@inproceedings{868c9d651954478283d77f36ac0f7dce,
title = "Persistence pays off: Paying attention to what the LSTM gating mechanism persists",
abstract = "Language Models (LMs) are important components in several Natural Language Processing systems. Recurrent Neural Network LMs composed of LSTM units, especially those augmented with an external memory, have achieved state-of-the-art results. However, these models still struggle to process long sequences which are more likely to contain long-distance dependencies because of information fading and a bias towards more recent information. In this paper we demonstrate an effective mechanism for retrieving information in a memory augmented LSTM LM based on attending to information in memory in proportion to the number of timesteps the LSTM gating mechanism persisted the information.",
author = "Salton, {Giancarlo D.} and Kelleher, {John D.}",
note = "Publisher Copyright: {\textcopyright} 2019 Association for Computational Linguistics (ACL). All rights reserved.; 12th International Conference on Recent Advances in Natural Language Processing, RANLP 2019 ; Conference date: 02-09-2019 Through 04-09-2019",
year = "2019",
doi = "10.26615/978-954-452-056-4_121",
language = "English",
series = "International Conference Recent Advances in Natural Language Processing, RANLP",
publisher = "Incoma Ltd",
pages = "1052--1059",
editor = "Galia Angelova and Ruslan Mitkov and Ivelina Nikolova and Irina Temnikova and Irina Temnikova",
booktitle = "International Conference on Recent Advances in Natural Language Processing in a Deep Learning World, RANLP 2019 - Proceedings",
}