We present a simple regularization technique for Recurrent Neural Networks
(RNNs) with Long Short-Term Memory (LSTM) units. Dropout, the most successful
technique for regularizing neural networks, does not work well with RNNs and
LSTMs. In this paper, we show how to correctly apply dropout to LSTMs, and show
that it substantially reduces overfitting on a variety of tasks. These tasks
include language modeling, speech recognition, image caption generation, and
machine translation.
%0 Generic
%1 zaremba2014recurrent
%A Zaremba, Wojciech
%A Sutskever, Ilya
%A Vinyals, Oriol
%D 2014
%K lstm rnn
%T Recurrent Neural Network Regularization
%U http://arxiv.org/abs/1409.2329
%X We present a simple regularization technique for Recurrent Neural Networks
(RNNs) with Long Short-Term Memory (LSTM) units. Dropout, the most successful
technique for regularizing neural networks, does not work well with RNNs and
LSTMs. In this paper, we show how to correctly apply dropout to LSTMs, and show
that it substantially reduces overfitting on a variety of tasks. These tasks
include language modeling, speech recognition, image caption generation, and
machine translation.
@misc{zaremba2014recurrent,
abstract = {We present a simple regularization technique for Recurrent Neural Networks
(RNNs) with Long Short-Term Memory (LSTM) units. Dropout, the most successful
technique for regularizing neural networks, does not work well with RNNs and
LSTMs. In this paper, we show how to correctly apply dropout to LSTMs, and show
that it substantially reduces overfitting on a variety of tasks. These tasks
include language modeling, speech recognition, image caption generation, and
machine translation.},
added-at = {2016-09-13T01:59:59.000+0200},
author = {Zaremba, Wojciech and Sutskever, Ilya and Vinyals, Oriol},
biburl = {https://www.bibsonomy.org/bibtex/25ec0f2efa51784af352ae940d05cf668/jkan},
description = {Recurrent Neural Network Regularization},
interhash = {4ae558af0d3b3ded8991be5d122c4afa},
intrahash = {5ec0f2efa51784af352ae940d05cf668},
keywords = {lstm rnn},
note = {cite arxiv:1409.2329},
timestamp = {2016-09-13T01:59:59.000+0200},
title = {Recurrent Neural Network Regularization},
url = {http://arxiv.org/abs/1409.2329},
year = 2014
}