%0 Journal Article
%1 vaswani2017attention
%A Vaswani, Ashish
%A Shazeer, Noam
%A Parmar, Niki
%A Uszkoreit, Jakob
%A Jones, Llion
%A Gomez, Aidan N
%A Kaiser, Łukasz
%A Polosukhin, Illia
%D 2017
%J CoRR
%K attention networks neural seq2seq transformer
%T Attention Is All You Need
%U http://arxiv.org/abs/1706.03762
%V abs/1706.03762
@article{vaswani2017attention,
added-at = {2019-11-08T09:11:32.000+0100},
author = {Vaswani, Ashish and Shazeer, Noam and Parmar, Niki and Uszkoreit, Jakob and Jones, Llion and Gomez, Aidan N and Kaiser, Łukasz and Polosukhin, Illia},
biburl = {https://www.bibsonomy.org/bibtex/26c1129dc79583078fb0409fc7efda6aa/thoni},
description = {Aktuelleres Paper zur Verwendung von Attention für die Neural Machine Translation},
ee = {http://arxiv.org/abs/1706.03762},
interhash = {b23d83da70543e00f9240cc009f1fcfa},
intrahash = {6c1129dc79583078fb0409fc7efda6aa},
journal = {CoRR},
keywords = {attention networks neural seq2seq transformer},
note = {cite arxiv:1706.03762Comment: 15 pages, 5 figures},
timestamp = {2019-11-08T09:11:32.000+0100},
title = {Attention Is All You Need},
url = {http://arxiv.org/abs/1706.03762},
volume = {abs/1706.03762},
year = 2017
}