@inproceedings{ec447319f33a494f9dfd616d07004df9,
title = "Deep Residual and Deep Dense Attentions in English Chinese Translation",
abstract = "Neural Machine Translation (NMT) with attention mechanism has achieved impressively improvement for automated translation. However, such models may lose information during multiple times of attention representations. This paper focuses on dealing with the over-attention problem. In our English-Chinese translation experimental results, the proposed model reduces the error rate of information in output sentences about 0.5%.",
author = "Lin, {Yi Xing} and Liang, {Kai Wen} and Yang, {Chih Hsuan} and Wang, {Jia Ching}",
note = "Publisher Copyright: {\textcopyright} 2021 IEEE.; 8th IEEE International Conference on Consumer Electronics-Taiwan, ICCE-TW 2021 ; Conference date: 15-09-2021 Through 17-09-2021",
year = "2021",
doi = "10.1109/ICCE-TW52618.2021.9603143",
language = "???core.languages.en_GB???",
series = "2021 IEEE International Conference on Consumer Electronics-Taiwan, ICCE-TW 2021",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "2021 IEEE International Conference on Consumer Electronics-Taiwan, ICCE-TW 2021",
}