@inproceedings{fa62da9d4e064627bb1551b96b539155,
title = "QNet: A Quantum-Native Sequence Encoder Architecture",
abstract = "This work proposes QNet, a novel sequence encoder model that entirely inferences on the quantum computer using a minimum number of qubits. Let n and d represent the length of the sequence and the embedding size, respectively. The dot-product attention mechanism requires a time complexity of O(n2 ·d), while QNet has merely O(n+d) quantum circuit depth. In addition, we introduce ResQNet, a quantum-classical hybrid model composed of several QNet blocks linked by residual connections, as an isomorph Transformer Encoder. We evaluated our work on various natural language processing tasks, including text classification, rating score prediction, and named entity recognition. Our models exhibit compelling performance over classical state-of-the-art models with a thousand times fewer parameters. In summary, this work investigates the advantage of machine learning on near-term quantum computers in sequential data by experimenting with natural language processing tasks.",
keywords = "deep learning model, natural language processing, quantum machine learning",
author = "Wei Day and Chen, {Hao Sheng} and Sun, {Min Te}",
note = "Publisher Copyright: {\textcopyright} 2023 IEEE.; 4th IEEE International Conference on Quantum Computing and Engineering, QCE 2023 ; Conference date: 17-09-2023 Through 22-09-2023",
year = "2023",
doi = "10.1109/QCE57702.2023.00035",
language = "???core.languages.en_GB???",
series = "Proceedings - 2023 IEEE International Conference on Quantum Computing and Engineering, QCE 2023",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "246--255",
editor = "Hausi Muller and Yuri Alexev and Andrea Delgado and Greg Byrd",
booktitle = "Proceedings - 2023 IEEE International Conference on Quantum Computing and Engineering, QCE 2023",
}