@inproceedings{12ae81abb73041f1a88957a5213798b7,
title = "An Efficient and Fast Softmax Hardware Architecture (EFSHA) for Deep Neural Networks",
abstract = "Deep neural networks are widely used in computer vision applications due to their high performance. However, DNNs involve a large number of computations in the training and inference phase. Among the different layers of a DNN, the softmax layer has one of the most complex computations as it involves exponent and division operations. So, a hardware-efficient implementation is required to reduce the on-chip resources. In this paper, we propose a new hardware-efficient and fast implementation of the softmax activation function. The proposed hardware implementation consumes fewer hardware resources and works at high speed as compared to the state-of-the-art techniques.",
keywords = "FPGA, Softmax layer, area-efficient implementation, deep neural networks, learning on-chip",
author = "Hussain, {Muhammad Awais} and Tsai, {Tsung Han}",
note = "Publisher Copyright: {\textcopyright} 2021 IEEE.; 3rd IEEE International Conference on Artificial Intelligence Circuits and Systems, AICAS 2021 ; Conference date: 06-06-2021 Through 09-06-2021",
year = "2021",
month = jun,
day = "6",
doi = "10.1109/AICAS51828.2021.9458541",
language = "???core.languages.en_GB???",
series = "2021 IEEE 3rd International Conference on Artificial Intelligence Circuits and Systems, AICAS 2021",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "2021 IEEE 3rd International Conference on Artificial Intelligence Circuits and Systems, AICAS 2021",
}