@inproceedings{5a14982f69d846ed84508741ccc87dd4,
title = "Exploiting Style Transfer and Semantic Segmentation to Facilitate Infrared and Visible Image Fusion",
abstract = "Image fusion integrates different imaging sources to generate one with improved scene representation or visual perception, supporting advanced vision tasks such as object detection and semantic analysis. Fusing infrared and visible images is a widely studied subject, and the current trend is to adopt deep learning models. It is well known that training a deep fusion model often requires many labeled data. Nevertheless, existing datasets only provide images without precise annotations, affecting the fusion presentation and limiting further development. This research creates a dataset for infrared and visible image fusion with semantic segmentation information. We utilize existing image datasets specific to semantic segmentation and generate corresponding infrared images by style transferring. A labeled dataset for image fusion is formed, in which each pair of infrared and visible images is accompanied by their semantic segmentation labels. The performance of image fusion in target datasets can thus be improved.",
keywords = "Image Fusion, Semantic Segmentation, Style Transfer",
author = "Chang, {Hsing Wei} and Su, {Po Chyi} and Lin, {Si Ting}",
note = "Publisher Copyright: {\textcopyright} The Author(s), under exclusive license to Springer Nature Singapore Pte Ltd. 2024.; 28th International Conference on Technologies and Applications of Artificial Intelligence, TAAI 2023 ; Conference date: 01-12-2023 Through 02-12-2023",
year = "2024",
doi = "10.1007/978-981-97-1711-8_21",
language = "???core.languages.en_GB???",
isbn = "9789819717101",
series = "Communications in Computer and Information Science",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "269--283",
editor = "Chao-Yang Lee and Chun-Li Lin and Hsuan-Ting Chang",
booktitle = "Technologies and Applications of Artificial Intelligence - 28th International Conference, TAAI 2023, Proceedings",
}