@inproceedings{df581823e4564c558c82efd337cd30d3,
title = "CycleGAN-Based Image Translation for Near-Infrared Camera-Trap Image Recognition",
abstract = "Due to its invisibility, NIR (Near-infrared) flash has been widely used to capture the images of wild animals in the night. Although the animals can be captured without notice, the gray NIR images are short of color and texture information and thus is difficult to analyze, for both human and machine. In this paper, we propose to use CycleGAN (Generative Adversarial Networks) to translate NIR image to the incandescent domain for visual quality enhancement. Example translations show that both color and texture can be well recovered by the proposed CycleGAN model. The recognition performance of a SSD based detector on the translated incandescent images is also significantly better than that on the original NIR images. Taking Wildebeest and Zebra for example, an increase of 16 % and 8 % in recognition accuracy has been observed.",
keywords = "Deep learning, Generative adversarial networks, Image translation",
author = "Renwu Gao and Siting Zheng and Jia He and Linlin Shen",
note = "Publisher Copyright: {\textcopyright} 2020, Springer Nature Switzerland AG.; 2nd International Conference on Pattern Recognition and Artificial Intelligence, ICPRAI 2020 ; Conference date: 19-10-2020 Through 23-10-2020",
year = "2020",
doi = "10.1007/978-3-030-59830-3_39",
language = "English",
isbn = "9783030598297",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "453--464",
editor = "Yue Lu and Nicole Vincent and Yuen, {Pong Chi} and Wei-Shi Zheng and Farida Cheriet and Suen, {Ching Y.}",
booktitle = "Pattern Recognition and Artificial Intelligence - International Conference, ICPRAI 2020, Proceedings",
address = "Germany",
}