@inproceedings{1d5ff8e06f03409fa8f2e0f9adee35c9,
title = "Mitigating Catastrophic Forgetting Through Knowledge Transfer and Weighted Loss Integration in Continual Learning",
abstract = "Continual learning is an emerging field of artificial intelligence (AI) that focuses on equipping models with the ability to adjust to new tasks while retaining previously acquired knowledge. This capability is critical for the development of versatile AI systems. It enables AI to handle dynamic real-world data effectively. Traditional machine learning models often struggle to meet efficiency and resource demands when dealing with changing datasets. Thus, continual learning has become a promising alternative. In this paper, we introduce Knowledge Distillation and Combined Loss Enhanced Continual Learning Network (KDCL), which aims to mitigate catastrophic forgetting and balance the stability and plasticity of continual learning. KDCL combines knowledge distillation and combined loss functions to improve learning efficiency. Through experiments on the CIFAR-100 dataset, KDCL significantly improves the average accuracy compared to existing models, highlighting its capability to retain past knowledge and effectively integrate new information.",
keywords = "Catas-trophic forgetting, Combined loss, Continual learning, Knowledge transfer",
author = "Lin Zhong and Qingya Sui and Yuki Todo and Jun Tang and Shangce Gao",
note = "Publisher Copyright: {\textcopyright} 2024 IEEE.; 21st International Conference on Networking, Sensing and Control, ICNSC 2024 ; Conference date: 18-10-2024 Through 20-10-2024",
year = "2024",
doi = "10.1109/ICNSC62968.2024.10760225",
language = "英語",
series = "ICNSC 2024 - 21st International Conference on Networking, Sensing and Control: Artificial Intelligence for the Next Industrial Revolution",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
booktitle = "ICNSC 2024 - 21st International Conference on Networking, Sensing and Control",
}