@inproceedings{727d20a562d0412e8faaa260c5669fdd,
title = "Knowledge distillation based online learning methodology using unlabeled data stream",
abstract = "In supervised learning, the performance of the learning model decreases with the change of time step due to concept drift caused by overfitting of the training data. As a methodology to mitigate such concept drift, an online learning methodology has been proposed that trains the learning model on continuously input data stream. In this paper, we proposed an online learning methodology in which teacher model continuously trains student model based on knowledge distillation theory. The teacher model generates the output distribution called soft label to make a label for the unlabeled data stream and the student model trained by the unlabeled data stream with the soft label from teacher model. Experimental results show that the proposed method has better performances such as classification accuracy than that of the batch learning model trained by labeled data stream only.",
keywords = "Concept Drift, Knowledge Distillation, Knowledge Transfer, Online Learning",
author = "Sanghyun Seo and Changhoon Jeong and Seongchul Park and Juntae Kim",
note = "Publisher Copyright: {\textcopyright} 2018 Association for Computing Machinery.; 2018 International Conference on Machine Learning and Machine Intelligence, MLMI 2018 ; Conference date: 28-09-2018 Through 30-09-2018",
year = "2018",
month = sep,
day = "28",
doi = "10.1145/3278312.3278319",
language = "English",
series = "ACM International Conference Proceeding Series",
publisher = "Association for Computing Machinery",
pages = "68--71",
booktitle = "Proceedings of International Conference on Machine Learning and Machine Intelligence, MLMI 2018",
address = "United States",
}