@InProceedings{ChenshenWu2023, author="Chenshen Wu and Joost Van de Weijer", title="Density Map Distillation for Incremental Object Counting", booktitle="Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition Workshops", year="2023", pages="2505--2514", abstract="We investigate the problem of incremental learning for object counting, where a method must learn to count a variety of object classes from a sequence of datasets. A na{\"i}ve approach to incremental object counting would suffer from catastrophic forgetting, where it would suffer from a dramatic performance drop on previous tasks. In this paper, we propose a new exemplar-free functional regularization method, called Density Map Distillation (DMD). During training, we introduce a new counter head for each task and introduce a distillation loss to prevent forgetting of previous tasks. Additionally, we introduce a cross-task adaptor that projects the features of the current backbone to the previous backbone. This projector allows for the learning of new features while the backbone retains the relevant features for previous tasks. Finally, we set up experiments of incremental learning for counting new objects. Results confirm that our method greatly reduces catastrophic forgetting and outperforms existing methods.", optnote="MSIAU", optnote="exported from refbase (http://158.109.8.37/show.php?record=3916), last updated on Mon, 22 Jan 2024 10:44:53 +0100", doi="10.1109/CVPRW59228.2023.00249", opturl="https://ieeexplore.ieee.org/document/10208935" }