@inproceedings{Kaeding16_FDN, type = {inproceedings}, key = {Kaeding16_FDN}, title = {Fine-tuning Deep Neural Networks in Continuous Learning Scenarios}, author = {Christoph Käding and Erik Rodner and Alexander Freytag and Joachim Denzler}, booktitle = {ACCV Workshop on Interpretation and Visualization of Deep Neural Nets (ACCV-WS)}, year = {2016}, abstract = {The revival of deep neural networks and the availability of ImageNet laid the foundation for recent success in highly complex recognition tasks. However, ImageNet does not cover all visual concepts of all possible application scenarios. Hence, application experts still record new data constantly and expect the data to be used upon its availability. In this paper, we follow this observation and apply the classical concept of fine-tuning deep neural networks to scenarios where data from known or completely new classes is continuously added. Besides a straightforward realization of continuous fine-tuning, we empirically analyze how computational burdens of training can be further reduced. Finally, we visualize how the networks attention maps evolve over time which allows for visually investigating what the network learned during continuous fine-tuning.}, groups = {deeplearning,lifelonglearning,incrementallearning}, url = {http://www.interpretable-ml.org/accv2016workshop/}, }