@misc{simon2016imagenet, type = {misc}, key = {simon2016imagenet}, title = {ImageNet pre-trained models with batch normalization}, author = {Marcel Simon and Erik Rodner and Joachim Denzler}, year = {2016}, howpublished = {arXiv preprint 1612.01452}, abstract = {Convolutional neural networks (CNN) pre-trained on ImageNet are the backbone of most state-of-the-art approaches. In this paper, we present a new set of pretrained models with popular state-of-the-art architectures for the Caffe framework. The first release includes Residual Networks (ResNets) with generation script as well as the batch-normalization-variants of AlexNet and VGG19. All models outperform previous models with the same architecture. The models and training code are available at http://www.inf-cv.uni-jena.de/Research/CNN+Models.html and https://github.com/cvjena/cnn-models.}, archiveprefix = {arXiv}, eprint = {1612.01452}, url = {https://arxiv.org/abs/1612.01452}, }