@inproceedings{Barz18:GoodTraining, type = {inproceedings}, key = {Barz18:GoodTraining}, title = {Deep Learning is not a Matter of Depth but of Good Training}, author = {Björn Barz and Joachim Denzler}, booktitle = {International Conference on Pattern Recognition and Artificial Intelligence (ICPRAI)}, year = {2018}, pages = {683-687}, publisher = {CENPARMI, Concordia University}, abstract = {In the past few years, deep neural networks have often been claimed to provide greater representational power than shallow networks. In this work, we propose a wide, shallow, and strictly sequential network architecture without any residual connections. When trained with cyclical learning rate schedules, this simple network achieves a classification accuracy on CIFAR-100 competitive to a 10 times deeper residual network, while it can be trained 4 times faster. This provides evidence that neither depth nor residual connections are crucial for deep learning. Instead, residual connections just seem to facilitate training using plain SGD by avoiding bad local minima. We believe that our work can hence point the research community to the actual bottleneck of contemporary deep learning: the optimization algorithms.}, groups = {deeplearning}, isbn = {1-895193-06-0}, venue = {Montreal, Canada}, }