Search Machine Learning Repository: @inproceedings{icml2014c2_pandey14,
    Publisher = {JMLR Workshop and Conference Proceedings},
    Title = {Learning by Stretching Deep Networks},
    Url = {http://jmlr.org/proceedings/papers/v32/pandey14.pdf},
    Abstract = {In recent years, deep architectures have gained a lot of prominence for learning complex AI tasks because of their capability to incorporate complex variations in data within the model. However, these models often need to be trained for a long time in order to obtain good results. In this paper, we propose a technique, called `stretching', that allows the same models to perform considerably better with very little training. We show that learning can be done tractably, even when the weight matrix is stretched to infinity, for some specific models. We also study tractable algorithms for implementing stretching in deep convolutional architectures in an iterative manner and derive bounds for its convergence. Our experimental results suggest that the proposed stretched deep convolutional networks are capable of achieving good performance for many object recognition tasks. More importantly, for a fixed network architecture, one can achieve much better accuracy using stretching rather than learning the weights using backpropagation.},
    Author = {Gaurav Pandey and Ambedkar Dukkipati},
    Editor = {Tony Jebara and Eric P. Xing},
    Year = {2014},
    Booktitle = {Proceedings of the 31st International Conference on Machine Learning (ICML-14)},
    Pages = {1719-1727}
   }