Search Machine Learning Repository: @inproceedings{icml2014c2_yangc14,
    Publisher = {JMLR Workshop and Conference Proceedings},
    Title = {Elementary Estimators for High-Dimensional Linear Regression},
    Url = {http://jmlr.org/proceedings/papers/v32/yangc14.pdf},
    Abstract = {We consider the problem of structurally constrained high-dimensional linear regression. This has attracted considerable attention over the last decade, with state of the art statistical estimators based on solving regularized convex programs. While these typically non-smooth convex programs can be solved in polynomial time, scaling the state of the art optimization methods to very large-scale problems is an ongoing and rich area of research. In this paper, we attempt to address this scaling issue at the source, by asking whether one can build \emph{simpler} possibly closed-form estimators, that yet come with statistical guarantees that are nonetheless comparable to regularized likelihood estimators! We answer this question in the affirmative, with variants of the classical ridge and OLS (ordinary least squares estimators) for linear regression. We analyze our estimators in the high-dimensional setting, and moreover provide empirical corroboration of its performance on simulated as well as real world microarray data.},
    Author = {Eunho Yang and Aurelie Lozano and Pradeep Ravikumar},
    Editor = {Tony Jebara and Eric P. Xing},
    Year = {2014},
    Booktitle = {Proceedings of the 31st International Conference on Machine Learning (ICML-14)},
    Pages = {388-396}
   }