Search Machine Learning Repository: @inproceedings{icml2014c2_hsu14,
    Publisher = {JMLR Workshop and Conference Proceedings},
    Title = {Heavy-tailed regression with a generalized median-of-means},
    Url = {http://jmlr.org/proceedings/papers/v32/hsu14.pdf},
    Abstract = {This work proposes a simple and computationally efficient estimator for linear regression, and other smooth and strongly convex loss minimization problems. We prove loss approximation guarantees that hold for general distributions, including those with heavy tails. All prior results only hold for estimators which either assume bounded or subgaussian distributions, require prior knowledge of distributional properties, or are not known to be computationally tractable. In the special case of linear regression with possibly heavy-tailed responses and with bounded and well-conditioned covariates in $d$-dimensions, we show that a random sample of size $\tilde{O}(d\log(1/\delta))$ suffices to obtain a constant factor approximation to the optimal loss with probability $1-\delta$, a minimax optimal sample complexity up to log factors. The core technique used in the proposed estimator is a new generalization of the median-of-means estimator to arbitrary metric spaces.},
    Author = {Daniel Hsu and Sivan Sabato},
    Editor = {Tony Jebara and Eric P. Xing},
    Year = {2014},
    Booktitle = {Proceedings of the 31st International Conference on Machine Learning (ICML-14)},
    Pages = {37-45}
   }