Search Machine Learning Repository: @inproceedings{icml2014c2_sunc14,
    Publisher = {JMLR Workshop and Conference Proceedings},
    Title = {A Convergence Rate Analysis for LogitBoost, MART and Their Variant},
    Url = {http://jmlr.org/proceedings/papers/v32/sunc14.pdf},
    Abstract = {LogitBoost, MART and their variant can be viewed as additive tree regression using logistic loss and boosting style optimization. We analyze their convergence rates based on a new weak learnability formulation. We show that it has $O(\frac{1}{T})$ rate when using gradient descent only, while a linear rate is achieved when using Newton descent. Moreover, introducing Newton descent when growing the trees, as LogitBoost does, leads to a faster linear rate. Empirical results on UCI datasets support our analysis.},
    Author = {Peng Sun and Tong Zhang and Jie Zhou},
    Editor = {Tony Jebara and Eric P. Xing},
    Year = {2014},
    Booktitle = {Proceedings of the 31st International Conference on Machine Learning (ICML-14)},
    Pages = {1251-1259}
   }