Search Machine Learning Repository: @inproceedings{icml2014c1_liua14,
    Publisher = {JMLR Workshop and Conference Proceedings},
    Title = {Efficient Approximation of Cross-Validation for Kernel Methods using Bouligand Influence Function},
    Url = {http://jmlr.org/proceedings/papers/v32/liua14.pdf},
    Abstract = {Model selection is one of the key issues both in recent research and application of kernel methods. Cross-validation is a commonly employed and widely accepted model selection criterion. However, it requires multiple times of training the algorithm under consideration, which is computationally intensive. In this paper, we present a novel strategy for approximating the cross-validation based on the Bouligand influence function (BIF), which only requires the solution of the algorithm once. The BIF measures the impact of an infinitesimal small amount of contamination of the original distribution. We first establish the link between the concept of BIF and the concept of cross-validation. The BIF is related to the first order term of a Taylor expansion. Then, we calculate the BIF and higher order BIFs, and apply these theoretical results to approximate the cross-validation error in practice. Experimental results demonstrate that our approximate cross-validation criterion is sound and efficient.},
    Author = {Yong Liu and Shali Jiang and Shizhong Liao},
    Editor = {Tony Jebara and Eric P. Xing},
    Year = {2014},
    Booktitle = {Proceedings of the 31st International Conference on Machine Learning (ICML-14)},
    Pages = {324-332}
   }