Search Machine Learning Repository: @inproceedings{icml2014c2_bai14,
    Publisher = {JMLR Workshop and Conference Proceedings},
    Title = {A Bayesian Framework for Online Classifier Ensemble},
    Url = {http://jmlr.org/proceedings/papers/v32/bai14.pdf},
    Abstract = {We propose a Bayesian framework for recursively estimating the classifier weights in online learning of a classifier ensemble. In contrast with past methods, such as stochastic gradient descent or online boosting, our framework estimates the weights in terms of evolving posterior distributions. For a specified class of loss functions, we show that it is possible to formulate a suitably defined likelihood function and hence use the posterior distribution as an approximation to the global empirical loss minimizer. If the stream of training data is sampled from a stationary process, we can also show that our framework admits a superior rate of convergence to the expected loss minimizer than is possible with standard stochastic gradient descent. In experiments with real-world datasets, our formulation often performs better than online boosting algorithms.},
    Author = {Qinxun Bai and Henry Lam and Stan Sclaroff},
    Editor = {Tony Jebara and Eric P. Xing},
    Year = {2014},
    Booktitle = {Proceedings of the 31st International Conference on Machine Learning (ICML-14)},
    Pages = {1584-1592}
   }