Search Machine Learning Repository: @inproceedings{icml2014c2_johnson14,
    Publisher = {JMLR Workshop and Conference Proceedings},
    Title = {Stochastic Variational Inference for Bayesian Time Series Models},
    Url = {http://jmlr.org/proceedings/papers/v32/johnson14.pdf},
    Abstract = {Bayesian models provide powerful tools for analyzing complex time series data, but performing inference with large datasets is a challenge. Stochastic variational inference (SVI) provides a new framework for approximating model posteriors with only a small number of passes through the data, enabling such models to be fit at scale. However, its application to time series models has not been studied. In this paper we develop SVI algorithms for several common Bayesian time series models, namely the hidden Markov model (HMM), hidden semi-Markov model (HSMM), and the nonparametric HDP-HMM and HDP-HSMM. In addition, because HSMM inference can be expensive even in the minibatch setting of SVI, we develop fast approximate updates for HSMMs with durations distributions that are negative binomials or mixtures of negative binomials.},
    Author = {Matthew Johnson and Alan Willsky},
    Editor = {Tony Jebara and Eric P. Xing},
    Year = {2014},
    Booktitle = {Proceedings of the 31st International Conference on Machine Learning (ICML-14)},
    Pages = {1854-1862}
   }