Search Machine Learning Repository: @inproceedings{icml2014c2_balle14,
    Publisher = {JMLR Workshop and Conference Proceedings},
    Title = {Methods of Moments for Learning Stochastic Languages: Unified Presentation and Empirical Comparison},
    Url = {http://jmlr.org/proceedings/papers/v32/balle14.pdf},
    Abstract = {Probabilistic latent-variable models are a powerful tool for modelling structured data. However, traditional expectation-maximization methods of learning such models are both computationally expensive and prone to local-minima. In contrast to these traditional methods, recently developed learning algorithms based upon the method of moments are both computationally efficient and provide strong statistical guarantees. In this work, we provide a unified presentation and empirical comparison of three general moment-based methods in the context of modelling stochastic languages. By rephrasing these methods upon a common theoretical ground, introducing novel theoretical results where necessary, we provide a clear comparison, making explicit the statistical assumptions upon which each method relies. With this theoretical grounding, we then provide an in-depth empirical analysis of the methods on both real and synthetic data with the goal of elucidating performance trends and highlighting important implementation details.},
    Author = {Borja Balle and William Hamilton and Joelle Pineau},
    Editor = {Tony Jebara and Eric P. Xing},
    Year = {2014},
    Booktitle = {Proceedings of the 31st International Conference on Machine Learning (ICML-14)},
    Pages = {1386-1394}
   }