Search Machine Learning Repository: @inproceedings{icml2014c2_suna14,
    Publisher = {JMLR Workshop and Conference Proceedings},
    Title = {An Information Geometry of Statistical Manifold Learning},
    Url = {http://jmlr.org/proceedings/papers/v32/suna14.pdf},
    Abstract = {Manifold learning seeks low-dimensional representations of high-dimensional data. The main tactics have been exploring the geometry in an input data space and an output embedding space. We develop a manifold learning theory in a hypothesis space consisting of models. A model means a specific instance of a collection of points, e.g., the input data collectively or the output embedding collectively. The semi-Riemannian metric of this hypothesis space is uniquely derived in closed form based on the information geometry of probability distributions. There, manifold learning is interpreted as a trajectory of intermediate models. The volume of a continuous region reveals an amount of information. It can be measured to define model complexity and embedding quality. This provides deep unified perspectives of manifold learning theory.},
    Author = {Ke Sun and St├ęphane Marchand-maillet},
    Editor = {Tony Jebara and Eric P. Xing},
    Year = {2014},
    Booktitle = {Proceedings of the 31st International Conference on Machine Learning (ICML-14)},
    Pages = {1-9}
   }