Search Machine Learning Repository: @inproceedings{icml2014c1_ermon14,
    Publisher = {JMLR Workshop and Conference Proceedings},
    Title = {Low-density Parity Constraints for Hashing-Based Discrete Integration},
    Url = {http://jmlr.org/proceedings/papers/v32/ermon14.pdf},
    Abstract = {In recent years, a number of probabilistic inference and counting techniques have been proposed that exploit pairwise independent hash functions to infer properties of succinctly defined high-dimensional sets. While providing desirable statistical guarantees, typical constructions of such hash functions are themselves not amenable to efficient inference. Inspired by the success of LDPC codes, we propose the use of low-density parity constraints to make inference more tractable in practice. While not strongly universal, we show that such sparse constraints belong to a new class of hash functions that we call Average Universal. These weaker hash functions retain the desirable statistical guarantees needed by most such probabilistic inference methods. Thus, they continue to provide provable accuracy guarantees while at the same time making a number of algorithms significantly more scalable in practice. Using this technique, we provide new, tighter bounds for challenging discrete integration and model counting problems.},
    Author = {Stefano Ermon and Carla Gomes and Ashish Sabharwal and Bart Selman},
    Editor = {Tony Jebara and Eric P. Xing},
    Year = {2014},
    Booktitle = {Proceedings of the 31st International Conference on Machine Learning (ICML-14)},
    Pages = {271-279}
   }