For distributions over discrete product spaces ∏_{i=1}^n Ω_i', Glauber dynamics is a Markov chain that at each step, resamples a random coordinate conditioned on the other coordinates. We show that k-Glauber dynamics, which resamples a random subset of k coordinates, mixes k times faster in χ²-divergence, and assuming approximate tensorization of entropy, mixes k times faster in KL-divergence. We apply this to obtain parallel algorithms in two settings: (1) For the Ising model μ_{J,h}(x) ∝ exp(1/2 ⟨x,Jx⟩ + ⟨h,x⟩) with ‖J‖ < 1-c (the regime where fast mixing is known), we show that we can implement each step of Θ(n/‖J‖_F)-Glauber dynamics efficiently with a parallel algorithm, resulting in a parallel algorithm with running time Õ(‖J‖_F) = Õ(√n). (2) For the mixed p-spin model at high enough temperature, we show that with high probability we can implement each step of Θ(√n)-Glauber dynamics efficiently and obtain running time Õ(√n).
@InProceedings{lee:LIPIcs.APPROX/RANDOM.2024.49, author = {Lee, Holden}, title = {{Parallelising Glauber Dynamics}}, booktitle = {Approximation, Randomization, and Combinatorial Optimization. Algorithms and Techniques (APPROX/RANDOM 2024)}, pages = {49:1--49:24}, series = {Leibniz International Proceedings in Informatics (LIPIcs)}, ISBN = {978-3-95977-348-5}, ISSN = {1868-8969}, year = {2024}, volume = {317}, editor = {Kumar, Amit and Ron-Zewi, Noga}, publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik}, address = {Dagstuhl, Germany}, URL = {https://drops.dagstuhl.de/entities/document/10.4230/LIPIcs.APPROX/RANDOM.2024.49}, URN = {urn:nbn:de:0030-drops-210424}, doi = {10.4230/LIPIcs.APPROX/RANDOM.2024.49}, annote = {Keywords: sampling, Ising model, parallel algorithm, Markov chain, Glauber dynamics} }
Feedback for Dagstuhl Publishing