We prove nearly matching upper and lower bounds on the randomized communication complexity of the following problem: Alice and Bob are each given a probability distribution over $n$ elements, and they wish to estimate within +-epsilon the statistical (total variation) distance between their distributions. For some range of parameters, there is up to a log(n) factor gap between the upper and lower bounds, and we identify a barrier to using information complexity techniques to improve the lower bound in this case. We also prove a side result that we discovered along the way: the randomized communication complexity of n-bit Majority composed with n-bit Greater-Than is Theta(n log n).
@InProceedings{watson:LIPIcs.APPROX-RANDOM.2017.49, author = {Watson, Thomas}, title = {{Communication Complexity of Statistical Distance}}, booktitle = {Approximation, Randomization, and Combinatorial Optimization. Algorithms and Techniques (APPROX/RANDOM 2017)}, pages = {49:1--49:10}, series = {Leibniz International Proceedings in Informatics (LIPIcs)}, ISBN = {978-3-95977-044-6}, ISSN = {1868-8969}, year = {2017}, volume = {81}, editor = {Jansen, Klaus and Rolim, Jos\'{e} D. P. and Williamson, David P. and Vempala, Santosh S.}, publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik}, address = {Dagstuhl, Germany}, URL = {https://drops.dagstuhl.de/entities/document/10.4230/LIPIcs.APPROX-RANDOM.2017.49}, URN = {urn:nbn:de:0030-drops-75984}, doi = {10.4230/LIPIcs.APPROX-RANDOM.2017.49}, annote = {Keywords: Communication, complexity, statistical, distance} }
Feedback for Dagstuhl Publishing