Creative Commons Attribution 3.0 Unported license
The Hamming distance function Ham_{n,d} returns 1 on all pairs of inputs x and y that differ in at most d coordinates and returns 0 otherwise. We initiate the study of the information complexity of the Hamming distance function.
We give a new optimal lower bound for the information complexity of the Ham_{n,d} function in the small-error regime where the protocol is required to err with probability at most epsilon < d/n. We also give a new conditional lower bound for the information complexity of Ham_{n,d} that is optimal in all regimes. These results imply the first new lower bounds on the communication complexity of the Hamming distance function for the shared randomness two-way communication model since Pang and El-Gamal (1986). These results also imply new lower bounds in the areas of property testing and parity decision tree complexity.
@InProceedings{blais_et_al:LIPIcs.APPROX-RANDOM.2014.465,
author = {Blais, Eric and Brody, Joshua and Ghazi, Badih},
title = {{The Information Complexity of Hamming Distance}},
booktitle = {Approximation, Randomization, and Combinatorial Optimization. Algorithms and Techniques (APPROX/RANDOM 2014)},
pages = {465--489},
series = {Leibniz International Proceedings in Informatics (LIPIcs)},
ISBN = {978-3-939897-74-3},
ISSN = {1868-8969},
year = {2014},
volume = {28},
editor = {Jansen, Klaus and Rolim, Jos\'{e} and Devanur, Nikhil R. and Moore, Cristopher},
publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik},
address = {Dagstuhl, Germany},
URL = {https://drops.dagstuhl.de/entities/document/10.4230/LIPIcs.APPROX-RANDOM.2014.465},
URN = {urn:nbn:de:0030-drops-47174},
doi = {10.4230/LIPIcs.APPROX-RANDOM.2014.465},
annote = {Keywords: Hamming distance, communication complexity, information complexity}
}