We study a direct-sum question for read-once branching programs. If M(f) denotes the minimum average memory required to compute a function f(x_1,x_2, ..., x_n) how much memory is required to compute f on k independent inputs that arrive in parallel? We show that when the inputs are sampled independently from some domain X and M(f) = Omega(n), then computing the value of f on k streams requires average memory at least Omega(k * M(f)/n). Our results are obtained by defining new ways to measure the information complexity of read-once branching programs. We define two such measures: the transitional and cumulative information content. We prove that any read-once branching program with transitional information content I can be simulated using average memory O(n(I+1)). On the other hand, if every read-once branching program with cumulative information content I can be simulated with average memory O(I+1), then computing f on k inputs requires average memory at least Omega(k * (M(f)-1)).
@InProceedings{rao_et_al:LIPIcs.APPROX-RANDOM.2016.44, author = {Rao, Anup and Sinha, Makrand}, title = {{A Direct-Sum Theorem for Read-Once Branching Programs}}, booktitle = {Approximation, Randomization, and Combinatorial Optimization. Algorithms and Techniques (APPROX/RANDOM 2016)}, pages = {44:1--44:15}, series = {Leibniz International Proceedings in Informatics (LIPIcs)}, ISBN = {978-3-95977-018-7}, ISSN = {1868-8969}, year = {2016}, volume = {60}, editor = {Jansen, Klaus and Mathieu, Claire and Rolim, Jos\'{e} D. P. and Umans, Chris}, publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik}, address = {Dagstuhl, Germany}, URL = {https://drops.dagstuhl.de/entities/document/10.4230/LIPIcs.APPROX-RANDOM.2016.44}, URN = {urn:nbn:de:0030-drops-66676}, doi = {10.4230/LIPIcs.APPROX-RANDOM.2016.44}, annote = {Keywords: Direct-sum, Information complexity, Streaming Algorithms} }
Feedback for Dagstuhl Publishing