We propose a randomized multiplicative weight update (MWU) algorithm for ๐_{โ} regression that runs in Oฬ(n^{2+1/22.5} poly(1/ฮต)) time when ฯ = 2+o(1), improving upon the previous best Oฬ(n^{2+1/18} polylog(1/ฮต)) runtime in the low-accuracy regime. Our algorithm combines state-of-the-art inverse maintenance data structures with acceleration. In order to do so, we propose a novel acceleration scheme for MWU that exhibits stability and robustness, which are required for the efficient implementations of the inverse maintenance data structures. We also design a faster deterministic MWU algorithm that runs in Oฬ(n^{2+1/12}poly(1/ฮต)) time when ฯ = 2+o(1), improving upon the previous best Oฬ(n^{2+1/6} poly log(1/ฮต)) runtime in the low-accuracy regime. We achieve this by showing a novel stability result that goes beyond previously known works based on interior point methods (IPMs). Our work is the first to use acceleration and inverse maintenance together efficiently, finally making the two most important building blocks of modern structured convex optimization compatible.
@InProceedings{adil_et_al:LIPIcs.ICALP.2025.5, author = {Adil, Deeksha and Jiang, Shunhua and Kyng, Rasmus}, title = {{Acceleration Meets Inverse Maintenance: Faster ๐\underlineโ-Regression}}, booktitle = {52nd International Colloquium on Automata, Languages, and Programming (ICALP 2025)}, pages = {5:1--5:16}, series = {Leibniz International Proceedings in Informatics (LIPIcs)}, ISBN = {978-3-95977-372-0}, ISSN = {1868-8969}, year = {2025}, volume = {334}, editor = {Censor-Hillel, Keren and Grandoni, Fabrizio and Ouaknine, Jo\"{e}l and Puppis, Gabriele}, publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik}, address = {Dagstuhl, Germany}, URL = {https://drops.dagstuhl.de/entities/document/10.4230/LIPIcs.ICALP.2025.5}, URN = {urn:nbn:de:0030-drops-233823}, doi = {10.4230/LIPIcs.ICALP.2025.5}, annote = {Keywords: Regression, Inverse Maintenance, Multiplicative Weights Update} }
Feedback for Dagstuhl Publishing