We show that standard extragradient methods (i.e. mirror prox [Arkadi Nemirovski, 2004] and dual extrapolation [Yurii Nesterov, 2007]) recover optimal accelerated rates for first-order minimization of smooth convex functions. To obtain this result we provide fine-grained characterization of the convergence rates of extragradient methods for solving monotone variational inequalities in terms of a natural condition we call relative Lipschitzness. We further generalize this framework to handle local and randomized notions of relative Lipschitzness and thereby recover rates for box-constrained 𝓁_∞ regression based on area convexity [Jonah Sherman, 2017] and complexity bounds achieved by accelerated (randomized) coordinate descent [Zeyuan {Allen Zhu} et al., 2016; Yurii Nesterov and Sebastian U. Stich, 2017] for smooth convex function minimization.
@InProceedings{cohen_et_al:LIPIcs.ITCS.2021.62, author = {Cohen, Michael B. and Sidford, Aaron and Tian, Kevin}, title = {{Relative Lipschitzness in Extragradient Methods and a Direct Recipe for Acceleration}}, booktitle = {12th Innovations in Theoretical Computer Science Conference (ITCS 2021)}, pages = {62:1--62:18}, series = {Leibniz International Proceedings in Informatics (LIPIcs)}, ISBN = {978-3-95977-177-1}, ISSN = {1868-8969}, year = {2021}, volume = {185}, editor = {Lee, James R.}, publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik}, address = {Dagstuhl, Germany}, URL = {https://drops.dagstuhl.de/entities/document/10.4230/LIPIcs.ITCS.2021.62}, URN = {urn:nbn:de:0030-drops-136011}, doi = {10.4230/LIPIcs.ITCS.2021.62}, annote = {Keywords: Variational inequalities, minimax optimization, acceleration, 𝓁\underline∞ regression} }
Feedback for Dagstuhl Publishing