We study the first-order convex optimization problem, where we have black-box access to a (not necessarily smooth) function f:ℝⁿ → ℝ and its (sub)gradient. Our goal is to find an ε-approximate minimum of f starting from a point that is distance at most R from the true minimum. If f is G-Lipschitz, then the classic gradient descent algorithm solves this problem with O((GR/ε)²) queries. Importantly, the number of queries is independent of the dimension n and gradient descent is optimal in this regard: No deterministic or randomized algorithm can achieve better complexity that is still independent of the dimension n. In this paper we reprove the randomized lower bound of Ω((GR/ε)²) using a simpler argument than previous lower bounds. We then show that although the function family used in the lower bound is hard for randomized algorithms, it can be solved using O(GR/ε) quantum queries. We then show an improved lower bound against quantum algorithms using a different set of instances and establish our main result that in general even quantum algorithms need Ω((GR/ε)²) queries to solve the problem. Hence there is no quantum speedup over gradient descent for black-box first-order convex optimization without further assumptions on the function family.
@InProceedings{garg_et_al:LIPIcs.ITCS.2021.53, author = {Garg, Ankit and Kothari, Robin and Netrapalli, Praneeth and Sherif, Suhail}, title = {{No Quantum Speedup over Gradient Descent for Non-Smooth Convex Optimization}}, booktitle = {12th Innovations in Theoretical Computer Science Conference (ITCS 2021)}, pages = {53:1--53:20}, series = {Leibniz International Proceedings in Informatics (LIPIcs)}, ISBN = {978-3-95977-177-1}, ISSN = {1868-8969}, year = {2021}, volume = {185}, editor = {Lee, James R.}, publisher = {Schloss Dagstuhl -- Leibniz-Zentrum f{\"u}r Informatik}, address = {Dagstuhl, Germany}, URL = {https://drops.dagstuhl.de/entities/document/10.4230/LIPIcs.ITCS.2021.53}, URN = {urn:nbn:de:0030-drops-135921}, doi = {10.4230/LIPIcs.ITCS.2021.53}, annote = {Keywords: Quantum algorithms, Gradient descent, Convex optimization} }