We consider differentiable games where the goal is to find a Nash
equilibrium. The machine learning community has recently started using variants
of the gradient method (GD). Prime example are extragradient (EG), the
optimistic gradient method (OG) and consensus optimization (CO), which enjoy
linear convergence in cases like bilinear games, where the standard GD fails.
The full benefits of theses relatively new methods are not known as there is no
unified analysis for both strongly monotone and bilinear games. We provide new
analyses of the EG's local and global convergence properties and use is to get
a tighter global convergence rate for OG and CO. Our analysis covers the whole
range of settings between bilinear and strongly monotone games. It reveals that
these methods converges via different mechanisms at these extremes; in between,
it exploits the most favorable mechanism for the given problem. We then prove
that EG achieves the optimal rate for a wide class of algorithms with any
number of extrapolations. Our tight analysis of EG's convergence rate in games
shows that, unlike in convex minimization, EG may be much faster than GD.
Description
[1906.05945] A Unified Analysis of Gradient-Based Methods for a Whole Spectrum of Games
%0 Journal Article
%1 azizian2019unified
%A Azizian, Waïss
%A Mitliagkas, Ioannis
%A Lacoste-Julien, Simon
%A Gidel, Gauthier
%D 2019
%K game-theory generative-models optimization readings
%T A Unified Analysis of Gradient-Based Methods for a Whole Spectrum of
Games
%U http://arxiv.org/abs/1906.05945
%X We consider differentiable games where the goal is to find a Nash
equilibrium. The machine learning community has recently started using variants
of the gradient method (GD). Prime example are extragradient (EG), the
optimistic gradient method (OG) and consensus optimization (CO), which enjoy
linear convergence in cases like bilinear games, where the standard GD fails.
The full benefits of theses relatively new methods are not known as there is no
unified analysis for both strongly monotone and bilinear games. We provide new
analyses of the EG's local and global convergence properties and use is to get
a tighter global convergence rate for OG and CO. Our analysis covers the whole
range of settings between bilinear and strongly monotone games. It reveals that
these methods converges via different mechanisms at these extremes; in between,
it exploits the most favorable mechanism for the given problem. We then prove
that EG achieves the optimal rate for a wide class of algorithms with any
number of extrapolations. Our tight analysis of EG's convergence rate in games
shows that, unlike in convex minimization, EG may be much faster than GD.
@article{azizian2019unified,
abstract = {We consider differentiable games where the goal is to find a Nash
equilibrium. The machine learning community has recently started using variants
of the gradient method (GD). Prime example are extragradient (EG), the
optimistic gradient method (OG) and consensus optimization (CO), which enjoy
linear convergence in cases like bilinear games, where the standard GD fails.
The full benefits of theses relatively new methods are not known as there is no
unified analysis for both strongly monotone and bilinear games. We provide new
analyses of the EG's local and global convergence properties and use is to get
a tighter global convergence rate for OG and CO. Our analysis covers the whole
range of settings between bilinear and strongly monotone games. It reveals that
these methods converges via different mechanisms at these extremes; in between,
it exploits the most favorable mechanism for the given problem. We then prove
that EG achieves the optimal rate for a wide class of algorithms with any
number of extrapolations. Our tight analysis of EG's convergence rate in games
shows that, unlike in convex minimization, EG may be much faster than GD.},
added-at = {2020-01-09T18:33:15.000+0100},
author = {Azizian, Waïss and Mitliagkas, Ioannis and Lacoste-Julien, Simon and Gidel, Gauthier},
biburl = {https://www.bibsonomy.org/bibtex/2c9ed916f2f1d1ce2f7ce5419d78367f6/kirk86},
description = {[1906.05945] A Unified Analysis of Gradient-Based Methods for a Whole Spectrum of Games},
interhash = {267f63c0676ef2e933fdf48f158d877a},
intrahash = {c9ed916f2f1d1ce2f7ce5419d78367f6},
keywords = {game-theory generative-models optimization readings},
note = {cite arxiv:1906.05945},
timestamp = {2020-01-09T18:33:54.000+0100},
title = {A Unified Analysis of Gradient-Based Methods for a Whole Spectrum of
Games},
url = {http://arxiv.org/abs/1906.05945},
year = 2019
}