The usual approach to developing and analyzing first-order methods for
non-smooth (stochastic or deterministic) convex optimization assumes that the
objective function is uniformly Lipschitz continuous with parameter $M_f$.
However, in many settings the non-differentiable convex function $f(\cdot)$ is
not uniformly Lipschitz continuous -- for example (i) the classical support
vector machine (SVM) problem, (ii) the problem of minimizing the maximum of
convex quadratic functions, and even (iii) the univariate setting with $f(x) :=
\max\0, x\ + x^2$. Herein we develop a notion of "relative continuity" that
is determined relative to a user-specified "reference function" $h(\cdot)$
(that should be computationally tractable for algorithms), and we show that
many non-differentiable convex functions are relatively continuous with respect
to a correspondingly fairly-simple reference function $h(\cdot)$. We also
similarly develop a notion of "relative stochastic continuity" for the
stochastic setting. We analysis two standard algorithms -- the (deterministic)
mirror descent algorithm and the stochastic mirror descent algorithm -- for
solving optimization problems in these two new settings, and we develop for the
first time computational guarantees for instances where the objective function
is not uniformly Lipschitz continuous. This paper is a companion paper for
non-differentiable convex optimization to the recent paper by Lu, Freund, and
Nesterov, which developed similar sorts of results for differentiable convex
optimization.
Description
[1710.04718] "Relative-Continuity" for Non-Lipschitz Non-Smooth Convex Optimization using Stochastic (or Deterministic) Mirror Descent
%0 Journal Article
%1 lu2017relativecontinuity
%A Lu, Haihao
%D 2017
%K mathematics optimization
%T "Relative-Continuity" for Non-Lipschitz Non-Smooth Convex Optimization
using Stochastic (or Deterministic) Mirror Descent
%U http://arxiv.org/abs/1710.04718
%X The usual approach to developing and analyzing first-order methods for
non-smooth (stochastic or deterministic) convex optimization assumes that the
objective function is uniformly Lipschitz continuous with parameter $M_f$.
However, in many settings the non-differentiable convex function $f(\cdot)$ is
not uniformly Lipschitz continuous -- for example (i) the classical support
vector machine (SVM) problem, (ii) the problem of minimizing the maximum of
convex quadratic functions, and even (iii) the univariate setting with $f(x) :=
\max\0, x\ + x^2$. Herein we develop a notion of "relative continuity" that
is determined relative to a user-specified "reference function" $h(\cdot)$
(that should be computationally tractable for algorithms), and we show that
many non-differentiable convex functions are relatively continuous with respect
to a correspondingly fairly-simple reference function $h(\cdot)$. We also
similarly develop a notion of "relative stochastic continuity" for the
stochastic setting. We analysis two standard algorithms -- the (deterministic)
mirror descent algorithm and the stochastic mirror descent algorithm -- for
solving optimization problems in these two new settings, and we develop for the
first time computational guarantees for instances where the objective function
is not uniformly Lipschitz continuous. This paper is a companion paper for
non-differentiable convex optimization to the recent paper by Lu, Freund, and
Nesterov, which developed similar sorts of results for differentiable convex
optimization.
@article{lu2017relativecontinuity,
abstract = {The usual approach to developing and analyzing first-order methods for
non-smooth (stochastic or deterministic) convex optimization assumes that the
objective function is uniformly Lipschitz continuous with parameter $M_f$.
However, in many settings the non-differentiable convex function $f(\cdot)$ is
not uniformly Lipschitz continuous -- for example (i) the classical support
vector machine (SVM) problem, (ii) the problem of minimizing the maximum of
convex quadratic functions, and even (iii) the univariate setting with $f(x) :=
\max\{0, x\} + x^2$. Herein we develop a notion of "relative continuity" that
is determined relative to a user-specified "reference function" $h(\cdot)$
(that should be computationally tractable for algorithms), and we show that
many non-differentiable convex functions are relatively continuous with respect
to a correspondingly fairly-simple reference function $h(\cdot)$. We also
similarly develop a notion of "relative stochastic continuity" for the
stochastic setting. We analysis two standard algorithms -- the (deterministic)
mirror descent algorithm and the stochastic mirror descent algorithm -- for
solving optimization problems in these two new settings, and we develop for the
first time computational guarantees for instances where the objective function
is not uniformly Lipschitz continuous. This paper is a companion paper for
non-differentiable convex optimization to the recent paper by Lu, Freund, and
Nesterov, which developed similar sorts of results for differentiable convex
optimization.},
added-at = {2019-10-09T01:56:31.000+0200},
author = {Lu, Haihao},
biburl = {https://www.bibsonomy.org/bibtex/2e562a46f27ed376db7f759962b4c4ae8/kirk86},
description = {[1710.04718] "Relative-Continuity" for Non-Lipschitz Non-Smooth Convex Optimization using Stochastic (or Deterministic) Mirror Descent},
interhash = {3736d75599f4e01abc21b37d4e5fe945},
intrahash = {e562a46f27ed376db7f759962b4c4ae8},
keywords = {mathematics optimization},
note = {cite arxiv:1710.04718},
timestamp = {2019-10-09T01:56:31.000+0200},
title = {"Relative-Continuity" for Non-Lipschitz Non-Smooth Convex Optimization
using Stochastic (or Deterministic) Mirror Descent},
url = {http://arxiv.org/abs/1710.04718},
year = 2017
}