The goal of the paper is to design sequential strategies which lead to
efficient optimization of an unknown function under the only assumption that it
has a finite Lipschitz constant. We first identify sufficient conditions for
the consistency of generic sequential algorithms and formulate the expected
minimax rate for their performance. We introduce and analyze a first algorithm
called LIPO which assumes the Lipschitz constant to be known. Consistency,
minimax rates for LIPO are proved, as well as fast rates under an additional
Hölder like condition. An adaptive version of LIPO is also introduced for the
more realistic setup where the Lipschitz constant is unknown and has to be
estimated along with the optimization. Similar theoretical guarantees are shown
to hold for the adaptive LIPO algorithm and a numerical assessment is provided
at the end of the paper to illustrate the potential of this strategy with
respect to state-of-the-art methods over typical benchmark problems for global
optimization.
Description
[1703.02628] Global optimization of Lipschitz functions
%0 Generic
%1 malherbe2017global
%A Malherbe, Cédric
%A Vayatis, Nicolas
%D 2017
%K 2017 arxiv icml machine-learning optimization
%T Global optimization of Lipschitz functions
%U http://arxiv.org/abs/1703.02628
%X The goal of the paper is to design sequential strategies which lead to
efficient optimization of an unknown function under the only assumption that it
has a finite Lipschitz constant. We first identify sufficient conditions for
the consistency of generic sequential algorithms and formulate the expected
minimax rate for their performance. We introduce and analyze a first algorithm
called LIPO which assumes the Lipschitz constant to be known. Consistency,
minimax rates for LIPO are proved, as well as fast rates under an additional
Hölder like condition. An adaptive version of LIPO is also introduced for the
more realistic setup where the Lipschitz constant is unknown and has to be
estimated along with the optimization. Similar theoretical guarantees are shown
to hold for the adaptive LIPO algorithm and a numerical assessment is provided
at the end of the paper to illustrate the potential of this strategy with
respect to state-of-the-art methods over typical benchmark problems for global
optimization.
@misc{malherbe2017global,
abstract = {The goal of the paper is to design sequential strategies which lead to
efficient optimization of an unknown function under the only assumption that it
has a finite Lipschitz constant. We first identify sufficient conditions for
the consistency of generic sequential algorithms and formulate the expected
minimax rate for their performance. We introduce and analyze a first algorithm
called LIPO which assumes the Lipschitz constant to be known. Consistency,
minimax rates for LIPO are proved, as well as fast rates under an additional
H\"older like condition. An adaptive version of LIPO is also introduced for the
more realistic setup where the Lipschitz constant is unknown and has to be
estimated along with the optimization. Similar theoretical guarantees are shown
to hold for the adaptive LIPO algorithm and a numerical assessment is provided
at the end of the paper to illustrate the potential of this strategy with
respect to state-of-the-art methods over typical benchmark problems for global
optimization.},
added-at = {2018-01-02T16:41:48.000+0100},
author = {Malherbe, Cédric and Vayatis, Nicolas},
biburl = {https://www.bibsonomy.org/bibtex/2cbd925d9fad6c3efeb60a2d0f2a3a7c4/achakraborty},
description = {[1703.02628] Global optimization of Lipschitz functions},
interhash = {7a3ec636439db52260d9fb784b750cf9},
intrahash = {cbd925d9fad6c3efeb60a2d0f2a3a7c4},
keywords = {2017 arxiv icml machine-learning optimization},
note = {cite arxiv:1703.02628},
timestamp = {2018-01-02T16:41:48.000+0100},
title = {Global optimization of Lipschitz functions},
url = {http://arxiv.org/abs/1703.02628},
year = 2017
}