In Valiant's model of evolution, a class of representations is evolvable iff
a polynomial-time process of random mutations guided by selection converges
with high probability to a representation as $\epsilon$-close as desired from
the optimal one, for any required $\epsilon>0$. Several previous positive
results exist that can be related to evolving a vector space, but each former
result imposes disproportionate representations or restrictions on
(re)initialisations, distributions, performance functions and/or the mutator.
In this paper, we show that all it takes to evolve a normed vector space is
merely a set that generates the space. Furthermore, it takes only
$O(1/\epsilon^2)$ steps and it is essentially stable, agnostic and
handles target drifts that rival some proven in fairly restricted settings. Our
algorithm can be viewed as a close relative to a popular fifty-years old
gradient-free optimization method for which little is still known from the
convergence standpoint: Nelder-Mead simplex method.
Description
[1704.02708] Evolving a Vector Space with any Generating Set
%0 Journal Article
%1 nock2017evolving
%A Nock, Richard
%A Nielsen, Frank
%D 2017
%K learning sets
%T Evolving a Vector Space with any Generating Set
%U http://arxiv.org/abs/1704.02708
%X In Valiant's model of evolution, a class of representations is evolvable iff
a polynomial-time process of random mutations guided by selection converges
with high probability to a representation as $\epsilon$-close as desired from
the optimal one, for any required $\epsilon>0$. Several previous positive
results exist that can be related to evolving a vector space, but each former
result imposes disproportionate representations or restrictions on
(re)initialisations, distributions, performance functions and/or the mutator.
In this paper, we show that all it takes to evolve a normed vector space is
merely a set that generates the space. Furthermore, it takes only
$O(1/\epsilon^2)$ steps and it is essentially stable, agnostic and
handles target drifts that rival some proven in fairly restricted settings. Our
algorithm can be viewed as a close relative to a popular fifty-years old
gradient-free optimization method for which little is still known from the
convergence standpoint: Nelder-Mead simplex method.
@article{nock2017evolving,
abstract = {In Valiant's model of evolution, a class of representations is evolvable iff
a polynomial-time process of random mutations guided by selection converges
with high probability to a representation as $\epsilon$-close as desired from
the optimal one, for any required $\epsilon>0$. Several previous positive
results exist that can be related to evolving a vector space, but each former
result imposes disproportionate representations or restrictions on
(re)initialisations, distributions, performance functions and/or the mutator.
In this paper, we show that all it takes to evolve a normed vector space is
merely a set that generates the space. Furthermore, it takes only
$\tilde{O}(1/\epsilon^2)$ steps and it is essentially stable, agnostic and
handles target drifts that rival some proven in fairly restricted settings. Our
algorithm can be viewed as a close relative to a popular fifty-years old
gradient-free optimization method for which little is still known from the
convergence standpoint: Nelder-Mead simplex method.},
added-at = {2019-12-11T14:31:27.000+0100},
author = {Nock, Richard and Nielsen, Frank},
biburl = {https://www.bibsonomy.org/bibtex/299f2898e63642437a2f4824f5e2a0cf5/kirk86},
description = {[1704.02708] Evolving a Vector Space with any Generating Set},
interhash = {645009aeae487d7f997957e16e886e68},
intrahash = {99f2898e63642437a2f4824f5e2a0cf5},
keywords = {learning sets},
note = {cite arxiv:1704.02708},
timestamp = {2019-12-11T14:31:27.000+0100},
title = {Evolving a Vector Space with any Generating Set},
url = {http://arxiv.org/abs/1704.02708},
year = 2017
}