We study learnability in the online learning model. We define several
complexity measures which capture the difficulty of learning in a sequential
manner. Among these measures are analogues of Rademacher complexity, covering
numbers and fat shattering dimension from statistical learning theory.
Relationship among these complexity measures, their connection to online
learning, and tools for bounding them are provided. In the setting of
supervised learning, finiteness of the introduced scale-sensitive parameters is
shown to be equivalent to learnability. The complexities we define also ensure
uniform convergence non-i.i.d. data, extending the uniform Glivenko-Cantelli
type results. We conclude by showing online learnability for an array of
examples.
Description
Online Learning: Random Averages, Combinatorial Parameters, and
Learnability
%0 Generic
%1 rakhlin2010online
%A Rakhlin, Alexander
%A Sridharan, Karthik
%A Tewari, Ambuj
%D 2010
%K Learnability learning online
%T Online Learning: Random Averages, Combinatorial Parameters, and
Learnability
%U http://arxiv.org/abs/1006.1138
%X We study learnability in the online learning model. We define several
complexity measures which capture the difficulty of learning in a sequential
manner. Among these measures are analogues of Rademacher complexity, covering
numbers and fat shattering dimension from statistical learning theory.
Relationship among these complexity measures, their connection to online
learning, and tools for bounding them are provided. In the setting of
supervised learning, finiteness of the introduced scale-sensitive parameters is
shown to be equivalent to learnability. The complexities we define also ensure
uniform convergence non-i.i.d. data, extending the uniform Glivenko-Cantelli
type results. We conclude by showing online learnability for an array of
examples.
@misc{rakhlin2010online,
abstract = {We study learnability in the online learning model. We define several
complexity measures which capture the difficulty of learning in a sequential
manner. Among these measures are analogues of Rademacher complexity, covering
numbers and fat shattering dimension from statistical learning theory.
Relationship among these complexity measures, their connection to online
learning, and tools for bounding them are provided. In the setting of
supervised learning, finiteness of the introduced scale-sensitive parameters is
shown to be equivalent to learnability. The complexities we define also ensure
uniform convergence non-i.i.d. data, extending the uniform Glivenko-Cantelli
type results. We conclude by showing online learnability for an array of
examples.},
added-at = {2013-05-09T12:19:56.000+0200},
author = {Rakhlin, Alexander and Sridharan, Karthik and Tewari, Ambuj},
biburl = {https://www.bibsonomy.org/bibtex/2ab22f66a1a26d003133bacadb38ee14b/sidyr},
description = {Online Learning: Random Averages, Combinatorial Parameters, and
Learnability},
interhash = {097dcadab4d27f36bcd75b3034e9a00c},
intrahash = {ab22f66a1a26d003133bacadb38ee14b},
keywords = {Learnability learning online},
note = {cite arxiv:1006.1138},
timestamp = {2013-05-09T12:19:56.000+0200},
title = {Online Learning: Random Averages, Combinatorial Parameters, and
Learnability},
url = {http://arxiv.org/abs/1006.1138},
year = 2010
}