Deep neural networks require large training sets but suffer from high
computational cost and long training times. Training on much smaller training
sets while maintaining nearly the same accuracy would be very beneficial. In
the few-shot learning setting, a model must learn a new class given only a
small number of samples from that class. One-shot learning is an extreme form
of few-shot learning where the model must learn a new class from a single
example. We propose the `less than one'-shot learning task where models must
learn $N$ new classes given only $M<N$ examples and we show that this is
achievable with the help of soft labels. We use a soft-label generalization of
the k-Nearest Neighbors classifier to explore the intricate decision landscapes
that can be created in the `less than one'-shot learning setting. We analyze
these decision landscapes to derive theoretical lower bounds for separating $N$
classes using $M<N$ soft-label samples and investigate the robustness of the
resulting systems.
Description
'Less Than One'-Shot Learning: Learning N Classes From M<N Samples
%0 Journal Article
%1 sucholutsky2020oneshot
%A Sucholutsky, Ilia
%A Schonlau, Matthias
%D 2020
%K Few learning shot
%T 'Less Than One'-Shot Learning: Learning N Classes From M<N Samples
%U http://arxiv.org/abs/2009.08449
%X Deep neural networks require large training sets but suffer from high
computational cost and long training times. Training on much smaller training
sets while maintaining nearly the same accuracy would be very beneficial. In
the few-shot learning setting, a model must learn a new class given only a
small number of samples from that class. One-shot learning is an extreme form
of few-shot learning where the model must learn a new class from a single
example. We propose the `less than one'-shot learning task where models must
learn $N$ new classes given only $M<N$ examples and we show that this is
achievable with the help of soft labels. We use a soft-label generalization of
the k-Nearest Neighbors classifier to explore the intricate decision landscapes
that can be created in the `less than one'-shot learning setting. We analyze
these decision landscapes to derive theoretical lower bounds for separating $N$
classes using $M<N$ soft-label samples and investigate the robustness of the
resulting systems.
@article{sucholutsky2020oneshot,
abstract = {Deep neural networks require large training sets but suffer from high
computational cost and long training times. Training on much smaller training
sets while maintaining nearly the same accuracy would be very beneficial. In
the few-shot learning setting, a model must learn a new class given only a
small number of samples from that class. One-shot learning is an extreme form
of few-shot learning where the model must learn a new class from a single
example. We propose the `less than one'-shot learning task where models must
learn $N$ new classes given only $M<N$ examples and we show that this is
achievable with the help of soft labels. We use a soft-label generalization of
the k-Nearest Neighbors classifier to explore the intricate decision landscapes
that can be created in the `less than one'-shot learning setting. We analyze
these decision landscapes to derive theoretical lower bounds for separating $N$
classes using $M<N$ soft-label samples and investigate the robustness of the
resulting systems.},
added-at = {2020-10-17T13:37:44.000+0200},
author = {Sucholutsky, Ilia and Schonlau, Matthias},
biburl = {https://www.bibsonomy.org/bibtex/2dc57134e402e9b0e6ab0cd86c58f861e/bubulean},
description = {'Less Than One'-Shot Learning: Learning N Classes From M<N Samples},
interhash = {faf92eefa5f0fb60412b4b8a377ba63a},
intrahash = {dc57134e402e9b0e6ab0cd86c58f861e},
keywords = {Few learning shot},
note = {cite arxiv:2009.08449},
timestamp = {2020-10-17T13:37:44.000+0200},
title = {'Less Than One'-Shot Learning: Learning N Classes From M<N Samples},
url = {http://arxiv.org/abs/2009.08449},
year = 2020
}