Representing examples in a way that is compatible with the underlying
classifier can greatly enhance the performance of a learning system. In this
paper we investigate scalable techniques for inducing discriminative features
by taking advantage of simple second order structure in the data. We focus on
multiclass classification and show that features extracted from the generalized
eigenvectors of the class conditional second moments lead to classifiers with
excellent empirical performance. Moreover, these features have attractive
theoretical properties, such as inducing representations that are invariant to
linear transformations of the input. We evaluate classifiers built from these
features on three different tasks, obtaining state of the art results.
Description
Discriminative Features via Generalized Eigenvectors
%0 Generic
%1 karampatziakis2013discriminative
%A Karampatziakis, Nikos
%A Mineiro, Paul
%D 2013
%K analysis eigenvector feature kallimachos
%T Discriminative Features via Generalized Eigenvectors
%U http://arxiv.org/abs/1310.1934
%X Representing examples in a way that is compatible with the underlying
classifier can greatly enhance the performance of a learning system. In this
paper we investigate scalable techniques for inducing discriminative features
by taking advantage of simple second order structure in the data. We focus on
multiclass classification and show that features extracted from the generalized
eigenvectors of the class conditional second moments lead to classifiers with
excellent empirical performance. Moreover, these features have attractive
theoretical properties, such as inducing representations that are invariant to
linear transformations of the input. We evaluate classifiers built from these
features on three different tasks, obtaining state of the art results.
@misc{karampatziakis2013discriminative,
abstract = {Representing examples in a way that is compatible with the underlying
classifier can greatly enhance the performance of a learning system. In this
paper we investigate scalable techniques for inducing discriminative features
by taking advantage of simple second order structure in the data. We focus on
multiclass classification and show that features extracted from the generalized
eigenvectors of the class conditional second moments lead to classifiers with
excellent empirical performance. Moreover, these features have attractive
theoretical properties, such as inducing representations that are invariant to
linear transformations of the input. We evaluate classifiers built from these
features on three different tasks, obtaining state of the art results.},
added-at = {2015-03-03T18:19:43.000+0100},
author = {Karampatziakis, Nikos and Mineiro, Paul},
biburl = {https://www.bibsonomy.org/bibtex/247512dd90370c769bfd328d8fd8179ef/hotho},
description = {Discriminative Features via Generalized Eigenvectors},
interhash = {befee5ff60893632b4a38edb54e7c975},
intrahash = {47512dd90370c769bfd328d8fd8179ef},
keywords = {analysis eigenvector feature kallimachos},
note = {cite arxiv:1310.1934},
timestamp = {2015-03-03T18:19:43.000+0100},
title = {Discriminative Features via Generalized Eigenvectors},
url = {http://arxiv.org/abs/1310.1934},
year = 2013
}