Factor analysis, principal component analysis, mixtures of gaussian clusters,
vector quantization, Kalman filter models, and hidden Markov models
can all be unified as variations of unsupervised learning under a single
basic generative model. This is achieved by collecting together disparate
observations and derivations made by many previous authors and introducing
a new way of linking discrete and continuous state models using
a simple nonlinearity. Through the use of other nonlinearities, we show
how independent component analysis is also a variation of the same basic
generative model.We show that factor analysis and mixtures of gaussians
can be implemented in autoencoder neural networks and learned using
squared error plus the same regularization term. We introduce a new
model for static data, known as sensible principal component analysis,
as well as a novel concept of spatially adaptive observation noise.We also
review some of the literature involving global and local mixtures of the
basic models and provide pseudocode for inference and learning for all
the basic models.
%0 Journal Article
%1 roweis1999unifying
%A Roweis, S.
%A Ghahramani, Z.
%D 1999
%I MIT Press
%J Neural computation
%K HiddenMarkovFields assimilation bayes bayesian ensembles kalmanfilter statistics uncertainty
%N 2
%P 305--345
%T A unifying review of linear Gaussian models
%U http://www.mitpressjournals.org/doi/abs/10.1162/089976699300016674
%V 11
%X Factor analysis, principal component analysis, mixtures of gaussian clusters,
vector quantization, Kalman filter models, and hidden Markov models
can all be unified as variations of unsupervised learning under a single
basic generative model. This is achieved by collecting together disparate
observations and derivations made by many previous authors and introducing
a new way of linking discrete and continuous state models using
a simple nonlinearity. Through the use of other nonlinearities, we show
how independent component analysis is also a variation of the same basic
generative model.We show that factor analysis and mixtures of gaussians
can be implemented in autoencoder neural networks and learned using
squared error plus the same regularization term. We introduce a new
model for static data, known as sensible principal component analysis,
as well as a novel concept of spatially adaptive observation noise.We also
review some of the literature involving global and local mixtures of the
basic models and provide pseudocode for inference and learning for all
the basic models.
@article{roweis1999unifying,
abstract = {Factor analysis, principal component analysis, mixtures of gaussian clusters,
vector quantization, Kalman filter models, and hidden Markov models
can all be unified as variations of unsupervised learning under a single
basic generative model. This is achieved by collecting together disparate
observations and derivations made by many previous authors and introducing
a new way of linking discrete and continuous state models using
a simple nonlinearity. Through the use of other nonlinearities, we show
how independent component analysis is also a variation of the same basic
generative model.We show that factor analysis and mixtures of gaussians
can be implemented in autoencoder neural networks and learned using
squared error plus the same regularization term. We introduce a new
model for static data, known as sensible principal component analysis,
as well as a novel concept of spatially adaptive observation noise.We also
review some of the literature involving global and local mixtures of the
basic models and provide pseudocode for inference and learning for all
the basic models.},
added-at = {2009-08-26T19:12:09.000+0200},
author = {Roweis, S. and Ghahramani, Z.},
biburl = {https://www.bibsonomy.org/bibtex/25928d2906ecc5d2169a086dfd7f3f3ba/jgomezdans},
interhash = {04103f33b5430517f2b02d4319b7c2cd},
intrahash = {5928d2906ecc5d2169a086dfd7f3f3ba},
journal = {Neural computation},
keywords = {HiddenMarkovFields assimilation bayes bayesian ensembles kalmanfilter statistics uncertainty},
number = 2,
pages = {305--345},
publisher = {MIT Press},
timestamp = {2009-08-26T19:12:09.000+0200},
title = {{A unifying review of linear Gaussian models}},
url = {http://www.mitpressjournals.org/doi/abs/10.1162/089976699300016674},
volume = 11,
year = 1999
}