High-dimensional data can be converted to low-dimensional codes by training a multilayer neural network with a small central layer to reconstruct high-dimensional input vectors. Gradient descent can be used for fine-tuning the weights in such äutoencoder" networks, but this works well only if the initial weights are close to a good solution. We describe an effective way of initializing the weights that allows deep autoencoder networks to learn low-dimensional codes that work much better than principal components analysis as a tool to reduce the dimensionality of data.
Beschreibung
Reducing the dimensionality of data with neural ne...[Science. 2006] - PubMed Result
%0 Journal Article
%1 HintonSalakhutdinov2006b
%A Hinton, G E
%A Salakhutdinov, R R
%D 2006
%J Science
%K dimensionalityreduction neuralnetworks parameterestimation
%N 5786
%P 504-507
%R 10.1126/science.1127647
%T Reducing the dimensionality of data with neural networks
%U http://www.ncbi.nlm.nih.gov/sites/entrez?db=pubmed&uid=16873662&cmd=showdetailview&indexed=google
%V 313
%X High-dimensional data can be converted to low-dimensional codes by training a multilayer neural network with a small central layer to reconstruct high-dimensional input vectors. Gradient descent can be used for fine-tuning the weights in such äutoencoder" networks, but this works well only if the initial weights are close to a good solution. We describe an effective way of initializing the weights that allows deep autoencoder networks to learn low-dimensional codes that work much better than principal components analysis as a tool to reduce the dimensionality of data.
@article{HintonSalakhutdinov2006b,
abstract = {High-dimensional data can be converted to low-dimensional codes by training a multilayer neural network with a small central layer to reconstruct high-dimensional input vectors. Gradient descent can be used for fine-tuning the weights in such "autoencoder" networks, but this works well only if the initial weights are close to a good solution. We describe an effective way of initializing the weights that allows deep autoencoder networks to learn low-dimensional codes that work much better than principal components analysis as a tool to reduce the dimensionality of data.},
added-at = {2008-07-15T10:05:18.000+0200},
author = {Hinton, G E and Salakhutdinov, R R},
biburl = {https://www.bibsonomy.org/bibtex/2135bbce97b449ddf5fca7be88102b53c/tmalsburg},
description = {Reducing the dimensionality of data with neural ne...[Science. 2006] - PubMed Result},
doi = {10.1126/science.1127647},
interhash = {019918b82518b74f443a22dc58a0117f},
intrahash = {135bbce97b449ddf5fca7be88102b53c},
journal = {Science},
keywords = {dimensionalityreduction neuralnetworks parameterestimation},
month = Jul,
number = 5786,
pages = {504-507},
pmid = {16873662},
timestamp = {2008-07-15T10:05:18.000+0200},
title = {Reducing the dimensionality of data with neural networks},
url = {http://www.ncbi.nlm.nih.gov/sites/entrez?db=pubmed&uid=16873662&cmd=showdetailview&indexed=google},
volume = 313,
year = 2006
}