Backpropagation is often viewed as a method for adapting artificial neural networks to classify patterns. Based on parts of the book by Rumelhart and colleagues, many authors equate backpropagation with the generalized delta rule applied to fully-connected feedforward networks. This paper will summarize a more general formulation of backpropagation, developed in 1974, which does more justice to the roots of the method in numerical analysis and statistics, and also does more justice to creative approaches expressed by neural modelers in the past year or two. It will discuss applications of backpropagation to forecasting over time (where errors have been halved by using methods other than least squares), to optimization, to sensitivity analysis, and to brain research. This paper will go on to derive a generalization of backpropagation to recurrent systems (which input their own output), such as hybrids of perceptron-style networks and Grossberg/Hopfield networks. Unlike the proposal of Rumelhart, Hinton, and Williams, this generalization does not require the storage of intermediate iterations to deal with continuous recurrence. This generalization was applied in 1981 to a model of natural gas markets, where it located sources of forecast uncertainty related to the use of least squares to estimate the model parameters in the first place.
Description
Generalization of backpropagation with application to a recurrent gas market model - ScienceDirect
%0 Journal Article
%1 WERBOS1988339
%A Werbos, Paul J.
%D 1988
%J Neural Networks
%K Backpropagationthroughtime sem_wise2223
%N 4
%P 339-356
%R https://doi.org/10.1016/0893-6080(88)90007-X
%T Generalization of backpropagation with application to a recurrent gas market model
%U https://www.sciencedirect.com/science/article/pii/089360808890007X
%V 1
%X Backpropagation is often viewed as a method for adapting artificial neural networks to classify patterns. Based on parts of the book by Rumelhart and colleagues, many authors equate backpropagation with the generalized delta rule applied to fully-connected feedforward networks. This paper will summarize a more general formulation of backpropagation, developed in 1974, which does more justice to the roots of the method in numerical analysis and statistics, and also does more justice to creative approaches expressed by neural modelers in the past year or two. It will discuss applications of backpropagation to forecasting over time (where errors have been halved by using methods other than least squares), to optimization, to sensitivity analysis, and to brain research. This paper will go on to derive a generalization of backpropagation to recurrent systems (which input their own output), such as hybrids of perceptron-style networks and Grossberg/Hopfield networks. Unlike the proposal of Rumelhart, Hinton, and Williams, this generalization does not require the storage of intermediate iterations to deal with continuous recurrence. This generalization was applied in 1981 to a model of natural gas markets, where it located sources of forecast uncertainty related to the use of least squares to estimate the model parameters in the first place.
@article{WERBOS1988339,
abstract = {Backpropagation is often viewed as a method for adapting artificial neural networks to classify patterns. Based on parts of the book by Rumelhart and colleagues, many authors equate backpropagation with the generalized delta rule applied to fully-connected feedforward networks. This paper will summarize a more general formulation of backpropagation, developed in 1974, which does more justice to the roots of the method in numerical analysis and statistics, and also does more justice to creative approaches expressed by neural modelers in the past year or two. It will discuss applications of backpropagation to forecasting over time (where errors have been halved by using methods other than least squares), to optimization, to sensitivity analysis, and to brain research. This paper will go on to derive a generalization of backpropagation to recurrent systems (which input their own output), such as hybrids of perceptron-style networks and Grossberg/Hopfield networks. Unlike the proposal of Rumelhart, Hinton, and Williams, this generalization does not require the storage of intermediate iterations to deal with continuous recurrence. This generalization was applied in 1981 to a model of natural gas markets, where it located sources of forecast uncertainty related to the use of least squares to estimate the model parameters in the first place.},
added-at = {2022-09-13T17:03:36.000+0200},
author = {Werbos, Paul J.},
biburl = {https://www.bibsonomy.org/bibtex/2340b509e82602498e68f5eb45d0ef851/annakrause},
description = {Generalization of backpropagation with application to a recurrent gas market model - ScienceDirect},
doi = {https://doi.org/10.1016/0893-6080(88)90007-X},
interhash = {924e8fa02b9b23ab9d327421262f332f},
intrahash = {340b509e82602498e68f5eb45d0ef851},
issn = {0893-6080},
journal = {Neural Networks},
keywords = {Backpropagationthroughtime sem_wise2223},
number = 4,
pages = {339-356},
timestamp = {2022-10-10T17:05:18.000+0200},
title = {Generalization of backpropagation with application to a recurrent gas market model},
url = {https://www.sciencedirect.com/science/article/pii/089360808890007X},
volume = 1,
year = 1988
}