Let \textbackslash\x\_t\textbackslash\ be a linear stationary process of the form Math Processing Errorx\_t + \textbackslashSigma\_\1\textbackslashleqslant i\textless\textbackslashinfty\a\_ix\_\t-i\ = e\_t, where Math Processing Error\textbackslash\e\_t\textbackslash\ is a sequence of i.i.d. normal random variables with mean 0 and variance Math Processing Error\textbackslashsigmaˆ2. Given observations Math Processing Errorx\_1, \textbackslashcdots, x\_n, least squares estimates Math Processing Error\textbackslashhat\a\(k) of Math Processing Errora' = (a\_1, a\_2, \textbackslashcdots), and Math Processing Error\textbackslashhat\\textbackslashsigma\ˆ2\_k of Math Processing Error\textbackslashsigmaˆ2 are obtained if the Math Processing Errorkth order autoregressive model is assumed. By using Math Processing Error\textbackslashhat\a\(k), we can also estimate coefficients of the best predictor based on Math Processing Errork successive realizations. An asymptotic lower bound is obtained for the mean squared error of the estimated predictor when Math Processing Errork is selected from the data. If Math Processing Errork is selected so as to minimize Math Processing ErrorS\_n(k) = (n + 2k)\textbackslashhat\\textbackslashsigma\ˆ2\_k, then the bound is attained in the limit. The key assumption is that the order of the autoregression of Math Processing Error\textbackslash\x\_t\textbackslash\ is infinite.
%0 Journal Article
%1 shibata_asymptotically_1980
%A Shibata, Ritei
%D 1980
%J The Annals of Statistics
%K Autoregression, Model efficiency, models prediction, selection, series time
%N 1
%P 147--164
%R 10.1214/aos/1176344897
%T Asymptotically efficient selection of the order of the model for estimating parameters of a linear process
%U http://projecteuclid.org/euclid.aos/1176344897
%V 8
%X Let \textbackslash\x\_t\textbackslash\ be a linear stationary process of the form Math Processing Errorx\_t + \textbackslashSigma\_\1\textbackslashleqslant i\textless\textbackslashinfty\a\_ix\_\t-i\ = e\_t, where Math Processing Error\textbackslash\e\_t\textbackslash\ is a sequence of i.i.d. normal random variables with mean 0 and variance Math Processing Error\textbackslashsigmaˆ2. Given observations Math Processing Errorx\_1, \textbackslashcdots, x\_n, least squares estimates Math Processing Error\textbackslashhat\a\(k) of Math Processing Errora' = (a\_1, a\_2, \textbackslashcdots), and Math Processing Error\textbackslashhat\\textbackslashsigma\ˆ2\_k of Math Processing Error\textbackslashsigmaˆ2 are obtained if the Math Processing Errorkth order autoregressive model is assumed. By using Math Processing Error\textbackslashhat\a\(k), we can also estimate coefficients of the best predictor based on Math Processing Errork successive realizations. An asymptotic lower bound is obtained for the mean squared error of the estimated predictor when Math Processing Errork is selected from the data. If Math Processing Errork is selected so as to minimize Math Processing ErrorS\_n(k) = (n + 2k)\textbackslashhat\\textbackslashsigma\ˆ2\_k, then the bound is attained in the limit. The key assumption is that the order of the autoregression of Math Processing Error\textbackslash\x\_t\textbackslash\ is infinite.
@article{shibata_asymptotically_1980,
abstract = {Let {\textbackslash}\{x\_t{\textbackslash}\} be a linear stationary process of the form [Math Processing Error]x\_t + {\textbackslash}Sigma\_\{1{\textbackslash}leqslant i{\textless}{\textbackslash}infty\}a\_ix\_\{t-i\} = e\_t, where [Math Processing Error]{\textbackslash}\{e\_t{\textbackslash}\} is a sequence of i.i.d. normal random variables with mean 0 and variance [Math Processing Error]{\textbackslash}sigma{\textasciicircum}2. Given observations [Math Processing Error]x\_1, {\textbackslash}cdots, x\_n, least squares estimates [Math Processing Error]{\textbackslash}hat\{a\}(k) of [Math Processing Error]a' = (a\_1, a\_2, {\textbackslash}cdots), and [Math Processing Error]{\textbackslash}hat\{{\textbackslash}sigma\}{\textasciicircum}2\_k of [Math Processing Error]{\textbackslash}sigma{\textasciicircum}2 are obtained if the [Math Processing Error]kth order autoregressive model is assumed. By using [Math Processing Error]{\textbackslash}hat\{a\}(k), we can also estimate coefficients of the best predictor based on [Math Processing Error]k successive realizations. An asymptotic lower bound is obtained for the mean squared error of the estimated predictor when [Math Processing Error]k is selected from the data. If [Math Processing Error]k is selected so as to minimize [Math Processing Error]S\_n(k) = (n + 2k){\textbackslash}hat\{{\textbackslash}sigma\}{\textasciicircum}2\_k, then the bound is attained in the limit. The key assumption is that the order of the autoregression of [Math Processing Error]{\textbackslash}\{x\_t{\textbackslash}\} is infinite.},
added-at = {2017-01-09T13:57:26.000+0100},
author = {Shibata, Ritei},
biburl = {https://www.bibsonomy.org/bibtex/213a12971ed7c745aeeed6f951ab77d70/yourwelcome},
doi = {10.1214/aos/1176344897},
interhash = {bbbab69f1dc0138e78f5d605db62acd6},
intrahash = {13a12971ed7c745aeeed6f951ab77d70},
issn = {0090-5364, 2168-8966},
journal = {The Annals of Statistics},
keywords = {Autoregression, Model efficiency, models prediction, selection, series time},
language = {EN},
month = jan,
mrnumber = {MR557560},
number = 1,
pages = {147--164},
timestamp = {2017-01-09T14:01:11.000+0100},
title = {Asymptotically efficient selection of the order of the model for estimating parameters of a linear process},
url = {http://projecteuclid.org/euclid.aos/1176344897},
urldate = {2016-06-19},
volume = 8,
year = 1980,
zmnumber = {0425.62069}
}