In order to compute the log-likelihood for high dimensional Gaussian models, it is necessary to compute the determinant of the large, sparse, symmetric positive definite precision matrix. Traditional methods for evaluating the log-likelihood, which are typically based on Cholesky factorisations, are not feasible for very large models due to the massive memory requirements. We present a novel approach for evaluating such likelihoods that only requires the computation of matrix-vector products. In this approach we utilise matrix functions, Krylov subspaces, and probing vectors to construct an iterative numerical method for computing the log-likelihood.
%0 Journal Article
%1 aune2014parameter
%A Aune, Erlend
%A Simpson, Daniel P.
%A Eidsvik, Jo
%D 2014
%J Statistics and Computing
%K Gaussian_processes linear_algebra methods statistics
%N 2
%P 247--263
%R 10.1007/s11222-012-9368-y
%T Parameter estimation in high dimensional Gaussian distributions
%U https://doi.org/10.1007/s11222-012-9368-y
%V 24
%X In order to compute the log-likelihood for high dimensional Gaussian models, it is necessary to compute the determinant of the large, sparse, symmetric positive definite precision matrix. Traditional methods for evaluating the log-likelihood, which are typically based on Cholesky factorisations, are not feasible for very large models due to the massive memory requirements. We present a novel approach for evaluating such likelihoods that only requires the computation of matrix-vector products. In this approach we utilise matrix functions, Krylov subspaces, and probing vectors to construct an iterative numerical method for computing the log-likelihood.
@article{aune2014parameter,
abstract = {In order to compute the log-likelihood for high dimensional Gaussian models, it is necessary to compute the determinant of the large, sparse, symmetric positive definite precision matrix. Traditional methods for evaluating the log-likelihood, which are typically based on Cholesky factorisations, are not feasible for very large models due to the massive memory requirements. We present a novel approach for evaluating such likelihoods that only requires the computation of matrix-vector products. In this approach we utilise matrix functions, Krylov subspaces, and probing vectors to construct an iterative numerical method for computing the log-likelihood.},
added-at = {2021-01-20T22:17:19.000+0100},
author = {Aune, Erlend and Simpson, Daniel P. and Eidsvik, Jo},
biburl = {https://www.bibsonomy.org/bibtex/2e41fee053c0b05368b4aa0f2531b5e46/peter.ralph},
day = 01,
doi = {10.1007/s11222-012-9368-y},
interhash = {766a33cc47165f0a8f047a1e90d0055b},
intrahash = {e41fee053c0b05368b4aa0f2531b5e46},
issn = {1573-1375},
journal = {Statistics and Computing},
keywords = {Gaussian_processes linear_algebra methods statistics},
month = mar,
number = 2,
pages = {247--263},
timestamp = {2021-01-20T22:17:19.000+0100},
title = {Parameter estimation in high dimensional {Gaussian} distributions},
url = {https://doi.org/10.1007/s11222-012-9368-y},
volume = 24,
year = 2014
}