Latent Gaussian models are a popular class of hierarchical models with
applications in many fields. Performing Bayesian inference on such models can
be challenging. Markov chain Monte Carlo algorithms struggle with the geometry
of the resulting posterior distribution and can be prohibitively slow. An
alternative is to use an integrated nested Laplace approximation, whereby we
marginalize out the latent Gaussian variables and estimate the hyperparameters
with a deterministic scheme. This type of inference typically only works for
low dimensional and unimodal hyperparameters. We bypass these limitations by
coupling dynamic Hamiltonian Monte Carlo with an embedded Laplace
approximation. Our implementation features a novel adjoint method to
differentiate the marginal likelihood, which scales with high dimensional
hyperparameters. We prototype the method in the probabilistic programming
framework Stan and test the utility of the embedded Laplace approximation on
several models: a classic Gaussian process, a general linear regression model
with a sparsity inducing horseshoe prior, and a sparse kernel interaction
model. The last two models are characterized by a high dimensional and a
multimodal posterior distribution of the hyperparameters, and as such present
novel applications of the embedded Laplace approximation. Depending on the
cases, the benefits are either a dramatic speed-up, or an alleviation of the
geometric pathologies that frustrate Hamiltonian Monte Carlo.
Description
[2004.12550] Hamiltonian Monte Carlo using an embedded Laplace approximation
%0 Journal Article
%1 margossian2020hamiltonian
%A Margossian, Charles C.
%A Vehtari, Aki
%A Simpson, Daniel
%A Agrawal, Raj
%D 2020
%K approximate bayesian geometry mcmc optimization
%T Hamiltonian Monte Carlo using an embedded Laplace approximation
%U http://arxiv.org/abs/2004.12550
%X Latent Gaussian models are a popular class of hierarchical models with
applications in many fields. Performing Bayesian inference on such models can
be challenging. Markov chain Monte Carlo algorithms struggle with the geometry
of the resulting posterior distribution and can be prohibitively slow. An
alternative is to use an integrated nested Laplace approximation, whereby we
marginalize out the latent Gaussian variables and estimate the hyperparameters
with a deterministic scheme. This type of inference typically only works for
low dimensional and unimodal hyperparameters. We bypass these limitations by
coupling dynamic Hamiltonian Monte Carlo with an embedded Laplace
approximation. Our implementation features a novel adjoint method to
differentiate the marginal likelihood, which scales with high dimensional
hyperparameters. We prototype the method in the probabilistic programming
framework Stan and test the utility of the embedded Laplace approximation on
several models: a classic Gaussian process, a general linear regression model
with a sparsity inducing horseshoe prior, and a sparse kernel interaction
model. The last two models are characterized by a high dimensional and a
multimodal posterior distribution of the hyperparameters, and as such present
novel applications of the embedded Laplace approximation. Depending on the
cases, the benefits are either a dramatic speed-up, or an alleviation of the
geometric pathologies that frustrate Hamiltonian Monte Carlo.
@article{margossian2020hamiltonian,
abstract = {Latent Gaussian models are a popular class of hierarchical models with
applications in many fields. Performing Bayesian inference on such models can
be challenging. Markov chain Monte Carlo algorithms struggle with the geometry
of the resulting posterior distribution and can be prohibitively slow. An
alternative is to use an integrated nested Laplace approximation, whereby we
marginalize out the latent Gaussian variables and estimate the hyperparameters
with a deterministic scheme. This type of inference typically only works for
low dimensional and unimodal hyperparameters. We bypass these limitations by
coupling dynamic Hamiltonian Monte Carlo with an embedded Laplace
approximation. Our implementation features a novel adjoint method to
differentiate the marginal likelihood, which scales with high dimensional
hyperparameters. We prototype the method in the probabilistic programming
framework Stan and test the utility of the embedded Laplace approximation on
several models: a classic Gaussian process, a general linear regression model
with a sparsity inducing horseshoe prior, and a sparse kernel interaction
model. The last two models are characterized by a high dimensional and a
multimodal posterior distribution of the hyperparameters, and as such present
novel applications of the embedded Laplace approximation. Depending on the
cases, the benefits are either a dramatic speed-up, or an alleviation of the
geometric pathologies that frustrate Hamiltonian Monte Carlo.},
added-at = {2020-04-30T04:22:55.000+0200},
author = {Margossian, Charles C. and Vehtari, Aki and Simpson, Daniel and Agrawal, Raj},
biburl = {https://www.bibsonomy.org/bibtex/26daac28659c7ab847f69d36c1ac4e043/kirk86},
description = {[2004.12550] Hamiltonian Monte Carlo using an embedded Laplace approximation},
interhash = {244820a3daaf61250fb8013c56e5d1b3},
intrahash = {6daac28659c7ab847f69d36c1ac4e043},
keywords = {approximate bayesian geometry mcmc optimization},
note = {cite arxiv:2004.12550Comment: 16 pages, 12 figures},
timestamp = {2020-04-30T04:22:55.000+0200},
title = {Hamiltonian Monte Carlo using an embedded Laplace approximation},
url = {http://arxiv.org/abs/2004.12550},
year = 2020
}