We investigate the Student-t process as an alternative to the Gaussian
process as a nonparametric prior over functions. We derive closed form
expressions for the marginal likelihood and predictive distribution of a
Student-t process, by integrating away an inverse Wishart process prior over
the covariance kernel of a Gaussian process model. We show surprising
equivalences between different hierarchical Gaussian process models leading to
Student-t processes, and derive a new sampling scheme for the inverse Wishart
process, which helps elucidate these equivalences. Overall, we show that a
Student-t process can retain the attractive properties of a Gaussian process --
a nonparametric representation, analytic marginal and predictive distributions,
and easy model selection through covariance kernels -- but has enhanced
flexibility, and predictive covariances that, unlike a Gaussian process,
explicitly depend on the values of training observations. We verify empirically
that a Student-t process is especially useful in situations where there are
changes in covariance structure, or in applications like Bayesian optimization,
where accurate predictive covariances are critical for good performance. These
advantages come at no additional computational cost over Gaussian processes.
%0 Generic
%1 shah2014studentt
%A Shah, Amar
%A Wilson, Andrew Gordon
%A Ghahramani, Zoubin
%D 2014
%K Gaussian_processes stable_processes
%T Student-t Processes as Alternatives to Gaussian Processes
%U http://arxiv.org/abs/1402.4306
%X We investigate the Student-t process as an alternative to the Gaussian
process as a nonparametric prior over functions. We derive closed form
expressions for the marginal likelihood and predictive distribution of a
Student-t process, by integrating away an inverse Wishart process prior over
the covariance kernel of a Gaussian process model. We show surprising
equivalences between different hierarchical Gaussian process models leading to
Student-t processes, and derive a new sampling scheme for the inverse Wishart
process, which helps elucidate these equivalences. Overall, we show that a
Student-t process can retain the attractive properties of a Gaussian process --
a nonparametric representation, analytic marginal and predictive distributions,
and easy model selection through covariance kernels -- but has enhanced
flexibility, and predictive covariances that, unlike a Gaussian process,
explicitly depend on the values of training observations. We verify empirically
that a Student-t process is especially useful in situations where there are
changes in covariance structure, or in applications like Bayesian optimization,
where accurate predictive covariances are critical for good performance. These
advantages come at no additional computational cost over Gaussian processes.
@misc{shah2014studentt,
abstract = {We investigate the Student-t process as an alternative to the Gaussian
process as a nonparametric prior over functions. We derive closed form
expressions for the marginal likelihood and predictive distribution of a
Student-t process, by integrating away an inverse Wishart process prior over
the covariance kernel of a Gaussian process model. We show surprising
equivalences between different hierarchical Gaussian process models leading to
Student-t processes, and derive a new sampling scheme for the inverse Wishart
process, which helps elucidate these equivalences. Overall, we show that a
Student-t process can retain the attractive properties of a Gaussian process --
a nonparametric representation, analytic marginal and predictive distributions,
and easy model selection through covariance kernels -- but has enhanced
flexibility, and predictive covariances that, unlike a Gaussian process,
explicitly depend on the values of training observations. We verify empirically
that a Student-t process is especially useful in situations where there are
changes in covariance structure, or in applications like Bayesian optimization,
where accurate predictive covariances are critical for good performance. These
advantages come at no additional computational cost over Gaussian processes.},
added-at = {2021-06-09T19:37:08.000+0200},
author = {Shah, Amar and Wilson, Andrew Gordon and Ghahramani, Zoubin},
biburl = {https://www.bibsonomy.org/bibtex/2ce4f912e1671621ca413e32f39cac9c0/peter.ralph},
interhash = {8ebbd8de83dc2fa74bdcd8d661a1723c},
intrahash = {ce4f912e1671621ca413e32f39cac9c0},
keywords = {Gaussian_processes stable_processes},
note = {cite arxiv:1402.4306},
timestamp = {2021-06-09T19:37:08.000+0200},
title = {Student-t Processes as Alternatives to {Gaussian} Processes},
url = {http://arxiv.org/abs/1402.4306},
year = 2014
}