A promising class of generative models maps points from a simple distribution
to a complex distribution through an invertible neural network.
Likelihood-based training of these models requires restricting their
architectures to allow cheap computation of Jacobian determinants.
Alternatively, the Jacobian trace can be used if the transformation is
specified by an ordinary differential equation. In this paper, we use
Hutchinson's trace estimator to give a scalable unbiased estimate of the
log-density. The result is a continuous-time invertible generative model with
unbiased density estimation and one-pass sampling, while allowing unrestricted
neural network architectures. We demonstrate our approach on high-dimensional
density estimation, image generation, and variational inference, achieving the
state-of-the-art among exact likelihood methods with efficient sampling.
Description
[1810.01367] FFJORD: Free-form Continuous Dynamics for Scalable Reversible Generative Models
%0 Generic
%1 grathwohl2018ffjord
%A Grathwohl, Will
%A Chen, Ricky T. Q.
%A Bettencourt, Jesse
%A Sutskever, Ilya
%A Duvenaud, David
%D 2018
%K deep-implicit-learning from:adulny generative-models neural-ode
%T FFJORD: Free-form Continuous Dynamics for Scalable Reversible Generative
Models
%U http://arxiv.org/abs/1810.01367
%X A promising class of generative models maps points from a simple distribution
to a complex distribution through an invertible neural network.
Likelihood-based training of these models requires restricting their
architectures to allow cheap computation of Jacobian determinants.
Alternatively, the Jacobian trace can be used if the transformation is
specified by an ordinary differential equation. In this paper, we use
Hutchinson's trace estimator to give a scalable unbiased estimate of the
log-density. The result is a continuous-time invertible generative model with
unbiased density estimation and one-pass sampling, while allowing unrestricted
neural network architectures. We demonstrate our approach on high-dimensional
density estimation, image generation, and variational inference, achieving the
state-of-the-art among exact likelihood methods with efficient sampling.
@misc{grathwohl2018ffjord,
abstract = {A promising class of generative models maps points from a simple distribution
to a complex distribution through an invertible neural network.
Likelihood-based training of these models requires restricting their
architectures to allow cheap computation of Jacobian determinants.
Alternatively, the Jacobian trace can be used if the transformation is
specified by an ordinary differential equation. In this paper, we use
Hutchinson's trace estimator to give a scalable unbiased estimate of the
log-density. The result is a continuous-time invertible generative model with
unbiased density estimation and one-pass sampling, while allowing unrestricted
neural network architectures. We demonstrate our approach on high-dimensional
density estimation, image generation, and variational inference, achieving the
state-of-the-art among exact likelihood methods with efficient sampling.},
added-at = {2021-03-26T12:04:18.000+0100},
author = {Grathwohl, Will and Chen, Ricky T. Q. and Bettencourt, Jesse and Sutskever, Ilya and Duvenaud, David},
biburl = {https://www.bibsonomy.org/bibtex/2f0cbaf99f9990fc0c3d99847c6352f9e/adulny},
description = {[1810.01367] FFJORD: Free-form Continuous Dynamics for Scalable Reversible Generative Models},
interhash = {36366306798a1de4c7bb5e1acee0e668},
intrahash = {f0cbaf99f9990fc0c3d99847c6352f9e},
keywords = {deep-implicit-learning from:adulny generative-models neural-ode},
note = {cite arxiv:1810.01367Comment: 8 Pages, 6 figures},
timestamp = {2021-03-26T12:04:18.000+0100},
title = {FFJORD: Free-form Continuous Dynamics for Scalable Reversible Generative
Models},
url = {http://arxiv.org/abs/1810.01367},
year = 2018
}