Autoregressive models (ARMs) currently hold state-of-the-art performance in
likelihood-based modeling of image and audio data. Generally, neural network
based ARMs are designed to allow fast inference, but sampling from these models
is impractically slow. In this paper, we introduce the predictive sampling
algorithm: a procedure that exploits the fast inference property of ARMs in
order to speed up sampling, while keeping the model intact. We propose two
variations of predictive sampling, namely sampling with ARM fixed-point
iteration and learned forecasting modules. Their effectiveness is demonstrated
in two settings: i) explicit likelihood modeling on binary MNIST, SVHN and
CIFAR10, and ii) discrete latent modeling in an autoencoder trained on SVHN,
CIFAR10 and Imagenet32. Empirically, we show considerable improvements over
baselines in number of ARM inference calls and sampling speed.
Description
[2002.09928] Predictive Sampling with Forecasting Autoregressive Models
%0 Journal Article
%1 wiggers2020predictive
%A Wiggers, Auke J.
%A Hoogeboom, Emiel
%D 2020
%K autoregressive generative-models sampling
%T Predictive Sampling with Forecasting Autoregressive Models
%U http://arxiv.org/abs/2002.09928
%X Autoregressive models (ARMs) currently hold state-of-the-art performance in
likelihood-based modeling of image and audio data. Generally, neural network
based ARMs are designed to allow fast inference, but sampling from these models
is impractically slow. In this paper, we introduce the predictive sampling
algorithm: a procedure that exploits the fast inference property of ARMs in
order to speed up sampling, while keeping the model intact. We propose two
variations of predictive sampling, namely sampling with ARM fixed-point
iteration and learned forecasting modules. Their effectiveness is demonstrated
in two settings: i) explicit likelihood modeling on binary MNIST, SVHN and
CIFAR10, and ii) discrete latent modeling in an autoencoder trained on SVHN,
CIFAR10 and Imagenet32. Empirically, we show considerable improvements over
baselines in number of ARM inference calls and sampling speed.
@article{wiggers2020predictive,
abstract = {Autoregressive models (ARMs) currently hold state-of-the-art performance in
likelihood-based modeling of image and audio data. Generally, neural network
based ARMs are designed to allow fast inference, but sampling from these models
is impractically slow. In this paper, we introduce the predictive sampling
algorithm: a procedure that exploits the fast inference property of ARMs in
order to speed up sampling, while keeping the model intact. We propose two
variations of predictive sampling, namely sampling with ARM fixed-point
iteration and learned forecasting modules. Their effectiveness is demonstrated
in two settings: i) explicit likelihood modeling on binary MNIST, SVHN and
CIFAR10, and ii) discrete latent modeling in an autoencoder trained on SVHN,
CIFAR10 and Imagenet32. Empirically, we show considerable improvements over
baselines in number of ARM inference calls and sampling speed.},
added-at = {2020-02-26T16:44:53.000+0100},
author = {Wiggers, Auke J. and Hoogeboom, Emiel},
biburl = {https://www.bibsonomy.org/bibtex/2e8f887e4bf006f68c9f73077cb486d3d/kirk86},
description = {[2002.09928] Predictive Sampling with Forecasting Autoregressive Models},
interhash = {51be7020bd2a1a3e4733ba7d14a3ba8b},
intrahash = {e8f887e4bf006f68c9f73077cb486d3d},
keywords = {autoregressive generative-models sampling},
note = {cite arxiv:2002.09928Comment: 13 pages, 16 figures},
timestamp = {2020-02-26T16:45:23.000+0100},
title = {Predictive Sampling with Forecasting Autoregressive Models},
url = {http://arxiv.org/abs/2002.09928},
year = 2020
}