Neural Tangents is a library designed to enable research into infinite-width
neural networks. It provides a high-level API for specifying complex and
hierarchical neural network architectures. These networks can then be trained
and evaluated either at finite-width as usual or in their infinite-width limit.
Infinite-width networks can be trained analytically using exact Bayesian
inference or using gradient descent via the Neural Tangent Kernel.
Additionally, Neural Tangents provides tools to study gradient descent training
dynamics of wide but finite networks in either function space or weight space.
The entire library runs out-of-the-box on CPU, GPU, or TPU. All computations
can be automatically distributed over multiple accelerators with near-linear
scaling in the number of devices. Neural Tangents is available at
www.github.com/google/neural-tangents. We also provide an accompanying
interactive Colab notebook.
Description
[1912.02803] Neural Tangents: Fast and Easy Infinite Neural Networks in Python
%0 Journal Article
%1 novak2019neural
%A Novak, Roman
%A Xiao, Lechao
%A Hron, Jiri
%A Lee, Jaehoon
%A Alemi, Alexander A.
%A Sohl-Dickstein, Jascha
%A Schoenholz, Samuel S.
%D 2019
%K bayesian gaussian-proceses neural-processes probability
%T Neural Tangents: Fast and Easy Infinite Neural Networks in Python
%U http://arxiv.org/abs/1912.02803
%X Neural Tangents is a library designed to enable research into infinite-width
neural networks. It provides a high-level API for specifying complex and
hierarchical neural network architectures. These networks can then be trained
and evaluated either at finite-width as usual or in their infinite-width limit.
Infinite-width networks can be trained analytically using exact Bayesian
inference or using gradient descent via the Neural Tangent Kernel.
Additionally, Neural Tangents provides tools to study gradient descent training
dynamics of wide but finite networks in either function space or weight space.
The entire library runs out-of-the-box on CPU, GPU, or TPU. All computations
can be automatically distributed over multiple accelerators with near-linear
scaling in the number of devices. Neural Tangents is available at
www.github.com/google/neural-tangents. We also provide an accompanying
interactive Colab notebook.
@article{novak2019neural,
abstract = {Neural Tangents is a library designed to enable research into infinite-width
neural networks. It provides a high-level API for specifying complex and
hierarchical neural network architectures. These networks can then be trained
and evaluated either at finite-width as usual or in their infinite-width limit.
Infinite-width networks can be trained analytically using exact Bayesian
inference or using gradient descent via the Neural Tangent Kernel.
Additionally, Neural Tangents provides tools to study gradient descent training
dynamics of wide but finite networks in either function space or weight space.
The entire library runs out-of-the-box on CPU, GPU, or TPU. All computations
can be automatically distributed over multiple accelerators with near-linear
scaling in the number of devices. Neural Tangents is available at
www.github.com/google/neural-tangents. We also provide an accompanying
interactive Colab notebook.},
added-at = {2019-12-06T21:41:50.000+0100},
author = {Novak, Roman and Xiao, Lechao and Hron, Jiri and Lee, Jaehoon and Alemi, Alexander A. and Sohl-Dickstein, Jascha and Schoenholz, Samuel S.},
biburl = {https://www.bibsonomy.org/bibtex/2195d97386f9fc83c79434f89c6367a0e/kirk86},
description = {[1912.02803] Neural Tangents: Fast and Easy Infinite Neural Networks in Python},
interhash = {fa488e2f6b496863b0d3ee745e97a996},
intrahash = {195d97386f9fc83c79434f89c6367a0e},
keywords = {bayesian gaussian-proceses neural-processes probability},
note = {cite arxiv:1912.02803},
timestamp = {2019-12-06T21:41:50.000+0100},
title = {Neural Tangents: Fast and Easy Infinite Neural Networks in Python},
url = {http://arxiv.org/abs/1912.02803},
year = 2019
}