Differential equations parameterized by neural networks become expensive to
solve numerically as training progresses. We propose a remedy that encourages
learned dynamics to be easier to solve. Specifically, we introduce a
differentiable surrogate for the time cost of standard numerical solvers, using
higher-order derivatives of solution trajectories. These derivatives are
efficient to compute with Taylor-mode automatic differentiation. Optimizing
this additional objective trades model performance against the time cost of
solving the learned dynamics. We demonstrate our approach by training
substantially faster, while nearly as accurate, models in supervised
classification, density estimation, and time-series modelling tasks.
Description
[2007.04504] Learning Differential Equations that are Easy to Solve
%0 Generic
%1 kelly2020learning
%A Kelly, Jacob
%A Bettencourt, Jesse
%A Johnson, Matthew James
%A Duvenaud, David
%D 2020
%K deep-implicit-learning from:adulny neural-ode
%T Learning Differential Equations that are Easy to Solve
%U http://arxiv.org/abs/2007.04504
%X Differential equations parameterized by neural networks become expensive to
solve numerically as training progresses. We propose a remedy that encourages
learned dynamics to be easier to solve. Specifically, we introduce a
differentiable surrogate for the time cost of standard numerical solvers, using
higher-order derivatives of solution trajectories. These derivatives are
efficient to compute with Taylor-mode automatic differentiation. Optimizing
this additional objective trades model performance against the time cost of
solving the learned dynamics. We demonstrate our approach by training
substantially faster, while nearly as accurate, models in supervised
classification, density estimation, and time-series modelling tasks.
@misc{kelly2020learning,
abstract = {Differential equations parameterized by neural networks become expensive to
solve numerically as training progresses. We propose a remedy that encourages
learned dynamics to be easier to solve. Specifically, we introduce a
differentiable surrogate for the time cost of standard numerical solvers, using
higher-order derivatives of solution trajectories. These derivatives are
efficient to compute with Taylor-mode automatic differentiation. Optimizing
this additional objective trades model performance against the time cost of
solving the learned dynamics. We demonstrate our approach by training
substantially faster, while nearly as accurate, models in supervised
classification, density estimation, and time-series modelling tasks.},
added-at = {2021-03-26T12:02:28.000+0100},
author = {Kelly, Jacob and Bettencourt, Jesse and Johnson, Matthew James and Duvenaud, David},
biburl = {https://www.bibsonomy.org/bibtex/29e2a50f9d2b4e897018292299587be5a/adulny},
description = {[2007.04504] Learning Differential Equations that are Easy to Solve},
interhash = {e8fd802e0240e7969e0e57ab7eecb682},
intrahash = {9e2a50f9d2b4e897018292299587be5a},
keywords = {deep-implicit-learning from:adulny neural-ode},
note = {cite arxiv:2007.04504},
timestamp = {2021-03-26T12:02:28.000+0100},
title = {Learning Differential Equations that are Easy to Solve},
url = {http://arxiv.org/abs/2007.04504},
year = 2020
}