This paper proposes a novel neural network architecture inspired by the
nonstandard form proposed by Beylkin, Coifman, and Rokhlin in Communications
on Pure and Applied Mathematics, 44(2), 141-183. The nonstandard form is a
highly effective wavelet-based compression scheme for linear integral
operators. In this work, we first represent the matrix-vector product algorithm
of the nonstandard form as a linear neural network where every scale of the
multiresolution computation is carried out by a locally connected linear
sub-network. In order to address nonlinear problems, we propose an extension,
called BCR-Net, by replacing each linear sub-network with a deeper and more
powerful nonlinear one. Numerical results demonstrate the efficiency of the new
architecture by approximating nonlinear maps that arise in homogenization
theory and stochastic computation.
%0 Generic
%1 fan2018bcrnet
%A Fan, Yuwei
%A Bohorquez, Cindy Orozco
%A Ying, Lexing
%D 2018
%K audio todo:read wavelet
%R 10.1016/j.jcp.2019.02.002
%T BCR-Net: a neural network based on the nonstandard wavelet form
%U http://arxiv.org/abs/1810.08754
%X This paper proposes a novel neural network architecture inspired by the
nonstandard form proposed by Beylkin, Coifman, and Rokhlin in Communications
on Pure and Applied Mathematics, 44(2), 141-183. The nonstandard form is a
highly effective wavelet-based compression scheme for linear integral
operators. In this work, we first represent the matrix-vector product algorithm
of the nonstandard form as a linear neural network where every scale of the
multiresolution computation is carried out by a locally connected linear
sub-network. In order to address nonlinear problems, we propose an extension,
called BCR-Net, by replacing each linear sub-network with a deeper and more
powerful nonlinear one. Numerical results demonstrate the efficiency of the new
architecture by approximating nonlinear maps that arise in homogenization
theory and stochastic computation.
@misc{fan2018bcrnet,
abstract = {This paper proposes a novel neural network architecture inspired by the
nonstandard form proposed by Beylkin, Coifman, and Rokhlin in [Communications
on Pure and Applied Mathematics, 44(2), 141-183]. The nonstandard form is a
highly effective wavelet-based compression scheme for linear integral
operators. In this work, we first represent the matrix-vector product algorithm
of the nonstandard form as a linear neural network where every scale of the
multiresolution computation is carried out by a locally connected linear
sub-network. In order to address nonlinear problems, we propose an extension,
called BCR-Net, by replacing each linear sub-network with a deeper and more
powerful nonlinear one. Numerical results demonstrate the efficiency of the new
architecture by approximating nonlinear maps that arise in homogenization
theory and stochastic computation.},
added-at = {2022-06-20T11:10:30.000+0200},
author = {Fan, Yuwei and Bohorquez, Cindy Orozco and Ying, Lexing},
biburl = {https://www.bibsonomy.org/bibtex/2d77500f3a8204daa3ca0d8e8cb3bbae9/annakrause},
description = {1810.08754.pdf},
doi = {10.1016/j.jcp.2019.02.002},
interhash = {583a27b68f9a5cc5328223b576597928},
intrahash = {d77500f3a8204daa3ca0d8e8cb3bbae9},
keywords = {audio todo:read wavelet},
note = {cite arxiv:1810.08754Comment: 17 pages and 9 figures},
timestamp = {2022-06-20T11:12:12.000+0200},
title = {BCR-Net: a neural network based on the nonstandard wavelet form},
url = {http://arxiv.org/abs/1810.08754},
year = 2018
}