We present MorphNet, an approach to automate the design of neural network structures. MorphNet iteratively shrinks and expands a network, shrinking via a resource-weighted sparsifying regularizer on activations and expanding via a uniform multiplicative factor on all layers. In contrast to previous approaches, our method is scalable to large networks, adaptable to specific resource constraints (e.g. the number of floating-point operations per inference), and capable of increasing the network's performance. When applied to standard network architectures on a wide variety of datasets, our approach discovers novel structures in each domain, obtaining higher performance while respecting the resource constraint.
%0 Journal Article
%1 Gordon2017
%A Gordon, Ariel
%A Eban, Elad
%A Nachum, Ofir
%A Chen, Bo
%A Wu, Hao
%A Yang, Tien-Ju
%A Choi, Edward
%D 2017
%K cs.LG, stat.ML
%T MorphNet: Fast & Simple Resource-Constrained Structure Learning of Deep Networks
%X We present MorphNet, an approach to automate the design of neural network structures. MorphNet iteratively shrinks and expands a network, shrinking via a resource-weighted sparsifying regularizer on activations and expanding via a uniform multiplicative factor on all layers. In contrast to previous approaches, our method is scalable to large networks, adaptable to specific resource constraints (e.g. the number of floating-point operations per inference), and capable of increasing the network's performance. When applied to standard network architectures on a wide variety of datasets, our approach discovers novel structures in each domain, obtaining higher performance while respecting the resource constraint.
@article{Gordon2017,
abstract = {We present MorphNet, an approach to automate the design of neural network structures. MorphNet iteratively shrinks and expands a network, shrinking via a resource-weighted sparsifying regularizer on activations and expanding via a uniform multiplicative factor on all layers. In contrast to previous approaches, our method is scalable to large networks, adaptable to specific resource constraints (e.g. the number of floating-point operations per inference), and capable of increasing the network's performance. When applied to standard network architectures on a wide variety of datasets, our approach discovers novel structures in each domain, obtaining higher performance while respecting the resource constraint.},
added-at = {2020-10-15T14:36:56.000+0200},
author = {Gordon, Ariel and Eban, Elad and Nachum, Ofir and Chen, Bo and Wu, Hao and Yang, Tien-Ju and Choi, Edward},
biburl = {https://www.bibsonomy.org/bibtex/2cf787220cc3dcc6c7a9171fbfb8b0406/annakrause},
eprint = {1711.06798v3},
eprintclass = {cs.LG},
eprinttype = {arXiv},
file = {:http\://arxiv.org/pdf/1711.06798v3:PDF},
interhash = {84e1eacfc0d7669d790049ec3ddc94e6},
intrahash = {cf787220cc3dcc6c7a9171fbfb8b0406},
keywords = {cs.LG, stat.ML},
timestamp = {2020-10-15T14:44:42.000+0200},
title = {MorphNet: Fast & Simple Resource-Constrained Structure Learning of Deep Networks},
year = 2017
}