While neural architecture search (NAS) has drawn increasing attention for
automatically tuning deep neural networks, existing search algorithms usually
suffer from expensive computational cost. Network morphism, which keeps the
functionality of a neural network while changing its neural architecture, could
be helpful for NAS by enabling a more efficient training during the search.
However, network morphism based NAS is still computationally expensive due to
the inefficient process of selecting the proper morph operation for existing
architectures. As we know, Bayesian optimization has been widely used to
optimize functions based on a limited number of observations, motivating us to
explore the possibility of making use of Bayesian optimization to accelerate
the morph operation selection process. In this paper, we propose a novel
framework enabling Bayesian optimization to guide the network morphism for
efficient neural architecture search by introducing a neural network kernel and
a tree-structured acquisition function optimization algorithm. With Bayesian
optimization to select the network morphism operations, the exploration of the
search space is more efficient. Moreover, we carefully wrapped our method into
an open-source software, namely Auto-Keras for people without rich machine
learning background to use. Intensive experiments on real-world datasets have
been done to demonstrate the superior performance of the developed framework
over the state-of-the-art baseline methods.
Description
[1806.10282] Efficient Neural Architecture Search with Network Morphism
%0 Generic
%1 jin2018efficient
%A Jin, Haifeng
%A Song, Qingquan
%A Hu, Xia
%D 2018
%K 2018 arxiv deep-learning paper
%T Efficient Neural Architecture Search with Network Morphism
%U http://arxiv.org/abs/1806.10282
%X While neural architecture search (NAS) has drawn increasing attention for
automatically tuning deep neural networks, existing search algorithms usually
suffer from expensive computational cost. Network morphism, which keeps the
functionality of a neural network while changing its neural architecture, could
be helpful for NAS by enabling a more efficient training during the search.
However, network morphism based NAS is still computationally expensive due to
the inefficient process of selecting the proper morph operation for existing
architectures. As we know, Bayesian optimization has been widely used to
optimize functions based on a limited number of observations, motivating us to
explore the possibility of making use of Bayesian optimization to accelerate
the morph operation selection process. In this paper, we propose a novel
framework enabling Bayesian optimization to guide the network morphism for
efficient neural architecture search by introducing a neural network kernel and
a tree-structured acquisition function optimization algorithm. With Bayesian
optimization to select the network morphism operations, the exploration of the
search space is more efficient. Moreover, we carefully wrapped our method into
an open-source software, namely Auto-Keras for people without rich machine
learning background to use. Intensive experiments on real-world datasets have
been done to demonstrate the superior performance of the developed framework
over the state-of-the-art baseline methods.
@misc{jin2018efficient,
abstract = {While neural architecture search (NAS) has drawn increasing attention for
automatically tuning deep neural networks, existing search algorithms usually
suffer from expensive computational cost. Network morphism, which keeps the
functionality of a neural network while changing its neural architecture, could
be helpful for NAS by enabling a more efficient training during the search.
However, network morphism based NAS is still computationally expensive due to
the inefficient process of selecting the proper morph operation for existing
architectures. As we know, Bayesian optimization has been widely used to
optimize functions based on a limited number of observations, motivating us to
explore the possibility of making use of Bayesian optimization to accelerate
the morph operation selection process. In this paper, we propose a novel
framework enabling Bayesian optimization to guide the network morphism for
efficient neural architecture search by introducing a neural network kernel and
a tree-structured acquisition function optimization algorithm. With Bayesian
optimization to select the network morphism operations, the exploration of the
search space is more efficient. Moreover, we carefully wrapped our method into
an open-source software, namely Auto-Keras for people without rich machine
learning background to use. Intensive experiments on real-world datasets have
been done to demonstrate the superior performance of the developed framework
over the state-of-the-art baseline methods.},
added-at = {2018-08-02T20:23:50.000+0200},
author = {Jin, Haifeng and Song, Qingquan and Hu, Xia},
biburl = {https://www.bibsonomy.org/bibtex/2179b182e88f2549e919c419f0e61db1f/analyst},
description = {[1806.10282] Efficient Neural Architecture Search with Network Morphism},
interhash = {c685472e9aa2f1a3313a142e2d1413da},
intrahash = {179b182e88f2549e919c419f0e61db1f},
keywords = {2018 arxiv deep-learning paper},
note = {cite arxiv:1806.10282},
timestamp = {2018-08-02T20:23:50.000+0200},
title = {Efficient Neural Architecture Search with Network Morphism},
url = {http://arxiv.org/abs/1806.10282},
year = 2018
}