Recent work has shown that convolutional networks can be substantially
deeper, more accurate, and efficient to train if they contain shorter
connections between layers close to the input and those close to the output. In
this paper, we embrace this observation and introduce the Dense Convolutional
Network (DenseNet), which connects each layer to every other layer in a
feed-forward fashion.Whereas traditional convolutional networks with L layers
have L connections - one between each layer and its subsequent layer - our
network has L(L+1)/2 direct connections. For each layer, the feature-maps of
all preceding layers are used as inputs, and its own feature-maps are used as
inputs into all subsequent layers. DenseNets have several compelling
advantages: they alleviate the vanishing-gradient problem, encourage feature
reuse and substantially improve parameter efficiency. We evaluate our proposed
architecture on four highly competitive object recognition benchmark tasks
(CIFAR-10, CIFAR-100, SVHN, and ImageNet). DenseNets obtain significant
improvements over the state-of-the-art on most of them, whilst requiring less
parameters and computation to achieve high performance.
Description
[2001.02394] Convolutional Networks with Dense Connectivity
%0 Generic
%1 huang2020convolutional
%A Huang, Gao
%A Liu, Zhuang
%A Pleiss, Geoff
%A van der Maaten, Laurens
%A Weinberger, Kilian Q.
%D 2020
%K 2020 cnn deep-learning
%T Convolutional Networks with Dense Connectivity
%U http://arxiv.org/abs/2001.02394
%X Recent work has shown that convolutional networks can be substantially
deeper, more accurate, and efficient to train if they contain shorter
connections between layers close to the input and those close to the output. In
this paper, we embrace this observation and introduce the Dense Convolutional
Network (DenseNet), which connects each layer to every other layer in a
feed-forward fashion.Whereas traditional convolutional networks with L layers
have L connections - one between each layer and its subsequent layer - our
network has L(L+1)/2 direct connections. For each layer, the feature-maps of
all preceding layers are used as inputs, and its own feature-maps are used as
inputs into all subsequent layers. DenseNets have several compelling
advantages: they alleviate the vanishing-gradient problem, encourage feature
reuse and substantially improve parameter efficiency. We evaluate our proposed
architecture on four highly competitive object recognition benchmark tasks
(CIFAR-10, CIFAR-100, SVHN, and ImageNet). DenseNets obtain significant
improvements over the state-of-the-art on most of them, whilst requiring less
parameters and computation to achieve high performance.
@misc{huang2020convolutional,
abstract = {Recent work has shown that convolutional networks can be substantially
deeper, more accurate, and efficient to train if they contain shorter
connections between layers close to the input and those close to the output. In
this paper, we embrace this observation and introduce the Dense Convolutional
Network (DenseNet), which connects each layer to every other layer in a
feed-forward fashion.Whereas traditional convolutional networks with L layers
have L connections - one between each layer and its subsequent layer - our
network has L(L+1)/2 direct connections. For each layer, the feature-maps of
all preceding layers are used as inputs, and its own feature-maps are used as
inputs into all subsequent layers. DenseNets have several compelling
advantages: they alleviate the vanishing-gradient problem, encourage feature
reuse and substantially improve parameter efficiency. We evaluate our proposed
architecture on four highly competitive object recognition benchmark tasks
(CIFAR-10, CIFAR-100, SVHN, and ImageNet). DenseNets obtain significant
improvements over the state-of-the-art on most of them, whilst requiring less
parameters and computation to achieve high performance.},
added-at = {2020-01-09T08:53:04.000+0100},
author = {Huang, Gao and Liu, Zhuang and Pleiss, Geoff and van der Maaten, Laurens and Weinberger, Kilian Q.},
biburl = {https://www.bibsonomy.org/bibtex/2fb9d315a79f74ceb28e8508098403037/analyst},
description = {[2001.02394] Convolutional Networks with Dense Connectivity},
interhash = {612c0c747256feb8bad21acd7964af16},
intrahash = {fb9d315a79f74ceb28e8508098403037},
keywords = {2020 cnn deep-learning},
note = {cite arxiv:2001.02394Comment: Journal(PAMI) version of DenseNet(CVPR'17)},
timestamp = {2020-01-09T08:53:04.000+0100},
title = {Convolutional Networks with Dense Connectivity},
url = {http://arxiv.org/abs/2001.02394},
year = 2020
}