We propose learning deep models that are monotonic with respect to a
user-specified set of inputs by alternating layers of linear embeddings,
ensembles of lattices, and calibrators (piecewise linear functions), with
appropriate constraints for monotonicity, and jointly training the resulting
network. We implement the layers and projections with new computational graph
nodes in TensorFlow and use the ADAM optimizer and batched stochastic
gradients. Experiments on benchmark and real-world datasets show that six-layer
monotonic deep lattice networks achieve state-of-the art performance for
classification and regression with monotonicity guarantees.
Beschreibung
[1709.06680] Deep Lattice Networks and Partial Monotonic Functions
%0 Generic
%1 you2017lattice
%A You, Seungil
%A Ding, David
%A Canini, Kevin
%A Pfeifer, Jan
%A Gupta, Maya
%D 2017
%K 2017 calculus deep-learning
%T Deep Lattice Networks and Partial Monotonic Functions
%U http://arxiv.org/abs/1709.06680
%X We propose learning deep models that are monotonic with respect to a
user-specified set of inputs by alternating layers of linear embeddings,
ensembles of lattices, and calibrators (piecewise linear functions), with
appropriate constraints for monotonicity, and jointly training the resulting
network. We implement the layers and projections with new computational graph
nodes in TensorFlow and use the ADAM optimizer and batched stochastic
gradients. Experiments on benchmark and real-world datasets show that six-layer
monotonic deep lattice networks achieve state-of-the art performance for
classification and regression with monotonicity guarantees.
@misc{you2017lattice,
abstract = {We propose learning deep models that are monotonic with respect to a
user-specified set of inputs by alternating layers of linear embeddings,
ensembles of lattices, and calibrators (piecewise linear functions), with
appropriate constraints for monotonicity, and jointly training the resulting
network. We implement the layers and projections with new computational graph
nodes in TensorFlow and use the ADAM optimizer and batched stochastic
gradients. Experiments on benchmark and real-world datasets show that six-layer
monotonic deep lattice networks achieve state-of-the art performance for
classification and regression with monotonicity guarantees.},
added-at = {2019-12-12T09:29:06.000+0100},
author = {You, Seungil and Ding, David and Canini, Kevin and Pfeifer, Jan and Gupta, Maya},
biburl = {https://www.bibsonomy.org/bibtex/2254279df487b2639e14568f0b99964e7/analyst},
description = {[1709.06680] Deep Lattice Networks and Partial Monotonic Functions},
interhash = {144e0581baba1b527d350724e546596a},
intrahash = {254279df487b2639e14568f0b99964e7},
keywords = {2017 calculus deep-learning},
note = {cite arxiv:1709.06680Comment: 9 pages, NIPS 2017},
timestamp = {2019-12-12T09:29:06.000+0100},
title = {Deep Lattice Networks and Partial Monotonic Functions},
url = {http://arxiv.org/abs/1709.06680},
year = 2017
}