Asynchronous parallel optimization received substantial successes and
extensive attention recently. One of core theoretical questions is how much
speedup (or benefit) the asynchronous parallelization can bring us. This paper
provides a comprehensive and generic analysis to study the speedup property for
a broad range of asynchronous parallel stochastic algorithms from the zeroth
order to the first order methods. Our result recovers or improves existing
analysis on special cases, provides more insights for understanding the
asynchronous parallel behaviors, and suggests a novel asynchronous parallel
zeroth order method for the first time. Our experiments provide novel
applications including model blending problems using the proposed asynchronous
parallel zeroth order method.
%0 Generic
%1 lian2016comprehensive
%A Lian, Xiangru
%A Zhang, Huan
%A Hsieh, Cho-Jui
%A Huang, Yijun
%A Liu, Ji
%D 2016
%K ASGD optimization speedup
%T A Comprehensive Linear Speedup Analysis for Asynchronous Stochastic
Parallel Optimization from Zeroth-Order to First-Order
%U http://arxiv.org/abs/1606.00498
%X Asynchronous parallel optimization received substantial successes and
extensive attention recently. One of core theoretical questions is how much
speedup (or benefit) the asynchronous parallelization can bring us. This paper
provides a comprehensive and generic analysis to study the speedup property for
a broad range of asynchronous parallel stochastic algorithms from the zeroth
order to the first order methods. Our result recovers or improves existing
analysis on special cases, provides more insights for understanding the
asynchronous parallel behaviors, and suggests a novel asynchronous parallel
zeroth order method for the first time. Our experiments provide novel
applications including model blending problems using the proposed asynchronous
parallel zeroth order method.
@misc{lian2016comprehensive,
abstract = {Asynchronous parallel optimization received substantial successes and
extensive attention recently. One of core theoretical questions is how much
speedup (or benefit) the asynchronous parallelization can bring us. This paper
provides a comprehensive and generic analysis to study the speedup property for
a broad range of asynchronous parallel stochastic algorithms from the zeroth
order to the first order methods. Our result recovers or improves existing
analysis on special cases, provides more insights for understanding the
asynchronous parallel behaviors, and suggests a novel asynchronous parallel
zeroth order method for the first time. Our experiments provide novel
applications including model blending problems using the proposed asynchronous
parallel zeroth order method.},
added-at = {2018-02-10T15:34:37.000+0100},
author = {Lian, Xiangru and Zhang, Huan and Hsieh, Cho-Jui and Huang, Yijun and Liu, Ji},
biburl = {https://www.bibsonomy.org/bibtex/27c0400f4bcc1cb5586f988ea6a0013f3/jk_itwm},
description = {1606.00498.pdf},
interhash = {0de45dc6cc2797098075a9cc5f3c3998},
intrahash = {7c0400f4bcc1cb5586f988ea6a0013f3},
keywords = {ASGD optimization speedup},
note = {cite arxiv:1606.00498},
timestamp = {2018-02-10T15:34:37.000+0100},
title = {A Comprehensive Linear Speedup Analysis for Asynchronous Stochastic
Parallel Optimization from Zeroth-Order to First-Order},
url = {http://arxiv.org/abs/1606.00498},
year = 2016
}