We study differentially private (DP) algorithms for stochastic convex
optimization (SCO). In this problem the goal is to approximately minimize the
population loss given i.i.d. samples from a distribution over convex and
Lipschitz loss functions. A long line of existing work on private convex
optimization focuses on the empirical loss and derives asymptotically tight
bounds on the excess empirical loss. However a significant gap exists in the
known bounds for the population loss. We show that, up to logarithmic factors,
the optimal excess population loss for DP algorithms is equal to the larger of
the optimal non-private excess population loss, and the optimal excess
empirical loss of DP algorithms. This implies that, contrary to intuition based
on private ERM, private SCO has asymptotically the same rate of $1/n$ as
non-private SCO in the parameter regime most common in practice. The best
previous result in this setting gives rate of $1/n^1/4$. Our approach builds
on existing differentially private algorithms and relies on the analysis of
algorithmic stability to ensure generalization.
Description
[1908.09970] Private Stochastic Convex Optimization with Optimal Rates
%0 Journal Article
%1 bassily2019private
%A Bassily, Raef
%A Feldman, Vitaly
%A Talwar, Kunal
%A Thakurta, Abhradeep
%D 2019
%K convex differential-privacy optimization privacy stochastic
%T Private Stochastic Convex Optimization with Optimal Rates
%U http://arxiv.org/abs/1908.09970
%X We study differentially private (DP) algorithms for stochastic convex
optimization (SCO). In this problem the goal is to approximately minimize the
population loss given i.i.d. samples from a distribution over convex and
Lipschitz loss functions. A long line of existing work on private convex
optimization focuses on the empirical loss and derives asymptotically tight
bounds on the excess empirical loss. However a significant gap exists in the
known bounds for the population loss. We show that, up to logarithmic factors,
the optimal excess population loss for DP algorithms is equal to the larger of
the optimal non-private excess population loss, and the optimal excess
empirical loss of DP algorithms. This implies that, contrary to intuition based
on private ERM, private SCO has asymptotically the same rate of $1/n$ as
non-private SCO in the parameter regime most common in practice. The best
previous result in this setting gives rate of $1/n^1/4$. Our approach builds
on existing differentially private algorithms and relies on the analysis of
algorithmic stability to ensure generalization.
@article{bassily2019private,
abstract = {We study differentially private (DP) algorithms for stochastic convex
optimization (SCO). In this problem the goal is to approximately minimize the
population loss given i.i.d. samples from a distribution over convex and
Lipschitz loss functions. A long line of existing work on private convex
optimization focuses on the empirical loss and derives asymptotically tight
bounds on the excess empirical loss. However a significant gap exists in the
known bounds for the population loss. We show that, up to logarithmic factors,
the optimal excess population loss for DP algorithms is equal to the larger of
the optimal non-private excess population loss, and the optimal excess
empirical loss of DP algorithms. This implies that, contrary to intuition based
on private ERM, private SCO has asymptotically the same rate of $1/\sqrt{n}$ as
non-private SCO in the parameter regime most common in practice. The best
previous result in this setting gives rate of $1/n^{1/4}$. Our approach builds
on existing differentially private algorithms and relies on the analysis of
algorithmic stability to ensure generalization.},
added-at = {2020-05-03T03:48:14.000+0200},
author = {Bassily, Raef and Feldman, Vitaly and Talwar, Kunal and Thakurta, Abhradeep},
biburl = {https://www.bibsonomy.org/bibtex/225df19c616aa18ffd96e88999d1fcc3c/kirk86},
description = {[1908.09970] Private Stochastic Convex Optimization with Optimal Rates},
interhash = {3aaf4420e32dd832fd26273a01624958},
intrahash = {25df19c616aa18ffd96e88999d1fcc3c},
keywords = {convex differential-privacy optimization privacy stochastic},
note = {cite arxiv:1908.09970},
timestamp = {2020-05-03T03:49:40.000+0200},
title = {Private Stochastic Convex Optimization with Optimal Rates},
url = {http://arxiv.org/abs/1908.09970},
year = 2019
}