I. Sason, and S. Verdú. (2015)cite arxiv:1508.00335Comment: IEEE Trans. on Information Theory, vol. 62, no. 11, pp. 5973--6006, November 2016. This manuscript is identical to the journal paper, apart of some additional material which includes Sections III-C and IV-F, and three technical proofs.
DOI: 10.1109/TIT.2016.2603151
Abstract
This paper develops systematic approaches to obtain $f$-divergence
inequalities, dealing with pairs of probability measures defined on arbitrary
alphabets. Functional domination is one such approach, where special emphasis
is placed on finding the best possible constant upper bounding a ratio of
$f$-divergences. Another approach used for the derivation of bounds among
$f$-divergences relies on moment inequalities and the logarithmic-convexity
property, which results in tight bounds on the relative entropy and
Bhattacharyya distance in terms of $\chi^2$ divergences. A rich variety of
bounds are shown to hold under boundedness assumptions on the relative
information. Special attention is devoted to the total variation distance and
its relation to the relative information and relative entropy, including
"reverse Pinsker inequalities," as well as on the $E_\gamma$ divergence, which
generalizes the total variation distance. Pinsker's inequality is extended for
this type of $f$-divergence, a result which leads to an inequality linking the
relative entropy and relative information spectrum. Integral expressions of the
Rényi divergence in terms of the relative information spectrum are derived,
leading to bounds on the Rényi divergence in terms of either the variational
distance or relative entropy.
cite arxiv:1508.00335Comment: IEEE Trans. on Information Theory, vol. 62, no. 11, pp. 5973--6006, November 2016. This manuscript is identical to the journal paper, apart of some additional material which includes Sections III-C and IV-F, and three technical proofs
%0 Journal Article
%1 sason2015fdivergence
%A Sason, Igal
%A Verdú, Sergio
%D 2015
%K divergences information readings theory
%R 10.1109/TIT.2016.2603151
%T $f$-divergence Inequalities
%U http://arxiv.org/abs/1508.00335
%X This paper develops systematic approaches to obtain $f$-divergence
inequalities, dealing with pairs of probability measures defined on arbitrary
alphabets. Functional domination is one such approach, where special emphasis
is placed on finding the best possible constant upper bounding a ratio of
$f$-divergences. Another approach used for the derivation of bounds among
$f$-divergences relies on moment inequalities and the logarithmic-convexity
property, which results in tight bounds on the relative entropy and
Bhattacharyya distance in terms of $\chi^2$ divergences. A rich variety of
bounds are shown to hold under boundedness assumptions on the relative
information. Special attention is devoted to the total variation distance and
its relation to the relative information and relative entropy, including
"reverse Pinsker inequalities," as well as on the $E_\gamma$ divergence, which
generalizes the total variation distance. Pinsker's inequality is extended for
this type of $f$-divergence, a result which leads to an inequality linking the
relative entropy and relative information spectrum. Integral expressions of the
Rényi divergence in terms of the relative information spectrum are derived,
leading to bounds on the Rényi divergence in terms of either the variational
distance or relative entropy.
@article{sason2015fdivergence,
abstract = {This paper develops systematic approaches to obtain $f$-divergence
inequalities, dealing with pairs of probability measures defined on arbitrary
alphabets. Functional domination is one such approach, where special emphasis
is placed on finding the best possible constant upper bounding a ratio of
$f$-divergences. Another approach used for the derivation of bounds among
$f$-divergences relies on moment inequalities and the logarithmic-convexity
property, which results in tight bounds on the relative entropy and
Bhattacharyya distance in terms of $\chi^2$ divergences. A rich variety of
bounds are shown to hold under boundedness assumptions on the relative
information. Special attention is devoted to the total variation distance and
its relation to the relative information and relative entropy, including
"reverse Pinsker inequalities," as well as on the $E_\gamma$ divergence, which
generalizes the total variation distance. Pinsker's inequality is extended for
this type of $f$-divergence, a result which leads to an inequality linking the
relative entropy and relative information spectrum. Integral expressions of the
R\'enyi divergence in terms of the relative information spectrum are derived,
leading to bounds on the R\'enyi divergence in terms of either the variational
distance or relative entropy.},
added-at = {2020-05-23T11:39:43.000+0200},
author = {Sason, Igal and Verdú, Sergio},
biburl = {https://www.bibsonomy.org/bibtex/20d0c8b1696c94eebbb5b92e58be3b38b/kirk86},
description = {[1508.00335] $f$-divergence Inequalities},
doi = {10.1109/TIT.2016.2603151},
interhash = {c542042728829018f877d3f95247855c},
intrahash = {0d0c8b1696c94eebbb5b92e58be3b38b},
keywords = {divergences information readings theory},
note = {cite arxiv:1508.00335Comment: IEEE Trans. on Information Theory, vol. 62, no. 11, pp. 5973--6006, November 2016. This manuscript is identical to the journal paper, apart of some additional material which includes Sections III-C and IV-F, and three technical proofs},
timestamp = {2020-05-23T11:39:43.000+0200},
title = {$f$-divergence Inequalities},
url = {http://arxiv.org/abs/1508.00335},
year = 2015
}