We propose a simple method of construction of new families of $\phi$%-divergences. This method called convex standardization is applicable to convex and concave functions $\psi(t)$ twice continuously differentiable in a neighborhood of $t=1$ with nonzero second derivative at the point $t=1$. Using this method we introduce several extensions of the LeCam, power, $% \chi^a$ and Matusita divergences. The extended families are shown to connect smoothly these divergences with the Kullback divergence or they connect various pairs of these particular divergences themselves. We investigate also the metric properties of divergences from these extended families.
Description
DML-CZ - Czech Digital Mathematics Library: Extensions of the parametric families of divergences used in statistical inference
%0 Journal Article
%1 kusextensions
%A Kůs, Václav
%A Morales, Domingo
%A Vajda, Igor
%D 2008
%I Institute of Information Theory and Automation AS CR
%J Kybernetika
%K divergences entropy information theory
%N 1
%P 95-112
%T Extensions of the parametric families of divergences used in statistical inference
%U https://dml.cz/handle/10338.dmlcz/135836
%V 44
%X We propose a simple method of construction of new families of $\phi$%-divergences. This method called convex standardization is applicable to convex and concave functions $\psi(t)$ twice continuously differentiable in a neighborhood of $t=1$ with nonzero second derivative at the point $t=1$. Using this method we introduce several extensions of the LeCam, power, $% \chi^a$ and Matusita divergences. The extended families are shown to connect smoothly these divergences with the Kullback divergence or they connect various pairs of these particular divergences themselves. We investigate also the metric properties of divergences from these extended families.
@article{kusextensions,
abstract = {We propose a simple method of construction of new families of $\phi$%-divergences. This method called convex standardization is applicable to convex and concave functions $\psi(t)$ twice continuously differentiable in a neighborhood of $t=1$ with nonzero second derivative at the point $t=1$. Using this method we introduce several extensions of the LeCam, power, $% \chi^a$ and Matusita divergences. The extended families are shown to connect smoothly these divergences with the Kullback divergence or they connect various pairs of these particular divergences themselves. We investigate also the metric properties of divergences from these extended families.},
added-at = {2019-12-11T13:01:22.000+0100},
author = {Kůs, Václav and Morales, Domingo and Vajda, Igor},
biburl = {https://www.bibsonomy.org/bibtex/25392ea498f5c327a504073a9dcc8009f/kirk86},
description = {DML-CZ - Czech Digital Mathematics Library: Extensions of the parametric families of divergences used in statistical inference},
id = {135836},
interhash = {1ae2af1d68e48df19bd8ea2df803c65f},
intrahash = {5392ea498f5c327a504073a9dcc8009f},
issn = {0023-5954},
journal = {Kybernetika},
keywords = {divergences entropy information theory},
number = 1,
pages = {95-112},
publisher = {Institute of Information Theory and Automation AS CR},
timestamp = {2019-12-11T13:01:22.000+0100},
title = {Extensions of the parametric families of divergences used in statistical inference},
type = {misc},
url = {https://dml.cz/handle/10338.dmlcz/135836},
volume = 44,
year = 2008
}