This paper addresses the issue of model selection for hidden Markov models (HMMs). We generalize factorized asymptotic Bayesian inference (FAB), which has been recently developed for model selection on independent hidden variables (i.e., mixture models), for time-dependent hidden variables. As with FAB in mixture models, FAB for HMMs is derived as an iterative lower bound maximization algorithm of a factorized information criterion (FIC). It inherits, from FAB for mixture models, several desirable properties for learning HMMs, such as asymptotic consistency of FIC with marginal log-likelihood, a shrinkage effect for hidden state selection, monotonic increase of the lower FIC bound through the iterative optimization. Further, it does not have a tunable hyper-parameter, and thus its model selection process can be fully automated. Experimental results shows that FAB outperforms states-of-the-art variational Bayesian HMM and non-parametric Bayesian HMM in terms of model selection accuracy and computational efficiency.
%0 Journal Article
%1 fujimaki_factorized_2012
%A Fujimaki, Ryohei
%A Hayashi, Kohei
%D 2012
%J arXiv:1206.4679
%K Bayesian Carlo, Learning, Machine Markov Monte chain inference, model selection
%T Factorized Asymptotic Bayesian Hidden Markov Models
%U http://arxiv.org/abs/1206.4679
%X This paper addresses the issue of model selection for hidden Markov models (HMMs). We generalize factorized asymptotic Bayesian inference (FAB), which has been recently developed for model selection on independent hidden variables (i.e., mixture models), for time-dependent hidden variables. As with FAB in mixture models, FAB for HMMs is derived as an iterative lower bound maximization algorithm of a factorized information criterion (FIC). It inherits, from FAB for mixture models, several desirable properties for learning HMMs, such as asymptotic consistency of FIC with marginal log-likelihood, a shrinkage effect for hidden state selection, monotonic increase of the lower FIC bound through the iterative optimization. Further, it does not have a tunable hyper-parameter, and thus its model selection process can be fully automated. Experimental results shows that FAB outperforms states-of-the-art variational Bayesian HMM and non-parametric Bayesian HMM in terms of model selection accuracy and computational efficiency.
@article{fujimaki_factorized_2012,
abstract = {This paper addresses the issue of model selection for hidden Markov models (HMMs). We generalize factorized asymptotic Bayesian inference (FAB), which has been recently developed for model selection on independent hidden variables (i.e., mixture models), for time-dependent hidden variables. As with FAB in mixture models, FAB for HMMs is derived as an iterative lower bound maximization algorithm of a factorized information criterion (FIC). It inherits, from FAB for mixture models, several desirable properties for learning HMMs, such as asymptotic consistency of FIC with marginal log-likelihood, a shrinkage effect for hidden state selection, monotonic increase of the lower FIC bound through the iterative optimization. Further, it does not have a tunable hyper-parameter, and thus its model selection process can be fully automated. Experimental results shows that FAB outperforms states-of-the-art variational Bayesian HMM and non-parametric Bayesian HMM in terms of model selection accuracy and computational efficiency.},
added-at = {2017-01-09T13:57:26.000+0100},
author = {Fujimaki, Ryohei and Hayashi, Kohei},
biburl = {https://www.bibsonomy.org/bibtex/2106279291789de8d33e5562bb3460af5/yourwelcome},
interhash = {4a96fcd7ae09fec9190dc00b9087d849},
intrahash = {106279291789de8d33e5562bb3460af5},
journal = {arXiv:1206.4679},
keywords = {Bayesian Carlo, Learning, Machine Markov Monte chain inference, model selection},
month = jun,
timestamp = {2017-01-09T14:01:11.000+0100},
title = {Factorized {Asymptotic} {Bayesian} {Hidden} {Markov} {Models}},
url = {http://arxiv.org/abs/1206.4679},
urldate = {2012-07-01},
year = 2012
}