Multimodal Language Processing for Mobile Information Access
M. Johnston, S. Bangalore, A. Stent, G. Vasireddy, and P. Ehlen. Proceedings of ICSLP---Interspeech 2002: 7th International Conference on Spoken Language Processing, Denver, CO, USA, page 2237-2240. (2002)
Abstract
Interfaces for mobile information access need to allow users flexibility in their choice of modes and interaction style in accordance with their preferences, the task at hand, and their physical and social environment. This paper describes the approach to multimodal language processing in MATCH (Multimodal Access To City Help), a mobile multimodal speech-pen interface to restaurant and subway information for New York City. Finite-state methods for multimodal integration and understanding enable users to interact using pen, speech, or dynamic combinations of the two, and a speech-act based multimodal dialogue manager enables mixed-initiative multimodal dialogue.
%0 Conference Paper
%1 JohnstonBangaloreEtAl02ICSLP
%A Johnston, Michael
%A Bangalore, Srinivas
%A Stent, Amanda
%A Vasireddy, Gunaranjan
%A Ehlen, Patrick
%B Proceedings of ICSLP---Interspeech 2002: 7th International Conference on Spoken Language Processing, Denver, CO, USA
%D 2002
%K v1205 isca paper ai language processing speech multimodal dialog mobile user interface interaction
%P 2237-2240
%T Multimodal Language Processing for Mobile Information Access
%U http://www.isca-speech.org/archive/icslp_2002/i02_2237.html
%X Interfaces for mobile information access need to allow users flexibility in their choice of modes and interaction style in accordance with their preferences, the task at hand, and their physical and social environment. This paper describes the approach to multimodal language processing in MATCH (Multimodal Access To City Help), a mobile multimodal speech-pen interface to restaurant and subway information for New York City. Finite-state methods for multimodal integration and understanding enable users to interact using pen, speech, or dynamic combinations of the two, and a speech-act based multimodal dialogue manager enables mixed-initiative multimodal dialogue.
@inproceedings{JohnstonBangaloreEtAl02ICSLP,
abstract = {Interfaces for mobile information access need to allow users flexibility in their choice of modes and interaction style in accordance with their preferences, the task at hand, and their physical and social environment. This paper describes the approach to multimodal language processing in MATCH (Multimodal Access To City Help), a mobile multimodal speech-pen interface to restaurant and subway information for New York City. Finite-state methods for multimodal integration and understanding enable users to interact using pen, speech, or dynamic combinations of the two, and a speech-act based multimodal dialogue manager enables mixed-initiative multimodal dialogue.},
added-at = {2012-05-30T10:48:41.000+0200},
author = {Johnston, Michael and Bangalore, Srinivas and Stent, Amanda and Vasireddy, Gunaranjan and Ehlen, Patrick},
biburl = {https://www.bibsonomy.org/bibtex/2aa52b9c0cfb8333cc5842f4247977808/flint63},
booktitle = {Proceedings of ICSLP---Interspeech 2002: 7th International Conference on Spoken Language Processing, Denver, CO, USA},
file = {ISCA Archive:2000-04/JohnstonBangaloreEtAl02ICSLP.pdf:PDF},
groups = {public},
interhash = {e29740f9056067bd3a14f30461f9e2dc},
intrahash = {aa52b9c0cfb8333cc5842f4247977808},
keywords = {v1205 isca paper ai language processing speech multimodal dialog mobile user interface interaction},
pages = {2237-2240},
timestamp = {2018-04-16T11:48:45.000+0200},
title = {Multimodal Language Processing for Mobile Information Access},
url = {http://www.isca-speech.org/archive/icslp_2002/i02_2237.html},
username = {flint63},
year = 2002
}