The development of interfaces has been a technology-driven process. However, the newly developed multimodal interfaces are using recognition-based technologies that must interpret human-speech, gesture, gaze, movement patterns, and other behavioral cues. As a result, the interface design requires a human-centered approach. In this paper we review the major approaches to multimodal Human Computer Interaction, giving an overview of the user and task modeling, and of the multimodal fusion. We highlight the challenges, open issues, and the future trends in multimodal interfaces research.
%0 Journal Article
%1 Sebe09jaise
%A Sebe, Nicu
%D 2009
%J Journal of Ambient Intelligence and Smart Environments
%K 01801 paper embedded ai user interface multimodal interaction analysis zzz.mmi
%N 1
%P 23--30
%R 10.3233/AIS-2009-0003
%T Multimodal Interfaces: Challenges and Perspectives
%V 1
%X The development of interfaces has been a technology-driven process. However, the newly developed multimodal interfaces are using recognition-based technologies that must interpret human-speech, gesture, gaze, movement patterns, and other behavioral cues. As a result, the interface design requires a human-centered approach. In this paper we review the major approaches to multimodal Human Computer Interaction, giving an overview of the user and task modeling, and of the multimodal fusion. We highlight the challenges, open issues, and the future trends in multimodal interfaces research.
@article{Sebe09jaise,
abstract = {The development of interfaces has been a technology-driven process. However, the newly developed multimodal interfaces are using recognition-based technologies that must interpret human-speech, gesture, gaze, movement patterns, and other behavioral cues. As a result, the interface design requires a human-centered approach. In this paper we review the major approaches to multimodal Human Computer Interaction, giving an overview of the user and task modeling, and of the multimodal fusion. We highlight the challenges, open issues, and the future trends in multimodal interfaces research.},
added-at = {2018-02-15T15:34:42.000+0100},
author = {Sebe, Nicu},
biburl = {https://www.bibsonomy.org/bibtex/277dc1999b7f9eec6b8288d4f11d94a53/flint63},
doi = {10.3233/AIS-2009-0003},
file = {IOS MetaPress:2009/Sebe09jaise.pdf:PDF},
groups = {public},
interhash = {251b657e180b5dbe0de93e000a9eaae7},
intrahash = {77dc1999b7f9eec6b8288d4f11d94a53},
issn = {1876-1364},
journal = {Journal of Ambient Intelligence and Smart Environments},
keywords = {01801 paper embedded ai user interface multimodal interaction analysis zzz.mmi},
number = 1,
pages = {23--30},
timestamp = {2018-04-16T12:05:21.000+0200},
title = {Multimodal Interfaces: Challenges and Perspectives},
username = {flint63},
volume = 1,
year = 2009
}