Proper usability evaluations of spoken dialogue systems are costly and cumbersome to carry out. In this paper, we present a new approach for facilitating usability evaluations which is based on user error simulations. The idea is to replace real users with simulations derived from empirical observations of users' erroneous behavior. The simulated errors must cover both system-driven errors (e.g., due to poor speech recognition) as well as conceptual errors and slips of the user, because neither alone is predictive of perceived usability. The simulation is integrated into a workbench which produces reports of typical and rare errors, and which allows usability ratings to be predicted. If successful, this workbench will help designers in making choices between system versions and lower testing costs at early phases of development. Challenges to the approach are discussed and solutions proposed.
%0 Conference Paper
%1 MoellerEnglertEtAl06ICSLP
%A Möller, Sebastian
%A Englert, Roman
%A Engelbrecht, Klaus
%A Hafner, Verena
%A Jameson, Anthony
%A Oulasvirta, Antti
%A Raake, Alexander
%A Reithinger, Norbert
%B Proceedings of Interspeech 2006---ICSLP: 9th International Conference on Spoken Language Processing, Pittsburgh, PA, USA
%D 2006
%K v1205 isca paper ai language processing speech dialog user interface interaction requirements engineering test
%P 1786-1789
%T MeMo: Towards Automatic Usability Evaluation of Spoken Dialogue Services by User Error Simulations
%U http://www.isca-speech.org/archive/interspeech_2006/i06_1131.html
%X Proper usability evaluations of spoken dialogue systems are costly and cumbersome to carry out. In this paper, we present a new approach for facilitating usability evaluations which is based on user error simulations. The idea is to replace real users with simulations derived from empirical observations of users' erroneous behavior. The simulated errors must cover both system-driven errors (e.g., due to poor speech recognition) as well as conceptual errors and slips of the user, because neither alone is predictive of perceived usability. The simulation is integrated into a workbench which produces reports of typical and rare errors, and which allows usability ratings to be predicted. If successful, this workbench will help designers in making choices between system versions and lower testing costs at early phases of development. Challenges to the approach are discussed and solutions proposed.
@inproceedings{MoellerEnglertEtAl06ICSLP,
abstract = {Proper usability evaluations of spoken dialogue systems are costly and cumbersome to carry out. In this paper, we present a new approach for facilitating usability evaluations which is based on user error simulations. The idea is to replace real users with simulations derived from empirical observations of users' erroneous behavior. The simulated errors must cover both system-driven errors (e.g., due to poor speech recognition) as well as conceptual errors and slips of the user, because neither alone is predictive of perceived usability. The simulation is integrated into a workbench which produces reports of typical and rare errors, and which allows usability ratings to be predicted. If successful, this workbench will help designers in making choices between system versions and lower testing costs at early phases of development. Challenges to the approach are discussed and solutions proposed.},
added-at = {2012-05-30T10:51:06.000+0200},
author = {M\"{o}ller, Sebastian and Englert, Roman and Engelbrecht, Klaus and Hafner, Verena and Jameson, Anthony and Oulasvirta, Antti and Raake, Alexander and Reithinger, Norbert},
biburl = {https://www.bibsonomy.org/bibtex/221641ee29d093b26de4fe52bdd3e35f9/flint63},
booktitle = {Proceedings of Interspeech 2006---ICSLP: 9th International Conference on Spoken Language Processing, Pittsburgh, PA, USA},
file = {ISCA Archive:2006/MoellerEnglertEtAl06ICSLP.pdf:PDF},
groups = {public},
interhash = {0ab584fc76a42d724f9caffba978b520},
intrahash = {21641ee29d093b26de4fe52bdd3e35f9},
keywords = {v1205 isca paper ai language processing speech dialog user interface interaction requirements engineering test},
pages = {1786-1789},
timestamp = {2018-04-16T12:20:19.000+0200},
title = {{MeMo}: Towards Automatic Usability Evaluation of Spoken Dialogue Services by User Error Simulations},
url = {http://www.isca-speech.org/archive/interspeech_2006/i06_1131.html},
username = {flint63},
year = 2006
}