In this paper we describe ongoing research that aims at the development of a generic demonstration platform for virtual prototype modeling by utilizing multimodal speech and gesture interactions in Virtual Reality. Particularly, we concentrate on two aspects. First, a knowledge-based approach for assembling CAD-based parts in VR is introduced. This includes a system to generate meta-information from geometric models
as well as accompanying task-level algorithms for virtual assembly. Second, a framework for modeling multimodal interaction using gesture and speech is presented that facilitates its generic adaptation to
scene-graph-based applications. The chosen decomposition of the required core modules is exemplified by an example of a typical object rotation interaction.
%0 Conference Paper
%1 biermann_etal:2002:VWerk
%A Biermann, Peter
%A Jung, Bernhard
%A Latoschik, Marc Erich
%A Wachsmuth, Ipke
%B Proceedings Fourth Virtual Reality International Conference (VRIC 2002), Laval, France
%D 2002
%K myown
%P 53-62
%T Virtuelle Werkstatt: A Platform for Multimodal Assembly in VR
%U http://www.techfak.uni-bielefeld.de/~ipke/download/VWerkDruck.pdf
%X In this paper we describe ongoing research that aims at the development of a generic demonstration platform for virtual prototype modeling by utilizing multimodal speech and gesture interactions in Virtual Reality. Particularly, we concentrate on two aspects. First, a knowledge-based approach for assembling CAD-based parts in VR is introduced. This includes a system to generate meta-information from geometric models
as well as accompanying task-level algorithms for virtual assembly. Second, a framework for modeling multimodal interaction using gesture and speech is presented that facilitates its generic adaptation to
scene-graph-based applications. The chosen decomposition of the required core modules is exemplified by an example of a typical object rotation interaction.
@inproceedings{biermann_etal:2002:VWerk,
abstract = { In this paper we describe ongoing research that aims at the development of a generic demonstration platform for virtual prototype modeling by utilizing multimodal speech and gesture interactions in Virtual Reality. Particularly, we concentrate on two aspects. First, a knowledge-based approach for assembling CAD-based parts in VR is introduced. This includes a system to generate meta-information from geometric models
as well as accompanying task-level algorithms for virtual assembly. Second, a framework for modeling multimodal interaction using gesture and speech is presented that facilitates its generic adaptation to
scene-graph-based applications. The chosen decomposition of the required core modules is exemplified by an example of a typical object rotation interaction.},
added-at = {2012-05-02T17:19:27.000+0200},
author = {Biermann, Peter and Jung, Bernhard and Latoschik, Marc Erich and Wachsmuth, Ipke},
biburl = {https://www.bibsonomy.org/bibtex/2cb9d2bf1202814194217e0193add6b19/hci-uwb},
booktitle = {Proceedings Fourth Virtual Reality International Conference (VRIC 2002), Laval, France},
interhash = {ce838931d445c891544915259528b93e},
intrahash = {cb9d2bf1202814194217e0193add6b19},
keywords = {myown},
pages = {53-62},
timestamp = {2024-05-06T17:22:37.000+0200},
title = {Virtuelle Werkstatt: A Platform for Multimodal Assembly in VR},
url = {http://www.techfak.uni-bielefeld.de/~ipke/download/VWerkDruck.pdf},
year = 2002
}