In this work, a mobile learning tool for the Lorm-alphabet is developed. A person who is deaf-blind is lorming by finger spelling on another person's palm and fingers. We aim to provide an easy and anywhere to use Lorm trainer for caregivers, companions, and the general public. A robust gesture recognition utilizing zoning techniques and matching of symbol sequences has been developed for touch sensitive mobile devices. Tests with three users of the target group were conducted and qualitative evaluation of three experts was obtained. Overall, our development got positive feedback and a broad demand for the application was communicated. It is promising not only to support students of Lorm in their training process, but to widen the application of Lorm, therefore, diminishing social isolation of deaf-blind.
Description
Zoning-Based Gesture Recognition to Enable a Mobile Lorm Trainer | SpringerLink
%0 Conference Paper
%1 schmidt2016zoningbased
%A Schmidt, Michael
%A Bank, Cathleen
%A Weber, Gerhard
%B Computers Helping People with Special Needs
%C Cham
%D 2016
%E Miesenberger, Klaus
%E Bühler, Christian
%E Penaz, Petr
%I Springer
%K deaf-blind gestures lorm-alphabet multi-touch real recognition zoning
%P 479-486
%R 10.1007/978-3-319-41267-2_67
%T Zoning-Based Gesture Recognition to Enable a Mobile Lorm Trainer
%U https://link.springer.com/chapter/10.1007/978-3-319-41267-2_67
%X In this work, a mobile learning tool for the Lorm-alphabet is developed. A person who is deaf-blind is lorming by finger spelling on another person's palm and fingers. We aim to provide an easy and anywhere to use Lorm trainer for caregivers, companions, and the general public. A robust gesture recognition utilizing zoning techniques and matching of symbol sequences has been developed for touch sensitive mobile devices. Tests with three users of the target group were conducted and qualitative evaluation of three experts was obtained. Overall, our development got positive feedback and a broad demand for the application was communicated. It is promising not only to support students of Lorm in their training process, but to widen the application of Lorm, therefore, diminishing social isolation of deaf-blind.
%@ 978-3-319-41267-2
@inproceedings{schmidt2016zoningbased,
abstract = {In this work, a mobile learning tool for the Lorm-alphabet is developed. A person who is deaf-blind is lorming by finger spelling on another person's palm and fingers. We aim to provide an easy and anywhere to use Lorm trainer for caregivers, companions, and the general public. A robust gesture recognition utilizing zoning techniques and matching of symbol sequences has been developed for touch sensitive mobile devices. Tests with three users of the target group were conducted and qualitative evaluation of three experts was obtained. Overall, our development got positive feedback and a broad demand for the application was communicated. It is promising not only to support students of Lorm in their training process, but to widen the application of Lorm, therefore, diminishing social isolation of deaf-blind.},
added-at = {2019-11-14T07:22:53.000+0100},
address = {Cham},
author = {Schmidt, Michael and Bank, Cathleen and Weber, Gerhard},
biburl = {https://www.bibsonomy.org/bibtex/2e24804cda722ab2b306450bf5f1cc2d3/jpmor},
booktitle = {Computers Helping People with Special Needs},
description = {Zoning-Based Gesture Recognition to Enable a Mobile Lorm Trainer | SpringerLink},
doi = {10.1007/978-3-319-41267-2_67},
editor = {Miesenberger, Klaus and B{\"u}hler, Christian and Penaz, Petr},
interhash = {cad6f1c780a3005e3baac17471f24554},
intrahash = {e24804cda722ab2b306450bf5f1cc2d3},
isbn = {978-3-319-41267-2},
keywords = {deaf-blind gestures lorm-alphabet multi-touch real recognition zoning},
language = {English},
pages = {479-486},
publisher = {Springer},
school = {Technische Universität Dresden (TUD)},
timestamp = {2020-10-07T13:36:50.000+0200},
title = {Zoning-Based Gesture Recognition to Enable a Mobile Lorm Trainer},
url = {https://link.springer.com/chapter/10.1007/978-3-319-41267-2_67},
year = 2016
}