With the growing importance of information technology in our everydaylife, newtypes
of applications are appearing that require the understanding of information in a broad sense.
Information that includes affective and subjective content plays a major role not only in an
individual?s cognitive processes but also in an individual?s interaction with others.We identify
three key points to be considered when developing systems that capture affective information:
embodiment (experiencing physical reality), dynamics (mapping experience and emotional state
with its label) and adaptive interaction (conveying emotive response, responding to a recognized
emotional state).We present two computational systems that implement those principles:MOUE
(Model Of User Emotions) is an emotion recognition system that recognizes the user?s emotion
from his/her facial expressions, and from it, adaptively builds semantic definitions of emotion
concepts using the user?s feedback; MIKE (Multimedia Interactive Environment for Kansei
communication) is an interactive adaptive system that, along with the user, co-evolves a language
for communicating over subjective impressions.
%0 Journal Article
%1 loepucl3365
%A Bianchi-Berthouze, N.
%A Lisetti, C.L.
%D 2002
%J User Modeling and User-Adapted Interaction
%K affect embodiment emotion interaction perception subjective-experience
%N 1
%P 49--84
%T Modeling Multimodal Expression of User's Affective
Subjective Experience
%U http://eprints.ucl.ac.uk/3365/
%V 12
%X With the growing importance of information technology in our everydaylife, newtypes
of applications are appearing that require the understanding of information in a broad sense.
Information that includes affective and subjective content plays a major role not only in an
individual?s cognitive processes but also in an individual?s interaction with others.We identify
three key points to be considered when developing systems that capture affective information:
embodiment (experiencing physical reality), dynamics (mapping experience and emotional state
with its label) and adaptive interaction (conveying emotive response, responding to a recognized
emotional state).We present two computational systems that implement those principles:MOUE
(Model Of User Emotions) is an emotion recognition system that recognizes the user?s emotion
from his/her facial expressions, and from it, adaptively builds semantic definitions of emotion
concepts using the user?s feedback; MIKE (Multimedia Interactive Environment for Kansei
communication) is an interactive adaptive system that, along with the user, co-evolves a language
for communicating over subjective impressions.
@article{loepucl3365,
abstract = {With the growing importance of information technology in our everydaylife, newtypes
of applications are appearing that require the understanding of information in a broad sense.
Information that includes affective and subjective content plays a major role not only in an
individual?s cognitive processes but also in an individual?s interaction with others.We identify
three key points to be considered when developing systems that capture affective information:
embodiment (experiencing physical reality), dynamics (mapping experience and emotional state
with its label) and adaptive interaction (conveying emotive response, responding to a recognized
emotional state).We present two computational systems that implement those principles:MOUE
(Model Of User Emotions) is an emotion recognition system that recognizes the user?s emotion
from his/her facial expressions, and from it, adaptively builds semantic definitions of emotion
concepts using the user?s feedback; MIKE (Multimedia Interactive Environment for Kansei
communication) is an interactive adaptive system that, along with the user, co-evolves a language
for communicating over subjective impressions.},
added-at = {2008-10-24T14:29:07.000+0200},
author = {Bianchi-Berthouze, N. and Lisetti, C.L.},
biburl = {https://www.bibsonomy.org/bibtex/20fdd80b4f4d80e96a516679a31919517/uclic},
description = {UCLIC eprints as of October 2008},
interhash = {93172d0fe4e9c2a668c56e550d1ec863},
intrahash = {0fdd80b4f4d80e96a516679a31919517},
journal = {User Modeling and User-Adapted Interaction},
keywords = {affect embodiment emotion interaction perception subjective-experience},
month = {February},
number = 1,
pages = {49--84},
timestamp = {2008-10-24T14:39:25.000+0200},
title = {Modeling Multimodal Expression of User's Affective
Subjective Experience},
url = {http://eprints.ucl.ac.uk/3365/},
volume = 12,
year = 2002
}