Subjective laboratory tests represent a proven, reliable approach
towards multimedia quality assessment. Nonetheless,
in certain cases novel progressive quality of experience (QoE)
assessment methods can lead to better results or enable test
execution in more cost-effective ways. In this respect, crowdsourcing
can be considered as emerging method enabling researchers
to better explore end-user quality perception when
requiring a large panel of subjects, particularly for Web application
usage scenarios. However, the crowdsourcing platform
chosen for recruiting participants can have an impact on
the experimental results. In this paper, we examine the platform’s
influence on QoE results by comparing MOS scores
of two otherwise identical subjective HD video quality experiments
executed on one paid and one non-paid crowdsourcing
platform.
%0 Conference Paper
%1 info3-inproceedings-2012-448
%A Gardlo, Bruno
%A Ries, Michal
%A Hoßfeld, Tobias
%A Schatz, Raimund
%B QoMEX 2012
%C Yarra Valley, Australia
%D 2012
%K myown
%T Microworkers vs. Facebook: The Impact of Crowdsourcing Platform Choice on Experimental Results
%X Subjective laboratory tests represent a proven, reliable approach
towards multimedia quality assessment. Nonetheless,
in certain cases novel progressive quality of experience (QoE)
assessment methods can lead to better results or enable test
execution in more cost-effective ways. In this respect, crowdsourcing
can be considered as emerging method enabling researchers
to better explore end-user quality perception when
requiring a large panel of subjects, particularly for Web application
usage scenarios. However, the crowdsourcing platform
chosen for recruiting participants can have an impact on
the experimental results. In this paper, we examine the platform’s
influence on QoE results by comparing MOS scores
of two otherwise identical subjective HD video quality experiments
executed on one paid and one non-paid crowdsourcing
platform.
@inproceedings{info3-inproceedings-2012-448,
abstract = {Subjective laboratory tests represent a proven, reliable approach
towards multimedia quality assessment. Nonetheless,
in certain cases novel progressive quality of experience (QoE)
assessment methods can lead to better results or enable test
execution in more cost-effective ways. In this respect, crowdsourcing
can be considered as emerging method enabling researchers
to better explore end-user quality perception when
requiring a large panel of subjects, particularly for Web application
usage scenarios. However, the crowdsourcing platform
chosen for recruiting participants can have an impact on
the experimental results. In this paper, we examine the platform’s
influence on QoE results by comparing MOS scores
of two otherwise identical subjective HD video quality experiments
executed on one paid and one non-paid crowdsourcing
platform.},
added-at = {2016-03-10T17:38:01.000+0100},
address = {Yarra Valley, Australia},
author = {Gardlo, Bruno and Ries, Michal and Hoßfeld, Tobias and Schatz, Raimund},
biburl = {https://www.bibsonomy.org/bibtex/2da60236cf82076ba435e15d33bfa9cd1/uniwue_info3},
booktitle = {QoMEX 2012},
interhash = {c9e00eccb7b0e2236e79b2e3ad11e20e},
intrahash = {da60236cf82076ba435e15d33bfa9cd1},
keywords = {myown},
month = {7},
timestamp = {2022-03-14T00:10:54.000+0100},
title = {Microworkers vs. Facebook: The Impact of Crowdsourcing Platform Choice on Experimental Results},
year = 2012
}