Despite various studies on examining and predicting answer quality on generic social Q&A sites such as Yahoo! Answers, little is known about why answers on academic Q&A sites are voted on by scholars who follow the discussion threads to be high quality answers. Using 1021 answers obtained from the Q&A part of an academic social network site ResearchGate (RG), we firstly explored whether various web-captured features and human-coded features can be the critical factors that influence the peer-judged answer quality. Then using the identified critical features, we constructed three classification models to predict the peer-judged rating. Our results identify four main findings. Firstly, responders' authority, shorter response time and greater answer length are the critical features that positively associate with the peer-judged answer quality. Secondly, answers containing social elements are very likely to harm the peer-judged answer quality. Thirdly, an optimized SVM algorithm has an overwhelming advantage over other models in terms of accuracy. Finally, the prediction based on web-captured features had better performance when comparing to prediction on human-coded features. We hope that these interesting insights on ResearchGate's answer quality can help the further design of academic Q&A sites.
Beschreibung
Answer Quality Characteristics and Prediction on an Academic Q&A Site
%0 Conference Paper
%1 Li:2015:AQC:2740908.2742129
%A Li, Lei
%A He, Daqing
%A Jeng, Wei
%A Goodwin, Spencer
%A Zhang, Chengzhi
%B Proceedings of the 24th International Conference on World Wide Web
%C New York, NY, USA
%D 2015
%I ACM
%K social-information-access
%P 1453--1458
%R 10.1145/2740908.2742129
%T Answer Quality Characteristics and Prediction on an Academic Q&\#38;A Site: A Case Study on ResearchGate
%U http://doi.acm.org/10.1145/2740908.2742129
%X Despite various studies on examining and predicting answer quality on generic social Q&A sites such as Yahoo! Answers, little is known about why answers on academic Q&A sites are voted on by scholars who follow the discussion threads to be high quality answers. Using 1021 answers obtained from the Q&A part of an academic social network site ResearchGate (RG), we firstly explored whether various web-captured features and human-coded features can be the critical factors that influence the peer-judged answer quality. Then using the identified critical features, we constructed three classification models to predict the peer-judged rating. Our results identify four main findings. Firstly, responders' authority, shorter response time and greater answer length are the critical features that positively associate with the peer-judged answer quality. Secondly, answers containing social elements are very likely to harm the peer-judged answer quality. Thirdly, an optimized SVM algorithm has an overwhelming advantage over other models in terms of accuracy. Finally, the prediction based on web-captured features had better performance when comparing to prediction on human-coded features. We hope that these interesting insights on ResearchGate's answer quality can help the further design of academic Q&A sites.
%@ 978-1-4503-3473-0
@inproceedings{Li:2015:AQC:2740908.2742129,
abstract = {Despite various studies on examining and predicting answer quality on generic social Q&A sites such as Yahoo! Answers, little is known about why answers on academic Q&A sites are voted on by scholars who follow the discussion threads to be high quality answers. Using 1021 answers obtained from the Q&A part of an academic social network site ResearchGate (RG), we firstly explored whether various web-captured features and human-coded features can be the critical factors that influence the peer-judged answer quality. Then using the identified critical features, we constructed three classification models to predict the peer-judged rating. Our results identify four main findings. Firstly, responders' authority, shorter response time and greater answer length are the critical features that positively associate with the peer-judged answer quality. Secondly, answers containing social elements are very likely to harm the peer-judged answer quality. Thirdly, an optimized SVM algorithm has an overwhelming advantage over other models in terms of accuracy. Finally, the prediction based on web-captured features had better performance when comparing to prediction on human-coded features. We hope that these interesting insights on ResearchGate's answer quality can help the further design of academic Q&A sites.},
acmid = {2742129},
added-at = {2017-03-22T15:27:07.000+0100},
address = {New York, NY, USA},
author = {Li, Lei and He, Daqing and Jeng, Wei and Goodwin, Spencer and Zhang, Chengzhi},
biburl = {https://www.bibsonomy.org/bibtex/2e91051fa1f030f44a2c2dd6c4bf7a547/sdjavadi},
booktitle = {Proceedings of the 24th International Conference on World Wide Web},
description = {Answer Quality Characteristics and Prediction on an Academic Q&A Site},
doi = {10.1145/2740908.2742129},
interhash = {3ba1491da4000d1ce33ae577bae6de50},
intrahash = {e91051fa1f030f44a2c2dd6c4bf7a547},
isbn = {978-1-4503-3473-0},
keywords = {social-information-access},
location = {Florence, Italy},
numpages = {6},
pages = {1453--1458},
publisher = {ACM},
series = {WWW '15 Companion},
timestamp = {2017-03-22T15:27:07.000+0100},
title = {Answer Quality Characteristics and Prediction on an Academic Q\&\#38;A Site: A Case Study on ResearchGate},
url = {http://doi.acm.org/10.1145/2740908.2742129},
year = 2015
}