C. Stadler, M. Martin, J. Lehmann, and S. Hellmann. 6th Workshop on Scripting and Development for the Semantic Web Colocated with ESWC, Crete, Greece, (May 2010)
Abstract
Wikipedia is one of the largest public information spaces with a huge user community, which collaboratively works on the largest online encyclopedia. Their users add or edit up to 150 thousand wiki pages per day. The DBpedia project extracts RDF from Wikipedia and interlinks it with other knowledge bases. In the DBpedia live extraction mode, Wikipedia edits are instantly processed to update information in DBpedia. Due to the high number of edits and the growth of Wikipedia, the update process has to be very efficient and scalable. In this paper, we present different strategies to tackle this challenging problem and describe how we modified the DBpedia live extraction algorithm to work more efficiently.
%0 Conference Paper
%1 stadler-c-2010
%A Stadler, Claus
%A Martin, Michael
%A Lehmann, Jens
%A Hellmann, Sebastian
%B 6th Workshop on Scripting and Development for the Semantic Web Colocated with ESWC
%C Crete, Greece
%D 2010
%K event_eswc dbpedia hellmann 2010 group_aksw stadler Martin Lehmann
%T Update Strategies for DBpedia Live
%X Wikipedia is one of the largest public information spaces with a huge user community, which collaboratively works on the largest online encyclopedia. Their users add or edit up to 150 thousand wiki pages per day. The DBpedia project extracts RDF from Wikipedia and interlinks it with other knowledge bases. In the DBpedia live extraction mode, Wikipedia edits are instantly processed to update information in DBpedia. Due to the high number of edits and the growth of Wikipedia, the update process has to be very efficient and scalable. In this paper, we present different strategies to tackle this challenging problem and describe how we modified the DBpedia live extraction algorithm to work more efficiently.
@inproceedings{stadler-c-2010,
abstract = {Wikipedia is one of the largest public information spaces with a huge user community, which collaboratively works on the largest online encyclopedia. Their users add or edit up to 150 thousand wiki pages per day. The DBpedia project extracts RDF from Wikipedia and interlinks it with other knowledge bases. In the DBpedia live extraction mode, Wikipedia edits are instantly processed to update information in DBpedia. Due to the high number of edits and the growth of Wikipedia, the update process has to be very efficient and scalable. In this paper, we present different strategies to tackle this challenging problem and describe how we modified the DBpedia live extraction algorithm to work more efficiently.},
added-at = {2012-04-24T14:48:07.000+0200},
address = {Crete, Greece},
author = {Stadler, Claus and Martin, Michael and Lehmann, Jens and Hellmann, Sebastian},
biburl = {https://www.bibsonomy.org/bibtex/22db43004d40084386665c9aa1ca39e88/mgns},
booktitle = {6th Workshop on Scripting and Development for the Semantic Web Colocated with ESWC},
interhash = {c041143d07de2040fb8484eab1f72ee3},
intrahash = {2db43004d40084386665c9aa1ca39e88},
keywords = {event_eswc dbpedia hellmann 2010 group_aksw stadler Martin Lehmann},
month = May,
timestamp = {2012-04-24T14:58:08.000+0200},
title = {Update Strategies for {DBpedia Live}},
year = 2010
}