C. Stadler, M. Martin, J. Lehmann, and S. Hellmann. 6th Workshop on Scripting and Development for the Semantic Web Colocated with ESWC 2010 30th or 31st May, 2010 Crete, Greece, (2010)
Abstract
Wikipedia is one of the largest public information spaces with a huge user community, which collaboratively works on the largest online encyclopedia. Their users add or edit up to 150 thousand wiki pages per day. The DBpedia project extracts RDF from Wikipedia and interlinks it with other knowledge bases. In the DBpedia live extraction mode, Wikipedia edits are instantly processed to update information in DBpedia. Due to the high number of edits and the growth of Wikipedia, the update process has to be very efficient and scalable. In this paper, we present different strategies to tackle this challenging problem and describe how we modified the DBpedia live extraction algorithm to work more efficiently.
%0 Conference Paper
%1 stadler-c-2010--a
%A Stadler, Claus
%A Martin, Michael
%A Lehmann, Jens
%A Hellmann, Sebastian
%B 6th Workshop on Scripting and Development for the Semantic Web Colocated with ESWC 2010 30th or 31st May, 2010 Crete, Greece
%D 2010
%K 2010 MOLE dbpedia event_sfsw group_aksw hellmann kilt lehmann martin ontowiki_eu peer-reviewed stadler
%T Update Strategies for DBpedia Live
%U http://jens-lehmann.org/files/2010/dbpedia_live_eswc.pdf
%X Wikipedia is one of the largest public information spaces with a huge user community, which collaboratively works on the largest online encyclopedia. Their users add or edit up to 150 thousand wiki pages per day. The DBpedia project extracts RDF from Wikipedia and interlinks it with other knowledge bases. In the DBpedia live extraction mode, Wikipedia edits are instantly processed to update information in DBpedia. Due to the high number of edits and the growth of Wikipedia, the update process has to be very efficient and scalable. In this paper, we present different strategies to tackle this challenging problem and describe how we modified the DBpedia live extraction algorithm to work more efficiently.
@inproceedings{stadler-c-2010--a,
abstract = {Wikipedia is one of the largest public information spaces with a huge user community, which collaboratively works on the largest online encyclopedia. Their users add or edit up to 150 thousand wiki pages per day. The DBpedia project extracts RDF from Wikipedia and interlinks it with other knowledge bases. In the DBpedia live extraction mode, Wikipedia edits are instantly processed to update information in DBpedia. Due to the high number of edits and the growth of Wikipedia, the update process has to be very efficient and scalable. In this paper, we present different strategies to tackle this challenging problem and describe how we modified the DBpedia live extraction algorithm to work more efficiently.},
added-at = {2024-06-18T09:45:57.000+0200},
author = {Stadler, Claus and Martin, Michael and Lehmann, Jens and Hellmann, Sebastian},
bdsk-url-1 = {http://www.semanticscripting.org/SFSW2010/papers/sfsw2010_submission_5.pdf},
biburl = {https://www.bibsonomy.org/bibtex/210e6a257e4a1bb39aa8742a9a1c28039/aksw},
booktitle = {6th Workshop on Scripting and Development for the Semantic Web Colocated with ESWC 2010 30th or 31st May, 2010 Crete, Greece},
date-modified = {2012-12-02 12:30:10 +0000},
interhash = {c041143d07de2040fb8484eab1f72ee3},
intrahash = {10e6a257e4a1bb39aa8742a9a1c28039},
keywords = {2010 MOLE dbpedia event_sfsw group_aksw hellmann kilt lehmann martin ontowiki_eu peer-reviewed stadler},
owner = {michael},
timestamp = {2024-06-18T09:45:57.000+0200},
title = {{U}pdate {S}trategies for {DB}pedia {L}ive},
url = {http://jens-lehmann.org/files/2010/dbpedia_live_eswc.pdf},
year = 2010
}