diff --git a/pyproject.toml b/pyproject.toml index 05ed1d34..962ce3d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta" [project] name = "qlever" description = "Script for using the QLever SPARQL engine." -version = "0.5.9" +version = "0.5.11" authors = [ { name = "Hannah Bast", email = "bast@cs.uni-freiburg.de" } ] diff --git a/src/qlever/Qleverfiles/Qleverfile.wikidata b/src/qlever/Qleverfiles/Qleverfile.wikidata index 57f4338e..b7d3d0c8 100644 --- a/src/qlever/Qleverfiles/Qleverfile.wikidata +++ b/src/qlever/Qleverfiles/Qleverfile.wikidata @@ -20,10 +20,9 @@ DESCRIPTION = Full Wikidata dump from ${GET_DATA_URL} (latest-all.ttl.bz2 TEXT_DESCRIPTION = All English and German literals + all sentences from the English Wikipedia (version ${DATE_WIKIPEDIA}), use with FILTER KEYWORDS(...) [index] -INPUT_FILES = latest-all.ttl.bz2 latest-lexemes.ttl.bz2 wikipedia-abstracts.nt dcatap.nt +INPUT_FILES = latest-all.ttl.bz2 latest-lexemes.ttl.bz2 dcatap.nt MULTI_INPUT_JSON = [{ "cmd": "lbzcat -n 4 latest-all.ttl.bz2", "format": "ttl", "parallel": "true" }, { "cmd": "lbzcat -n 1 latest-lexemes.ttl.bz2", "format": "ttl", "parallel": "false" }, - { "cmd": "cat wikipedia-abstracts.nt", "format": "nt", "parallel": "false" }, { "cmd": "cat dcatap.nt", "format": "nt", "parallel": "false" }] SETTINGS_JSON = { "languages-internal": [], "prefixes-external": [""], "locale": { "language": "en", "country": "US", "ignore-punctuation": true }, "ascii-prefixes-only": true, "num-triples-per-batch": 5000000 } STXXL_MEMORY = 10G