diff --git a/README.md b/README.md index 1859efdd7..13856fcb0 100644 --- a/README.md +++ b/README.md @@ -67,3 +67,14 @@ should return no test failures. RDF4j and Jena are both a lot stricter than virt # Labeling queries If you want to add a label to a query please use [schema.org keyword](https://schema.org/keywords) + +# Testing the queries actually work + +The queries can be executed automatically on all endpoints they apply to using + +``` +mvn test -PallTests +``` + +This does change the queries to add a LIMIT 1 if no limit was set in the query. Then +if there is a result it is fetched. diff --git a/pom.xml b/pom.xml index fc8fbf80a..be97aaddf 100644 --- a/pom.xml +++ b/pom.xml @@ -26,7 +26,16 @@ 4.3.11 5.0.0 5.9.3 + SlowTest + + + allTests + + + + + @@ -119,6 +128,9 @@ maven-surefire-plugin 3.0.0-M6 + + ${project.tests.exclude} + diff --git a/src/test/java/swiss/sib/rdf/sparql/examples/CreateTestWithRDF4jMethods.java b/src/test/java/swiss/sib/rdf/sparql/examples/CreateTestWithRDF4jMethods.java index dd97b6356..ab255da72 100644 --- a/src/test/java/swiss/sib/rdf/sparql/examples/CreateTestWithRDF4jMethods.java +++ b/src/test/java/swiss/sib/rdf/sparql/examples/CreateTestWithRDF4jMethods.java @@ -9,9 +9,11 @@ import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.Set; +import java.util.function.BiFunction; import java.util.stream.Stream; import org.eclipse.rdf4j.model.IRI; @@ -21,12 +23,22 @@ import org.eclipse.rdf4j.model.impl.LinkedHashModel; import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.eclipse.rdf4j.model.vocabulary.SHACL; +import org.eclipse.rdf4j.query.BooleanQuery; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.GraphQueryResult; import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.query.Query; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.TupleQueryResult; import org.eclipse.rdf4j.query.algebra.Service; +import org.eclipse.rdf4j.query.algebra.Slice; import org.eclipse.rdf4j.query.algebra.helpers.AbstractQueryModelVisitor; import org.eclipse.rdf4j.query.parser.ParsedQuery; import org.eclipse.rdf4j.query.parser.QueryParser; import org.eclipse.rdf4j.query.parser.sparql.SPARQLParserFactory; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.sparql.SPARQLRepository; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.eclipse.rdf4j.rio.RDFParseException; @@ -35,8 +47,25 @@ import org.eclipse.rdf4j.rio.helpers.StatementCollector; public class CreateTestWithRDF4jMethods { - private static final IRI DESCRIBE = SimpleValueFactory.getInstance().createIRI(SHACL.NAMESPACE, "describe"); + private static final IRI SHACL_DESCRIBE = SimpleValueFactory.getInstance().createIRI(SHACL.NAMESPACE, "describe"); + private static final IRI SCHEMA_TARGET = SimpleValueFactory.getInstance().createIRI("https://schema.org/","target"); + private enum QueryTypes { + ASK(SHACL.ASK, (rc, q) -> rc.prepareBooleanQuery(q)), + SELECT(SHACL.SELECT, (rc, q) -> rc.prepareTupleQuery(q)), + DESCRIBE(SHACL_DESCRIBE, (rc, q) -> rc.prepareGraphQuery(q)), + CONSTRUCT(SHACL.CONSTRUCT, (rc, q) -> rc.prepareGraphQuery(q)); + + + private final IRI iri; + private final BiFunction pq; + + QueryTypes(IRI iri, BiFunction pq) { + this.iri = iri; + this.pq = pq; + } + } + static void testQueryValid(Path p, String projectPrefixes) { assertTrue(Files.exists(p)); RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE); @@ -49,7 +78,7 @@ static void testQueryValid(Path p, String projectPrefixes) { } assertFalse(model.isEmpty()); QueryParser parser = new SPARQLParserFactory().getParser(); - Stream.of(SHACL.ASK, SHACL.SELECT, SHACL.CONSTRUCT, DESCRIBE) + Stream.of(SHACL.ASK, SHACL.SELECT, SHACL.CONSTRUCT, SHACL_DESCRIBE) .map(s -> model.getStatements(null, s, null)) .map(Iterable::iterator) .forEach(i -> testAllQueryStringsInModel(projectPrefixes, parser, i)); @@ -69,7 +98,7 @@ static Stream extractServiceEndpoints(Path p, String projectPrefixes) { assertFalse(model.isEmpty()); QueryParser parser = new SPARQLParserFactory().getParser(); - return Stream.of(SHACL.ASK, SHACL.SELECT, SHACL.CONSTRUCT, DESCRIBE).map( + return Stream.of(SHACL.ASK, SHACL.SELECT, SHACL.CONSTRUCT, SHACL_DESCRIBE).map( s -> model.getStatements(null, s, null)) .map(Iterable::iterator).map(i -> { return collectServiceIrisInFromOneExample(projectPrefixes, parser, i); @@ -121,4 +150,104 @@ private static void testQueryStringInValue(String projectPrefixes, QueryParser p } } + /** + * Generate a test case to make sure the query runs. + * @param p of file containing the query + * @param projectPrefixes all the prefixes that need to be added before the query + */ + public static void testQueryRuns(Path p, String projectPrefixes) { + RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE); + Model model = new LinkedHashModel(); + rdfParser.setRDFHandler(new StatementCollector(model)); + try (InputStream newInputStream = Files.newInputStream(p)) { + rdfParser.parse(newInputStream); + } catch (RDFParseException | RDFHandlerException | IOException e) { + fail(e); + } + assertFalse(model.isEmpty()); + QueryParser parser = new SPARQLParserFactory().getParser(); + Arrays.stream(QueryTypes.values()) + .forEach(s -> executeAllQueryStringsInModel(projectPrefixes, parser, model, s)); + } + + private static void executeAllQueryStringsInModel(String projectPrefixes, QueryParser parser, Model m, QueryTypes qt) { + Iterator i = m.getStatements(null, qt.iri, null).iterator(); + while (i.hasNext()) { + Statement next = i.next(); + Iterator targets = m.getStatements(next.getSubject(), SCHEMA_TARGET, null).iterator(); + while(targets.hasNext()) { + Statement targetStatement = targets.next(); + executeQueryStringInValue(projectPrefixes, parser, next.getObject(), targetStatement.getObject(), qt); + } + } + } + + + private static void executeQueryStringInValue(String projectPrefixes, QueryParser parser, Value obj, Value target, QueryTypes qt) { + assertNotNull(obj); + assertTrue(obj.isLiteral()); + String queryStr = projectPrefixes + obj.stringValue(); + + SPARQLRepository r = new SPARQLRepository(target.stringValue()); + try { + r.init(); + try (RepositoryConnection connection = r.getConnection()){ + queryStr = addLimitToQuery(projectPrefixes, parser, obj, qt, queryStr); + Query query = qt.pq.apply(connection, queryStr); + query.setMaxExecutionTime(45 * 60); + tryEvaluating(query); + } + } catch (MalformedQueryException qe) { + fail(qe.getMessage() + "\n" + queryStr, qe); + } catch (QueryEvaluationException qe) { + fail(qe.getMessage() + "\n" + queryStr, qe); + } + } + + private static void tryEvaluating(Query query) throws QueryEvaluationException { + if (query instanceof BooleanQuery bq) { + bq.evaluate(); + } + if (query instanceof TupleQuery tq) { + try (TupleQueryResult evaluate = tq.evaluate()){ + if (evaluate.hasNext()) { + evaluate.next(); + } + } + } + if (query instanceof GraphQuery gq) { + try (GraphQueryResult evaluate = gq.evaluate()){ + if (evaluate.hasNext()) { + evaluate.next(); + } + } + } + } + + private static String addLimitToQuery(String projectPrefixes, QueryParser parser, Value obj, QueryTypes qt, + String queryStr) { + //If it is not an ask we better insert a limit into the query. + if (qt != QueryTypes.ASK) { + HasLimit visitor = new HasLimit(); + ParsedQuery pq = parser.parseQuery(queryStr, "https://example.org/"); + pq.getTupleExpr().visit(visitor); + if (!visitor.hasLimit) { + //We can add the limit at the end. + queryStr = projectPrefixes + obj.stringValue() + " LIMIT 1"; + } + } + return queryStr; + } + + private static class HasLimit extends AbstractQueryModelVisitor { + private boolean hasLimit = false; + + @Override + public void meet(Slice node) throws RuntimeException { + if (node.hasLimit()) { + hasLimit = true; + } + } + + } } diff --git a/src/test/java/swiss/sib/rdf/sparql/examples/ValidateSparqlExamplesTest.java b/src/test/java/swiss/sib/rdf/sparql/examples/ValidateSparqlExamplesTest.java index 82c202ca4..523f190b3 100644 --- a/src/test/java/swiss/sib/rdf/sparql/examples/ValidateSparqlExamplesTest.java +++ b/src/test/java/swiss/sib/rdf/sparql/examples/ValidateSparqlExamplesTest.java @@ -30,8 +30,8 @@ import org.apache.jena.rdf.model.Model; import org.apache.jena.riot.RDFDataMgr; import org.apache.jena.riot.RiotException; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.DynamicTest; +import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.TestFactory; import org.junit.jupiter.api.function.Executable; @@ -61,7 +61,7 @@ public Stream testAllWithRDF4j() throws URISyntaxException, IOExcep return testAll(tester); } - @Disabled + @Tag("SlowTest") @TestFactory public Stream testAllService() throws URISyntaxException, IOException { BiFunction> tester = (p, projectPrefixes) -> CreateTestWithRDF4jMethods @@ -92,6 +92,14 @@ public Stream testAllService() throws URISyntaxException, IOExcepti Function, Stream> test = iris -> iris.distinct().map(s -> DynamicTest.dynamicTest(s, () -> consumer.accept(s))); return testAllAsOne(tester, test); } + + @TestFactory + @Tag("SlowTest") + public Stream testAllQueriesRun() throws URISyntaxException, IOException { + BiFunction tester = (p, projectPrefixes) -> () -> CreateTestWithRDF4jMethods + .testQueryRuns(p, projectPrefixes); + return testAll(tester); + } @TestFactory public Stream testPrefixDeclarations() throws URISyntaxException, IOException {