Skip to content

Commit

Permalink
Run the queries with a new test mode mvn test -PallTests
Browse files Browse the repository at this point in the history
This executes the queries and takes long.
  • Loading branch information
JervenBolleman committed Jun 5, 2024
1 parent eaeed70 commit 5e17645
Show file tree
Hide file tree
Showing 4 changed files with 165 additions and 5 deletions.
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,3 +67,14 @@ should return no test failures. RDF4j and Jena are both a lot stricter than virt
# Labeling queries

If you want to add a label to a query please use [schema.org keyword](https://schema.org/keywords)

# Testing the queries actually work

The queries can be executed automatically on all endpoints they apply to using

```
mvn test -PallTests
```

This does change the queries to add a LIMIT 1 if no limit was set in the query. Then
if there is a result it is fetched.
12 changes: 12 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,16 @@
<rdf4j.version>4.3.11</rdf4j.version>
<jena.version>5.0.0</jena.version>
<junit.version>5.9.3</junit.version>
<project.tests.exclude>SlowTest</project.tests.exclude>
</properties>
<profiles>
<profile>
<id>allTests</id>
<properties>
<project.tests.exclude></project.tests.exclude>
</properties>
</profile>
</profiles>

<dependencyManagement>
<dependencies>
Expand Down Expand Up @@ -119,6 +128,9 @@
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>3.0.0-M6</version>
<configuration>
<excludedGroups>${project.tests.exclude}</excludedGroups>
</configuration>
</plugin>
</plugins>
<testResources>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,11 @@
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.stream.Stream;

import org.eclipse.rdf4j.model.IRI;
Expand All @@ -21,12 +23,22 @@
import org.eclipse.rdf4j.model.impl.LinkedHashModel;
import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
import org.eclipse.rdf4j.model.vocabulary.SHACL;
import org.eclipse.rdf4j.query.BooleanQuery;
import org.eclipse.rdf4j.query.GraphQuery;
import org.eclipse.rdf4j.query.GraphQueryResult;
import org.eclipse.rdf4j.query.MalformedQueryException;
import org.eclipse.rdf4j.query.Query;
import org.eclipse.rdf4j.query.QueryEvaluationException;
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.TupleQueryResult;
import org.eclipse.rdf4j.query.algebra.Service;
import org.eclipse.rdf4j.query.algebra.Slice;
import org.eclipse.rdf4j.query.algebra.helpers.AbstractQueryModelVisitor;
import org.eclipse.rdf4j.query.parser.ParsedQuery;
import org.eclipse.rdf4j.query.parser.QueryParser;
import org.eclipse.rdf4j.query.parser.sparql.SPARQLParserFactory;
import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.repository.sparql.SPARQLRepository;
import org.eclipse.rdf4j.rio.RDFFormat;
import org.eclipse.rdf4j.rio.RDFHandlerException;
import org.eclipse.rdf4j.rio.RDFParseException;
Expand All @@ -35,8 +47,25 @@
import org.eclipse.rdf4j.rio.helpers.StatementCollector;

public class CreateTestWithRDF4jMethods {
private static final IRI DESCRIBE = SimpleValueFactory.getInstance().createIRI(SHACL.NAMESPACE, "describe");
private static final IRI SHACL_DESCRIBE = SimpleValueFactory.getInstance().createIRI(SHACL.NAMESPACE, "describe");
private static final IRI SCHEMA_TARGET = SimpleValueFactory.getInstance().createIRI("https://schema.org/","target");

private enum QueryTypes {
ASK(SHACL.ASK, (rc, q) -> rc.prepareBooleanQuery(q)),
SELECT(SHACL.SELECT, (rc, q) -> rc.prepareTupleQuery(q)),
DESCRIBE(SHACL_DESCRIBE, (rc, q) -> rc.prepareGraphQuery(q)),
CONSTRUCT(SHACL.CONSTRUCT, (rc, q) -> rc.prepareGraphQuery(q));


private final IRI iri;
private final BiFunction<RepositoryConnection, String, ? extends Query> pq;

QueryTypes(IRI iri, BiFunction<RepositoryConnection, String, ? extends Query> pq) {
this.iri = iri;
this.pq = pq;
}
}

static void testQueryValid(Path p, String projectPrefixes) {
assertTrue(Files.exists(p));
RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE);
Expand All @@ -49,7 +78,7 @@ static void testQueryValid(Path p, String projectPrefixes) {
}
assertFalse(model.isEmpty());
QueryParser parser = new SPARQLParserFactory().getParser();
Stream.of(SHACL.ASK, SHACL.SELECT, SHACL.CONSTRUCT, DESCRIBE)
Stream.of(SHACL.ASK, SHACL.SELECT, SHACL.CONSTRUCT, SHACL_DESCRIBE)
.map(s -> model.getStatements(null, s, null))
.map(Iterable::iterator)
.forEach(i -> testAllQueryStringsInModel(projectPrefixes, parser, i));
Expand All @@ -69,7 +98,7 @@ static Stream<String> extractServiceEndpoints(Path p, String projectPrefixes) {
assertFalse(model.isEmpty());
QueryParser parser = new SPARQLParserFactory().getParser();

return Stream.of(SHACL.ASK, SHACL.SELECT, SHACL.CONSTRUCT, DESCRIBE).map(
return Stream.of(SHACL.ASK, SHACL.SELECT, SHACL.CONSTRUCT, SHACL_DESCRIBE).map(
s -> model.getStatements(null, s, null))
.map(Iterable::iterator).map(i -> {
return collectServiceIrisInFromOneExample(projectPrefixes, parser, i);
Expand Down Expand Up @@ -121,4 +150,104 @@ private static void testQueryStringInValue(String projectPrefixes, QueryParser p
}
}

/**
* Generate a test case to make sure the query runs.
* @param p of file containing the query
* @param projectPrefixes all the prefixes that need to be added before the query
*/
public static void testQueryRuns(Path p, String projectPrefixes) {
RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE);
Model model = new LinkedHashModel();
rdfParser.setRDFHandler(new StatementCollector(model));
try (InputStream newInputStream = Files.newInputStream(p)) {
rdfParser.parse(newInputStream);
} catch (RDFParseException | RDFHandlerException | IOException e) {
fail(e);
}
assertFalse(model.isEmpty());
QueryParser parser = new SPARQLParserFactory().getParser();
Arrays.stream(QueryTypes.values())
.forEach(s -> executeAllQueryStringsInModel(projectPrefixes, parser, model, s));
}

private static void executeAllQueryStringsInModel(String projectPrefixes, QueryParser parser, Model m, QueryTypes qt) {
Iterator<Statement> i = m.getStatements(null, qt.iri, null).iterator();
while (i.hasNext()) {
Statement next = i.next();
Iterator<Statement> targets = m.getStatements(next.getSubject(), SCHEMA_TARGET, null).iterator();
while(targets.hasNext()) {
Statement targetStatement = targets.next();
executeQueryStringInValue(projectPrefixes, parser, next.getObject(), targetStatement.getObject(), qt);
}
}
}


private static void executeQueryStringInValue(String projectPrefixes, QueryParser parser, Value obj, Value target, QueryTypes qt) {
assertNotNull(obj);
assertTrue(obj.isLiteral());
String queryStr = projectPrefixes + obj.stringValue();

SPARQLRepository r = new SPARQLRepository(target.stringValue());
try {
r.init();
try (RepositoryConnection connection = r.getConnection()){
queryStr = addLimitToQuery(projectPrefixes, parser, obj, qt, queryStr);
Query query = qt.pq.apply(connection, queryStr);
query.setMaxExecutionTime(45 * 60);
tryEvaluating(query);
}
} catch (MalformedQueryException qe) {
fail(qe.getMessage() + "\n" + queryStr, qe);
} catch (QueryEvaluationException qe) {
fail(qe.getMessage() + "\n" + queryStr, qe);
}
}

private static void tryEvaluating(Query query) throws QueryEvaluationException {
if (query instanceof BooleanQuery bq) {
bq.evaluate();
}
if (query instanceof TupleQuery tq) {
try (TupleQueryResult evaluate = tq.evaluate()){
if (evaluate.hasNext()) {
evaluate.next();
}
}
}
if (query instanceof GraphQuery gq) {
try (GraphQueryResult evaluate = gq.evaluate()){
if (evaluate.hasNext()) {
evaluate.next();
}
}
}
}

private static String addLimitToQuery(String projectPrefixes, QueryParser parser, Value obj, QueryTypes qt,
String queryStr) {
//If it is not an ask we better insert a limit into the query.
if (qt != QueryTypes.ASK) {
HasLimit visitor = new HasLimit();
ParsedQuery pq = parser.parseQuery(queryStr, "https://example.org/");
pq.getTupleExpr().visit(visitor);
if (!visitor.hasLimit) {
//We can add the limit at the end.
queryStr = projectPrefixes + obj.stringValue() + " LIMIT 1";
}
}
return queryStr;
}

private static class HasLimit extends AbstractQueryModelVisitor<RuntimeException> {
private boolean hasLimit = false;

@Override
public void meet(Slice node) throws RuntimeException {
if (node.hasLimit()) {
hasLimit = true;
}
}

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
import org.apache.jena.rdf.model.Model;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.jena.riot.RiotException;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.DynamicTest;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.TestFactory;
import org.junit.jupiter.api.function.Executable;

Expand Down Expand Up @@ -61,7 +61,7 @@ public Stream<DynamicTest> testAllWithRDF4j() throws URISyntaxException, IOExcep
return testAll(tester);
}

@Disabled
@Tag("SlowTest")
@TestFactory
public Stream<DynamicTest> testAllService() throws URISyntaxException, IOException {
BiFunction<Path, String, Stream<String>> tester = (p, projectPrefixes) -> CreateTestWithRDF4jMethods
Expand Down Expand Up @@ -92,6 +92,14 @@ public Stream<DynamicTest> testAllService() throws URISyntaxException, IOExcepti
Function<Stream<String>, Stream<DynamicTest>> test = iris -> iris.distinct().map(s -> DynamicTest.dynamicTest(s, () -> consumer.accept(s)));
return testAllAsOne(tester, test);
}

@TestFactory
@Tag("SlowTest")
public Stream<DynamicTest> testAllQueriesRun() throws URISyntaxException, IOException {
BiFunction<Path, String, Executable> tester = (p, projectPrefixes) -> () -> CreateTestWithRDF4jMethods
.testQueryRuns(p, projectPrefixes);
return testAll(tester);
}

@TestFactory
public Stream<DynamicTest> testPrefixDeclarations() throws URISyntaxException, IOException {
Expand Down

0 comments on commit 5e17645

Please sign in to comment.