Skip to content

Commit

Permalink
Merge main (#4836)
Browse files Browse the repository at this point in the history
  • Loading branch information
hmottestad authored Nov 7, 2023
2 parents be1d764 + ffdfcb2 commit c747cf8
Show file tree
Hide file tree
Showing 20 changed files with 1,047 additions and 57 deletions.
31 changes: 31 additions & 0 deletions .github/workflows/pr-verify.yml
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,37 @@ jobs:
repo: eclipse/rdf4j
workflow_id: ${{ github.run_id }}
access_token: ${{ github.token }}
e2e:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK
uses: actions/setup-java@v1
with:
java-version: 11
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: ~/.m2/repository
key: ${{ runner.os }}-jdk11-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-jdk11-maven-
- name: Install dependencies
run: sudo apt-get update && sudo apt-get install -y libxml2-utils
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version: 18
- name: Run end-to-end tests of RDF4J Server and Workbench
working-directory: ./e2e
run: ./run.sh
- name: Cancel workflow on failure
uses: vishnudxb/[email protected]
if: failure()
with:
repo: eclipse/rdf4j
workflow_id: ${{ github.run_id }}
access_token: ${{ github.token }}
copyright-check:
runs-on: ubuntu-latest
steps:
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ compliance/*/overlays
docker/ignore
/core/queryparser/sparql/JavaCC/javacc/
/scripts/temp/
org.eclipse.dash.licenses-1.0.2.jar
e2e/node_modules
e2e/playwright-report
e2e/test-results
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ public void addRequiredTE(TupleExpr te) {
*/
void clearRequiredTEs() {
requiredTEs.clear();
optionalTEs.clear();
}

public void addRequiredSP(Var subjVar, Var predVar, Var objVar) {
Expand Down Expand Up @@ -173,6 +174,18 @@ public void clear() {
public TupleExpr buildTupleExpr() {
TupleExpr result = buildJoinFromRequiredTEs();

result = buildOptionalTE(result);

for (ValueExpr constraint : constraints) {
result = new Filter(result, constraint);
}
return result;
}

/**
* Build optionals to the supplied TE
*/
public TupleExpr buildOptionalTE(TupleExpr result) {
for (Map.Entry<TupleExpr, List<ValueExpr>> entry : optionalTEs) {
List<ValueExpr> constraints = entry.getValue();
if (constraints != null && !constraints.isEmpty()) {
Expand All @@ -186,11 +199,6 @@ public TupleExpr buildTupleExpr() {
result = new LeftJoin(result, entry.getKey());
}
}

for (ValueExpr constraint : constraints) {
result = new Filter(result, constraint);
}

return result;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2376,6 +2376,8 @@ public Object visit(ASTBind node, Object data) throws VisitorException {

// get a tupleExpr that represents the basic graph pattern, sofar.
TupleExpr arg = graphPattern.buildJoinFromRequiredTEs();
// apply optionals, if any
arg = graphPattern.buildOptionalTE(arg);

// check if alias is not previously used in the BGP
if (arg.getBindingNames().contains(alias)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,41 @@ public void testServiceGraphPatternChopping() {

}

@Test
public void testOtionalBindCoalesce() throws Exception {
StringBuilder qb = new StringBuilder();
qb.append("SELECT ?result \n");
qb.append("WHERE { \n");
qb.append("OPTIONAL {\n" +
" OPTIONAL {\n" +
" BIND(\"value\" AS ?foo)\n" +
" }\n" +
" BIND(COALESCE(?foo, \"no value\") AS ?result)\n" +
" }");
qb.append(" } ");

ASTQueryContainer qc = SyntaxTreeBuilder.parseQuery(qb.toString());
TupleExpr result = builder.visit(qc, null);
String expected = "Projection\n" +
" ProjectionElemList\n" +
" ProjectionElem \"result\"\n" +
" LeftJoin\n" +
" SingletonSet\n" +
" Extension\n" +
" LeftJoin\n" +
" SingletonSet\n" +
" Extension\n" +
" SingletonSet\n" +
" ExtensionElem (foo)\n" +
" ValueConstant (value=\"value\")\n" +
" ExtensionElem (result)\n" +
" Coalesce\n" +
" Var (name=foo)\n" +
" ValueConstant (value=\"no value\")\n";
assertEquals(expected.replace("\r\n", "\n"), result.toString().replace("\r\n", "\n"));
// System.out.println(result);
}

private class ServiceNodeFinder extends AbstractASTVisitor {

private final List<String> graphPatterns = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class LmdbRecordIterator implements RecordIterator {

private final int dbi;

private boolean closed = false;
private volatile boolean closed = false;

private final MDBVal keyData;

Expand All @@ -72,6 +72,8 @@ class LmdbRecordIterator implements RecordIterator {

private final StampedLock txnLock;

private final Thread ownerThread = Thread.currentThread();

LmdbRecordIterator(Pool pool, TripleIndex index, boolean rangeSearch, long subj, long pred, long obj,
long context, boolean explicit, Txn txnRef) throws IOException {
this.pool = pool;
Expand Down Expand Up @@ -140,7 +142,7 @@ public long[] next() {
lastResult = mdb_cursor_get(cursor, keyData, valueData, MDB_SET_RANGE);
}
if (lastResult != 0) {
close();
closeInternal(false);
return null;
}
}
Expand Down Expand Up @@ -177,30 +179,45 @@ public long[] next() {
return quad;
}
}
close();
closeInternal(false);
return null;
} finally {
txnLock.unlockRead(stamp);
}
}

@Override
public void close() {
private void closeInternal(boolean maybeCalledAsync) {
if (!closed) {
long stamp;
if (maybeCalledAsync && ownerThread != Thread.currentThread()) {
stamp = txnLock.writeLock();
} else {
stamp = 0;
}
try {
mdb_cursor_close(cursor);
pool.free(keyData);
pool.free(valueData);
if (minKeyBuf != null) {
pool.free(minKeyBuf);
}
if (maxKey != null) {
pool.free(maxKeyBuf);
pool.free(maxKey);
if (!closed) {
mdb_cursor_close(cursor);
pool.free(keyData);
pool.free(valueData);
if (minKeyBuf != null) {
pool.free(minKeyBuf);
}
if (maxKey != null) {
pool.free(maxKeyBuf);
pool.free(maxKey);
}
}
} finally {
closed = true;
if (stamp != 0) {
txnLock.unlockWrite(stamp);
}
}
}
}

@Override
public void close() {
closeInternal(true);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;

import org.eclipse.rdf4j.common.exception.RDF4JException;
import org.eclipse.rdf4j.common.iteration.CloseableIteration;
import org.eclipse.rdf4j.common.transaction.IsolationLevels;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Model;
Expand Down Expand Up @@ -82,48 +84,34 @@ private Rdf4jShaclShapeGraphShapeSource(Repository repository, RepositoryConnect

private SailRepository forwardChain(RepositoryConnection shapesRepoConnection) {
try (var statements = shapesRepoConnection.getStatements(null, null, null, false, RDF4J.SHACL_SHAPE_GRAPH)) {
if (!statements.hasNext()) {
return new SailRepository(new MemoryStore());
}

SailRepository shapesRepoWithReasoning = new SailRepository(
SchemaCachingRDFSInferencer.fastInstantiateFrom(shaclVocabulary, new MemoryStore(), false));

try (var shapesRepoWithReasoningConnection = shapesRepoWithReasoning.getConnection()) {
shapesRepoWithReasoningConnection.begin(IsolationLevels.NONE);

shapesRepoWithReasoningConnection.add(statements);
enrichShapes(shapesRepoWithReasoningConnection);

shapesRepoWithReasoningConnection.commit();
}

return shapesRepoWithReasoning;

return forwardChain(statements);
}
}

private SailRepository forwardChain(SailConnection shapesSailConnection) {
try (var statements = shapesSailConnection.getStatements(null, null, null, false, RDF4J.SHACL_SHAPE_GRAPH)) {
if (!statements.hasNext()) {
return new SailRepository(new MemoryStore());
}

SailRepository shapesRepoWithReasoning = new SailRepository(
SchemaCachingRDFSInferencer.fastInstantiateFrom(shaclVocabulary, new MemoryStore(), false));
return forwardChain(statements);
}
}

try (var shapesRepoWithReasoningConnection = shapesRepoWithReasoning.getConnection()) {
shapesRepoWithReasoningConnection.begin(IsolationLevels.NONE);
private SailRepository forwardChain(CloseableIteration<? extends Statement> statements) {
if (!statements.hasNext()) {
return new SailRepository(new MemoryStore());
}

shapesRepoWithReasoningConnection.add(statements);
enrichShapes(shapesRepoWithReasoningConnection);
SailRepository shapesRepoWithReasoning = new SailRepository(
SchemaCachingRDFSInferencer.fastInstantiateFrom(shaclVocabulary, new MemoryStore(), false));

shapesRepoWithReasoningConnection.commit();
}
try (var shapesRepoWithReasoningConnection = shapesRepoWithReasoning.getConnection()) {
shapesRepoWithReasoningConnection.begin(IsolationLevels.NONE);

return shapesRepoWithReasoning;
shapesRepoWithReasoningConnection.add(statements);
enrichShapes(shapesRepoWithReasoningConnection);

shapesRepoWithReasoningConnection.commit();
}

return shapesRepoWithReasoning;
}

private static SchemaCachingRDFSInferencer createShaclVocabulary() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,6 @@ private static Model resourceAsModel(String filename) {
static Stream<ShapesGraph> getRsxDataAndShapesGraphLink(SailConnection connection, Resource[] context) {
Stream<ShapesGraph> rsxDataAndShapesGraphLink;

List<? extends Statement> collect1 = connection.getStatements(null, null, null, false)
.stream()
.collect(Collectors.toList());

try (var stream = connection.getStatements(null, RDF.TYPE, RSX.DataAndShapesGraphLink, false, context)
.stream()) {

Expand Down Expand Up @@ -108,9 +104,6 @@ static Stream<ShapesGraph> getRsxDataAndShapesGraphLink(SailConnection connectio
}

static Stream<ShapesGraph> getRsxDataAndShapesGraphLink(RepositoryConnection connection, Resource[] context) {
List<? extends Statement> collect1 = connection.getStatements(null, null, null, false)
.stream()
.collect(Collectors.toList());

Stream<ShapesGraph> rsxDataAndShapesGraphLink;
try (var stream = connection.getStatements(null, RDF.TYPE, RSX.DataAndShapesGraphLink, false, context)
Expand Down
54 changes: 54 additions & 0 deletions docker/waitForDocker.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#!/usr/bin/env bash
# Initial sleep to make sure docker has started
sleep 5


while : ; do
STARTING=`docker ps -f "health=starting" --format "{{.Names}}"`
if [ -z "$STARTING" ] #if STARTING is empty
then
break
fi
echo "Waiting for containers to finish starting"
sleep 1
done


while : ; do

# Get cpu % from docker stats, remove '%' and then sum all the values into one number
CPU=`docker stats --no-stream --format "{{.CPUPerc}}" | awk '{gsub ( "[%]","" ) ; print $0 }' | awk '{s+=$1} END {print s}'`
echo "CPU: $CPU%"

# Do floating point comparison, if $CPU is bigger than 15, WAIT will be 1
WAIT=`echo $CPU'>'15 | bc -l`
echo "WAIT (0/1): $WAIT"

sleep 1

# Get cpu % from docker stats, remove '%' and then sum all the values into one number
CPU2=`docker stats --no-stream --format "{{.CPUPerc}}" | awk '{gsub ( "[%]","" ) ; print $0 }' | awk '{s+=$1} END {print s}'`
echo "CPU2: $CPU2%"

# Do floating point comparison, if $CPU is bigger than 15, WAIT will be 1
WAIT2=`echo $CPU'>'15 | bc -l`
echo "WAIT2 (0/1): $WAIT2"

# Break from loop if WAIT is 0, which is when the sum of the cpu usage is smaller than 15%
[[ "$WAIT" -eq 0 ]] && [[ "$WAIT2" -eq 0 ]] && break

# Else sleep and loop
echo "Waiting for docker"
sleep 1

done

while : ; do
STARTING=`docker ps -f "health=starting" --format "{{.Names}}"`
if [ -z "$STARTING" ] #if STARTING is empty
then
break
fi
echo "Waiting for containers to finish starting"
sleep 1
done
27 changes: 27 additions & 0 deletions e2e/.github/workflows/playwright.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Playwright Tests
on:
push:
branches: [ main, master ]
pull_request:
branches: [ main, master ]
jobs:
test:
timeout-minutes: 60
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
- name: Install dependencies
run: npm ci
- name: Install Playwright Browsers
run: npx playwright install --with-deps
- name: Run Playwright tests
run: npx playwright test
- uses: actions/upload-artifact@v3
if: always()
with:
name: playwright-report
path: playwright-report/
retention-days: 30
Loading

0 comments on commit c747cf8

Please sign in to comment.