diff --git a/src/main/java/graphql/Directives.java b/src/main/java/graphql/Directives.java
index 50e570e14a..8e0c81661e 100644
--- a/src/main/java/graphql/Directives.java
+++ b/src/main/java/graphql/Directives.java
@@ -1,6 +1,7 @@
package graphql;
+import graphql.language.BooleanValue;
import graphql.language.Description;
import graphql.language.DirectiveDefinition;
import graphql.language.StringValue;
@@ -33,6 +34,7 @@ public class Directives {
private static final String SPECIFIED_BY = "specifiedBy";
private static final String DEPRECATED = "deprecated";
private static final String ONE_OF = "oneOf";
+ private static final String DEFER = "defer";
public static final String NO_LONGER_SUPPORTED = "No longer supported";
public static final DirectiveDefinition DEPRECATED_DIRECTIVE_DEFINITION;
@@ -40,7 +42,6 @@ public class Directives {
@ExperimentalApi
public static final DirectiveDefinition ONE_OF_DIRECTIVE_DEFINITION;
-
static {
DEPRECATED_DIRECTIVE_DEFINITION = DirectiveDefinition.newDirectiveDefinition()
.name(DEPRECATED)
@@ -77,6 +78,34 @@ public class Directives {
.build();
}
+ /**
+ * The @defer directive can be used to defer sending data for a fragment until later in the query.
+ * This is an opt-in directive that is not available unless it is explicitly put into the schema.
+ *
+ * This implementation is based on the state of Defer/Stream PR
+ * More specifically at the state of this
+ * commit
+ *
+ * The execution behaviour should match what we get from running Apollo Server 4.9.5 with graphql-js v17.0.0-alpha.2
+ */
+ @ExperimentalApi
+ public static final GraphQLDirective DeferDirective = GraphQLDirective.newDirective()
+ .name(DEFER)
+ .description("This directive allows results to be deferred during execution")
+ .validLocations(FRAGMENT_SPREAD, INLINE_FRAGMENT)
+ .argument(newArgument()
+ .name("if")
+ .type(nonNull(GraphQLBoolean))
+ .description("Deferred behaviour is controlled by this argument")
+ .defaultValueLiteral(BooleanValue.newBooleanValue(true).build())
+ )
+ .argument(newArgument()
+ .name("label")
+ .type(GraphQLString)
+ .description("A unique label that represents the fragment being deferred")
+ )
+ .build();
+
public static final GraphQLDirective IncludeDirective = GraphQLDirective.newDirective()
.name("include")
.description("Directs the executor to include this field or fragment only when the `if` argument is true")
diff --git a/src/main/java/graphql/ExperimentalApi.java b/src/main/java/graphql/ExperimentalApi.java
index c405ec10cf..991932b2d4 100644
--- a/src/main/java/graphql/ExperimentalApi.java
+++ b/src/main/java/graphql/ExperimentalApi.java
@@ -12,9 +12,9 @@
/**
* This represents code that the graphql-java project considers experimental API and while our intention is that it will
- * progress to be {@link PublicApi}, its existence, signature of behavior may change between releases.
- *
- * In general unnecessary changes will be avoided but you should not depend on experimental classes being stable
+ * progress to be {@link PublicApi}, its existence, signature or behavior may change between releases.
+ *
+ * In general unnecessary changes will be avoided, but you should not depend on experimental classes being stable.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(value = {CONSTRUCTOR, METHOD, TYPE, FIELD})
diff --git a/src/main/java/graphql/execution/FieldCollector.java b/src/main/java/graphql/execution/FieldCollector.java
index a6f1310a8c..8fae8a3afb 100644
--- a/src/main/java/graphql/execution/FieldCollector.java
+++ b/src/main/java/graphql/execution/FieldCollector.java
@@ -25,7 +25,7 @@
/**
* A field collector can iterate over field selection sets and build out the sub fields that have been selected,
- * expanding named and inline fragments as it goes.s
+ * expanding named and inline fragments as it goes.
*/
@Internal
public class FieldCollector {
diff --git a/src/main/java/graphql/normalized/ENFMerger.java b/src/main/java/graphql/normalized/ENFMerger.java
index 97d182a5f4..f1fbc37b95 100644
--- a/src/main/java/graphql/normalized/ENFMerger.java
+++ b/src/main/java/graphql/normalized/ENFMerger.java
@@ -19,7 +19,12 @@
@Internal
public class ENFMerger {
- public static void merge(ExecutableNormalizedField parent, List childrenWithSameResultKey, GraphQLSchema schema) {
+ public static void merge(
+ ExecutableNormalizedField parent,
+ List childrenWithSameResultKey,
+ GraphQLSchema schema,
+ boolean deferSupport
+ ) {
// they have all the same result key
// we can only merge the fields if they have the same field name + arguments + all children are the same
List> possibleGroupsToMerge = new ArrayList<>();
@@ -28,7 +33,7 @@ public static void merge(ExecutableNormalizedField parent, List group : possibleGroupsToMerge) {
for (ExecutableNormalizedField fieldInGroup : group) {
- if(field.getFieldName().equals(Introspection.TypeNameMetaFieldDef.getName())) {
+ if (field.getFieldName().equals(Introspection.TypeNameMetaFieldDef.getName())) {
addToGroup = true;
group.add(field);
continue overPossibleGroups;
@@ -63,8 +68,15 @@ && isFieldInSharedInterface(field, fieldInGroup, schema)
// patching the first one to contain more objects, remove all others
Iterator iterator = groupOfFields.iterator();
ExecutableNormalizedField first = iterator.next();
+
while (iterator.hasNext()) {
- parent.getChildren().remove(iterator.next());
+ ExecutableNormalizedField next = iterator.next();
+ parent.getChildren().remove(next);
+
+ if (deferSupport) {
+ // Move defer executions from removed field into the merged field's entry
+ first.addDeferExecutions(next.getDeferExecutions());
+ }
}
first.setObjectTypeNames(mergedObjects);
}
diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedField.java b/src/main/java/graphql/normalized/ExecutableNormalizedField.java
index 41ddd594b3..3a8d36af08 100644
--- a/src/main/java/graphql/normalized/ExecutableNormalizedField.java
+++ b/src/main/java/graphql/normalized/ExecutableNormalizedField.java
@@ -3,12 +3,14 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import graphql.Assert;
+import graphql.ExperimentalApi;
import graphql.Internal;
import graphql.Mutable;
import graphql.PublicApi;
import graphql.collect.ImmutableKit;
import graphql.introspection.Introspection;
import graphql.language.Argument;
+import graphql.normalized.incremental.DeferExecution;
import graphql.schema.GraphQLFieldDefinition;
import graphql.schema.GraphQLInterfaceType;
import graphql.schema.GraphQLNamedOutputType;
@@ -63,6 +65,8 @@ public class ExecutableNormalizedField {
private final String fieldName;
private final int level;
+ // Mutable List on purpose: it is modified after creation
+ private final LinkedHashSet deferExecutions;
private ExecutableNormalizedField(Builder builder) {
this.alias = builder.alias;
@@ -74,6 +78,7 @@ private ExecutableNormalizedField(Builder builder) {
this.children = builder.children;
this.level = builder.level;
this.parent = builder.parent;
+ this.deferExecutions = builder.deferExecutions;
}
/**
@@ -129,6 +134,7 @@ private ExecutableNormalizedField(Builder builder) {
* NOT {@code Cat} or {@code Dog} as their respective implementations would say.
*
* @param schema - the graphql schema in play
+ *
* @return true if the field is conditional
*/
public boolean isConditional(@NotNull GraphQLSchema schema) {
@@ -255,6 +261,16 @@ public void clearChildren() {
this.children.clear();
}
+ @Internal
+ public void setDeferExecutions(Collection deferExecutions) {
+ this.deferExecutions.clear();
+ this.deferExecutions.addAll(deferExecutions);
+ }
+
+ public void addDeferExecutions(Collection deferExecutions) {
+ this.deferExecutions.addAll(deferExecutions);
+ }
+
/**
* All merged fields have the same name so this is the name of the {@link ExecutableNormalizedField}.
*
@@ -364,7 +380,6 @@ public String getSingleObjectTypeName() {
return objectTypeNames.iterator().next();
}
-
/**
* @return a helper method show field details
*/
@@ -461,6 +476,15 @@ public ExecutableNormalizedField getParent() {
return parent;
}
+ /**
+ * @return the {@link DeferExecution}s associated with this {@link ExecutableNormalizedField}.
+ * @see DeferExecution
+ */
+ @ExperimentalApi
+ public LinkedHashSet getDeferExecutions() {
+ return deferExecutions;
+ }
+
@Internal
public void replaceParent(ExecutableNormalizedField newParent) {
this.parent = newParent;
@@ -588,6 +612,8 @@ public static class Builder {
private LinkedHashMap resolvedArguments = new LinkedHashMap<>();
private ImmutableList astArguments = ImmutableKit.emptyList();
+ private LinkedHashSet deferExecutions = new LinkedHashSet<>();
+
private Builder() {
}
@@ -601,6 +627,7 @@ private Builder(ExecutableNormalizedField existing) {
this.children = new ArrayList<>(existing.children);
this.level = existing.getLevel();
this.parent = existing.getParent();
+ this.deferExecutions = existing.getDeferExecutions();
}
public Builder clearObjectTypesNames() {
@@ -656,6 +683,11 @@ public Builder parent(ExecutableNormalizedField parent) {
return this;
}
+ public Builder deferExecutions(LinkedHashSet deferExecutions) {
+ this.deferExecutions = deferExecutions;
+ return this;
+ }
+
public ExecutableNormalizedField build() {
return new ExecutableNormalizedField(this);
}
diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java
index 36a07f4866..9367c1d58d 100644
--- a/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java
+++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperationFactory.java
@@ -4,6 +4,8 @@
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
+import graphql.Assert;
+import graphql.ExperimentalApi;
import graphql.GraphQLContext;
import graphql.PublicApi;
import graphql.collect.ImmutableKit;
@@ -16,6 +18,7 @@
import graphql.execution.directives.QueryDirectives;
import graphql.execution.directives.QueryDirectivesImpl;
import graphql.introspection.Introspection;
+import graphql.language.Directive;
import graphql.language.Document;
import graphql.language.Field;
import graphql.language.FragmentDefinition;
@@ -25,7 +28,10 @@
import graphql.language.OperationDefinition;
import graphql.language.Selection;
import graphql.language.SelectionSet;
+import graphql.language.TypeName;
import graphql.language.VariableDefinition;
+import graphql.normalized.incremental.DeferExecution;
+import graphql.normalized.incremental.IncrementalNodes;
import graphql.schema.FieldCoordinates;
import graphql.schema.GraphQLCompositeType;
import graphql.schema.GraphQLFieldDefinition;
@@ -42,10 +48,15 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
+import java.util.Objects;
import java.util.Set;
+import java.util.function.Function;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
import static graphql.Assert.assertNotNull;
import static graphql.Assert.assertShouldNeverHappen;
@@ -56,6 +67,8 @@
import static graphql.util.FpKit.intersection;
import static java.util.Collections.singleton;
import static java.util.Collections.singletonList;
+import static java.util.stream.Collectors.toCollection;
+import static java.util.stream.Collectors.toSet;
/**
* This factory can create a {@link ExecutableNormalizedOperation} which represents what would be executed
@@ -69,19 +82,24 @@ public static class Options {
private final Locale locale;
private final int maxChildrenDepth;
+ private final boolean deferSupport;
+
private Options(GraphQLContext graphQLContext,
Locale locale,
- int maxChildrenDepth) {
+ int maxChildrenDepth,
+ boolean deferSupport) {
this.graphQLContext = graphQLContext;
this.locale = locale;
this.maxChildrenDepth = maxChildrenDepth;
+ this.deferSupport = deferSupport;
}
public static Options defaultOptions() {
return new Options(
GraphQLContext.getDefault(),
Locale.getDefault(),
- Integer.MAX_VALUE);
+ Integer.MAX_VALUE,
+ false);
}
/**
@@ -94,7 +112,7 @@ public static Options defaultOptions() {
* @return new options object to use
*/
public Options locale(Locale locale) {
- return new Options(this.graphQLContext, locale, this.maxChildrenDepth);
+ return new Options(this.graphQLContext, locale, this.maxChildrenDepth, this.deferSupport);
}
/**
@@ -107,7 +125,7 @@ public Options locale(Locale locale) {
* @return new options object to use
*/
public Options graphQLContext(GraphQLContext graphQLContext) {
- return new Options(graphQLContext, this.locale, this.maxChildrenDepth);
+ return new Options(graphQLContext, this.locale, this.maxChildrenDepth, this.deferSupport);
}
/**
@@ -119,7 +137,19 @@ public Options graphQLContext(GraphQLContext graphQLContext) {
* @return new options object to use
*/
public Options maxChildrenDepth(int maxChildrenDepth) {
- return new Options(this.graphQLContext, this.locale, maxChildrenDepth);
+ return new Options(this.graphQLContext, this.locale, maxChildrenDepth, this.deferSupport);
+ }
+
+ /**
+ * Controls whether defer execution is supported when creating instances of {@link ExecutableNormalizedOperation}.
+ *
+ * @param deferSupport true to enable support for defer
+ *
+ * @return new options object to use
+ */
+ @ExperimentalApi
+ public Options deferSupport(boolean deferSupport) {
+ return new Options(this.graphQLContext, this.locale, this.maxChildrenDepth, deferSupport);
}
/**
@@ -148,14 +178,25 @@ public Locale getLocale() {
public int getMaxChildrenDepth() {
return maxChildrenDepth;
}
+
+ /**
+ * @return whether support for defer is enabled
+ *
+ * @see #deferSupport(boolean)
+ */
+ @ExperimentalApi
+ public boolean getDeferSupport() {
+ return deferSupport;
+ }
}
+ private static final ConditionalNodes conditionalNodes = new ConditionalNodes();
+ private static final IncrementalNodes incrementalNodes = new IncrementalNodes();
+
private ExecutableNormalizedOperationFactory() {
}
- private static final ConditionalNodes conditionalNodes = new ConditionalNodes();
-
/**
* This will create a runtime representation of the graphql operation that would be executed
* in a runtime sense.
@@ -172,15 +213,44 @@ public static ExecutableNormalizedOperation createExecutableNormalizedOperation(
Document document,
String operationName,
CoercedVariables coercedVariableValues
+ ) {
+ return createExecutableNormalizedOperation(
+ graphQLSchema,
+ document,
+ operationName,
+ coercedVariableValues,
+ Options.defaultOptions());
+ }
+
+ /**
+ * This will create a runtime representation of the graphql operation that would be executed
+ * in a runtime sense.
+ *
+ * @param graphQLSchema the schema to be used
+ * @param document the {@link Document} holding the operation text
+ * @param operationName the operation name to use
+ * @param coercedVariableValues the coerced variables to use
+ * @param options the {@link Options} to use for parsing
+ *
+ * @return a runtime representation of the graphql operation.
+ */
+ public static ExecutableNormalizedOperation createExecutableNormalizedOperation(
+ GraphQLSchema graphQLSchema,
+ Document document,
+ String operationName,
+ CoercedVariables coercedVariableValues,
+ Options options
) {
NodeUtil.GetOperationResult getOperationResult = NodeUtil.getOperation(document, operationName);
- return createExecutableNormalizedOperation(
+ return new ExecutableNormalizedOperationFactoryImpl(
graphQLSchema,
getOperationResult.operationDefinition,
getOperationResult.fragmentsByName,
- coercedVariableValues
- );
+ coercedVariableValues,
+ null,
+ options
+ ).createNormalizedQueryImpl();
}
/**
@@ -355,13 +425,12 @@ private ExecutableNormalizedOperation createNormalizedQueryImpl() {
buildFieldWithChildren(
topLevel,
fieldAndAstParents,
- 1,
- options.getMaxChildrenDepth());
+ 1);
}
// getPossibleMergerList
for (PossibleMerger possibleMerger : possibleMergerList) {
List childrenWithSameResultKey = possibleMerger.parent.getChildrenWithSameResultKey(possibleMerger.resultKey);
- ENFMerger.merge(possibleMerger.parent, childrenWithSameResultKey, graphQLSchema);
+ ENFMerger.merge(possibleMerger.parent, childrenWithSameResultKey, graphQLSchema, options.deferSupport);
}
return new ExecutableNormalizedOperation(
operationDefinition.getOperation(),
@@ -374,7 +443,7 @@ private ExecutableNormalizedOperation createNormalizedQueryImpl() {
);
}
- private void captureMergedField(ExecutableNormalizedField enf, MergedField mergedFld) {
+ private void captureMergedField(ExecutableNormalizedField enf, MergedField mergedFld) {
// QueryDirectivesImpl is a lazy object and only computes itself when asked for
QueryDirectives queryDirectives = new QueryDirectivesImpl(mergedFld, graphQLSchema, coercedVariableValues.toMap(), options.getGraphQLContext(), options.getLocale());
normalizedFieldToQueryDirectives.put(enf, queryDirectives);
@@ -383,10 +452,9 @@ private void captureMergedField(ExecutableNormalizedField enf, MergedField merge
private void buildFieldWithChildren(ExecutableNormalizedField executableNormalizedField,
ImmutableList fieldAndAstParents,
- int curLevel,
- int maxLevel) {
- if (curLevel > maxLevel) {
- throw new AbortExecutionException("Maximum query depth exceeded " + curLevel + " > " + maxLevel);
+ int curLevel) {
+ if (curLevel > this.options.getMaxChildrenDepth()) {
+ throw new AbortExecutionException("Maximum query depth exceeded " + curLevel + " > " + this.options.getMaxChildrenDepth());
}
CollectNFResult nextLevel = collectFromMergedField(executableNormalizedField, fieldAndAstParents, curLevel + 1);
@@ -403,8 +471,7 @@ private void buildFieldWithChildren(ExecutableNormalizedField executableNormaliz
buildFieldWithChildren(childENF,
childFieldAndAstParents,
- curLevel + 1,
- maxLevel);
+ curLevel + 1);
}
}
@@ -445,7 +512,8 @@ public CollectNFResult collectFromMergedField(ExecutableNormalizedField executab
this.collectFromSelectionSet(fieldAndAstParent.field.getSelectionSet(),
collectedFields,
(GraphQLCompositeType) astParentType,
- possibleObjects
+ possibleObjects,
+ null
);
}
Map> fieldsByName = fieldsByResultKey(collectedFields);
@@ -470,7 +538,7 @@ public CollectNFResult collectFromOperation(GraphQLObjectType rootType) {
Set possibleObjects = ImmutableSet.of(rootType);
List collectedFields = new ArrayList<>();
- collectFromSelectionSet(operationDefinition.getSelectionSet(), collectedFields, rootType, possibleObjects);
+ collectFromSelectionSet(operationDefinition.getSelectionSet(), collectedFields, rootType, possibleObjects, null);
// group by result key
Map> fieldsByName = fieldsByResultKey(collectedFields);
ImmutableList.Builder resultNFs = ImmutableList.builder();
@@ -498,6 +566,10 @@ private void createNFs(ImmutableList.Builder nfListBu
normalizedFieldToAstFields.put(nf, new FieldAndAstParent(collectedField.field, collectedField.astTypeCondition));
}
nfListBuilder.add(nf);
+
+ if (this.options.deferSupport) {
+ nf.addDeferExecutions(fieldGroup.deferExecutions);
+ }
}
if (commonParentsGroups.size() > 1) {
possibleMergerList.add(new PossibleMerger(parent, resultKey));
@@ -533,42 +605,103 @@ private ExecutableNormalizedField createNF(CollectedFieldGroup collectedFieldGro
.build();
}
- private static class CollectedFieldGroup {
- Set objectTypes;
- Set fields;
+ private List groupByCommonParents(Collection fields) {
+ if (this.options.deferSupport) {
+ return groupByCommonParentsWithDeferSupport(fields);
+ } else {
+ return groupByCommonParentsNoDeferSupport(fields);
+ }
+ }
- public CollectedFieldGroup(Set fields, Set objectTypes) {
- this.fields = fields;
- this.objectTypes = objectTypes;
+ private List groupByCommonParentsNoDeferSupport(Collection fields) {
+ ImmutableSet.Builder objectTypes = ImmutableSet.builder();
+ for (CollectedField collectedField : fields) {
+ objectTypes.addAll(collectedField.objectTypes);
}
+ Set allRelevantObjects = objectTypes.build();
+ Map> groupByAstParent = groupingBy(fields, fieldAndType -> fieldAndType.astTypeCondition);
+ if (groupByAstParent.size() == 1) {
+ return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects, null));
+ }
+ ImmutableList.Builder result = ImmutableList.builder();
+ for (GraphQLObjectType objectType : allRelevantObjects) {
+ Set relevantFields = filterSet(fields, field -> field.objectTypes.contains(objectType));
+ result.add(new CollectedFieldGroup(relevantFields, singleton(objectType), null));
+ }
+ return result.build();
}
- private List groupByCommonParents(Collection fields) {
+ private List groupByCommonParentsWithDeferSupport(Collection fields) {
ImmutableSet.Builder objectTypes = ImmutableSet.builder();
+ ImmutableSet.Builder deferExecutionsBuilder = ImmutableSet.builder();
+
for (CollectedField collectedField : fields) {
objectTypes.addAll(collectedField.objectTypes);
+
+ DeferExecution collectedDeferExecution = collectedField.deferExecution;
+
+ if (collectedDeferExecution != null) {
+ deferExecutionsBuilder.add(collectedDeferExecution);
+ }
}
+
Set allRelevantObjects = objectTypes.build();
+ Set deferExecutions = deferExecutionsBuilder.build();
+
+ Set duplicatedLabels = listDuplicatedLabels(deferExecutions);
+
+ if (!duplicatedLabels.isEmpty()) {
+ // Query validation should pick this up
+ Assert.assertShouldNeverHappen("Duplicated @defer labels are not allowed: [%s]", String.join(",", duplicatedLabels));
+ }
+
Map> groupByAstParent = groupingBy(fields, fieldAndType -> fieldAndType.astTypeCondition);
if (groupByAstParent.size() == 1) {
- return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects));
+ return singletonList(new CollectedFieldGroup(ImmutableSet.copyOf(fields), allRelevantObjects, deferExecutions));
}
+
ImmutableList.Builder result = ImmutableList.builder();
for (GraphQLObjectType objectType : allRelevantObjects) {
Set relevantFields = filterSet(fields, field -> field.objectTypes.contains(objectType));
- result.add(new CollectedFieldGroup(relevantFields, singleton(objectType)));
+
+ Set filteredDeferExecutions = deferExecutions.stream()
+ .filter(filterExecutionsFromType(objectType))
+ .collect(toCollection(LinkedHashSet::new));
+
+ result.add(new CollectedFieldGroup(relevantFields, singleton(objectType), filteredDeferExecutions));
}
return result.build();
}
+ private static Predicate filterExecutionsFromType(GraphQLObjectType objectType) {
+ String objectTypeName = objectType.getName();
+ return deferExecution -> deferExecution.getPossibleTypes()
+ .stream()
+ .map(GraphQLObjectType::getName)
+ .anyMatch(objectTypeName::equals);
+ }
+
+ private Set listDuplicatedLabels(Collection deferExecutions) {
+ return deferExecutions.stream()
+ .map(DeferExecution::getLabel)
+ .filter(Objects::nonNull)
+ .collect(Collectors.groupingBy(Function.identity(), Collectors.counting()))
+ .entrySet()
+ .stream()
+ .filter(entry -> entry.getValue() > 1)
+ .map(Map.Entry::getKey)
+ .collect(toSet());
+ }
+
private void collectFromSelectionSet(SelectionSet selectionSet,
List result,
GraphQLCompositeType astTypeCondition,
- Set possibleObjects
+ Set possibleObjects,
+ DeferExecution deferExecution
) {
for (Selection> selection : selectionSet.getSelections()) {
if (selection instanceof Field) {
- collectField(result, (Field) selection, possibleObjects, astTypeCondition);
+ collectField(result, (Field) selection, possibleObjects, astTypeCondition, deferExecution);
} else if (selection instanceof InlineFragment) {
collectInlineFragment(result, (InlineFragment) selection, possibleObjects, astTypeCondition);
} else if (selection instanceof FragmentSpread) {
@@ -597,7 +730,13 @@ private void collectFragmentSpread(List result,
}
GraphQLCompositeType newAstTypeCondition = (GraphQLCompositeType) assertNotNull(this.graphQLSchema.getType(fragmentDefinition.getTypeCondition().getName()));
Set newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition);
- collectFromSelectionSet(fragmentDefinition.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects);
+
+ DeferExecution newDeferExecution = buildDeferExecution(
+ fragmentSpread.getDirectives(),
+ fragmentDefinition.getTypeCondition(),
+ newPossibleObjects);
+
+ collectFromSelectionSet(fragmentDefinition.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects, newDeferExecution);
}
private void collectInlineFragment(List result,
@@ -616,13 +755,37 @@ private void collectInlineFragment(List result,
newPossibleObjects = narrowDownPossibleObjects(possibleObjects, newAstTypeCondition);
}
- collectFromSelectionSet(inlineFragment.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects);
+
+ DeferExecution newDeferExecution = buildDeferExecution(
+ inlineFragment.getDirectives(),
+ inlineFragment.getTypeCondition(),
+ newPossibleObjects
+ );
+
+ collectFromSelectionSet(inlineFragment.getSelectionSet(), result, newAstTypeCondition, newPossibleObjects, newDeferExecution);
+ }
+
+ private DeferExecution buildDeferExecution(
+ List directives,
+ TypeName typeCondition,
+ Set newPossibleObjects) {
+ if(!options.deferSupport) {
+ return null;
+ }
+
+ return incrementalNodes.createDeferExecution(
+ this.coercedVariableValues.toMap(),
+ directives,
+ typeCondition,
+ newPossibleObjects
+ );
}
private void collectField(List result,
Field field,
Set possibleObjectTypes,
- GraphQLCompositeType astTypeCondition
+ GraphQLCompositeType astTypeCondition,
+ DeferExecution deferExecution
) {
if (!conditionalNodes.shouldInclude(field,
this.coercedVariableValues.toMap(),
@@ -634,7 +797,7 @@ private void collectField(List result,
if (possibleObjectTypes.isEmpty()) {
return;
}
- result.add(new CollectedField(field, possibleObjectTypes, astTypeCondition));
+ result.add(new CollectedField(field, possibleObjectTypes, astTypeCondition, deferExecution));
}
private Set narrowDownPossibleObjects(Set currentOnes,
@@ -689,11 +852,13 @@ private static class CollectedField {
Field field;
Set objectTypes;
GraphQLCompositeType astTypeCondition;
+ DeferExecution deferExecution;
- public CollectedField(Field field, Set objectTypes, GraphQLCompositeType astTypeCondition) {
+ public CollectedField(Field field, Set objectTypes, GraphQLCompositeType astTypeCondition, DeferExecution deferExecution) {
this.field = field;
this.objectTypes = objectTypes;
this.astTypeCondition = astTypeCondition;
+ this.deferExecution = deferExecution;
}
}
@@ -716,6 +881,18 @@ private FieldAndAstParent(Field field, GraphQLCompositeType astParentType) {
this.astParentType = astParentType;
}
}
+
+ private static class CollectedFieldGroup {
+ Set objectTypes;
+ Set fields;
+ Set deferExecutions;
+
+ public CollectedFieldGroup(Set fields, Set objectTypes, Set deferExecutions) {
+ this.fields = fields;
+ this.objectTypes = objectTypes;
+ this.deferExecutions = deferExecutions;
+ }
+ }
}
}
diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java
index 7dc3d11f32..051d4655de 100644
--- a/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java
+++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperationToAstCompiler.java
@@ -3,6 +3,8 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import graphql.Assert;
+import graphql.Directives;
+import graphql.ExperimentalApi;
import graphql.PublicApi;
import graphql.execution.directives.QueryDirectives;
import graphql.introspection.Introspection;
@@ -18,8 +20,10 @@
import graphql.language.OperationDefinition;
import graphql.language.Selection;
import graphql.language.SelectionSet;
+import graphql.language.StringValue;
import graphql.language.TypeName;
import graphql.language.Value;
+import graphql.normalized.incremental.DeferExecution;
import graphql.schema.GraphQLCompositeType;
import graphql.schema.GraphQLFieldDefinition;
import graphql.schema.GraphQLObjectType;
@@ -30,8 +34,10 @@
import java.util.ArrayList;
import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.stream.Collectors;
import static graphql.collect.ImmutableKit.emptyList;
@@ -46,7 +52,7 @@
/**
* This class can take a list of {@link ExecutableNormalizedField}s and compiling out a
* normalised operation {@link Document} that would represent how those fields
- * maybe executed.
+ * may be executed.
*
* This is essentially the reverse of {@link ExecutableNormalizedOperationFactory} which takes
* operation text and makes {@link ExecutableNormalizedField}s from it, this takes {@link ExecutableNormalizedField}s
@@ -82,7 +88,7 @@ public Map getVariables() {
/**
* This will compile an operation text {@link Document} with possibly variables from the given {@link ExecutableNormalizedField}s
- *
+ *
* The {@link VariablePredicate} is used called to decide if the given argument values should be made into a variable
* OR inlined into the operation text as a graphql literal.
*
@@ -99,21 +105,21 @@ public static CompilerResult compileToDocument(@NotNull GraphQLSchema schema,
@Nullable String operationName,
@NotNull List topLevelFields,
@Nullable VariablePredicate variablePredicate) {
- return compileToDocument(schema,operationKind,operationName,topLevelFields,Map.of(),variablePredicate);
+ return compileToDocument(schema, operationKind, operationName, topLevelFields, Map.of(), variablePredicate);
}
/**
* This will compile an operation text {@link Document} with possibly variables from the given {@link ExecutableNormalizedField}s
- *
+ *
* The {@link VariablePredicate} is used called to decide if the given argument values should be made into a variable
* OR inlined into the operation text as a graphql literal.
*
- * @param schema the graphql schema to use
- * @param operationKind the kind of operation
- * @param operationName the name of the operation to use
- * @param topLevelFields the top level {@link ExecutableNormalizedField}s to start from
- * @param normalizedFieldToQueryDirectives the map of normalized field to query directives
- * @param variablePredicate the variable predicate that decides if arguments turn into variables or not during compilation
+ * @param schema the graphql schema to use
+ * @param operationKind the kind of operation
+ * @param operationName the name of the operation to use
+ * @param topLevelFields the top level {@link ExecutableNormalizedField}s to start from
+ * @param normalizedFieldToQueryDirectives the map of normalized field to query directives
+ * @param variablePredicate the variable predicate that decides if arguments turn into variables or not during compilation
*
* @return a {@link CompilerResult} object
*/
@@ -123,10 +129,75 @@ public static CompilerResult compileToDocument(@NotNull GraphQLSchema schema,
@NotNull List topLevelFields,
@NotNull Map normalizedFieldToQueryDirectives,
@Nullable VariablePredicate variablePredicate) {
+ return compileToDocument(schema, operationKind, operationName, topLevelFields, normalizedFieldToQueryDirectives, variablePredicate, false);
+ }
+
+
+ /**
+ * This will compile an operation text {@link Document} with possibly variables from the given {@link ExecutableNormalizedField}s, with support for the experimental @defer directive.
+ *
+ * The {@link VariablePredicate} is used called to decide if the given argument values should be made into a variable
+ * OR inlined into the operation text as a graphql literal.
+ *
+ * @param schema the graphql schema to use
+ * @param operationKind the kind of operation
+ * @param operationName the name of the operation to use
+ * @param topLevelFields the top level {@link ExecutableNormalizedField}s to start from
+ * @param variablePredicate the variable predicate that decides if arguments turn into variables or not during compilation
+ *
+ * @return a {@link CompilerResult} object
+ *
+ * @see ExecutableNormalizedOperationToAstCompiler#compileToDocument(GraphQLSchema, OperationDefinition.Operation, String, List, VariablePredicate)
+ */
+ @ExperimentalApi
+ public static CompilerResult compileToDocumentWithDeferSupport(@NotNull GraphQLSchema schema,
+ @NotNull OperationDefinition.Operation operationKind,
+ @Nullable String operationName,
+ @NotNull List topLevelFields,
+ @Nullable VariablePredicate variablePredicate
+ ) {
+ return compileToDocumentWithDeferSupport(schema, operationKind, operationName, topLevelFields, Map.of(), variablePredicate);
+ }
+
+ /**
+ * This will compile an operation text {@link Document} with possibly variables from the given {@link ExecutableNormalizedField}s, with support for the experimental @defer directive.
+ *
+ * The {@link VariablePredicate} is used called to decide if the given argument values should be made into a variable
+ * OR inlined into the operation text as a graphql literal.
+ *
+ * @param schema the graphql schema to use
+ * @param operationKind the kind of operation
+ * @param operationName the name of the operation to use
+ * @param topLevelFields the top level {@link ExecutableNormalizedField}s to start from
+ * @param normalizedFieldToQueryDirectives the map of normalized field to query directives
+ * @param variablePredicate the variable predicate that decides if arguments turn into variables or not during compilation
+ *
+ * @return a {@link CompilerResult} object
+ *
+ * @see ExecutableNormalizedOperationToAstCompiler#compileToDocument(GraphQLSchema, OperationDefinition.Operation, String, List, Map, VariablePredicate)
+ */
+ @ExperimentalApi
+ public static CompilerResult compileToDocumentWithDeferSupport(@NotNull GraphQLSchema schema,
+ @NotNull OperationDefinition.Operation operationKind,
+ @Nullable String operationName,
+ @NotNull List topLevelFields,
+ @NotNull Map normalizedFieldToQueryDirectives,
+ @Nullable VariablePredicate variablePredicate
+ ) {
+ return compileToDocument(schema, operationKind, operationName, topLevelFields, normalizedFieldToQueryDirectives, variablePredicate, true);
+ }
+
+ private static CompilerResult compileToDocument(@NotNull GraphQLSchema schema,
+ @NotNull OperationDefinition.Operation operationKind,
+ @Nullable String operationName,
+ @NotNull List topLevelFields,
+ @NotNull Map normalizedFieldToQueryDirectives,
+ @Nullable VariablePredicate variablePredicate,
+ boolean deferSupport) {
GraphQLObjectType operationType = getOperationType(schema, operationKind);
VariableAccumulator variableAccumulator = new VariableAccumulator(variablePredicate);
- List> selections = subselectionsForNormalizedField(schema, operationType.getName(), topLevelFields, normalizedFieldToQueryDirectives, variableAccumulator);
+ List> selections = subselectionsForNormalizedField(schema, operationType.getName(), topLevelFields, normalizedFieldToQueryDirectives, variableAccumulator, deferSupport);
SelectionSet selectionSet = new SelectionSet(selections);
OperationDefinition.Builder definitionBuilder = OperationDefinition.newOperationDefinition()
@@ -148,7 +219,20 @@ private static List> subselectionsForNormalizedField(GraphQLSchema
@NotNull String parentOutputType,
List executableNormalizedFields,
@NotNull Map normalizedFieldToQueryDirectives,
- VariableAccumulator variableAccumulator) {
+ VariableAccumulator variableAccumulator,
+ boolean deferSupport) {
+ if (deferSupport) {
+ return subselectionsForNormalizedFieldWithDeferSupport(schema, parentOutputType, executableNormalizedFields, normalizedFieldToQueryDirectives, variableAccumulator);
+ } else {
+ return subselectionsForNormalizedFieldNoDeferSupport(schema, parentOutputType, executableNormalizedFields, normalizedFieldToQueryDirectives, variableAccumulator);
+ }
+ }
+
+ private static List> subselectionsForNormalizedFieldNoDeferSupport(GraphQLSchema schema,
+ @NotNull String parentOutputType,
+ List executableNormalizedFields,
+ @NotNull Map normalizedFieldToQueryDirectives,
+ VariableAccumulator variableAccumulator) {
ImmutableList.Builder> selections = ImmutableList.builder();
// All conditional fields go here instead of directly to selections, so they can be grouped together
@@ -157,13 +241,13 @@ private static List> subselectionsForNormalizedField(GraphQLSchema
for (ExecutableNormalizedField nf : executableNormalizedFields) {
if (nf.isConditional(schema)) {
- selectionForNormalizedField(schema, nf, normalizedFieldToQueryDirectives, variableAccumulator)
+ selectionForNormalizedField(schema, nf, normalizedFieldToQueryDirectives, variableAccumulator, false)
.forEach((objectTypeName, field) ->
fieldsByTypeCondition
.computeIfAbsent(objectTypeName, ignored -> new ArrayList<>())
.add(field));
} else {
- selections.add(selectionForNormalizedField(schema, parentOutputType, nf, normalizedFieldToQueryDirectives,variableAccumulator));
+ selections.add(selectionForNormalizedField(schema, parentOutputType, nf, normalizedFieldToQueryDirectives, variableAccumulator, false));
}
}
@@ -179,17 +263,89 @@ private static List> subselectionsForNormalizedField(GraphQLSchema
return selections.build();
}
+
+ private static List> subselectionsForNormalizedFieldWithDeferSupport(GraphQLSchema schema,
+ @NotNull String parentOutputType,
+ List executableNormalizedFields,
+ @NotNull Map normalizedFieldToQueryDirectives,
+ VariableAccumulator variableAccumulator) {
+ ImmutableList.Builder> selections = ImmutableList.builder();
+
+ // All conditional and deferred fields go here instead of directly to selections, so they can be grouped together
+ // in the same inline fragment in the output
+ //
+ Map> fieldsByFragmentDetails = new LinkedHashMap<>();
+
+ for (ExecutableNormalizedField nf : executableNormalizedFields) {
+ LinkedHashSet deferExecutions = nf.getDeferExecutions();
+
+ if (nf.isConditional(schema)) {
+ selectionForNormalizedField(schema, nf, normalizedFieldToQueryDirectives, variableAccumulator, true)
+ .forEach((objectTypeName, field) -> {
+ if (deferExecutions == null || deferExecutions.isEmpty()) {
+ fieldsByFragmentDetails
+ .computeIfAbsent(new ExecutionFragmentDetails(objectTypeName, null), ignored -> new ArrayList<>())
+ .add(field);
+ } else {
+ deferExecutions.forEach(deferExecution -> {
+ fieldsByFragmentDetails
+ .computeIfAbsent(new ExecutionFragmentDetails(objectTypeName, deferExecution), ignored -> new ArrayList<>())
+ .add(field);
+ });
+ }
+ });
+
+ } else if (deferExecutions != null && !deferExecutions.isEmpty()) {
+ Field field = selectionForNormalizedField(schema, parentOutputType, nf, normalizedFieldToQueryDirectives, variableAccumulator, true);
+
+ deferExecutions.forEach(deferExecution -> {
+ fieldsByFragmentDetails
+ .computeIfAbsent(new ExecutionFragmentDetails(null, deferExecution), ignored -> new ArrayList<>())
+ .add(field);
+ });
+ } else {
+ selections.add(selectionForNormalizedField(schema, parentOutputType, nf, normalizedFieldToQueryDirectives, variableAccumulator, true));
+ }
+ }
+
+ fieldsByFragmentDetails.forEach((typeAndDeferPair, fields) -> {
+ InlineFragment.Builder fragmentBuilder = newInlineFragment()
+ .selectionSet(selectionSet(fields));
+
+ if (typeAndDeferPair.typeName != null) {
+ TypeName typeName = newTypeName(typeAndDeferPair.typeName).build();
+ fragmentBuilder.typeCondition(typeName);
+ }
+
+ if (typeAndDeferPair.deferExecution != null) {
+ Directive.Builder deferBuilder = Directive.newDirective().name(Directives.DeferDirective.getName());
+
+ if (typeAndDeferPair.deferExecution.getLabel() != null) {
+ deferBuilder.argument(newArgument().name("label").value(StringValue.of(typeAndDeferPair.deferExecution.getLabel())).build());
+ }
+
+ fragmentBuilder.directive(deferBuilder.build());
+ }
+
+
+ selections.add(fragmentBuilder.build());
+ });
+
+ return selections.build();
+ }
+
/**
* @return Map of object type names to list of fields
*/
private static Map selectionForNormalizedField(GraphQLSchema schema,
ExecutableNormalizedField executableNormalizedField,
@NotNull Map normalizedFieldToQueryDirectives,
- VariableAccumulator variableAccumulator) {
+ VariableAccumulator variableAccumulator,
+ boolean deferSupport) {
Map groupedFields = new LinkedHashMap<>();
for (String objectTypeName : executableNormalizedField.getObjectTypeNames()) {
- groupedFields.put(objectTypeName, selectionForNormalizedField(schema, objectTypeName, executableNormalizedField,normalizedFieldToQueryDirectives, variableAccumulator));
+ groupedFields.put(objectTypeName, selectionForNormalizedField(schema, objectTypeName, executableNormalizedField, normalizedFieldToQueryDirectives, variableAccumulator, deferSupport));
}
return groupedFields;
@@ -202,7 +358,8 @@ private static Field selectionForNormalizedField(GraphQLSchema schema,
String objectTypeName,
ExecutableNormalizedField executableNormalizedField,
@NotNull Map normalizedFieldToQueryDirectives,
- VariableAccumulator variableAccumulator) {
+ VariableAccumulator variableAccumulator,
+ boolean deferSupport) {
final List> subSelections;
if (executableNormalizedField.getChildren().isEmpty()) {
subSelections = emptyList();
@@ -215,7 +372,8 @@ private static Field selectionForNormalizedField(GraphQLSchema schema,
fieldOutputType.getName(),
executableNormalizedField.getChildren(),
normalizedFieldToQueryDirectives,
- variableAccumulator
+ variableAccumulator,
+ deferSupport
);
}
@@ -230,9 +388,9 @@ private static Field selectionForNormalizedField(GraphQLSchema schema,
.alias(executableNormalizedField.getAlias())
.selectionSet(selectionSet)
.arguments(arguments);
- if(queryDirectives == null || queryDirectives.getImmediateAppliedDirectivesByField().isEmpty() ){
+ if (queryDirectives == null || queryDirectives.getImmediateAppliedDirectivesByField().isEmpty()) {
return builder.build();
- }else {
+ } else {
List directives = queryDirectives.getImmediateAppliedDirectivesByField().keySet().stream().flatMap(field -> field.getDirectives().stream()).collect(Collectors.toList());
return builder
.directives(directives)
@@ -326,4 +484,33 @@ private static GraphQLObjectType getOperationType(@NotNull GraphQLSchema schema,
return Assert.assertShouldNeverHappen("Unknown operation kind " + operationKind);
}
+ /**
+ * Represents important execution details that can be associated with a fragment.
+ */
+ private static class ExecutionFragmentDetails {
+ private final String typeName;
+ private final DeferExecution deferExecution;
+
+ public ExecutionFragmentDetails(String typeName, DeferExecution deferExecution) {
+ this.typeName = typeName;
+ this.deferExecution = deferExecution;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ ExecutionFragmentDetails that = (ExecutionFragmentDetails) o;
+ return Objects.equals(typeName, that.typeName) && Objects.equals(deferExecution, that.deferExecution);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(typeName, deferExecution);
+ }
+ }
}
diff --git a/src/main/java/graphql/normalized/incremental/DeferExecution.java b/src/main/java/graphql/normalized/incremental/DeferExecution.java
new file mode 100644
index 0000000000..71488b03ca
--- /dev/null
+++ b/src/main/java/graphql/normalized/incremental/DeferExecution.java
@@ -0,0 +1,129 @@
+package graphql.normalized.incremental;
+
+import graphql.ExperimentalApi;
+import graphql.schema.GraphQLObjectType;
+
+import javax.annotation.Nullable;
+import java.util.Set;
+
+/**
+ * Represents details about the defer execution that can be associated with a {@link graphql.normalized.ExecutableNormalizedField}.
+ *
+ * Taking this schema as an example:
+ *
+ * type Query { animal: Animal }
+ * interface Animal { name: String, age: Int }
+ * type Cat implements Animal { name: String, age: Int }
+ * type Dog implements Animal { name: String, age: Int }
+ *
+ *
+ * An ENF can be associated with multiple `DeferExecution`s
+ *
+ * For example, this query:
+ *
+ * query MyQuery {
+ * animal {
+ * ... @defer {
+ * name
+ * }
+ * ... @defer {
+ * name
+ * }
+ * }
+ * }
+ *
+ *
+ * Would result in one ENF (name) associated with 2 `DeferExecution` instances. This is relevant for the execution
+ * since the field would have to be included in 2 incremental payloads. (I know, there's some duplication here, but
+ * this is the current state of the spec. There are some discussions happening around de-duplicating data in scenarios
+ * like this, so this behaviour might change in the future).
+ *
+ * A `DeferExecution` may be associated with a list of possible types
+ *
+ * For example, this query:
+ *
+ * query MyQuery {
+ * animal {
+ * ... @defer {
+ * name
+ * }
+ * }
+ * }
+ *
+ * results in a `DeferExecution` with no label and possible types [Dog, Cat]
+ *
+ * A `DeferExecution` may be associated with specific types
+ * For example, this query:
+ *
+ * query MyQuery {
+ * animal {
+ * ... on Cat @defer {
+ * name
+ * }
+ * ... on Dog {
+ * name
+ * }
+ * }
+ * }
+ *
+ * results in a single ENF (name) associated with a `DeferExecution` with only "Cat" as a possible type. This means
+ * that, at execution time, `name` should be deferred only if the return object is a "Cat" (but not a if it is a "Dog").
+ *
+ * ENFs associated with the same instance of `DeferExecution` will be resolved in the same incremental response payload
+ * For example, take these queries:
+ *
+ *
+ * query Query1 {
+ * animal {
+ * ... @defer {
+ * name
+ * }
+ * ... @defer {
+ * age
+ * }
+ * }
+ * }
+ *
+ * query Query2 {
+ * animal {
+ * ... @defer {
+ * name
+ * age
+ * }
+ * }
+ * }
+ *
+ *
+ * In `Query1`, the ENFs name and age are associated with different instances of `DeferExecution`. This means that,
+ * during execution, `name` and `age` can be delivered at different times (if name is resolved faster, it will be
+ * delivered first, and vice-versa).
+ * In `Query2` the fields will share the same instance of `DeferExecution`. This ensures that, at execution time, the
+ * fields are guaranteed to be delivered together. In other words, execution should wait until the slowest field resolves
+ * and deliver both fields at the same time.
+ *
+ */
+@ExperimentalApi
+public class DeferExecution {
+ private final String label;
+ private final Set possibleTypes;
+
+ public DeferExecution(@Nullable String label, Set possibleTypes) {
+ this.label = label;
+ this.possibleTypes = possibleTypes;
+ }
+
+ /**
+ * @return the label associated with this defer declaration
+ */
+ @Nullable
+ public String getLabel() {
+ return label;
+ }
+
+ /**
+ * @return the concrete object types that are associated with this defer execution
+ */
+ public Set getPossibleTypes() {
+ return possibleTypes;
+ }
+}
diff --git a/src/main/java/graphql/normalized/incremental/IncrementalNodes.java b/src/main/java/graphql/normalized/incremental/IncrementalNodes.java
new file mode 100644
index 0000000000..025b9333a0
--- /dev/null
+++ b/src/main/java/graphql/normalized/incremental/IncrementalNodes.java
@@ -0,0 +1,55 @@
+package graphql.normalized.incremental;
+
+import graphql.Assert;
+import graphql.GraphQLContext;
+import graphql.Internal;
+import graphql.execution.CoercedVariables;
+import graphql.execution.ValuesResolver;
+import graphql.language.Directive;
+import graphql.language.NodeUtil;
+import graphql.language.TypeName;
+import graphql.schema.GraphQLObjectType;
+
+import javax.annotation.Nullable;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+
+import static graphql.Directives.DeferDirective;
+
+@Internal
+public class IncrementalNodes {
+
+ public DeferExecution createDeferExecution(
+ Map variables,
+ List directives,
+ @Nullable TypeName targetType,
+ Set possibleTypes
+ ) {
+ Directive deferDirective = NodeUtil.findNodeByName(directives, DeferDirective.getName());
+
+ if (deferDirective != null) {
+ Map argumentValues = ValuesResolver.getArgumentValues(DeferDirective.getArguments(), deferDirective.getArguments(), CoercedVariables.of(variables), GraphQLContext.getDefault(), Locale.getDefault());
+
+ Object flag = argumentValues.get("if");
+ Assert.assertTrue(flag instanceof Boolean, () -> String.format("The '%s' directive MUST have a value for the 'if' argument", DeferDirective.getName()));
+
+ if (!((Boolean) flag)) {
+ return null;
+ }
+
+ Object label = argumentValues.get("label");
+
+ if (label == null) {
+ return new DeferExecution(null, possibleTypes);
+ }
+
+ Assert.assertTrue(label instanceof String, () -> String.format("The 'label' argument from the '%s' directive MUST contain a String value", DeferDirective.getName()));
+
+ return new DeferExecution((String) label, possibleTypes);
+ }
+
+ return null;
+ }
+}
diff --git a/src/test/groovy/graphql/execution/FieldCollectorTest.groovy b/src/test/groovy/graphql/execution/FieldCollectorTest.groovy
index 1fa8f360e8..6460512543 100644
--- a/src/test/groovy/graphql/execution/FieldCollectorTest.groovy
+++ b/src/test/groovy/graphql/execution/FieldCollectorTest.groovy
@@ -21,7 +21,7 @@ class FieldCollectorTest extends Specification {
type Query {
bar1: String
bar2: String
- }
+ }
""")
def objectType = schema.getType("Query") as GraphQLObjectType
FieldCollector fieldCollector = new FieldCollector()
@@ -48,12 +48,12 @@ class FieldCollectorTest extends Specification {
type Query{
bar1: String
bar2: Test
- }
+ }
interface Test {
- fieldOnInterface: String
- }
+ fieldOnInterface: String
+ }
type TestImpl implements Test {
- fieldOnInterface: String
+ fieldOnInterface: String
}
""")
def object = schema.getType("TestImpl") as GraphQLObjectType
@@ -73,6 +73,136 @@ class FieldCollectorTest extends Specification {
then:
result.getSubField('fieldOnInterface').getFields() == [interfaceField]
+ }
+
+ def "collect fields that are merged together - one of the fields is on an inline fragment "() {
+ def schema = TestUtil.schema("""
+ type Query {
+ echo: String
+ }
+""")
+
+ Document document = new Parser().parseDocument("""
+ {
+ echo
+ ... on Query {
+ echo
+ }
+ }
+
+""")
+
+ def object = schema.getType("TestImpl") as GraphQLObjectType
+ FieldCollector fieldCollector = new FieldCollector()
+ FieldCollectorParameters fieldCollectorParameters = newParameters()
+ .schema(schema)
+ .objectType(object)
+ .build()
+
+ def selectionSet = ((OperationDefinition) document.children[0]).selectionSet
+
+ when:
+ def result = fieldCollector.collectFields(fieldCollectorParameters, selectionSet)
+
+ then:
+ result.size() == 1
+ result.getSubField('echo').fields.size() == 1
+ }
+
+ def "collect fields that are merged together - fields have different selection sets "() {
+ def schema = TestUtil.schema("""
+ type Query {
+ me: Me
+ }
+
+ type Me {
+ firstname: String
+ lastname: String
+ }
+""")
+
+ Document document = new Parser().parseDocument("""
+ {
+ me {
+ firstname
+ }
+ me {
+ lastname
+ }
+ }
+
+""")
+
+ def object = schema.getType("TestImpl") as GraphQLObjectType
+ FieldCollector fieldCollector = new FieldCollector()
+ FieldCollectorParameters fieldCollectorParameters = newParameters()
+ .schema(schema)
+ .objectType(object)
+ .build()
+
+ def selectionSet = ((OperationDefinition) document.children[0]).selectionSet
+
+ when:
+ def result = fieldCollector.collectFields(fieldCollectorParameters, selectionSet)
+
+ then:
+ result.size() == 1
+
+ def meField = result.getSubField('me')
+
+ meField.fields.size() == 2
+
+ meField.fields[0].selectionSet.selections.size() == 1
+ meField.fields[0].selectionSet.selections[0].name == "firstname"
+
+ meField.fields[1].selectionSet.selections.size() == 1
+ meField.fields[1].selectionSet.selections[0].name == "lastname"
+ }
+
+ def "collect fields that are merged together - fields have different directives"() {
+ def schema = TestUtil.schema("""
+ directive @one on FIELD
+ directive @two on FIELD
+
+ type Query {
+ echo: String
+ }
+""")
+
+ Document document = new Parser().parseDocument("""
+ {
+ echo @one
+ echo @two
+ }
+
+""")
+
+ def object = schema.getType("TestImpl") as GraphQLObjectType
+ FieldCollector fieldCollector = new FieldCollector()
+ FieldCollectorParameters fieldCollectorParameters = newParameters()
+ .schema(schema)
+ .objectType(object)
+ .build()
+
+ def selectionSet = ((OperationDefinition) document.children[0]).selectionSet
+
+ when:
+ def result = fieldCollector.collectFields(fieldCollectorParameters, selectionSet)
+
+ then:
+ result.size() == 1
+
+ def echoField = result.getSubField('echo')
+
+ echoField.fields.size() == 2
+
+ echoField.fields[0].name == "echo"
+ echoField.fields[0].directives.size() == 1
+ echoField.fields[0].directives[0].name == "one"
+
+ echoField.fields[1].name == "echo"
+ echoField.fields[1].directives.size() == 1
+ echoField.fields[1].directives[0].name == "two"
}
}
diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryDeferTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryDeferTest.groovy
new file mode 100644
index 0000000000..08ead8f5e2
--- /dev/null
+++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryDeferTest.groovy
@@ -0,0 +1,965 @@
+package graphql.normalized
+
+import graphql.AssertException
+import graphql.ExecutionInput
+import graphql.GraphQL
+import graphql.TestUtil
+import graphql.execution.RawVariables
+import graphql.language.Document
+import graphql.schema.GraphQLSchema
+import graphql.util.TraversalControl
+import graphql.util.Traverser
+import graphql.util.TraverserContext
+import graphql.util.TraverserVisitorStub
+import spock.lang.Specification
+
+class ExecutableNormalizedOperationFactoryDeferTest extends Specification {
+ String schema = """
+ directive @defer(if: Boolean, label: String) on FRAGMENT_SPREAD | INLINE_FRAGMENT
+
+ type Query {
+ dog: Dog
+ animal: Animal
+ mammal: Mammal
+ }
+
+ interface LivingThing {
+ age: Int
+ }
+
+ interface Animal implements LivingThing {
+ name: String
+ age: Int
+ }
+
+ type Dog implements Animal & LivingThing {
+ name: String
+ age: Int
+ breed: String
+ owner: Person
+ }
+
+ type Cat implements Animal & LivingThing {
+ name: String
+ age: Int
+ breed: String
+ color: String
+ siblings: [Cat]
+ }
+
+ type Fish implements Animal & LivingThing {
+ name: String
+ age: Int
+ }
+
+ type Person {
+ firstname: String
+ lastname: String
+ bestFriend: Person
+ }
+
+ union Mammal = Dog | Cat
+ """
+
+ GraphQLSchema graphQLSchema = TestUtil.schema(schema)
+
+ def "defer on a single field via inline fragment without type"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ name
+ ... @defer(label: "breed-defer") {
+ breed
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name',
+ 'Dog.breed defer{[label=breed-defer;types=[Dog]]}',
+ ]
+ }
+
+ def "fragment on interface field with no type"() {
+ given:
+
+ String query = '''
+ query q {
+ animal {
+ ... @defer {
+ name
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.animal',
+ "[Cat, Dog, Fish].name defer{[label=null;types=[Cat, Dog, Fish]]}",
+ ]
+ }
+
+ def "fragments on non-conditional fields"() {
+ given:
+
+ String query = '''
+ query q {
+ animal {
+ ... on Cat @defer {
+ name
+ }
+ ... on Dog @defer {
+ name
+ }
+ ... on Animal @defer {
+ name
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.animal',
+ "[Cat, Dog, Fish].name defer{[label=null;types=[Cat]],[label=null;types=[Dog]],[label=null;types=[Cat, Dog, Fish]]}",
+ ]
+ }
+
+ def "fragments on subset of non-conditional fields"() {
+ given:
+
+ String query = '''
+ query q {
+ animal {
+ ... on Cat @defer {
+ name
+ }
+ ... on Dog @defer {
+ name
+ }
+ ... on Fish {
+ name
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.animal',
+ "[Cat, Dog, Fish].name defer{[label=null;types=[Cat]],[label=null;types=[Dog]]}",
+ ]
+ }
+
+ def "field on multiple defer declarations is associated with "() {
+ given:
+ String query = '''
+ query q {
+ dog {
+ ... @defer {
+ name
+ age
+ }
+ ... @defer {
+ age
+ }
+ }
+ }
+ '''
+ Map variables = [:]
+
+ when:
+ def executableNormalizedOperation = createExecutableNormalizedOperations(query, variables);
+
+ List printedTree = printTreeWithIncrementalExecutionDetails(executableNormalizedOperation)
+
+ then:
+
+ def nameField = findField(executableNormalizedOperation, "Dog", "name")
+ def ageField = findField(executableNormalizedOperation, "Dog", "age")
+
+ nameField.deferExecutions.size() == 1
+ ageField.deferExecutions.size() == 2
+
+ // age field is associated with 2 defer executions, one of then is shared with "name" the other isn't
+ ageField.deferExecutions.any {
+ it == nameField.deferExecutions[0]
+ }
+
+ ageField.deferExecutions.any {
+ it != nameField.deferExecutions[0]
+ }
+
+ printedTree == ['Query.dog',
+ "Dog.name defer{[label=null;types=[Dog]]}",
+ "Dog.age defer{[label=null;types=[Dog]],[label=null;types=[Dog]]}",
+ ]
+ }
+
+ def "fragment on interface"() {
+ given:
+
+ String query = '''
+ query q {
+ animal {
+ ... on Animal @defer {
+ name
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.animal',
+ '[Cat, Dog, Fish].name defer{[label=null;types=[Cat, Dog, Fish]]}',
+ ]
+ }
+
+ def "fragment on distant interface"() {
+ given:
+
+ String query = '''
+ query q {
+ animal {
+ ... on LivingThing @defer {
+ age
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.animal',
+ '[Cat, Dog, Fish].age defer{[label=null;types=[Cat, Dog, Fish]]}',
+ ]
+ }
+
+ def "fragment on union"() {
+ given:
+
+ String query = '''
+ query q {
+ mammal {
+ ... on Dog @defer {
+ name
+ breed
+ }
+ ... on Cat @defer {
+ name
+ breed
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.mammal',
+ '[Dog, Cat].name defer{[label=null;types=[Cat]],[label=null;types=[Dog]]}',
+ 'Dog.breed defer{[label=null;types=[Dog]]}',
+ 'Cat.breed defer{[label=null;types=[Cat]]}',
+ ]
+ }
+
+ def "fragments on interface"() {
+ given:
+
+ String query = '''
+ query q {
+ animal {
+ ... on Animal @defer {
+ name
+ }
+ ... on Animal @defer {
+ age
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.animal',
+ '[Cat, Dog, Fish].name defer{[label=null;types=[Cat, Dog, Fish]]}',
+ '[Cat, Dog, Fish].age defer{[label=null;types=[Cat, Dog, Fish]]}',
+ ]
+ }
+
+ def "defer on a subselection of non-conditional fields"() {
+ given:
+
+ String query = '''
+ query q {
+ animal {
+ ... on Cat @defer {
+ name
+ }
+ ... on Dog {
+ name
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.animal',
+ '[Cat, Dog].name defer{[label=null;types=[Cat]]}',
+ ]
+ }
+
+ def "fragments on conditional fields"() {
+ given:
+
+ String query = '''
+ query q {
+ animal {
+ ... on Cat @defer {
+ breed
+ }
+ ... on Dog @defer {
+ breed
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.animal',
+ 'Cat.breed defer{[label=null;types=[Cat]]}',
+ 'Dog.breed defer{[label=null;types=[Dog]]}'
+ ]
+ }
+
+ def "defer on a single field via inline fragment with type"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ name
+ ... on Dog @defer(label: "breed-defer") {
+ breed
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name',
+ 'Dog.breed defer{[label=breed-defer;types=[Dog]]}',
+ ]
+ }
+
+ def "1 defer on 2 fields"() {
+ given:
+ String query = '''
+ query q {
+ animal {
+ ... @defer {
+ name
+ }
+
+ ... on Dog @defer {
+ name
+ breed
+ }
+
+ ... on Cat @defer {
+ name
+ breed
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ def executableNormalizedOperation = createExecutableNormalizedOperations(query, variables);
+
+ List printedTree = printTreeWithIncrementalExecutionDetails(executableNormalizedOperation)
+
+ then: "should result in the same instance of defer block"
+ def nameField = findField(executableNormalizedOperation,"[Cat, Dog, Fish]","name")
+ def dogBreedField = findField(executableNormalizedOperation, "Dog", "breed")
+ def catBreedField = findField(executableNormalizedOperation, "Cat", "breed")
+
+ nameField.deferExecutions.size() == 3
+ dogBreedField.deferExecutions.size() == 1
+ catBreedField.deferExecutions.size() == 1
+
+ // nameField should share a defer block with each of the other fields
+ nameField.deferExecutions.any {
+ it == dogBreedField.deferExecutions[0]
+ }
+ nameField.deferExecutions.any {
+ it == catBreedField.deferExecutions[0]
+ }
+ // also, nameField should have a defer block that is not shared with any other field
+ nameField.deferExecutions.any {
+ it != dogBreedField.deferExecutions[0] &&
+ it != catBreedField.deferExecutions[0]
+ }
+
+ printedTree == ['Query.animal',
+ '[Cat, Dog, Fish].name defer{[label=null;types=[Cat]],[label=null;types=[Dog]],[label=null;types=[Cat, Dog, Fish]]}',
+ 'Dog.breed defer{[label=null;types=[Dog]]}',
+ 'Cat.breed defer{[label=null;types=[Cat]]}',
+ ]
+ }
+
+ def "2 defers on 2 fields"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer{
+ name
+ }
+ ... @defer{
+ breed
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ def executableNormalizedOperation = createExecutableNormalizedOperations(query, variables);
+
+ List printedTree = printTreeWithIncrementalExecutionDetails(executableNormalizedOperation)
+
+ then: "should result in 2 different instances of defer"
+ def nameField = findField(executableNormalizedOperation, "Dog", "name")
+ def breedField = findField(executableNormalizedOperation, "Dog", "breed")
+
+ nameField.deferExecutions.size() == 1
+ breedField.deferExecutions.size() == 1
+
+ // different label instances
+ nameField.deferExecutions[0] != breedField.deferExecutions[0]
+
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=null;types=[Dog]]}',
+ 'Dog.breed defer{[label=null;types=[Dog]]}',
+ ]
+ }
+
+ def "defer on a fragment definition"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... DogFrag @defer(label: "breed-defer")
+ }
+ }
+
+ fragment DogFrag on Dog {
+ name
+ breed
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=breed-defer;types=[Dog]]}',
+ 'Dog.breed defer{[label=breed-defer;types=[Dog]]}',
+ ]
+ }
+
+ def "multiple defer on same field with different labels"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer(label: "name-defer") {
+ name
+ }
+
+ ... @defer(label: "another-name-defer") {
+ name
+ }
+ }
+ }
+
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=another-name-defer;types=[Dog]],[label=name-defer;types=[Dog]]}'
+ ]
+ }
+
+ def "multiple fields and a single defer"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer(label: "name-defer") {
+ name
+ }
+
+ ... {
+ name
+ }
+ }
+ }
+
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=name-defer;types=[Dog]]}',
+ ]
+ }
+
+ def "multiple fields and a single defer - no label"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer {
+ name
+ }
+
+ ... {
+ name
+ }
+ }
+ }
+
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=null;types=[Dog]]}',
+ ]
+ }
+
+ def "multiple fields and multiple defers - no label"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer {
+ name
+ }
+
+ ... @defer {
+ name
+ }
+ }
+ }
+
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=null;types=[Dog]],[label=null;types=[Dog]]}',
+ ]
+ }
+
+ def "multiple fields and a multiple defers with same label are not allowed"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer(label:"name-defer") {
+ name
+ }
+
+ ... @defer(label:"name-defer") {
+ name
+ }
+ }
+ }
+
+ '''
+
+ Map variables = [:]
+
+ when:
+ executeQueryAndPrintTree(query, variables)
+
+ then:
+ def exception = thrown(AssertException)
+ exception.message == "Internal error: should never happen: Duplicated @defer labels are not allowed: [name-defer]"
+ }
+
+ def "nested defers - no label"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer {
+ name
+ owner {
+ firstname
+ ... @defer {
+ lastname
+ }
+ }
+ }
+ }
+ }
+
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=null;types=[Dog]]}',
+ 'Dog.owner defer{[label=null;types=[Dog]]}',
+ 'Person.firstname',
+ 'Person.lastname defer{[label=null;types=[Person]]}',
+ ]
+ }
+
+ def "nested defers - with labels"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer(label:"dog-defer") {
+ name
+ owner {
+ firstname
+ ... @defer(label: "lastname-defer") {
+ lastname
+ }
+ }
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=dog-defer;types=[Dog]]}',
+ 'Dog.owner defer{[label=dog-defer;types=[Dog]]}',
+ 'Person.firstname',
+ 'Person.lastname defer{[label=lastname-defer;types=[Person]]}',
+ ]
+ }
+
+ def "nested defers - with named spreads"() {
+ given:
+
+ String query = '''
+ query q {
+ animal {
+ name
+ ... on Dog @defer(label:"dog-defer") {
+ owner {
+ firstname
+ ... @defer(label: "lastname-defer") {
+ lastname
+ }
+ }
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.animal',
+ '[Cat, Dog, Fish].name',
+ 'Dog.owner defer{[label=dog-defer;types=[Dog]]}',
+ 'Person.firstname',
+ 'Person.lastname defer{[label=lastname-defer;types=[Person]]}',
+ ]
+ }
+
+ def "nesting defer blocks that would always result in no data are ignored"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer(label: "one") {
+ ... @defer(label: "two") {
+ ... @defer(label: "three") {
+ name
+ }
+ }
+ }
+ }
+ }
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=three;types=[Dog]]}',
+ ]
+ }
+
+ def "'if' argument is respected"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer(if: false, label: "name-defer") {
+ name
+ }
+
+ ... @defer(if: true, label: "another-name-defer") {
+ name
+ }
+ }
+ }
+
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=another-name-defer;types=[Dog]]}',
+ ]
+ }
+
+ def "'if' argument is respected when value is passed through variable"() {
+ given:
+
+ String query = '''
+ query q($if1: Boolean, $if2: Boolean) {
+ dog {
+ ... @defer(if: $if1, label: "name-defer") {
+ name
+ }
+
+ ... @defer(if: $if2, label: "another-name-defer") {
+ name
+ }
+ }
+ }
+
+ '''
+
+ Map variables = [if1: false, if2: true]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=another-name-defer;types=[Dog]]}',
+ ]
+ }
+
+ def "'if' argument with different values on same field and same label"() {
+ given:
+
+ String query = '''
+ query q {
+ dog {
+ ... @defer(if: false, label: "name-defer") {
+ name
+ }
+
+ ... @defer(if: true, label: "name-defer") {
+ name
+ }
+ }
+ }
+
+ '''
+
+ Map variables = [:]
+
+ when:
+ List printedTree = executeQueryAndPrintTree(query, variables)
+
+ then:
+ printedTree == ['Query.dog',
+ 'Dog.name defer{[label=name-defer;types=[Dog]]}',
+ ]
+ }
+
+ private ExecutableNormalizedOperation createExecutableNormalizedOperations(String query, Map variables) {
+ assertValidQuery(graphQLSchema, query, variables)
+ Document document = TestUtil.parseQuery(query)
+ ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory()
+
+ return dependencyGraph.createExecutableNormalizedOperationWithRawVariables(
+ graphQLSchema,
+ document,
+ null,
+ RawVariables.of(variables),
+ ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(true),
+ )
+ }
+
+ private List executeQueryAndPrintTree(String query, Map variables) {
+ assertValidQuery(graphQLSchema, query, variables)
+ Document document = TestUtil.parseQuery(query)
+ ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory()
+
+ def tree = dependencyGraph.createExecutableNormalizedOperationWithRawVariables(
+ graphQLSchema,
+ document,
+ null,
+ RawVariables.of(variables),
+ ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(true),
+ )
+ return printTreeWithIncrementalExecutionDetails(tree)
+ }
+
+ private List printTreeWithIncrementalExecutionDetails(ExecutableNormalizedOperation queryExecutionTree) {
+ def result = []
+ Traverser traverser = Traverser.depthFirst({ it.getChildren() })
+
+ traverser.traverse(queryExecutionTree.getTopLevelFields(), new TraverserVisitorStub() {
+ @Override
+ TraversalControl enter(TraverserContext context) {
+ ExecutableNormalizedField queryExecutionField = context.thisNode()
+ result << queryExecutionField.printDetails() + printDeferExecutionDetails(queryExecutionField)
+ return TraversalControl.CONTINUE
+ }
+
+ String printDeferExecutionDetails(ExecutableNormalizedField field) {
+ def deferExecutions = field.deferExecutions
+ if (deferExecutions == null || deferExecutions.isEmpty()) {
+ return ""
+ }
+
+ def deferLabels = new ArrayList<>(deferExecutions)
+ .sort { it.label }
+ .sort { it.possibleTypes.collect {it.name} }
+ .collect { "[label=${it.label};types=${it.possibleTypes.collect{it.name}.sort()}]" }
+ .join(",")
+
+ return " defer{${deferLabels}}"
+ }
+ })
+
+ result
+ }
+
+ private static void assertValidQuery(GraphQLSchema graphQLSchema, String query, Map variables = [:]) {
+ GraphQL graphQL = GraphQL.newGraphQL(graphQLSchema).build()
+ def ei = ExecutionInput.newExecutionInput(query).variables(variables).build()
+ assert graphQL.execute(ei).errors.size() == 0
+ }
+
+ private static ExecutableNormalizedField findField(ExecutableNormalizedOperation operation, String objectTypeNames, String fieldName) {
+ return operation.normalizedFieldToMergedField
+ .collect { it.key }
+ .find { it.fieldName == fieldName
+ && it.objectTypeNamesToString() == objectTypeNames}
+ }
+}
diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy
index 7a67201bd9..6063e7e448 100644
--- a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy
+++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationFactoryTest.groovy
@@ -27,7 +27,10 @@ import static graphql.language.AstPrinter.printAst
import static graphql.parser.Parser.parseValue
import static graphql.schema.FieldCoordinates.coordinates
-class ExecutableNormalizedOperationFactoryTest extends Specification {
+abstract class ExecutableNormalizedOperationFactoryTest extends Specification {
+ static boolean deferSupport
+
+
def "test"() {
String schema = """
type Query{
@@ -112,8 +115,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
-
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -198,8 +200,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
-
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -278,8 +279,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
-
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -330,7 +330,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTree(tree)
expect:
@@ -373,7 +373,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTree(tree)
expect:
@@ -423,7 +423,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTree(tree)
expect:
@@ -486,7 +486,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -532,7 +532,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTree(tree)
expect:
@@ -576,7 +576,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTree(tree)
expect:
@@ -620,7 +620,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTree(tree)
expect:
@@ -652,7 +652,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTree(tree)
expect:
@@ -703,7 +703,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -753,7 +753,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -792,7 +792,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTree(tree)
expect:
@@ -836,7 +836,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -876,7 +876,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -924,7 +924,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -1027,7 +1027,7 @@ type Dog implements Animal{
def subFooField = (document.getDefinitions()[1] as FragmentDefinition).getSelectionSet().getSelections()[0] as Field
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def fieldToNormalizedField = tree.getFieldToNormalizedField()
expect:
@@ -1070,7 +1070,7 @@ type Dog implements Animal{
def idField = petsField.getSelectionSet().getSelections()[0] as Field
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def fieldToNormalizedField = tree.getFieldToNormalizedField()
@@ -1119,7 +1119,7 @@ type Dog implements Animal{
def typeField = selections[3] as Field
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def fieldToNormalizedField = tree.getFieldToNormalizedField()
expect:
@@ -1176,7 +1176,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -1219,7 +1219,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTree(tree)
expect:
@@ -1247,7 +1247,7 @@ type Dog implements Animal{
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def normalizedFieldToMergedField = tree.getNormalizedFieldToMergedField()
Traverser traverser = Traverser.depthFirst({ it.getChildren() })
List result = new ArrayList<>()
@@ -1287,7 +1287,7 @@ type Dog implements Animal{
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def coordinatesToNormalizedFields = tree.coordinatesToNormalizedFields
then:
@@ -1386,7 +1386,7 @@ schema {
Document document = TestUtil.parseQuery(mutation)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -1443,7 +1443,7 @@ schema {
// the normalized arg value should be the same regardless of how the value was provided
def expectedNormalizedArgValue = [foo: new NormalizedInputValue("String", parseValue('"foo"')), input2: new NormalizedInputValue("Input2", [bar: new NormalizedInputValue("Int", parseValue("123"))])]
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
def topLevelField = tree.getTopLevelFields().get(0)
def secondField = topLevelField.getChildren().get(0)
def arg1 = secondField.getNormalizedArgument("arg1")
@@ -1484,7 +1484,7 @@ schema {
def document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables())
then:
def topLevelField = tree.getTopLevelFields().get(0)
@@ -1523,7 +1523,7 @@ schema {
otherVar: null,
]
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
then:
def topLevelField = tree.getTopLevelFields().get(0)
@@ -1575,7 +1575,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
def topLevelField = tree.getTopLevelFields().get(0)
def arg1 = topLevelField.getNormalizedArgument("arg1")
def arg2 = topLevelField.getNormalizedArgument("arg2")
@@ -1628,7 +1628,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
def topLevelField = tree.getTopLevelFields().get(0)
def arg1 = topLevelField.getNormalizedArgument("arg1")
def arg2 = topLevelField.getNormalizedArgument("arg2")
@@ -1683,7 +1683,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables())
then:
tree.normalizedFieldToMergedField.size() == 3
@@ -1741,7 +1741,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
then:
@@ -1789,7 +1789,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
then:
@@ -1865,7 +1865,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, schema)
then:
@@ -1929,7 +1929,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, schema)
then:
@@ -1986,7 +1986,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, schema)
then:
@@ -2061,7 +2061,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, schema)
then:
@@ -2123,7 +2123,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, schema)
then:
@@ -2165,7 +2165,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, schema)
then:
@@ -2208,7 +2208,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, schema)
then:
@@ -2251,7 +2251,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, schema)
then:
@@ -2326,7 +2326,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, schema)
then:
@@ -2402,7 +2402,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, schema)
then:
@@ -2464,7 +2464,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
println String.join("\n", printTree(tree))
def printedTree = printTree(tree)
@@ -2521,7 +2521,7 @@ schema {
Document document = TestUtil.parseQuery(query)
when:
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
+ def tree = localCreateExecutableNormalizedOperationWithRawVariables(graphQLSchema, document, null, RawVariables.of(variables))
def printedTree = printTreeAndDirectives(tree)
then:
@@ -2585,7 +2585,7 @@ fragment personName on Person {
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -2638,7 +2638,7 @@ fragment personName on Person {
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -2685,7 +2685,7 @@ fragment personName on Person {
Document document = TestUtil.parseQuery(query)
- def tree = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
+ def tree = localCreateExecutableNormalizedOperation(graphQLSchema, document, null, CoercedVariables.emptyVariables())
def printedTree = printTreeWithLevelInfo(tree, graphQLSchema)
expect:
@@ -2875,4 +2875,46 @@ fragment personName on Person {
then:
noExceptionThrown()
}
+
+ private static ExecutableNormalizedOperation localCreateExecutableNormalizedOperation(
+ GraphQLSchema graphQLSchema,
+ Document document,
+ String operationName,
+ CoercedVariables coercedVariableValues
+ ) {
+
+ def options = ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(deferSupport)
+
+ return ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, document, operationName, coercedVariableValues, options)
+ }
+
+ private static ExecutableNormalizedOperation localCreateExecutableNormalizedOperationWithRawVariables(
+ GraphQLSchema graphQLSchema,
+ Document document,
+ String operationName,
+ RawVariables rawVariables
+ ) {
+
+ def options = ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(deferSupport)
+
+ return ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(
+ graphQLSchema,
+ document,
+ operationName,
+ rawVariables,
+ options
+ )
+ }
+}
+
+class ExecutableNormalizedOperationFactoryTestWithDeferSupport extends ExecutableNormalizedOperationFactoryTest {
+ static {
+ deferSupport = true
+ }
+}
+
+class ExecutableNormalizedOperationFactoryTestNoDeferSupport extends ExecutableNormalizedOperationFactoryTest {
+ static {
+ deferSupport = false
+ }
}
diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerDeferTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerDeferTest.groovy
new file mode 100644
index 0000000000..13928fd991
--- /dev/null
+++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerDeferTest.groovy
@@ -0,0 +1,525 @@
+package graphql.normalized
+
+
+import graphql.GraphQL
+import graphql.TestUtil
+import graphql.execution.RawVariables
+import graphql.language.AstPrinter
+import graphql.language.AstSorter
+import graphql.language.Document
+import graphql.schema.GraphQLSchema
+import graphql.schema.idl.RuntimeWiring
+import graphql.schema.idl.TestLiveMockedWiringFactory
+import graphql.schema.scalars.JsonScalar
+import spock.lang.Specification
+
+import static graphql.ExecutionInput.newExecutionInput
+import static graphql.language.OperationDefinition.Operation.QUERY
+import static graphql.normalized.ExecutableNormalizedOperationToAstCompiler.compileToDocumentWithDeferSupport
+
+class ExecutableNormalizedOperationToAstCompilerDeferTest extends Specification {
+ VariablePredicate noVariables = new VariablePredicate() {
+ @Override
+ boolean shouldMakeVariable(ExecutableNormalizedField executableNormalizedField, String argName, NormalizedInputValue normalizedInputValue) {
+ return false
+ }
+ }
+
+ String sdl = """
+ directive @defer(if: Boolean, label: String) on FRAGMENT_SPREAD | INLINE_FRAGMENT
+
+ type Query {
+ dog: Dog
+ animal: Animal
+ }
+
+ interface Animal {
+ name: String
+ }
+
+ type Dog implements Animal {
+ name: String
+ breed: String
+ owner: Person
+ }
+
+ type Cat implements Animal {
+ name: String
+ breed: String
+ color: String
+ siblings: [Cat]
+ }
+
+ type Fish implements Animal {
+ name: String
+ }
+
+ type Person {
+ firstname: String
+ lastname: String
+ bestFriend: Person
+ }
+ """
+
+ def "simple defer"() {
+ String query = """
+ query q {
+ dog {
+ name
+ ... @defer(label: "breed-defer") {
+ breed
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ dog {
+ name
+ ... @defer(label: "breed-defer") {
+ breed
+ }
+ }
+}
+'''
+ }
+
+ def "simple defer with named spread"() {
+ String query = """
+ query q {
+ dog {
+ name
+ ... on Dog @defer(label: "breed-defer") {
+ breed
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ dog {
+ name
+ ... @defer(label: "breed-defer") {
+ breed
+ }
+ }
+}
+'''
+ }
+
+ def "multiple labels on the same field"() {
+ String query = """
+ query q {
+ dog {
+ name
+ ... @defer(label: "breed-defer") {
+ breed
+ }
+ ... @defer(label: "breed-defer-2") {
+ breed
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ dog {
+ name
+ ... @defer(label: "breed-defer") {
+ breed
+ }
+ ... @defer(label: "breed-defer-2") {
+ breed
+ }
+ }
+}
+'''
+ }
+
+ def "multiple defers without label on the same field"() {
+ String query = """
+ query q {
+ dog {
+ name
+ ... @defer {
+ breed
+ }
+ ... @defer {
+ breed
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ dog {
+ name
+ ... @defer {
+ breed
+ }
+ ... @defer {
+ breed
+ }
+ }
+}
+'''
+ }
+
+ def "field with and without defer"() {
+ String query = """
+ query q {
+ dog {
+ ... @defer {
+ breed
+ }
+ ... {
+ breed
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ dog {
+ ... @defer {
+ breed
+ }
+ }
+}
+'''
+ }
+
+ def "defer on type spread"() {
+ String query = """
+ query q {
+ animal {
+ ... on Dog @defer {
+ breed
+ }
+ ... on Dog {
+ name
+ }
+ ... on Dog @defer(label: "owner-defer") {
+ owner {
+ firstname
+ }
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ animal {
+ ... on Dog @defer {
+ breed
+ }
+ ... on Dog {
+ name
+ }
+ ... on Dog @defer(label: "owner-defer") {
+ owner {
+ firstname
+ }
+ }
+ }
+}
+'''
+ }
+
+ def "2 fragments on non-conditional fields"() {
+ String query = """
+ query q {
+ animal {
+ ... on Cat @defer {
+ name
+ }
+ ... on Animal @defer {
+ name
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ animal {
+ ... @defer {
+ name
+ }
+ ... @defer {
+ name
+ }
+ }
+}
+'''
+ }
+
+ def "2 fragments on conditional fields"() {
+ String query = """
+ query q {
+ animal {
+ ... on Cat @defer {
+ breed
+ }
+ ... on Dog @defer {
+ breed
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ animal {
+ ... on Cat @defer {
+ breed
+ }
+ ... on Dog @defer {
+ breed
+ }
+ }
+}
+'''
+ }
+
+ def "2 fragments on conditional fields with different labels"() {
+ String query = """
+ query q {
+ animal {
+ ... on Cat @defer(label: "cat-defer") {
+ breed
+ }
+ ... on Dog @defer(label: "dog-defer") {
+ breed
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ animal {
+ ... on Cat @defer(label: "cat-defer") {
+ breed
+ }
+ ... on Dog @defer(label: "dog-defer") {
+ breed
+ }
+ }
+}
+'''
+ }
+
+ def "fragments on conditional fields with different labels and repeating types"() {
+ String query = """
+ query q {
+ animal {
+ ... on Cat @defer(label: "cat-defer-1") {
+ breed
+ }
+ ... on Cat @defer(label: "cat-defer-2") {
+ breed
+ }
+ ... on Dog @defer(label: "dog-defer") {
+ breed
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ animal {
+ ... on Cat @defer(label: "cat-defer-1") {
+ breed
+ }
+ ... on Cat @defer(label: "cat-defer-2") {
+ breed
+ }
+ ... on Dog @defer(label: "dog-defer") {
+ breed
+ }
+ }
+}
+'''
+ }
+
+ def "nested defer"() {
+ String query = """
+ query q {
+ animal {
+ ... on Cat @defer {
+ name
+ }
+ ... on Animal @defer {
+ name
+ ... on Dog @defer {
+ owner {
+ firstname
+ ... @defer {
+ lastname
+ }
+ ... @defer {
+ bestFriend {
+ firstname
+ ... @defer {
+ lastname
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ animal {
+ ... @defer {
+ name
+ }
+ ... @defer {
+ name
+ }
+ ... on Dog @defer {
+ owner {
+ firstname
+ ... @defer {
+ lastname
+ }
+ ... @defer {
+ bestFriend {
+ firstname
+ ... @defer {
+ lastname
+ }
+ }
+ }
+ }
+ }
+ }
+}
+'''
+ }
+
+ def "multiple defers at the same level are preserved"() {
+ String query = """
+ query q {
+ dog {
+ ... @defer {
+ name
+ }
+ ... @defer {
+ breed
+ }
+ ... @defer {
+ owner {
+ firstname
+ }
+ }
+ }
+ }
+ """
+ GraphQLSchema schema = mkSchema(sdl)
+ def tree = createNormalizedTree(schema, query)
+ when:
+ def result = compileToDocumentWithDeferSupport(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
+ then:
+ printed == '''{
+ dog {
+ ... @defer {
+ name
+ }
+ ... @defer {
+ breed
+ }
+ ... @defer {
+ owner {
+ firstname
+ }
+ }
+ }
+}
+'''
+ }
+
+ private ExecutableNormalizedOperation createNormalizedTree(GraphQLSchema schema, String query, Map variables = [:]) {
+ assertValidQuery(schema, query, variables)
+ Document originalDocument = TestUtil.parseQuery(query)
+
+ ExecutableNormalizedOperationFactory dependencyGraph = new ExecutableNormalizedOperationFactory()
+ def options = ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(true)
+ return dependencyGraph.createExecutableNormalizedOperationWithRawVariables(
+ schema,
+ originalDocument,
+ null,
+ RawVariables.of(variables),
+ options
+ )
+ }
+
+ private void assertValidQuery(GraphQLSchema graphQLSchema, String query, Map variables = [:]) {
+ GraphQL graphQL = GraphQL.newGraphQL(graphQLSchema).build()
+ assert graphQL.execute(newExecutionInput().query(query).variables(variables)).errors.isEmpty()
+ }
+
+ GraphQLSchema mkSchema(String sdl) {
+ def wiringFactory = new TestLiveMockedWiringFactory([JsonScalar.JSON_SCALAR])
+ def runtimeWiring = RuntimeWiring.newRuntimeWiring()
+ .wiringFactory(wiringFactory).build()
+ TestUtil.schema(sdl, runtimeWiring)
+ }
+}
diff --git a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerTest.groovy b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerTest.groovy
index e990d981a5..27c4c89a6d 100644
--- a/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerTest.groovy
+++ b/src/test/groovy/graphql/normalized/ExecutableNormalizedOperationToAstCompilerTest.groovy
@@ -3,12 +3,13 @@ package graphql.normalized
import graphql.GraphQL
import graphql.TestUtil
import graphql.execution.RawVariables
+import graphql.execution.directives.QueryDirectives
import graphql.language.AstPrinter
-import graphql.language.Field
-import graphql.language.OperationDefinition
import graphql.language.AstSorter
import graphql.language.Document
+import graphql.language.Field
import graphql.language.IntValue
+import graphql.language.OperationDefinition
import graphql.language.StringValue
import graphql.parser.Parser
import graphql.schema.GraphQLSchema
@@ -22,8 +23,12 @@ import static graphql.language.OperationDefinition.Operation.MUTATION
import static graphql.language.OperationDefinition.Operation.QUERY
import static graphql.language.OperationDefinition.Operation.SUBSCRIPTION
import static graphql.normalized.ExecutableNormalizedOperationToAstCompiler.compileToDocument
+import static graphql.normalized.ExecutableNormalizedOperationToAstCompiler.compileToDocumentWithDeferSupport
+
+abstract class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
+ static boolean deferSupport
+
-class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
VariablePredicate noVariables = new VariablePredicate() {
@Override
boolean shouldMakeVariable(ExecutableNormalizedField executableNormalizedField, String argName, NormalizedInputValue normalizedInputValue) {
@@ -128,7 +133,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
printed == '''{
@@ -196,10 +201,9 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
"""
def tree = createNormalizedTree(schema, query)
- // printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -250,7 +254,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
// printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -331,7 +335,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
// printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -356,6 +360,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
}
"""
}
+
def "test interface fields with different output types on the implementations 4"() {
// Tests we don't consider File as a possible option for parent on animals
def schema = TestUtil.schema("""
@@ -422,7 +427,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
// printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -517,7 +522,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
// printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -584,7 +589,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
// printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -641,7 +646,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
// printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -699,7 +704,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
// printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -766,7 +771,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
// printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -864,7 +869,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
// printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -962,7 +967,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
// printTreeWithLevelInfo(tree, schema).forEach { println it }
when:
- def result = compileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, tree.topLevelFields, noVariables)
def printed = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1029,7 +1034,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1063,7 +1068,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1089,7 +1094,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, "My_Op23", fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, "My_Op23", fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1134,7 +1139,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1166,9 +1171,9 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
}
'''
GraphQLSchema schema = mkSchema(sdl)
- def fields = createNormalizedFields(schema, query,["v":123])
+ def fields = createNormalizedFields(schema, query, ["v": 123])
when:
- def result = compileToDocument(schema, QUERY, null, fields, allVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, allVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1200,7 +1205,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, noVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1231,7 +1236,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, SUBSCRIPTION, null, fields, noVariables)
+ def result = localCompileToDocument(schema, SUBSCRIPTION, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1242,7 +1247,6 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
}
-
def "test query directive"() {
def sdl = '''
type Query {
@@ -1275,14 +1279,14 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
'''
GraphQLSchema schema = mkSchema(sdl)
Document document = new Parser().parse(query)
- ExecutableNormalizedOperation eno = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema,document, null,RawVariables.emptyVariables())
+ ExecutableNormalizedOperation eno = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, document, null, RawVariables.emptyVariables())
when:
- def result = compileToDocument(schema, SUBSCRIPTION, null, eno.topLevelFields, eno.normalizedFieldToQueryDirectives, noVariables)
+ def result = localCompileToDocument(schema, SUBSCRIPTION, null, eno.topLevelFields, eno.normalizedFieldToQueryDirectives, noVariables)
OperationDefinition operationDefinition = result.document.getDefinitionsOfType(OperationDefinition.class)[0]
- def fooField = (Field)operationDefinition.selectionSet.children[0]
- def nameField = (Field)fooField.selectionSet.children[0]
+ def fooField = (Field) operationDefinition.selectionSet.children[0]
+ def nameField = (Field) fooField.selectionSet.children[0]
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1327,7 +1331,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, noVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1372,7 +1376,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
documentPrinted == '''{
@@ -1418,7 +1422,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
documentPrinted == '''{
@@ -1438,6 +1442,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
}
'''
}
+
def "test is conditional when there is only one interface implementation"() {
def sdl = '''
type Query {
@@ -1468,7 +1473,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
documentPrinted == '''{
@@ -1507,7 +1512,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
documentPrinted == '''{
@@ -1558,7 +1563,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
// Note: the typename field moves out of a fragment because AFoo is the only impl
@@ -1609,7 +1614,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = mkSchema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
// Note: the typename field moves out of a fragment because AFoo is the only impl
@@ -1659,7 +1664,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
GraphQLSchema schema = TestUtil.schema(sdl)
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, null, fields, noVariables)
+ def result = localCompileToDocument(schema, QUERY, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
// Note: the typename field moves out of a fragment because AFoo is the only impl
@@ -1695,7 +1700,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query, vars)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1727,7 +1732,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query, vars)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1759,7 +1764,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query, vars)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1789,7 +1794,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, noVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1819,7 +1824,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, noVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1849,7 +1854,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1879,7 +1884,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1916,7 +1921,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
def vars = result.variables
@@ -1953,7 +1958,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, noVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -1988,7 +1993,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, noVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, noVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -2031,7 +2036,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query, variables)
when:
- def result = compileToDocument(schema, MUTATION, null, fields, jsonVariables)
+ def result = localCompileToDocument(schema, MUTATION, null, fields, jsonVariables)
def documentPrinted = AstPrinter.printAst(new AstSorter().sort(result.document))
then:
@@ -2104,7 +2109,7 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
def fields = createNormalizedFields(schema, query)
when:
- def result = compileToDocument(schema, QUERY, "named", fields, allVariables)
+ def result = localCompileToDocument(schema, QUERY, "named", fields, allVariables)
def document = result.document
def vars = result.variables
def ast = AstPrinter.printAst(new AstSorter().sort(document))
@@ -2140,7 +2145,8 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
assertValidQuery(schema, query, variables)
Document originalDocument = TestUtil.parseQuery(query)
- return ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, originalDocument, null, RawVariables.of(variables))
+ def options = ExecutableNormalizedOperationFactory.Options.defaultOptions().deferSupport(deferSupport)
+ return ExecutableNormalizedOperationFactory.createExecutableNormalizedOperationWithRawVariables(schema, originalDocument, null, RawVariables.of(variables), options)
}
private List createNormalizedFields(GraphQLSchema schema, String query, Map variables = [:]) {
@@ -2158,4 +2164,40 @@ class ExecutableNormalizedOperationToAstCompilerTest extends Specification {
.wiringFactory(wiringFactory).build()
TestUtil.schema(sdl, runtimeWiring)
}
+
+ private static ExecutableNormalizedOperationToAstCompiler.CompilerResult localCompileToDocument(
+ GraphQLSchema schema,
+ OperationDefinition.Operation operationKind,
+ String operationName,
+ List topLevelFields,
+ VariablePredicate variablePredicate
+ ) {
+ return localCompileToDocument(schema, operationKind, operationName, topLevelFields,Map.of(), variablePredicate);
+ }
+
+ private static ExecutableNormalizedOperationToAstCompiler.CompilerResult localCompileToDocument(
+ GraphQLSchema schema,
+ OperationDefinition.Operation operationKind,
+ String operationName,
+ List topLevelFields,
+ Map normalizedFieldToQueryDirectives,
+ VariablePredicate variablePredicate
+ ) {
+ if (deferSupport) {
+ return compileToDocumentWithDeferSupport(schema, operationKind, operationName, topLevelFields, normalizedFieldToQueryDirectives, variablePredicate)
+ }
+ return compileToDocument(schema, operationKind, operationName, topLevelFields, normalizedFieldToQueryDirectives, variablePredicate)
+ }
+}
+
+class ExecutableNormalizedOperationToAstCompilerTestWithDeferSupport extends ExecutableNormalizedOperationToAstCompilerTest {
+ static {
+ deferSupport = true
+ }
+}
+
+class ExecutableNormalizedOperationToAstCompilerTestNoDeferSupport extends ExecutableNormalizedOperationToAstCompilerTest {
+ static {
+ deferSupport = false
+ }
}