From 1b8640a02b9be7fc756ec5390f9c22ebe638952a Mon Sep 17 00:00:00 2001 From: Tim Ward Date: Wed, 9 Jul 2025 16:25:08 +0800 Subject: [PATCH 1/3] Add configuration to allow usage of normalized documents --- src/main/java/graphql/ExperimentalApi.java | 5 + .../graphql/GraphQLUnusualConfiguration.java | 29 +++ .../graphql/execution/ExecutionContext.java | 58 +++++- .../graphql/execution/ExecutionStrategy.java | 12 +- .../java/graphql/execution/ResolveType.java | 8 +- .../graphql/language/OperationDefinition.java | 3 +- .../normalized/ExecutableNormalizedField.java | 129 +++----------- .../ExecutableNormalizedOperation.java | 8 +- .../normalized/GraphQlNormalizedField.java | 166 ++++++++++++++++++ .../GraphQlNormalizedOperation.java | 27 +++ .../normalized/nf/NormalizedField.java | 12 +- .../normalized/nf/NormalizedOperation.java | 12 +- .../DataFetchingFieldSelectionSetImpl.java | 28 +-- 13 files changed, 358 insertions(+), 139 deletions(-) create mode 100644 src/main/java/graphql/normalized/GraphQlNormalizedField.java create mode 100644 src/main/java/graphql/normalized/GraphQlNormalizedOperation.java diff --git a/src/main/java/graphql/ExperimentalApi.java b/src/main/java/graphql/ExperimentalApi.java index 80be253cd1..9edf67d2d9 100644 --- a/src/main/java/graphql/ExperimentalApi.java +++ b/src/main/java/graphql/ExperimentalApi.java @@ -24,4 +24,9 @@ * The key that should be associated with a boolean value which indicates whether @defer and @stream behaviour is enabled for this execution. */ String ENABLE_INCREMENTAL_SUPPORT = "ENABLE_INCREMENTAL_SUPPORT"; + + /** + * The key that should be associated with a boolean value which indicates whether normalized document behaviour is enabled for this execution. + */ + String ENABLE_NORMALIZED_DOCUMENT_SUPPORT = "ENABLE_NORMALIZED_DOCUMENT_SUPPORT"; } diff --git a/src/main/java/graphql/GraphQLUnusualConfiguration.java b/src/main/java/graphql/GraphQLUnusualConfiguration.java index 75720d52cd..1ca52169c8 100644 --- a/src/main/java/graphql/GraphQLUnusualConfiguration.java +++ b/src/main/java/graphql/GraphQLUnusualConfiguration.java @@ -265,6 +265,13 @@ public IncrementalSupportConfig incrementalSupport() { return new IncrementalSupportConfig(this); } + /** + * @return an element that allows you to control normalized document behavior + */ + public NormalizedDocumentSupportConfig normalizedDocumentSupport() { + return new NormalizedDocumentSupportConfig(this); + } + /** * @return an element that allows you to precisely control {@link org.dataloader.DataLoader} behavior * in graphql-java. @@ -321,6 +328,28 @@ public GraphQLContextConfiguration then() { } } + public static class NormalizedDocumentSupportConfig extends BaseContextConfig { + private NormalizedDocumentSupportConfig(GraphQLContextConfiguration contextConfig) { + super(contextConfig); + } + + /** + * @return true if normalized document behaviour is enabled for this execution. + */ + public boolean isNormalizedDocumentSupportEnabled() { + return contextConfig.getBoolean(ExperimentalApi.ENABLE_NORMALIZED_DOCUMENT_SUPPORT); + } + + /** + * This controls whether normalized document behaviour is enabled for this execution. + */ + @ExperimentalApi + public NormalizedDocumentSupportConfig enableNormalizedDocumentSupport(boolean enable) { + contextConfig.put(ExperimentalApi.ENABLE_NORMALIZED_DOCUMENT_SUPPORT, enable); + return this; + } + } + public static class IncrementalSupportConfig extends BaseContextConfig { private IncrementalSupportConfig(GraphQLContextConfiguration contextConfig) { super(contextConfig); diff --git a/src/main/java/graphql/execution/ExecutionContext.java b/src/main/java/graphql/execution/ExecutionContext.java index 5bdc076d36..21d3f1b402 100644 --- a/src/main/java/graphql/execution/ExecutionContext.java +++ b/src/main/java/graphql/execution/ExecutionContext.java @@ -19,6 +19,10 @@ import graphql.language.OperationDefinition; import graphql.normalized.ExecutableNormalizedOperation; import graphql.normalized.ExecutableNormalizedOperationFactory; +import graphql.normalized.GraphQlNormalizedOperation; +import graphql.normalized.nf.NormalizedDocument; +import graphql.normalized.nf.NormalizedDocumentFactory; +import graphql.normalized.nf.NormalizedOperation; import graphql.schema.GraphQLSchema; import graphql.util.FpKit; import graphql.util.LockKit; @@ -65,7 +69,7 @@ public class ExecutionContext { private final ResponseMapFactory responseMapFactory; private final ExecutionInput executionInput; - private final Supplier queryTree; + private final Supplier queryTree; private final boolean propagateErrorsOnNonNullContractFailure; private final AtomicInteger isRunning = new AtomicInteger(0); @@ -100,7 +104,7 @@ public class ExecutionContext { this.localContext = builder.localContext; this.executionInput = builder.executionInput; this.dataLoaderDispatcherStrategy = builder.dataLoaderDispatcherStrategy; - this.queryTree = FpKit.interThreadMemoize(() -> ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, operationDefinition, fragmentsByName, coercedVariables)); + this.queryTree = FpKit.interThreadMemoize(this::createGraphQLNormalizedOperation); this.propagateErrorsOnNonNullContractFailure = builder.propagateErrorsOnNonNullContractFailure; this.engineRunningState = builder.engineRunningState; } @@ -336,7 +340,7 @@ public ExecutionStrategy getStrategy(OperationDefinition.Operation operation) { } } - public Supplier getNormalizedQueryTree() { + public Supplier getNormalizedQueryTree() { return queryTree; } @@ -368,12 +372,60 @@ public ResultNodesInfo getResultNodesInfo() { return resultNodesInfo; } + private GraphQlNormalizedOperation createGraphQLNormalizedOperation() { + // Check for experimental support for normalized documents + if (hasNormalizedDocumentSupport()) { + return createNormalizedOperation(); + } + + return createExecutableNormalizedOperation(); + } + + @ExperimentalApi + private NormalizedOperation createNormalizedOperation() { + var normalizedDocument = NormalizedDocumentFactory.createNormalizedDocument(graphQLSchema, document); + + // Search the document for the operation that matches the operationDefinition name, + // if no match then it could be anonymous query, then fallback to the first operation. + var normalizedOperations = normalizedDocument.getNormalizedOperations(); + var normalizedOperation = normalizedOperations.stream() + .filter(this::isExecutingOperation) + .findAny() + .map(NormalizedDocument.NormalizedOperationWithAssumedSkipIncludeVariables::getNormalizedOperation) + .orElseGet(normalizedDocument::getSingleNormalizedOperation); + + return normalizedOperation; + } + + private ExecutableNormalizedOperation createExecutableNormalizedOperation() { + var executableNormalizedOperation = ExecutableNormalizedOperationFactory.createExecutableNormalizedOperation(graphQLSchema, operationDefinition, fragmentsByName, coercedVariables); + + return executableNormalizedOperation; + } + + private boolean isExecutingOperation(NormalizedDocument.NormalizedOperationWithAssumedSkipIncludeVariables op) { + var operation = op.getNormalizedOperation(); + var operationName = operation.getOperationName(); + var operationDefinitionName = operationDefinition.getName(); + if (operationName == null || operationDefinitionName == null) { + return false; + } + + return operationName.equals(operationDefinitionName); + } + @Internal public boolean hasIncrementalSupport() { GraphQLContext graphqlContext = getGraphQLContext(); return graphqlContext != null && graphqlContext.getBoolean(ExperimentalApi.ENABLE_INCREMENTAL_SUPPORT); } + @Internal + private boolean hasNormalizedDocumentSupport() { + GraphQLContext graphqlContext = getGraphQLContext(); + return graphqlContext != null && graphqlContext.getBoolean(ExperimentalApi.ENABLE_NORMALIZED_DOCUMENT_SUPPORT); + } + @Internal public EngineRunningState getEngineRunningState() { return engineRunningState; diff --git a/src/main/java/graphql/execution/ExecutionStrategy.java b/src/main/java/graphql/execution/ExecutionStrategy.java index 563d4d5279..21e5787860 100644 --- a/src/main/java/graphql/execution/ExecutionStrategy.java +++ b/src/main/java/graphql/execution/ExecutionStrategy.java @@ -27,8 +27,8 @@ import graphql.extensions.ExtensionsBuilder; import graphql.introspection.Introspection; import graphql.language.Field; -import graphql.normalized.ExecutableNormalizedField; -import graphql.normalized.ExecutableNormalizedOperation; +import graphql.normalized.GraphQlNormalizedField; +import graphql.normalized.GraphQlNormalizedOperation; import graphql.schema.CoercingSerializeException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -420,7 +420,7 @@ private Object fetchField(GraphQLFieldDefinition fieldDef, ExecutionContext exec Supplier> argumentValues = () -> executionStepInfo.get().getArguments(); - Supplier normalizedFieldSupplier = getNormalizedField(executionContext, parameters, executionStepInfo); + Supplier normalizedFieldSupplier = getNormalizedField(executionContext, parameters, executionStepInfo); // DataFetchingFieldSelectionSet and QueryDirectives is a supplier of sorts - eg a lazy pattern DataFetchingFieldSelectionSet fieldCollector = DataFetchingFieldSelectionSetImpl.newCollector(executionContext.getGraphQLSchema(), fieldDef.getType(), normalizedFieldSupplier); @@ -515,9 +515,9 @@ private Object invokeDataFetcher(ExecutionContext executionContext, ExecutionStr return fetchedValue; } - protected Supplier getNormalizedField(ExecutionContext executionContext, ExecutionStrategyParameters parameters, Supplier executionStepInfo) { - Supplier normalizedQuery = executionContext.getNormalizedQueryTree(); - return () -> normalizedQuery.get().getNormalizedField(parameters.getField(), executionStepInfo.get().getObjectType(), executionStepInfo.get().getPath()); + protected Supplier getNormalizedField(ExecutionContext executionContext, ExecutionStrategyParameters parameters, Supplier executionStepInfo) { + Supplier normalizedQuery = executionContext.getNormalizedQueryTree(); + return () -> normalizedQuery.get().getGraphQlNormalizedField(parameters.getField(), executionStepInfo.get().getObjectType(), executionStepInfo.get().getPath()); } protected FetchedValue unboxPossibleDataFetcherResult(ExecutionContext executionContext, diff --git a/src/main/java/graphql/execution/ResolveType.java b/src/main/java/graphql/execution/ResolveType.java index 3ef9c556fb..0f1e68c9a3 100644 --- a/src/main/java/graphql/execution/ResolveType.java +++ b/src/main/java/graphql/execution/ResolveType.java @@ -3,8 +3,8 @@ import graphql.Assert; import graphql.Internal; import graphql.TypeResolutionEnvironment; -import graphql.normalized.ExecutableNormalizedField; -import graphql.normalized.ExecutableNormalizedOperation; +import graphql.normalized.GraphQlNormalizedField; +import graphql.normalized.GraphQlNormalizedOperation; import graphql.schema.DataFetchingFieldSelectionSet; import graphql.schema.DataFetchingFieldSelectionSetImpl; import graphql.schema.GraphQLInterfaceType; @@ -43,8 +43,8 @@ public GraphQLObjectType resolveType(ExecutionContext executionContext, MergedFi } private DataFetchingFieldSelectionSet buildSelectionSet(ExecutionContext executionContext, MergedField field, GraphQLOutputType fieldType, ExecutionStepInfo executionStepInfo) { - Supplier normalizedQuery = executionContext.getNormalizedQueryTree(); - Supplier normalizedFieldSupplier = () -> normalizedQuery.get().getNormalizedField(field, executionStepInfo.getObjectType(), executionStepInfo.getPath()); + Supplier normalizedQuery = executionContext.getNormalizedQueryTree(); + Supplier normalizedFieldSupplier = () -> normalizedQuery.get().getGraphQlNormalizedField(field, executionStepInfo.getObjectType(), executionStepInfo.getPath()); return DataFetchingFieldSelectionSetImpl.newCollector(executionContext.getGraphQLSchema(), fieldType, normalizedFieldSupplier); } diff --git a/src/main/java/graphql/language/OperationDefinition.java b/src/main/java/graphql/language/OperationDefinition.java index 824180bb49..568523a87c 100644 --- a/src/main/java/graphql/language/OperationDefinition.java +++ b/src/main/java/graphql/language/OperationDefinition.java @@ -8,6 +8,7 @@ import graphql.language.NodeUtil.DirectivesHolder; import graphql.util.TraversalControl; import graphql.util.TraverserContext; +import org.jspecify.annotations.Nullable; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -93,7 +94,7 @@ public OperationDefinition withNewChildren(NodeChildrenContainer newChildren) { ); } - public String getName() { + public @Nullable String getName() { return name; } diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedField.java b/src/main/java/graphql/normalized/ExecutableNormalizedField.java index 963c2ae5a9..e533997726 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedField.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedField.java @@ -50,7 +50,7 @@ */ @PublicApi @Mutable -public class ExecutableNormalizedField { +public class ExecutableNormalizedField implements GraphQlNormalizedField { private final String alias; private final ImmutableMap normalizedArguments; private final LinkedHashMap resolvedArguments; @@ -80,62 +80,7 @@ private ExecutableNormalizedField(Builder builder) { this.deferredExecutions = builder.deferredExecutions; } - /** - * Determines whether this {@link ExecutableNormalizedField} needs a fragment to select the field. However, it considers the parent - * output type when determining whether it needs a fragment. - *

- * Consider the following schema - * - *

-     * interface Animal {
-     *     name: String
-     *     parent: Animal
-     * }
-     * type Cat implements Animal {
-     *     name: String
-     *     parent: Cat
-     * }
-     * type Dog implements Animal {
-     *     name: String
-     *     parent: Dog
-     *     isGoodBoy: Boolean
-     * }
-     * type Query {
-     *     animal: Animal
-     * }
-     * 
- *

- * and the following query - * - *

-     * {
-     *     animal {
-     *         parent {
-     *             name
-     *         }
-     *     }
-     * }
-     * 
- *

- * Then we would get the following {@link ExecutableNormalizedOperation} - * - *

-     * -Query.animal: Animal
-     * --[Cat, Dog].parent: Cat, Dog
-     * ---[Cat, Dog].name: String
-     * 
- *

- * If we simply checked the {@link #parent}'s {@link #getFieldDefinitions(GraphQLSchema)} that would - * point us to {@code Cat.parent} and {@code Dog.parent} whose output types would incorrectly answer - * our question whether this is conditional? - *

- * We MUST consider that the output type of the {@code parent} field is {@code Animal} and - * NOT {@code Cat} or {@code Dog} as their respective implementations would say. - * - * @param schema - the graphql schema in play - * - * @return true if the field is conditional - */ + @Override public boolean isConditional(@NonNull GraphQLSchema schema) { if (parent == null) { return false; @@ -176,10 +121,12 @@ public boolean isConditional(@NonNull GraphQLSchema schema) { return unwrapAll(parentFieldDef.getType()) != oneObjectType; } + @Override public boolean hasChildren() { return children.size() > 0; } + @Override public GraphQLOutputType getType(GraphQLSchema schema) { List fieldDefinitions = getFieldDefinitions(schema); Set fieldTypes = fieldDefinitions.stream().map(fd -> simplePrint(fd.getType())).collect(toSet()); @@ -204,6 +151,7 @@ public void forEachFieldDefinition(GraphQLSchema schema, Consumer getFieldDefinitions(GraphQLSchema schema) { ImmutableList.Builder builder = ImmutableList.builder(); forEachFieldDefinition(schema, builder::add); @@ -270,16 +218,7 @@ public void addDeferredExecutions(Collection deferr this.deferredExecutions.addAll(deferredExecutions); } - /** - * All merged fields have the same name so this is the name of the {@link ExecutableNormalizedField}. - *

- * WARNING: This is not always the key in the execution result, because of possible field aliases. - * - * @return the name of this {@link ExecutableNormalizedField} - * - * @see #getResultKey() - * @see #getAlias() - */ + @Override public String getName() { return getFieldName(); } @@ -294,14 +233,7 @@ public String getFieldName() { return fieldName; } - /** - * Returns the result key of this {@link ExecutableNormalizedField} within the overall result. - * This is either a field alias or the value of {@link #getName()} - * - * @return the result key for this {@link ExecutableNormalizedField}. - * - * @see #getName() - */ + @Override public String getResultKey() { if (alias != null) { return alias; @@ -309,12 +241,7 @@ public String getResultKey() { return getName(); } - /** - * @return the field alias used or null if there is none - * - * @see #getResultKey() - * @see #getName() - */ + @Override public String getAlias() { return alias; } @@ -344,31 +271,16 @@ public ImmutableMap getNormalizedArguments() { return normalizedArguments; } - /** - * @return a map of the resolved argument values - */ + @Override public LinkedHashMap getResolvedArguments() { return resolvedArguments; } - - /** - * A {@link ExecutableNormalizedField} can sometimes (for non-concrete types like interfaces and unions) - * have more than one object type it could be when executed. There is no way to know what it will be until - * the field is executed over data and the type is resolved via a {@link graphql.schema.TypeResolver}. - *

- * This method returns all the possible types a field can be which is one or more {@link GraphQLObjectType} - * names. - *

- * Warning: This returns a Mutable Set. No defensive copy is made for performance reasons. - * - * @return a set of the possible type names this field could be. - */ + @Override public Set getObjectTypeNames() { return objectTypeNames; } - /** * This returns the first entry in {@link #getObjectTypeNames()}. Sometimes you know a field cant be more than one * type and this method is a shortcut one to help you. @@ -390,9 +302,7 @@ public String printDetails() { return result + objectTypeNamesToString() + "." + fieldName; } - /** - * @return a helper method to show the object types names as a string - */ + @Override public String objectTypeNamesToString() { if (objectTypeNames.size() == 1) { return objectTypeNames.iterator().next(); @@ -458,12 +368,7 @@ public List getChildren(String objectTypeName) { .collect(toList()); } - /** - * the level of the {@link ExecutableNormalizedField} in the operation hierarchy with top level fields - * starting at 1 - * - * @return the level of the {@link ExecutableNormalizedField} in the operation hierarchy - */ + @Override public int getLevel() { return level; } @@ -475,6 +380,11 @@ public ExecutableNormalizedField getParent() { return parent; } + @Override + public GraphQlNormalizedField getGraphQlNormalizedParent() { + return getParent(); + } + /** * @return the {@link NormalizedDeferredExecution}s associated with this {@link ExecutableNormalizedField}. * @@ -513,6 +423,11 @@ public void traverseSubTree(Consumer consumer) { }); } + @Override + public List getGraphQlNormalizedChildren() { + return ImmutableKit.map(getChildren(), child -> child); + } + private void traverseImpl(ExecutableNormalizedField root, Consumer consumer, int curRelativeLevel, diff --git a/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java b/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java index cfcda2746d..f64f032265 100644 --- a/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java +++ b/src/main/java/graphql/normalized/ExecutableNormalizedOperation.java @@ -8,6 +8,7 @@ import graphql.execution.directives.QueryDirectives; import graphql.language.Field; import graphql.language.OperationDefinition; +import graphql.normalized.nf.NormalizedOperation; import graphql.schema.FieldCoordinates; import graphql.schema.GraphQLFieldsContainer; @@ -23,7 +24,7 @@ * An operation consists of a list of {@link ExecutableNormalizedField}s in a parent child hierarchy */ @PublicApi -public class ExecutableNormalizedOperation { +public class ExecutableNormalizedOperation implements GraphQlNormalizedOperation { private final OperationDefinition.Operation operation; private final String operationName; private final List topLevelFields; @@ -177,4 +178,9 @@ public ExecutableNormalizedField getNormalizedField(MergedField mergedField, Gra } return Assert.assertShouldNeverHappen("normalized field not found"); } + + @Override + public GraphQlNormalizedField getGraphQlNormalizedField(MergedField mergedField, GraphQLFieldsContainer fieldsContainer, ResultPath resultPath) { + return getNormalizedField(mergedField, fieldsContainer, resultPath); + } } diff --git a/src/main/java/graphql/normalized/GraphQlNormalizedField.java b/src/main/java/graphql/normalized/GraphQlNormalizedField.java new file mode 100644 index 0000000000..6e4d1234c2 --- /dev/null +++ b/src/main/java/graphql/normalized/GraphQlNormalizedField.java @@ -0,0 +1,166 @@ +package graphql.normalized; + +import graphql.Internal; +import graphql.PublicApi; +import graphql.schema.GraphQLFieldDefinition; +import graphql.schema.GraphQLObjectType; +import graphql.schema.GraphQLOutputType; +import graphql.schema.GraphQLSchema; +import org.jspecify.annotations.NonNull; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Set; + +/** + * A {@link GraphQlNormalizedField} represents a normalized field in GraphQL. + */ +@Internal +public interface GraphQlNormalizedField { + /** + * @return the children of the {@link GraphQlNormalizedField} + */ + List getGraphQlNormalizedChildren(); + + /** + * @return the parent of this {@link GraphQlNormalizedField} or null if it's a top level field + */ + GraphQlNormalizedField getGraphQlNormalizedParent(); + + /** + * A {@link GraphQlNormalizedField} can sometimes (for non-concrete types like interfaces and unions) + * have more than one object type it could be when executed. There is no way to know what it will be until + * the field is executed over data and the type is resolved via a {@link graphql.schema.TypeResolver}. + *

+ * This method returns all the possible types a field can be which is one or more {@link GraphQLObjectType} + * names. + *

+ * Warning: This returns a Mutable Set. No defensive copy is made for performance reasons. + * + * @return a set of the possible type names this field could be. + */ + Set getObjectTypeNames(); + + /** + * @return the field alias used or null if there is none + * + * @see #getResultKey() + * @see #getName() + */ + String getAlias(); + + /** + * All merged fields have the same name so this is the name of the {@link GraphQlNormalizedField}. + *

+ * WARNING: This is not always the key in the execution result, because of possible field aliases. + * + * @return the name of this {@link GraphQlNormalizedField} + * + * @see #getResultKey() + * @see #getAlias() + */ + String getName(); + + /** + * @return true if this field has children, false otherwise + */ + boolean hasChildren(); + + /** + * @return a helper method to show the object types names as a string + */ + String objectTypeNamesToString(); + + /** + * Returns the field definitions for this field in the given schema. + * @param schema the GraphQL schema to look up the field definitions + * @return a list of field definitions for this field + */ + List getFieldDefinitions(GraphQLSchema schema); + + /** + * @return the GraphQL output type for this field + */ + GraphQLOutputType getType(GraphQLSchema schema); + + /** + * @return a map of the resolved argument values + */ + LinkedHashMap getResolvedArguments(); + + /** + * the level of the {@link GraphQlNormalizedField} in the operation hierarchy with top level fields + * starting at 1 + * + * @return the level of the {@link GraphQlNormalizedField} in the operation hierarchy + */ + int getLevel(); + + /** + * Determines whether this {@link GraphQlNormalizedField} needs a fragment to select the field. However, it considers the parent + * output type when determining whether it needs a fragment. + *

+ * Consider the following schema + * + *

+     * interface Animal {
+     *     name: String
+     *     parent: Animal
+     * }
+     * type Cat implements Animal {
+     *     name: String
+     *     parent: Cat
+     * }
+     * type Dog implements Animal {
+     *     name: String
+     *     parent: Dog
+     *     isGoodBoy: Boolean
+     * }
+     * type Query {
+     *     animal: Animal
+     * }
+     * 
+ *

+ * and the following query + * + *

+     * {
+     *     animal {
+     *         parent {
+     *             name
+     *         }
+     *     }
+     * }
+     * 
+ *

+ * Then we would get the following {@link GraphQlNormalizedOperation} + * + *

+     * -Query.animal: Animal
+     * --[Cat, Dog].parent: Cat, Dog
+     * ---[Cat, Dog].name: String
+     * 
+ *

+ * If we simply checked the parent's {@link #getFieldDefinitions(GraphQLSchema)} that would + * point us to {@code Cat.parent} and {@code Dog.parent} whose output types would incorrectly answer + * our question whether this is conditional? + *

+ * We MUST consider that the output type of the {@code parent} field is {@code Animal} and + * NOT {@code Cat} or {@code Dog} as their respective implementations would say. + * + * @param schema - the graphql schema in play + * + * @return true if the field is conditional + */ + boolean isConditional(@NonNull GraphQLSchema schema); + + /** + * Returns the result key of this {@link GraphQlNormalizedField} within the overall result. + * This is either a field alias or the value of {@link #getName()} + * + * @return the result key for this {@link GraphQlNormalizedField}. + * + * @see #getName() + */ + String getResultKey(); +} diff --git a/src/main/java/graphql/normalized/GraphQlNormalizedOperation.java b/src/main/java/graphql/normalized/GraphQlNormalizedOperation.java new file mode 100644 index 0000000000..eda7943638 --- /dev/null +++ b/src/main/java/graphql/normalized/GraphQlNormalizedOperation.java @@ -0,0 +1,27 @@ +package graphql.normalized; + +import graphql.Internal; +import graphql.PublicApi; +import graphql.execution.MergedField; +import graphql.execution.ResultPath; +import graphql.schema.GraphQLFieldsContainer; + +/** + * A {@link GraphQlNormalizedOperation} represents a normalized operation in GraphQL. + * It is used to handle the execution of GraphQL operations according to the specification, + * including merging duplicate fields and handling type detection for fields that may correspond + * to multiple object types. + */ +@Internal +public interface GraphQlNormalizedOperation { + /** + * This will find a {@link GraphQlNormalizedField} given a merged field and a result path. If this does not find a field it will assert with an exception + * + * @param mergedField the merged field + * @param fieldsContainer the containing type of that field + * @param resultPath the result path in play + * + * @return the ExecutableNormalizedField + */ + GraphQlNormalizedField getGraphQlNormalizedField(MergedField mergedField, GraphQLFieldsContainer fieldsContainer, ResultPath resultPath); +} diff --git a/src/main/java/graphql/normalized/nf/NormalizedField.java b/src/main/java/graphql/normalized/nf/NormalizedField.java index 3b8aa08bdd..ccf6d4ae1b 100644 --- a/src/main/java/graphql/normalized/nf/NormalizedField.java +++ b/src/main/java/graphql/normalized/nf/NormalizedField.java @@ -11,6 +11,7 @@ import graphql.language.Directive; import graphql.normalized.ExecutableNormalizedOperation; import graphql.normalized.NormalizedInputValue; +import graphql.normalized.GraphQlNormalizedField; import graphql.schema.GraphQLFieldDefinition; import graphql.schema.GraphQLInterfaceType; import graphql.schema.GraphQLNamedOutputType; @@ -51,7 +52,7 @@ */ @ExperimentalApi @Mutable -public class NormalizedField { +public class NormalizedField implements GraphQlNormalizedField { private final String alias; private final ImmutableMap normalizedArguments; private final LinkedHashMap resolvedArguments; @@ -296,6 +297,11 @@ public String getResultKey() { return getName(); } + @Override + public GraphQlNormalizedField getGraphQlNormalizedParent() { + return getParent(); + } + /** * @return the field alias used or null if there is none * @see #getResultKey() @@ -583,6 +589,10 @@ public NormalizedField transform(Consumer builderConsumer) { return builder.build(); } + @Override + public List getGraphQlNormalizedChildren() { + return ImmutableKit.map(getChildren(), child -> child); + } public static class Builder { private LinkedHashSet objectTypeNames = new LinkedHashSet<>(); diff --git a/src/main/java/graphql/normalized/nf/NormalizedOperation.java b/src/main/java/graphql/normalized/nf/NormalizedOperation.java index 6d3c333d0b..dde55cbd41 100644 --- a/src/main/java/graphql/normalized/nf/NormalizedOperation.java +++ b/src/main/java/graphql/normalized/nf/NormalizedOperation.java @@ -8,8 +8,11 @@ import graphql.execution.directives.QueryDirectives; import graphql.language.Field; import graphql.language.OperationDefinition; +import graphql.normalized.GraphQlNormalizedOperation; +import graphql.normalized.GraphQlNormalizedField; import graphql.schema.FieldCoordinates; import graphql.schema.GraphQLFieldsContainer; +import org.jspecify.annotations.Nullable; import java.util.List; import java.util.Map; @@ -23,7 +26,7 @@ * An operation consists of a list of {@link NormalizedField}s in a parent child hierarchy */ @ExperimentalApi -public class NormalizedOperation { +public class NormalizedOperation implements GraphQlNormalizedOperation { private final OperationDefinition.Operation operation; private final String operationName; private final List rootFields; @@ -65,7 +68,7 @@ public OperationDefinition.Operation getOperation() { /** * @return the operation name, which can be null */ - public String getOperationName() { + public @Nullable String getOperationName() { return operationName; } @@ -177,4 +180,9 @@ public NormalizedField getNormalizedField(MergedField mergedField, GraphQLFields } return Assert.assertShouldNeverHappen("normalized field not found"); } + + @Override + public GraphQlNormalizedField getGraphQlNormalizedField(MergedField mergedField, GraphQLFieldsContainer fieldsContainer, ResultPath resultPath) { + return getNormalizedField(mergedField, fieldsContainer, resultPath); + } } diff --git a/src/main/java/graphql/schema/DataFetchingFieldSelectionSetImpl.java b/src/main/java/graphql/schema/DataFetchingFieldSelectionSetImpl.java index 4800a0dfcf..a0facaf2ee 100644 --- a/src/main/java/graphql/schema/DataFetchingFieldSelectionSetImpl.java +++ b/src/main/java/graphql/schema/DataFetchingFieldSelectionSetImpl.java @@ -4,7 +4,7 @@ import com.google.common.collect.ImmutableSet; import graphql.Internal; import graphql.collect.ImmutableKit; -import graphql.normalized.ExecutableNormalizedField; +import graphql.normalized.GraphQlNormalizedField; import graphql.util.LockKit; import java.io.File; @@ -79,7 +79,7 @@ public Map> getFieldsGroupedByResultKey(String field } }; - public static DataFetchingFieldSelectionSet newCollector(GraphQLSchema schema, GraphQLOutputType fieldType, Supplier normalizedFieldSupplier) { + public static DataFetchingFieldSelectionSet newCollector(GraphQLSchema schema, GraphQLOutputType fieldType, Supplier normalizedFieldSupplier) { if (!GraphQLTypeUtil.isLeaf(fieldType)) { return new DataFetchingFieldSelectionSetImpl(normalizedFieldSupplier, schema); } else { @@ -88,7 +88,7 @@ public static DataFetchingFieldSelectionSet newCollector(GraphQLSchema schema, G } } - private final Supplier normalizedFieldSupplier; + private final Supplier normalizedFieldSupplier; private final LockKit.ReentrantLock lock = new LockKit.ReentrantLock(); private volatile boolean computedValues; @@ -102,7 +102,7 @@ public static DataFetchingFieldSelectionSet newCollector(GraphQLSchema schema, G private Set flattenedFieldsForGlobSearching; private final GraphQLSchema schema; - private DataFetchingFieldSelectionSetImpl(Supplier normalizedFieldSupplier, GraphQLSchema schema) { + private DataFetchingFieldSelectionSetImpl(Supplier normalizedFieldSupplier, GraphQLSchema schema) { this.schema = schema; this.normalizedFieldSupplier = normalizedFieldSupplier; } @@ -219,7 +219,7 @@ private void computeValuesLazily(boolean immediate) { // this supplier is a once only thread synced call - so do it outside this lock // if only to have only 1 lock in action at a time - ExecutableNormalizedField currentNormalisedField = normalizedFieldSupplier.get(); + GraphQlNormalizedField currentNormalisedField = normalizedFieldSupplier.get(); lock.runLocked(() -> { if (computedValues) { @@ -240,9 +240,9 @@ private void computeValuesLazily(boolean immediate) { }); } - private void traverseSubSelectedFields(ExecutableNormalizedField currentNormalisedField, ImmutableList.Builder immediateFieldsBuilder, String qualifiedFieldPrefix, String simpleFieldPrefix, boolean firstLevel, boolean immediate) { - List children = currentNormalisedField.getChildren(); - for (ExecutableNormalizedField normalizedSubSelectedField : children) { + private void traverseSubSelectedFields(GraphQlNormalizedField currentNormalisedField, ImmutableList.Builder immediateFieldsBuilder, String qualifiedFieldPrefix, String simpleFieldPrefix, boolean firstLevel, boolean immediate) { + List children = currentNormalisedField.getGraphQlNormalizedChildren(); + for (GraphQlNormalizedField normalizedSubSelectedField : children) { String typeQualifiedName = mkTypeQualifiedName(normalizedSubSelectedField); String simpleName = normalizedSubSelectedField.getName(); @@ -274,7 +274,7 @@ private String removeLeadingSlash(String fieldGlobPattern) { return fieldGlobPattern; } - private static String mkTypeQualifiedName(ExecutableNormalizedField executableNormalizedField) { + private static String mkTypeQualifiedName(GraphQlNormalizedField executableNormalizedField) { return executableNormalizedField.objectTypeNamesToString() + "." + executableNormalizedField.getName(); } @@ -306,10 +306,10 @@ private static class SelectedFieldImpl implements SelectedField { private final String qualifiedName; private final String fullyQualifiedName; private final DataFetchingFieldSelectionSet selectionSet; - private final ExecutableNormalizedField executableNormalizedField; + private final GraphQlNormalizedField executableNormalizedField; private final GraphQLSchema schema; - private SelectedFieldImpl(String simpleQualifiedName, String fullyQualifiedName, ExecutableNormalizedField executableNormalizedField, GraphQLSchema schema) { + private SelectedFieldImpl(String simpleQualifiedName, String fullyQualifiedName, GraphQlNormalizedField executableNormalizedField, GraphQLSchema schema) { this.schema = schema; this.qualifiedName = simpleQualifiedName; this.fullyQualifiedName = fullyQualifiedName; @@ -317,11 +317,11 @@ private SelectedFieldImpl(String simpleQualifiedName, String fullyQualifiedName, this.selectionSet = new DataFetchingFieldSelectionSetImpl(() -> executableNormalizedField, schema); } - private SelectedField mkParent(ExecutableNormalizedField executableNormalizedField) { + private SelectedField mkParent(GraphQlNormalizedField executableNormalizedField) { String parentSimpleQualifiedName = beforeLastSlash(qualifiedName); String parentFullyQualifiedName = beforeLastSlash(fullyQualifiedName); - return executableNormalizedField.getParent() == null ? null : - new SelectedFieldImpl(parentSimpleQualifiedName, parentFullyQualifiedName, executableNormalizedField.getParent(), schema); + return executableNormalizedField.getGraphQlNormalizedParent() == null ? null : + new SelectedFieldImpl(parentSimpleQualifiedName, parentFullyQualifiedName, executableNormalizedField.getGraphQlNormalizedParent(), schema); } private String beforeLastSlash(String name) { From 8e2e760f6af360809d99d80b4ea680a01fa1911f Mon Sep 17 00:00:00 2001 From: Tim Ward Date: Fri, 11 Jul 2025 11:25:05 +0800 Subject: [PATCH 2/3] Add normalized document provider to allow caching of normalized documents Fix --- src/main/java/graphql/GraphQL.java | 39 +++++++++++--- .../java/graphql/execution/Execution.java | 8 ++- .../graphql/execution/ExecutionContext.java | 51 ++++++++++++------- .../execution/ExecutionContextBuilder.java | 10 ++++ .../nf/provider/CreateNormalizedDocument.java | 8 +++ .../NoOpNormalizedDocumentProvider.java | 16 ++++++ .../nf/provider/NormalizedDocumentEntry.java | 32 ++++++++++++ .../provider/NormalizedDocumentProvider.java | 17 +++++++ .../graphql/execution/ExecutionTest.groovy | 6 ++- .../FieldValidationTest.groovy | 3 +- 10 files changed, 161 insertions(+), 29 deletions(-) create mode 100644 src/main/java/graphql/normalized/nf/provider/CreateNormalizedDocument.java create mode 100644 src/main/java/graphql/normalized/nf/provider/NoOpNormalizedDocumentProvider.java create mode 100644 src/main/java/graphql/normalized/nf/provider/NormalizedDocumentEntry.java create mode 100644 src/main/java/graphql/normalized/nf/provider/NormalizedDocumentProvider.java diff --git a/src/main/java/graphql/GraphQL.java b/src/main/java/graphql/GraphQL.java index bee767ae43..fc1594c00e 100644 --- a/src/main/java/graphql/GraphQL.java +++ b/src/main/java/graphql/GraphQL.java @@ -9,7 +9,6 @@ import graphql.execution.ExecutionId; import graphql.execution.ExecutionIdProvider; import graphql.execution.ExecutionStrategy; -import graphql.execution.ResponseMapFactory; import graphql.execution.SimpleDataFetcherExceptionHandler; import graphql.execution.SubscriptionExecutionStrategy; import graphql.execution.ValueUnboxer; @@ -25,6 +24,8 @@ import graphql.execution.preparsed.PreparsedDocumentEntry; import graphql.execution.preparsed.PreparsedDocumentProvider; import graphql.language.Document; +import graphql.normalized.nf.provider.NoOpNormalizedDocumentProvider; +import graphql.normalized.nf.provider.NormalizedDocumentProvider; import graphql.schema.GraphQLSchema; import graphql.validation.ValidationError; @@ -158,6 +159,7 @@ public static GraphQLUnusualConfiguration.GraphQLContextConfiguration unusualCon private final ExecutionIdProvider idProvider; private final Instrumentation instrumentation; private final PreparsedDocumentProvider preparsedDocumentProvider; + private final NormalizedDocumentProvider normalizedDocumentProvider; private final ValueUnboxer valueUnboxer; private final boolean doNotAutomaticallyDispatchDataLoader; @@ -170,6 +172,7 @@ private GraphQL(Builder builder) { this.idProvider = assertNotNull(builder.idProvider, () -> "idProvider must be non null"); this.instrumentation = assertNotNull(builder.instrumentation, () -> "instrumentation must not be null"); this.preparsedDocumentProvider = assertNotNull(builder.preparsedDocumentProvider, () -> "preparsedDocumentProvider must be non null"); + this.normalizedDocumentProvider = assertNotNull(builder.normalizedDocumentProvider, () -> "normalizedDocumentProvider must be non null"); this.valueUnboxer = assertNotNull(builder.valueUnboxer, () -> "valueUnboxer must not be null"); this.doNotAutomaticallyDispatchDataLoader = builder.doNotAutomaticallyDispatchDataLoader; } @@ -227,6 +230,13 @@ public PreparsedDocumentProvider getPreparsedDocumentProvider() { return preparsedDocumentProvider; } + /** + * @return the NormalizedDocumentProvider for this {@link GraphQL} instance + */ + public NormalizedDocumentProvider getNormalizedDocumentProvider() { + return normalizedDocumentProvider; + } + /** * @return the ValueUnboxer for this {@link GraphQL} instance */ @@ -261,7 +271,8 @@ public GraphQL transform(Consumer builderConsumer) { .subscriptionExecutionStrategy(this.subscriptionStrategy) .executionIdProvider(Optional.ofNullable(this.idProvider).orElse(builder.idProvider)) .instrumentation(Optional.ofNullable(this.instrumentation).orElse(builder.instrumentation)) - .preparsedDocumentProvider(Optional.ofNullable(this.preparsedDocumentProvider).orElse(builder.preparsedDocumentProvider)); + .preparsedDocumentProvider(Optional.ofNullable(this.preparsedDocumentProvider).orElse(builder.preparsedDocumentProvider)) + .normalizedDocumentProvider(Optional.ofNullable(this.normalizedDocumentProvider).orElse(builder.normalizedDocumentProvider)); builderConsumer.accept(builder); @@ -278,6 +289,7 @@ public static class Builder { private ExecutionIdProvider idProvider = DEFAULT_EXECUTION_ID_PROVIDER; private Instrumentation instrumentation = null; // deliberate default here private PreparsedDocumentProvider preparsedDocumentProvider = NoOpPreparsedDocumentProvider.INSTANCE; + private NormalizedDocumentProvider normalizedDocumentProvider = NoOpNormalizedDocumentProvider.INSTANCE; private boolean doNotAutomaticallyDispatchDataLoader = false; private ValueUnboxer valueUnboxer = ValueUnboxer.DEFAULT; @@ -328,6 +340,18 @@ public Builder preparsedDocumentProvider(PreparsedDocumentProvider preparsedDocu return this; } + /** + * This allows you to set a {@link NormalizedDocumentProvider} that will be used to provide normalized documents + * + * @param normalizedDocumentProvider the provider of normalized documents + * + * @return this builder + */ + public Builder normalizedDocumentProvider(NormalizedDocumentProvider normalizedDocumentProvider) { + this.normalizedDocumentProvider = normalizedDocumentProvider; + return this; + } + public Builder executionIdProvider(ExecutionIdProvider executionIdProvider) { this.idProvider = assertNotNull(executionIdProvider, () -> "ExecutionIdProvider must be non null"); return this; @@ -497,7 +521,7 @@ public CompletableFuture executeAsync(ExecutionInput executionI GraphQLSchema graphQLSchema = instrumentation.instrumentSchema(this.graphQLSchema, instrumentationParameters, instrumentationState); - CompletableFuture executionResult = parseValidateAndExecute(instrumentedExecutionInput, graphQLSchema, instrumentationState, engineRunningState); + CompletableFuture executionResult = parseValidateAndExecute(instrumentedExecutionInput, graphQLSchema, instrumentationState, engineRunningState, normalizedDocumentProvider); // // finish up instrumentation executionResult = executionResult.whenComplete(completeInstrumentationCtxCF(executionInstrumentation)); @@ -529,7 +553,7 @@ private ExecutionInput ensureInputHasId(ExecutionInput executionInput) { } - private CompletableFuture parseValidateAndExecute(ExecutionInput executionInput, GraphQLSchema graphQLSchema, InstrumentationState instrumentationState, EngineRunningState engineRunningState) { + private CompletableFuture parseValidateAndExecute(ExecutionInput executionInput, GraphQLSchema graphQLSchema, InstrumentationState instrumentationState, EngineRunningState engineRunningState, NormalizedDocumentProvider normalizedDocumentProvider) { AtomicReference executionInputRef = new AtomicReference<>(executionInput); Function computeFunction = transformedInput -> { // if they change the original query in the pre-parser, then we want to see it downstream from then on @@ -542,7 +566,7 @@ private CompletableFuture parseValidateAndExecute(ExecutionInpu return CompletableFuture.completedFuture(new ExecutionResultImpl(preparsedDocumentEntry.getErrors())); } try { - return execute(executionInputRef.get(), preparsedDocumentEntry.getDocument(), graphQLSchema, instrumentationState, engineRunningState); + return execute(executionInputRef.get(), preparsedDocumentEntry.getDocument(), graphQLSchema, instrumentationState, engineRunningState, normalizedDocumentProvider); } catch (AbortExecutionException e) { return CompletableFuture.completedFuture(e.toExecutionResult()); } @@ -606,10 +630,11 @@ private CompletableFuture execute(ExecutionInput executionInput Document document, GraphQLSchema graphQLSchema, InstrumentationState instrumentationState, - EngineRunningState engineRunningState + EngineRunningState engineRunningState, + NormalizedDocumentProvider normalizedDocumentProvider ) { - Execution execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, instrumentation, valueUnboxer, doNotAutomaticallyDispatchDataLoader); + Execution execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, instrumentation, valueUnboxer, doNotAutomaticallyDispatchDataLoader, normalizedDocumentProvider); ExecutionId executionId = executionInput.getExecutionId(); return execution.execute(document, graphQLSchema, executionId, executionInput, instrumentationState, engineRunningState); diff --git a/src/main/java/graphql/execution/Execution.java b/src/main/java/graphql/execution/Execution.java index 4c8cb7da3d..649e31dff1 100644 --- a/src/main/java/graphql/execution/Execution.java +++ b/src/main/java/graphql/execution/Execution.java @@ -25,6 +25,7 @@ import graphql.language.NodeUtil; import graphql.language.OperationDefinition; import graphql.language.VariableDefinition; +import graphql.normalized.nf.provider.NormalizedDocumentProvider; import graphql.schema.GraphQLObjectType; import graphql.schema.GraphQLSchema; import graphql.schema.impl.SchemaUtil; @@ -55,20 +56,22 @@ public class Execution { private final Instrumentation instrumentation; private final ValueUnboxer valueUnboxer; private final boolean doNotAutomaticallyDispatchDataLoader; - + private final NormalizedDocumentProvider normalizedDocumentProvider; public Execution(ExecutionStrategy queryStrategy, ExecutionStrategy mutationStrategy, ExecutionStrategy subscriptionStrategy, Instrumentation instrumentation, ValueUnboxer valueUnboxer, - boolean doNotAutomaticallyDispatchDataLoader) { + boolean doNotAutomaticallyDispatchDataLoader, + NormalizedDocumentProvider normalizedDocumentProvider) { this.queryStrategy = queryStrategy != null ? queryStrategy : new AsyncExecutionStrategy(); this.mutationStrategy = mutationStrategy != null ? mutationStrategy : new AsyncSerialExecutionStrategy(); this.subscriptionStrategy = subscriptionStrategy != null ? subscriptionStrategy : new AsyncExecutionStrategy(); this.instrumentation = instrumentation; this.valueUnboxer = valueUnboxer; this.doNotAutomaticallyDispatchDataLoader = doNotAutomaticallyDispatchDataLoader; + this.normalizedDocumentProvider = normalizedDocumentProvider; } public CompletableFuture execute(Document document, GraphQLSchema graphQLSchema, ExecutionId executionId, ExecutionInput executionInput, InstrumentationState instrumentationState, EngineRunningState engineRunningState) { @@ -118,6 +121,7 @@ public CompletableFuture execute(Document document, GraphQLSche .locale(executionInput.getLocale()) .valueUnboxer(valueUnboxer) .responseMapFactory(responseMapFactory) + .normalizedDocumentProvider(normalizedDocumentProvider) .executionInput(executionInput) .propagapropagateErrorsOnNonNullContractFailureeErrors(propagateErrorsOnNonNullContractFailure) .engineRunningState(engineRunningState) diff --git a/src/main/java/graphql/execution/ExecutionContext.java b/src/main/java/graphql/execution/ExecutionContext.java index 21d3f1b402..8182eb4a05 100644 --- a/src/main/java/graphql/execution/ExecutionContext.java +++ b/src/main/java/graphql/execution/ExecutionContext.java @@ -23,6 +23,7 @@ import graphql.normalized.nf.NormalizedDocument; import graphql.normalized.nf.NormalizedDocumentFactory; import graphql.normalized.nf.NormalizedOperation; +import graphql.normalized.nf.provider.NormalizedDocumentProvider; import graphql.schema.GraphQLSchema; import graphql.util.FpKit; import graphql.util.LockKit; @@ -34,9 +35,11 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import java.util.function.Function; import java.util.function.Supplier; @SuppressWarnings("TypeParameterUnusedInFormals") @@ -67,6 +70,7 @@ public class ExecutionContext { private final IncrementalCallState incrementalCallState = new IncrementalCallState(); private final ValueUnboxer valueUnboxer; private final ResponseMapFactory responseMapFactory; + private final NormalizedDocumentProvider normalizedDocumentProvider; private final ExecutionInput executionInput; private final Supplier queryTree; @@ -100,11 +104,12 @@ public class ExecutionContext { this.locale = builder.locale; this.valueUnboxer = builder.valueUnboxer; this.responseMapFactory = builder.responseMapFactory; + this.normalizedDocumentProvider = builder.normalizedDocumentProvider; this.errors.set(builder.errors); this.localContext = builder.localContext; this.executionInput = builder.executionInput; this.dataLoaderDispatcherStrategy = builder.dataLoaderDispatcherStrategy; - this.queryTree = FpKit.interThreadMemoize(this::createGraphQLNormalizedOperation); + this.queryTree = FpKit.interThreadMemoize(() -> this. createGraphQLNormalizedOperation().join()); this.propagateErrorsOnNonNullContractFailure = builder.propagateErrorsOnNonNullContractFailure; this.engineRunningState = builder.engineRunningState; } @@ -307,6 +312,11 @@ public ResponseMapFactory getResponseMapFactory() { return responseMapFactory; } + @Internal + public NormalizedDocumentProvider getNormalizedDocumentProvider() { + return normalizedDocumentProvider; + } + /** * @return the total list of errors for this execution context */ @@ -372,29 +382,36 @@ public ResultNodesInfo getResultNodesInfo() { return resultNodesInfo; } - private GraphQlNormalizedOperation createGraphQLNormalizedOperation() { + private NormalizedDocument createNormalizedDocument() { + return NormalizedDocumentFactory.createNormalizedDocument(graphQLSchema, document); + } + + private CompletableFuture createGraphQLNormalizedOperation() { // Check for experimental support for normalized documents if (hasNormalizedDocumentSupport()) { - return createNormalizedOperation(); + return createNormalizedOperation() + .thenApply(Function.identity()); // Cast to interface. } - return createExecutableNormalizedOperation(); + return CompletableFuture.completedFuture(createExecutableNormalizedOperation()); } @ExperimentalApi - private NormalizedOperation createNormalizedOperation() { - var normalizedDocument = NormalizedDocumentFactory.createNormalizedDocument(graphQLSchema, document); - - // Search the document for the operation that matches the operationDefinition name, - // if no match then it could be anonymous query, then fallback to the first operation. - var normalizedOperations = normalizedDocument.getNormalizedOperations(); - var normalizedOperation = normalizedOperations.stream() - .filter(this::isExecutingOperation) - .findAny() - .map(NormalizedDocument.NormalizedOperationWithAssumedSkipIncludeVariables::getNormalizedOperation) - .orElseGet(normalizedDocument::getSingleNormalizedOperation); - - return normalizedOperation; + private CompletableFuture createNormalizedOperation() { + return normalizedDocumentProvider.getNormalizedDocument(executionInput, this::createNormalizedDocument).thenApply(normalizedDocumentEntry -> { + var normalizedDocument = normalizedDocumentEntry.getDocument(); + + // Search the document for the operation that matches the operationDefinition name, + // if no match then it could be anonymous query, then fallback to the first operation. + var normalizedOperations = normalizedDocument.getNormalizedOperations(); + var normalizedOperation = normalizedOperations.stream() + .filter(this::isExecutingOperation) + .findAny() + .map(NormalizedDocument.NormalizedOperationWithAssumedSkipIncludeVariables::getNormalizedOperation) + .orElseGet(normalizedDocument::getSingleNormalizedOperation); + + return normalizedOperation; + }); } private ExecutableNormalizedOperation createExecutableNormalizedOperation() { diff --git a/src/main/java/graphql/execution/ExecutionContextBuilder.java b/src/main/java/graphql/execution/ExecutionContextBuilder.java index fc28675f09..926bcc8fcb 100644 --- a/src/main/java/graphql/execution/ExecutionContextBuilder.java +++ b/src/main/java/graphql/execution/ExecutionContextBuilder.java @@ -14,6 +14,8 @@ import graphql.language.Document; import graphql.language.FragmentDefinition; import graphql.language.OperationDefinition; +import graphql.normalized.nf.NormalizedDocumentFactory; +import graphql.normalized.nf.provider.NormalizedDocumentProvider; import graphql.schema.GraphQLSchema; import org.dataloader.DataLoaderRegistry; import org.jspecify.annotations.Nullable; @@ -53,6 +55,7 @@ public class ExecutionContextBuilder { boolean propagateErrorsOnNonNullContractFailure = true; EngineRunningState engineRunningState; ResponseMapFactory responseMapFactory = ResponseMapFactory.DEFAULT; + NormalizedDocumentProvider normalizedDocumentProvider; /** * @return a new builder of {@link graphql.execution.ExecutionContext}s @@ -102,6 +105,7 @@ public ExecutionContextBuilder() { propagateErrorsOnNonNullContractFailure = other.propagateErrorsOnNonNullContractFailure(); engineRunningState = other.getEngineRunningState(); responseMapFactory = other.getResponseMapFactory(); + normalizedDocumentProvider = other.getNormalizedDocumentProvider(); } public ExecutionContextBuilder instrumentation(Instrumentation instrumentation) { @@ -232,6 +236,12 @@ public ExecutionContextBuilder responseMapFactory(ResponseMapFactory responseMap return this; } + @Internal + public ExecutionContextBuilder normalizedDocumentProvider(NormalizedDocumentProvider normalizedDocumentProvider) { + this.normalizedDocumentProvider = normalizedDocumentProvider; + return this; + } + public ExecutionContextBuilder resetErrors() { this.errors = emptyList(); return this; diff --git a/src/main/java/graphql/normalized/nf/provider/CreateNormalizedDocument.java b/src/main/java/graphql/normalized/nf/provider/CreateNormalizedDocument.java new file mode 100644 index 0000000000..b325802f36 --- /dev/null +++ b/src/main/java/graphql/normalized/nf/provider/CreateNormalizedDocument.java @@ -0,0 +1,8 @@ +package graphql.normalized.nf.provider; + +import graphql.normalized.nf.NormalizedDocument; + +@FunctionalInterface +public interface CreateNormalizedDocument { + NormalizedDocument createNormalizedDocument(); +} diff --git a/src/main/java/graphql/normalized/nf/provider/NoOpNormalizedDocumentProvider.java b/src/main/java/graphql/normalized/nf/provider/NoOpNormalizedDocumentProvider.java new file mode 100644 index 0000000000..ca7e1fcf27 --- /dev/null +++ b/src/main/java/graphql/normalized/nf/provider/NoOpNormalizedDocumentProvider.java @@ -0,0 +1,16 @@ +package graphql.normalized.nf.provider; + +import graphql.ExecutionInput; +import graphql.Internal; + +import java.util.concurrent.CompletableFuture; + +@Internal +public class NoOpNormalizedDocumentProvider implements NormalizedDocumentProvider { + public static final NoOpNormalizedDocumentProvider INSTANCE = new NoOpNormalizedDocumentProvider(); + + @Override + public CompletableFuture getNormalizedDocument(ExecutionInput executionInput, CreateNormalizedDocument creator) { + return CompletableFuture.completedFuture(new NormalizedDocumentEntry(creator.createNormalizedDocument())); + } +} diff --git a/src/main/java/graphql/normalized/nf/provider/NormalizedDocumentEntry.java b/src/main/java/graphql/normalized/nf/provider/NormalizedDocumentEntry.java new file mode 100644 index 0000000000..d0b4466306 --- /dev/null +++ b/src/main/java/graphql/normalized/nf/provider/NormalizedDocumentEntry.java @@ -0,0 +1,32 @@ +package graphql.normalized.nf.provider; + +import graphql.GraphQLError; +import graphql.PublicApi; +import graphql.language.Document; +import graphql.normalized.nf.NormalizedDocument; + +import java.io.Serializable; +import java.util.List; + +import static graphql.Assert.assertNotNull; +import static java.util.Collections.singletonList; + +/** + * NOTE: This class implements {@link Serializable} and hence it can be serialised and placed into a distributed cache. However we + * are not aiming to provide long term compatibility and do not intend for you to place this serialised data into permanent storage, + * with times frames that cross graphql-java versions. While we don't change things unnecessarily, we may inadvertently break + * the serialised compatibility across versions. + */ +@PublicApi +public class NormalizedDocumentEntry implements Serializable { + private final NormalizedDocument document; + + public NormalizedDocumentEntry(NormalizedDocument document) { + assertNotNull(document); + this.document = document; + } + + public NormalizedDocument getDocument() { + return document; + } +} diff --git a/src/main/java/graphql/normalized/nf/provider/NormalizedDocumentProvider.java b/src/main/java/graphql/normalized/nf/provider/NormalizedDocumentProvider.java new file mode 100644 index 0000000000..46e67dea32 --- /dev/null +++ b/src/main/java/graphql/normalized/nf/provider/NormalizedDocumentProvider.java @@ -0,0 +1,17 @@ +package graphql.normalized.nf.provider; + +import graphql.ExecutionInput; +import graphql.PublicSpi; + +import java.util.concurrent.CompletableFuture; + +/** + * Interface that allows clients to hook in normalized document caching. + */ +@PublicSpi +public interface NormalizedDocumentProvider { + CompletableFuture getNormalizedDocument(ExecutionInput executionInput, CreateNormalizedDocument creator); +} + + + diff --git a/src/test/groovy/graphql/execution/ExecutionTest.groovy b/src/test/groovy/graphql/execution/ExecutionTest.groovy index 6d207ae1a1..52189c5764 100644 --- a/src/test/groovy/graphql/execution/ExecutionTest.groovy +++ b/src/test/groovy/graphql/execution/ExecutionTest.groovy @@ -8,6 +8,7 @@ import graphql.MutationSchema import graphql.execution.instrumentation.InstrumentationState import graphql.execution.instrumentation.SimplePerformantInstrumentation import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters +import graphql.normalized.nf.provider.NoOpNormalizedDocumentProvider import graphql.parser.Parser import spock.lang.Specification @@ -36,7 +37,8 @@ class ExecutionTest extends Specification { def subscriptionStrategy = new CountingExecutionStrategy() def mutationStrategy = new CountingExecutionStrategy() def queryStrategy = new CountingExecutionStrategy() - def execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, SimplePerformantInstrumentation.INSTANCE, ValueUnboxer.DEFAULT, false) + def normalizedDocumentProvider = NoOpNormalizedDocumentProvider.INSTANCE; + def execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, SimplePerformantInstrumentation.INSTANCE, ValueUnboxer.DEFAULT, false, normalizedDocumentProvider) def emptyExecutionInput = ExecutionInput.newExecutionInput().query("query").build() def instrumentationState = new InstrumentationState() {} @@ -125,7 +127,7 @@ class ExecutionTest extends Specification { } } - def execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, instrumentation, ValueUnboxer.DEFAULT, false) + def execution = new Execution(queryStrategy, mutationStrategy, subscriptionStrategy, instrumentation, ValueUnboxer.DEFAULT, false, NoOpNormalizedDocumentProvider.INSTANCE) when: diff --git a/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy b/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy index 2852d262b4..31dc1a8a28 100644 --- a/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy +++ b/src/test/groovy/graphql/execution/instrumentation/fieldvalidation/FieldValidationTest.groovy @@ -14,6 +14,7 @@ import graphql.execution.ResponseMapFactory import graphql.execution.ResultPath import graphql.execution.ValueUnboxer import graphql.execution.instrumentation.ChainedInstrumentation +import graphql.normalized.nf.provider.NoOpNormalizedDocumentProvider import spock.lang.Specification import java.util.concurrent.CompletableFuture @@ -307,7 +308,7 @@ class FieldValidationTest extends Specification { def document = TestUtil.parseQuery(query) def strategy = new AsyncExecutionStrategy() def instrumentation = new FieldValidationInstrumentation(validation) - def execution = new Execution(strategy, strategy, strategy, instrumentation, ValueUnboxer.DEFAULT, false) + def execution = new Execution(strategy, strategy, strategy, instrumentation, ValueUnboxer.DEFAULT, false, NoOpNormalizedDocumentProvider.INSTANCE) def executionInput = ExecutionInput.newExecutionInput().query(query).variables(variables).build() execution.execute(document, schema, ExecutionId.generate(), executionInput, null, new EngineRunningState(executionInput)) From d6ca0f75dffd7712aad8d39b8d7faf651b4a257a Mon Sep 17 00:00:00 2001 From: Tim Ward Date: Sun, 20 Jul 2025 08:32:54 -0700 Subject: [PATCH 3/3] Add test coverage --- src/test/groovy/graphql/StarWarsData.groovy | 1 + .../GraphQLUnusualConfigurationTest.groovy | 18 ++ ...pNormalizedDocumentProviderUnitTest.groovy | 20 ++ .../NormalizedDocumentEntryTest.groovy | 26 +++ .../NormalizedDocumentProviderTest.groovy | 174 ++++++++++++++++++ .../TestingNormalizedDocumentProvider.groovy | 19 ++ 6 files changed, 258 insertions(+) create mode 100644 src/test/groovy/graphql/normalized/nf/provider/NoOpNormalizedDocumentProviderUnitTest.groovy create mode 100644 src/test/groovy/graphql/normalized/nf/provider/NormalizedDocumentEntryTest.groovy create mode 100644 src/test/groovy/graphql/normalized/nf/provider/NormalizedDocumentProviderTest.groovy create mode 100644 src/test/groovy/graphql/normalized/nf/provider/TestingNormalizedDocumentProvider.groovy diff --git a/src/test/groovy/graphql/StarWarsData.groovy b/src/test/groovy/graphql/StarWarsData.groovy index d4a406d565..8fcd120b85 100644 --- a/src/test/groovy/graphql/StarWarsData.groovy +++ b/src/test/groovy/graphql/StarWarsData.groovy @@ -105,6 +105,7 @@ class StarWarsData { static TypeResolver characterTypeResolver = new TypeResolver() { @Override GraphQLObjectType getType(TypeResolutionEnvironment env) { + env.getSelectionSet().getFields() // Used to validate selection sets for various tests def id = env.getObject().id if (humanData[id] != null) return StarWarsSchema.humanType diff --git a/src/test/groovy/graphql/config/GraphQLUnusualConfigurationTest.groovy b/src/test/groovy/graphql/config/GraphQLUnusualConfigurationTest.groovy index d526b675f8..a264a09e80 100644 --- a/src/test/groovy/graphql/config/GraphQLUnusualConfigurationTest.groovy +++ b/src/test/groovy/graphql/config/GraphQLUnusualConfigurationTest.groovy @@ -127,6 +127,24 @@ class GraphQLUnusualConfigurationTest extends Specification { !GraphQL.unusualConfiguration(graphqlContext).incrementalSupport().isIncrementalSupportEnabled() } + def "can set normalized document support on graphql context objects"() { + when: + def graphqlContextBuilder = GraphQLContext.newContext() + GraphQL.unusualConfiguration(graphqlContextBuilder).normalizedDocumentSupport().enableNormalizedDocumentSupport(true) + + then: + graphqlContextBuilder.build().get(ExperimentalApi.ENABLE_NORMALIZED_DOCUMENT_SUPPORT) == true + GraphQL.unusualConfiguration(graphqlContextBuilder).normalizedDocumentSupport().isNormalizedDocumentSupportEnabled() + + when: + graphqlContextBuilder = GraphQLContext.newContext() + GraphQL.unusualConfiguration(graphqlContextBuilder).normalizedDocumentSupport().enableNormalizedDocumentSupport(false) + + then: + graphqlContextBuilder.build().get(ExperimentalApi.ENABLE_NORMALIZED_DOCUMENT_SUPPORT) == false + !GraphQL.unusualConfiguration(graphqlContextBuilder).normalizedDocumentSupport().isNormalizedDocumentSupportEnabled() + } + def "can set data loader chaining config for enablement"() { when: def graphqlContextBuilder = GraphQLContext.newContext() diff --git a/src/test/groovy/graphql/normalized/nf/provider/NoOpNormalizedDocumentProviderUnitTest.groovy b/src/test/groovy/graphql/normalized/nf/provider/NoOpNormalizedDocumentProviderUnitTest.groovy new file mode 100644 index 0000000000..7fac614e2b --- /dev/null +++ b/src/test/groovy/graphql/normalized/nf/provider/NoOpNormalizedDocumentProviderUnitTest.groovy @@ -0,0 +1,20 @@ +package graphql.normalized.nf.provider + +import graphql.normalized.nf.NormalizedDocument +import spock.lang.Specification + +import static graphql.ExecutionInput.newExecutionInput + +class NoOpNormalizedDocumentProviderUnitTest extends Specification { + def "NoOp always returns result of compute function"() { + given: + def provider = NoOpNormalizedDocumentProvider.INSTANCE + def document = new NormalizedDocument(List.of()) + + when: + def actual = provider.getNormalizedDocument(newExecutionInput("{}").build(), { return document }) + + then: + actual.join().document == document + } +} diff --git a/src/test/groovy/graphql/normalized/nf/provider/NormalizedDocumentEntryTest.groovy b/src/test/groovy/graphql/normalized/nf/provider/NormalizedDocumentEntryTest.groovy new file mode 100644 index 0000000000..7d480f1c90 --- /dev/null +++ b/src/test/groovy/graphql/normalized/nf/provider/NormalizedDocumentEntryTest.groovy @@ -0,0 +1,26 @@ +package graphql.normalized.nf.provider + +import graphql.AssertException +import graphql.normalized.nf.NormalizedDocument +import spock.lang.Specification + +class NormalizedDocumentEntryTest extends Specification { + def "Ensure a non-null document returns"() { + given: + def document = new NormalizedDocument(List.of()) + + when: + def entry = new NormalizedDocumentEntry(document) + + then: + entry.document == document + } + + def "Ensure a null document throws Exception"() { + when: + new NormalizedDocumentEntry((NormalizedDocument) null) + + then: + thrown(AssertException) + } +} diff --git a/src/test/groovy/graphql/normalized/nf/provider/NormalizedDocumentProviderTest.groovy b/src/test/groovy/graphql/normalized/nf/provider/NormalizedDocumentProviderTest.groovy new file mode 100644 index 0000000000..4672d30683 --- /dev/null +++ b/src/test/groovy/graphql/normalized/nf/provider/NormalizedDocumentProviderTest.groovy @@ -0,0 +1,174 @@ +package graphql.normalized.nf.provider + +import graphql.ExperimentalApi +import graphql.GraphQL +import graphql.StarWarsSchema +import graphql.execution.AsyncExecutionStrategy +import graphql.execution.instrumentation.LegacyTestingInstrumentation +import spock.lang.Specification + +import static graphql.ExecutionInput.newExecutionInput + +class NormalizedDocumentProviderTest extends Specification { + def expected = [ + "start:execution", + + "start:parse", + "end:parse", + + "start:validation", + "end:validation", + "start:execute-operation", + + "start:execution-strategy", + + "start:field-hero", + "start:fetch-hero", + "end:fetch-hero", + "start:complete-hero", + + "start:execute-object", + + "start:field-id", + "start:fetch-id", + "end:fetch-id", + "start:complete-id", + "end:complete-id", + "end:field-id", + + "end:execute-object", + + "end:complete-hero", + "end:field-hero", + + "end:execution-strategy", + + "end:execute-operation", + "end:execution", + ] + + def expectedNormalizedCached = [ + "start:execution", + + "start:parse", + "end:parse", + + "start:validation", + "end:validation", + "start:execute-operation", + + "start:execution-strategy", + + "start:field-hero", + "start:fetch-hero", + "end:fetch-hero", + "start:complete-hero", + + "start:execute-object", + + "start:field-id", + "start:fetch-id", + "end:fetch-id", + "start:complete-id", + "end:complete-id", + "end:field-id", + + "end:execute-object", + + "end:complete-hero", + "end:field-hero", + + "end:execution-strategy", + + "end:execute-operation", + "end:execution", + ] + + def 'Normalized document caching of simple serial execution'() { + given: + def query = """ + query HeroNameAndFriendsQuery { + hero { + id + } + } + """ + + + when: + + def instrumentation = new LegacyTestingInstrumentation() + def instrumentationPreparsed = new LegacyTestingInstrumentation() + def normalizedCache = new TestingNormalizedDocumentProvider() + def context = Map.of( + ExperimentalApi.ENABLE_NORMALIZED_DOCUMENT_SUPPORT, true) + def executionInput = newExecutionInput().query(query).graphQLContext(context).build() + + def strategy = new AsyncExecutionStrategy() + def data1 = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .queryExecutionStrategy(strategy) + .instrumentation(instrumentation) + .normalizedDocumentProvider(normalizedCache) + .build() + .execute(executionInput).data + + def data2 = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .queryExecutionStrategy(strategy) + .instrumentation(instrumentationPreparsed) + .normalizedDocumentProvider(normalizedCache) + .build() + .execute(executionInput).data + + + then: + + instrumentation.executionList == expected + instrumentationPreparsed.executionList == expectedNormalizedCached + data1 == data2 + normalizedCache.cache.containsKey(query) + } + + def 'Normalized document caching of simple anonymous serial execution'() { + given: + def query = """ + query { + hero { + id + } + } + """ + + + when: + + def instrumentation = new LegacyTestingInstrumentation() + def instrumentationPreparsed = new LegacyTestingInstrumentation() + def normalizedCache = new TestingNormalizedDocumentProvider() + def context = Map.of( + ExperimentalApi.ENABLE_NORMALIZED_DOCUMENT_SUPPORT, true) + def executionInput = newExecutionInput().query(query).graphQLContext(context).build() + + def strategy = new AsyncExecutionStrategy() + def data1 = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .queryExecutionStrategy(strategy) + .instrumentation(instrumentation) + .normalizedDocumentProvider(normalizedCache) + .build() + .execute(executionInput).data + + def data2 = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .queryExecutionStrategy(strategy) + .instrumentation(instrumentationPreparsed) + .normalizedDocumentProvider(normalizedCache) + .build() + .execute(executionInput).data + + + then: + + instrumentation.executionList == expected + instrumentationPreparsed.executionList == expectedNormalizedCached + data1 == data2 + normalizedCache.cache.containsKey(query) + } +} diff --git a/src/test/groovy/graphql/normalized/nf/provider/TestingNormalizedDocumentProvider.groovy b/src/test/groovy/graphql/normalized/nf/provider/TestingNormalizedDocumentProvider.groovy new file mode 100644 index 0000000000..2bfc0533ee --- /dev/null +++ b/src/test/groovy/graphql/normalized/nf/provider/TestingNormalizedDocumentProvider.groovy @@ -0,0 +1,19 @@ +package graphql.normalized.nf.provider + +import graphql.ExecutionInput +import graphql.execution.preparsed.PreparsedDocumentEntry +import graphql.execution.preparsed.PreparsedDocumentProvider + +import java.util.concurrent.CompletableFuture +import java.util.function.Function + + +class TestingNormalizedDocumentProvider implements NormalizedDocumentProvider { + Map cache = new HashMap<>() + + @Override + CompletableFuture getNormalizedDocument(ExecutionInput executionInput, CreateNormalizedDocument creator) { + Function mapCompute = { key -> new NormalizedDocumentEntry(creator.createNormalizedDocument()) } + return CompletableFuture.completedFuture(cache.computeIfAbsent(executionInput.query, mapCompute)) + } +}