Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@graphql-tools/executor

Package Overview
Dependencies
Maintainers
3
Versions
343
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@graphql-tools/executor - npm Package Compare versions

Comparing version 1.2.6 to 2.0.0-alpha-20240606144658-8963c8b8f661638eaee0e101a55f3b6e46cc03ff

cjs/execution/AccumulatorMap.js

1059

cjs/execution/execute.js

@@ -5,6 +5,8 @@ "use strict";

const graphql_1 = require("graphql");
const value_or_promise_1 = require("value-or-promise");
const utils_1 = require("@graphql-tools/utils");
const BoxedPromiseOrValue_js_1 = require("./BoxedPromiseOrValue.js");
const buildFieldPlan_js_1 = require("./buildFieldPlan.js");
const coerceError_js_1 = require("./coerceError.js");
const flattenAsyncIterable_js_1 = require("./flattenAsyncIterable.js");
const collectFields_js_1 = require("./collectFields.js");
const IncrementalPublisher_js_1 = require("./IncrementalPublisher.js");
const invariant_js_1 = require("./invariant.js");

@@ -18,3 +20,3 @@ const promiseForObject_js_1 = require("./promiseForObject.js");

*/
const collectSubfields = (0, utils_1.memoize3)((exeContext, returnType, fieldNodes) => (0, utils_1.collectSubFields)(exeContext.schema, exeContext.fragments, exeContext.variableValues, returnType, fieldNodes));
const collectSubfields = (0, utils_1.memoize3)((exeContext, returnType, fieldGroup) => (0, collectFields_js_1.collectSubfields)(exeContext.schema, exeContext.fragments, exeContext.variableValues, exeContext.operation, returnType, fieldGroup));
/**

@@ -53,43 +55,5 @@ * Implements the "Executing requests" section of the GraphQL specification,

}
return executeImpl(exeContext);
return executeOperation(exeContext);
}
exports.execute = execute;
function executeImpl(exeContext) {
if (exeContext.signal?.aborted) {
throw exeContext.signal.reason;
}
// Return a Promise that will eventually resolve to the data described by
// The "Response" section of the GraphQL specification.
//
// If errors are encountered while executing a GraphQL field, only that
// field and its descendants will be omitted, and sibling fields will still
// be executed. An execution which encounters errors will still result in a
// resolved Promise.
//
// Errors from sub-fields of a NonNull type may propagate to the top level,
// at which point we still log the error and null the parent field, which
// in this case is the entire response.
const result = new value_or_promise_1.ValueOrPromise(() => executeOperation(exeContext))
.then(data => {
const initialResult = buildResponse(data, exeContext.errors);
if (exeContext.subsequentPayloads.size > 0) {
return {
initialResult: {
...initialResult,
hasNext: true,
},
subsequentResults: yieldSubsequentPayloads(exeContext),
};
}
return initialResult;
}, (error) => {
if (exeContext.signal?.aborted) {
throw exeContext.signal.reason;
}
exeContext.errors.push(error);
return buildResponse(null, exeContext.errors);
})
.resolve();
return result;
}
/**

@@ -113,5 +77,46 @@ * Also implements the "Executing requests" section of the GraphQL specification.

*/
function buildResponse(data, errors) {
return errors.length === 0 ? { data } : { errors, data };
function buildDataResponse(exeContext, data) {
const { errors, incrementalDataRecords } = exeContext;
if (incrementalDataRecords === undefined) {
return buildSingleResult(data, errors);
}
if (errors === undefined) {
return (0, IncrementalPublisher_js_1.buildIncrementalResponse)(exeContext, data, undefined, incrementalDataRecords);
}
const filteredIncrementalDataRecords = filterIncrementalDataRecords(undefined, errors, incrementalDataRecords);
if (filteredIncrementalDataRecords.length === 0) {
return buildSingleResult(data, errors);
}
return (0, IncrementalPublisher_js_1.buildIncrementalResponse)(exeContext, data, Array.from(errors.values()), filteredIncrementalDataRecords);
}
function buildSingleResult(data, errors) {
return errors !== undefined ? { errors: Array.from(errors.values()), data } : { data };
}
function filterIncrementalDataRecords(initialPath, errors, incrementalDataRecords) {
const filteredIncrementalDataRecords = [];
for (const incrementalDataRecord of incrementalDataRecords) {
let currentPath = incrementalDataRecord.path;
if (errors.has(currentPath)) {
continue;
}
const paths = [currentPath];
let filtered = false;
while (currentPath !== initialPath) {
// Because currentPath leads to initialPath or is undefined, and the
// loop will exit if initialPath is undefined, currentPath must be
// defined.
// TODO: Consider, however, adding an invariant.
currentPath = currentPath.prev;
if (errors.has(currentPath)) {
filtered = true;
break;
}
paths.push(currentPath);
}
if (!filtered) {
filteredIncrementalDataRecords.push(incrementalDataRecord);
}
}
return filteredIncrementalDataRecords;
}
/**

@@ -150,3 +155,3 @@ * Essential assertions before executing to provide developer feedback for

function buildExecutionContext(args) {
const { schema, document, rootValue, contextValue, variableValues: rawVariableValues, operationName, fieldResolver, typeResolver, subscribeFieldResolver, signal, } = args;
const { schema, document, rootValue, contextValue, variableValues: rawVariableValues, operationName, fieldResolver, typeResolver, subscribeFieldResolver, enableEarlyExecution, signal, } = args;
// If the schema used for execution is invalid, throw an error.

@@ -200,5 +205,7 @@ (0, graphql_1.assertValidSchema)(schema);

subscribeFieldResolver: subscribeFieldResolver ?? exports.defaultFieldResolver,
subsequentPayloads: new Set(),
errors: [],
enableEarlyExecution: enableEarlyExecution !== false,
signal,
errors: undefined,
cancellableStreams: undefined,
incrementalDataRecords: undefined,
};

@@ -211,4 +218,3 @@ }

rootValue: payload,
subsequentPayloads: new Set(),
errors: [],
errors: undefined,
};

@@ -220,24 +226,72 @@ }

function executeOperation(exeContext) {
const { operation, schema, fragments, variableValues, rootValue } = exeContext;
const rootType = (0, utils_1.getDefinedRootType)(schema, operation.operation, [operation]);
if (rootType == null) {
(0, utils_1.createGraphQLError)(`Schema is not configured to execute ${operation.operation} operation.`, {
nodes: operation,
});
if (exeContext.signal?.aborted) {
throw exeContext.signal.reason;
}
const { fields: rootFields, patches } = (0, utils_1.collectFields)(schema, fragments, variableValues, rootType, operation.selectionSet);
const path = undefined;
try {
const { operation, schema, fragments, variableValues, rootValue } = exeContext;
const rootType = (0, utils_1.getDefinedRootType)(schema, operation.operation, [operation]);
if (rootType == null) {
(0, utils_1.createGraphQLError)(`Schema is not configured to execute ${operation.operation} operation.`, {
nodes: operation,
});
}
const collectedFields = (0, collectFields_js_1.collectFields)(schema, fragments, variableValues, rootType, operation);
let groupedFieldSet = collectedFields.groupedFieldSet;
const newDeferUsages = collectedFields.newDeferUsages;
let data;
if (newDeferUsages.length === 0) {
data = executeRootGroupedFieldSet(exeContext, operation.operation, rootType, rootValue, groupedFieldSet, undefined);
}
else {
const fieldPLan = (0, buildFieldPlan_js_1.buildFieldPlan)(groupedFieldSet);
groupedFieldSet = fieldPLan.groupedFieldSet;
const newGroupedFieldSets = fieldPLan.newGroupedFieldSets;
const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map());
data = executeRootGroupedFieldSet(exeContext, operation.operation, rootType, rootValue, groupedFieldSet, newDeferMap);
if (newGroupedFieldSets.size > 0) {
const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets(exeContext, rootType, rootValue, undefined, undefined, newGroupedFieldSets, newDeferMap);
addIncrementalDataRecords(exeContext, newDeferredGroupedFieldSetRecords);
}
}
if ((0, utils_1.isPromise)(data)) {
return data.then(resolved => buildDataResponse(exeContext, resolved), error => {
if (exeContext.signal?.aborted) {
throw exeContext.signal.reason;
}
return {
data: null,
errors: withError(exeContext.errors, error),
};
});
}
return buildDataResponse(exeContext, data);
}
catch (error) {
if (exeContext.signal?.aborted) {
throw exeContext.signal.reason;
}
return { data: null, errors: withError(exeContext.errors, error) };
}
}
function executeRootGroupedFieldSet(exeContext, operation, rootType, rootValue, groupedFieldSet, deferMap) {
let result;
if (operation.operation === 'mutation') {
result = executeFieldsSerially(exeContext, rootType, rootValue, path, rootFields);
if (operation === 'mutation') {
result = executeFieldsSerially(exeContext, rootType, rootValue, undefined, groupedFieldSet, undefined, deferMap);
}
else {
result = executeFields(exeContext, rootType, rootValue, path, rootFields);
result = executeFields(exeContext, rootType, rootValue, undefined, groupedFieldSet, undefined, deferMap);
}
for (const patch of patches) {
const { label, fields: patchFields } = patch;
executeDeferredFragment(exeContext, rootType, rootValue, patchFields, label, path);
}
return result;
}
function addIncrementalDataRecords(context, newIncrementalDataRecords) {
const incrementalDataRecords = context.incrementalDataRecords;
if (incrementalDataRecords === undefined) {
context.incrementalDataRecords = [...newIncrementalDataRecords];
return;
}
incrementalDataRecords.push(...newIncrementalDataRecords);
}
function withError(errors, error) {
return errors === undefined ? [error] : [...errors.values(), error];
}
/**

@@ -247,4 +301,4 @@ * Implements the "Executing selection sets" section of the spec

*/
function executeFieldsSerially(exeContext, parentType, sourceValue, path, fields) {
return (0, utils_1.promiseReduce)(fields, (results, [responseName, fieldNodes]) => {
function executeFieldsSerially(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
return (0, utils_1.promiseReduce)(groupedFieldSet, (results, [responseName, fieldGroup]) => {
const fieldPath = (0, utils_1.addPath)(path, responseName, parentType.name);

@@ -254,10 +308,15 @@ if (exeContext.signal?.aborted) {

}
return new value_or_promise_1.ValueOrPromise(() => executeField(exeContext, parentType, sourceValue, fieldNodes, fieldPath)).then(result => {
if (result === undefined) {
const result = executeField(exeContext, parentType, sourceValue, fieldGroup, fieldPath, incrementalContext, deferMap);
if (result === undefined) {
return results;
}
if ((0, utils_1.isPromise)(result)) {
return result.then(resolved => {
results[responseName] = resolved;
return results;
}
results[responseName] = result;
return results;
});
}, Object.create(null)).resolve();
});
}
results[responseName] = result;
return results;
}, Object.create(null));
}

@@ -268,7 +327,7 @@ /**

*/
function executeFields(exeContext, parentType, sourceValue, path, fields, asyncPayloadRecord) {
function executeFields(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
const results = Object.create(null);
let containsPromise = false;
try {
for (const [responseName, fieldNodes] of fields) {
for (const [responseName, fieldGroup] of groupedFieldSet) {
if (exeContext.signal?.aborted) {

@@ -278,3 +337,3 @@ throw exeContext.signal.reason;

const fieldPath = (0, utils_1.addPath)(path, responseName, parentType.name);
const result = executeField(exeContext, parentType, sourceValue, fieldNodes, fieldPath, asyncPayloadRecord);
const result = executeField(exeContext, parentType, sourceValue, fieldGroup, fieldPath, incrementalContext, deferMap);
if (result !== undefined) {

@@ -297,3 +356,3 @@ results[responseName] = result;

}
// If there are no promises, we can just return the object
// If there are no promises, we can just return the object and any incrementalDataRecords
if (!containsPromise) {

@@ -307,2 +366,5 @@ return results;

}
function toNodes(fieldGroup) {
return fieldGroup.map(fieldDetails => fieldDetails.node);
}
/**

@@ -314,5 +376,4 @@ * Implements the "Executing fields" section of the spec

*/
function executeField(exeContext, parentType, source, fieldNodes, path, asyncPayloadRecord) {
const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
const fieldDef = getFieldDef(exeContext.schema, parentType, fieldNodes[0]);
function executeField(exeContext, parentType, source, fieldGroup, path, incrementalContext, deferMap) {
const fieldDef = getFieldDef(exeContext.schema, parentType, fieldGroup[0].node);
if (!fieldDef) {

@@ -323,3 +384,3 @@ return;

const resolveFn = fieldDef.resolve ?? exeContext.fieldResolver;
const info = buildResolveInfo(exeContext, fieldDef, fieldNodes, parentType, path);
const info = buildResolveInfo(exeContext, fieldDef, toNodes(fieldGroup), parentType, path);
// Get the resolve function, regardless of if its result is normal or abrupt (error).

@@ -330,3 +391,3 @@ try {

// TODO: find a way to memoize, in case this field is within a List type.
const args = (0, utils_1.getArgumentValues)(fieldDef, fieldNodes[0], exeContext.variableValues);
const args = (0, utils_1.getArgumentValues)(fieldDef, fieldGroup[0].node, exeContext.variableValues);
// The resolve function's optional third argument is a context value that

@@ -337,9 +398,6 @@ // is provided to every resolve function within an execution. It is commonly

const result = resolveFn(source, args, contextValue, info);
let completed;
if ((0, utils_1.isPromise)(result)) {
completed = result.then(resolved => completeValue(exeContext, returnType, fieldNodes, info, path, resolved, asyncPayloadRecord));
return completePromisedValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
}
else {
completed = completeValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
}
const completed = completeValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
if ((0, utils_1.isPromise)(completed)) {

@@ -349,7 +407,4 @@ // Note: we don't rely on a `catch` method, but we do expect "thenable"

return completed.then(undefined, rawError => {
rawError = (0, coerceError_js_1.coerceError)(rawError);
const error = (0, graphql_1.locatedError)(rawError, fieldNodes, (0, utils_1.pathToArray)(path));
const handledError = handleFieldError(error, returnType, errors);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
return handledError;
handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
return null;
});

@@ -360,7 +415,4 @@ }

catch (rawError) {
const coercedError = (0, coerceError_js_1.coerceError)(rawError);
const error = (0, graphql_1.locatedError)(coercedError, fieldNodes, (0, utils_1.pathToArray)(path));
const handledError = handleFieldError(error, returnType, errors);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
return handledError;
handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
return null;
}

@@ -389,3 +441,4 @@ }

exports.buildResolveInfo = buildResolveInfo;
function handleFieldError(error, returnType, errors) {
function handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext) {
const error = (0, graphql_1.locatedError)((0, coerceError_js_1.coerceError)(rawError), toNodes(fieldGroup), (0, utils_1.pathToArray)(path));
// If the field type is non-nullable, then it is resolved without any

@@ -398,4 +451,9 @@ // protection from errors, however it still properly locates the error.

// a null value for this field if one is encountered.
errors.push(error);
return null;
const context = incrementalContext ?? exeContext;
let errors = context.errors;
if (errors === undefined) {
errors = new Map();
context.errors = errors;
}
errors.set(path, error);
}

@@ -423,3 +481,3 @@ /**

*/
function completeValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
function completeValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
// If result is an Error, throw a located error.

@@ -432,4 +490,4 @@ if (result instanceof Error) {

if ((0, graphql_1.isNonNullType)(returnType)) {
const completed = completeValue(exeContext, returnType.ofType, fieldNodes, info, path, result, asyncPayloadRecord);
if (completed === null) {
const completed = completeValue(exeContext, returnType.ofType, fieldGroup, info, path, result, incrementalContext, deferMap);
if (completed == null) {
throw new Error(`Cannot return null for non-nullable field ${info.parentType.name}.${info.fieldName}.`);

@@ -445,3 +503,3 @@ }

if ((0, graphql_1.isListType)(returnType)) {
return completeListValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
return completeListValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
}

@@ -456,18 +514,32 @@ // If field type is a leaf type, Scalar or Enum, serialize to a valid value,

if ((0, graphql_1.isAbstractType)(returnType)) {
return completeAbstractValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
return completeAbstractValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
}
// If field type is Object, execute and complete all sub-selections.
if ((0, graphql_1.isObjectType)(returnType)) {
return completeObjectValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
return completeObjectValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
}
/* c8 ignore next 6 */
// Not reachable, all possible output types have been considered.
console.assert(false, 'Cannot complete value of unexpected output type: ' + (0, utils_1.inspect)(returnType));
(0, invariant_js_1.invariant)(false, 'Cannot complete value of unexpected output type: ' + (0, utils_1.inspect)(returnType));
}
async function completePromisedValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
try {
const resolved = await result;
let completed = completeValue(exeContext, returnType, fieldGroup, info, path, resolved, incrementalContext, deferMap);
if ((0, utils_1.isPromise)(completed)) {
completed = await completed;
}
return completed;
}
catch (rawError) {
handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
return null;
}
}
/**
* Returns an object containing the `@stream` arguments if a field should be
* Returns an object containing info for streaming if a field should be
* streamed based on the experimental flag, stream directive present and
* not disabled by the "if" argument.
*/
function getStreamValues(exeContext, fieldNodes, path) {
function getStreamUsage(exeContext, fieldGroup, path) {
// do not stream inner lists of multi-dimensional lists

@@ -477,5 +549,10 @@ if (typeof path.key === 'number') {

}
// TODO: add test for this case (a streamed list nested under a list).
/* c8 ignore next 7 */
if (fieldGroup._streamUsage !== undefined) {
return fieldGroup._streamUsage;
}
// validation only allows equivalent streams on multiple fields, so it is
// safe to only check the first fieldNode for the stream directive
const stream = (0, graphql_1.getDirectiveValues)(utils_1.GraphQLStreamDirective, fieldNodes[0], exeContext.variableValues);
const stream = (0, graphql_1.getDirectiveValues)(utils_1.GraphQLStreamDirective, fieldGroup[0].node, exeContext.variableValues);
if (!stream) {

@@ -489,6 +566,14 @@ return;

(0, invariant_js_1.invariant)(stream['initialCount'] >= 0, 'initialCount must be a positive integer');
return {
(0, invariant_js_1.invariant)(exeContext.operation.operation !== 'subscription', '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.');
const streamedFieldGroup = fieldGroup.map(fieldDetails => ({
node: fieldDetails.node,
deferUsage: undefined,
}));
const streamUsage = {
initialCount: stream['initialCount'],
label: typeof stream['label'] === 'string' ? stream['label'] : undefined,
fieldGroup: streamedFieldGroup,
};
fieldGroup._streamUsage = streamUsage;
return streamUsage;
}

@@ -499,14 +584,36 @@ /**

*/
async function completeAsyncIteratorValue(exeContext, itemType, fieldNodes, info, path, iterator, asyncPayloadRecord) {
async function completeAsyncIteratorValue(exeContext, itemType, fieldGroup, info, path, asyncIterator, incrementalContext, deferMap) {
exeContext.signal?.addEventListener('abort', () => {
iterator.return?.();
asyncIterator.return?.();
});
const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
const stream = getStreamValues(exeContext, fieldNodes, path);
let containsPromise = false;
const completedResults = [];
let index = 0;
const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
while (true) {
if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) {
executeStreamIterator(index, iterator, exeContext, fieldNodes, info, itemType, path, stream.label, asyncPayloadRecord);
if (streamUsage && index >= streamUsage.initialCount) {
const streamItemQueue = buildAsyncStreamItemQueue(index, path, asyncIterator, exeContext, streamUsage.fieldGroup, info, itemType);
const returnFn = asyncIterator.return;
let streamRecord;
if (returnFn === undefined) {
streamRecord = {
label: streamUsage.label,
path,
streamItemQueue,
};
}
else {
streamRecord = {
label: streamUsage.label,
path,
streamItemQueue,
earlyReturn: returnFn.bind(asyncIterator),
};
if (exeContext.cancellableStreams === undefined) {
exeContext.cancellableStreams = new Set();
}
exeContext.cancellableStreams.add(streamRecord);
}
const context = incrementalContext ?? exeContext;
addIncrementalDataRecords(context, [streamRecord]);
break;

@@ -517,19 +624,33 @@ }

try {
iteration = await iterator.next();
if (iteration.done) {
break;
}
iteration = await asyncIterator.next();
}
catch (rawError) {
const coercedError = (0, coerceError_js_1.coerceError)(rawError);
const error = (0, graphql_1.locatedError)(coercedError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
completedResults.push(handleFieldError(error, itemType, errors));
throw (0, graphql_1.locatedError)((0, coerceError_js_1.coerceError)(rawError), toNodes(fieldGroup), (0, utils_1.pathToArray)(path));
}
// TODO: add test case for stream returning done before initialCount
/* c8 ignore next 3 */
if (iteration.done) {
break;
}
if (completeListItemValue(iteration.value, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord)) {
const item = iteration.value;
// TODO: add tests for stream backed by asyncIterator that returns a promise
/* c8 ignore start */
if ((0, utils_1.isPromise)(item)) {
completedResults.push(completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap));
containsPromise = true;
}
index += 1;
else if (
/* c8 ignore stop */
completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap)
// TODO: add tests for stream backed by asyncIterator that completes to a promise
/* c8 ignore start */
) {
containsPromise = true;
}
/* c8 ignore stop */
index++;
}
return containsPromise ? Promise.all(completedResults) : completedResults;
return containsPromise
? /* c8 ignore start */ Promise.all(completedResults)
: /* c8 ignore stop */ completedResults;
}

@@ -540,8 +661,7 @@ /**

*/
function completeListValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
function completeListValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
const itemType = returnType.ofType;
const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
if ((0, utils_1.isAsyncIterable)(result)) {
const iterator = result[Symbol.asyncIterator]();
return completeAsyncIteratorValue(exeContext, itemType, fieldNodes, info, path, iterator, asyncPayloadRecord);
const asyncIterator = result[Symbol.asyncIterator]();
return completeAsyncIteratorValue(exeContext, itemType, fieldGroup, info, path, asyncIterator, incrementalContext, deferMap);
}

@@ -551,22 +671,37 @@ if (!(0, utils_1.isIterableObject)(result)) {

}
const stream = getStreamValues(exeContext, fieldNodes, path);
return completeIterableValue(exeContext, itemType, fieldGroup, info, path, result, incrementalContext, deferMap);
}
function completeIterableValue(exeContext, itemType, fieldGroup, info, path, items, incrementalContext, deferMap) {
// This is specified as a simple map, however we're optimizing the path
// where the list contains no Promises by avoiding creating another Promise.
let containsPromise = false;
let previousAsyncPayloadRecord = asyncPayloadRecord;
const completedResults = [];
let index = 0;
for (const item of result) {
const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
const iterator = items[Symbol.iterator]();
let iteration = iterator.next();
while (!iteration.done) {
const item = iteration.value;
if (streamUsage && index >= streamUsage.initialCount) {
const streamRecord = {
label: streamUsage.label,
path,
streamItemQueue: buildSyncStreamItemQueue(item, index, path, iterator, exeContext, streamUsage.fieldGroup, info, itemType),
};
const context = incrementalContext ?? exeContext;
addIncrementalDataRecords(context, [streamRecord]);
break;
}
// No need to modify the info object containing the path,
// since from here on it is not ever accessed by resolver functions.
const itemPath = (0, utils_1.addPath)(path, index, undefined);
if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) {
previousAsyncPayloadRecord = executeStreamField(path, itemPath, item, exeContext, fieldNodes, info, itemType, stream.label, previousAsyncPayloadRecord);
index++;
continue;
if ((0, utils_1.isPromise)(item)) {
completedResults.push(completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap));
containsPromise = true;
}
if (completeListItemValue(item, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord)) {
else if (completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap)) {
containsPromise = true;
}
index++;
iteration = iterator.next();
}

@@ -580,11 +715,5 @@ return containsPromise ? Promise.all(completedResults) : completedResults;

*/
function completeListItemValue(item, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord) {
function completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap) {
try {
let completedItem;
if ((0, utils_1.isPromise)(item)) {
completedItem = item.then(resolved => completeValue(exeContext, itemType, fieldNodes, info, itemPath, resolved, asyncPayloadRecord));
}
else {
completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
}
const completedItem = completeValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, deferMap);
if ((0, utils_1.isPromise)(completedItem)) {

@@ -594,7 +723,4 @@ // Note: we don't rely on a `catch` method, but we do expect "thenable"

completedResults.push(completedItem.then(undefined, rawError => {
rawError = (0, coerceError_js_1.coerceError)(rawError);
const error = (0, graphql_1.locatedError)(rawError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
const handledError = handleFieldError(error, itemType, errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
return handledError;
handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
return null;
}));

@@ -606,10 +732,21 @@ return true;

catch (rawError) {
const coercedError = (0, coerceError_js_1.coerceError)(rawError);
const error = (0, graphql_1.locatedError)(coercedError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
const handledError = handleFieldError(error, itemType, errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
completedResults.push(handledError);
handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
completedResults.push(null);
}
return false;
}
async function completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap) {
try {
const resolved = await item;
let completed = completeValue(exeContext, itemType, fieldGroup, info, itemPath, resolved, incrementalContext, deferMap);
if ((0, utils_1.isPromise)(completed)) {
completed = await completed;
}
return completed;
}
catch (rawError) {
handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
return null;
}
}
/**

@@ -644,3 +781,3 @@ * Complete a Scalar or Enum by serializing to a valid value, returning

*/
function completeAbstractValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
function completeAbstractValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
const resolveTypeFn = returnType.resolveType ?? exeContext.typeResolver;

@@ -650,9 +787,9 @@ const contextValue = exeContext.contextValue;

if ((0, utils_1.isPromise)(runtimeType)) {
return runtimeType.then(resolvedRuntimeType => completeObjectValue(exeContext, ensureValidRuntimeType(resolvedRuntimeType, exeContext, returnType, fieldNodes, info, result), fieldNodes, info, path, result, asyncPayloadRecord));
return runtimeType.then(resolvedRuntimeType => completeObjectValue(exeContext, ensureValidRuntimeType(resolvedRuntimeType, exeContext, returnType, fieldGroup, info, result), fieldGroup, info, path, result, incrementalContext, deferMap));
}
return completeObjectValue(exeContext, ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldNodes, info, result), fieldNodes, info, path, result, asyncPayloadRecord);
return completeObjectValue(exeContext, ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldGroup, info, result), fieldGroup, info, path, result, incrementalContext, deferMap);
}
function ensureValidRuntimeType(runtimeTypeName, exeContext, returnType, fieldNodes, info, result) {
function ensureValidRuntimeType(runtimeTypeName, exeContext, returnType, fieldGroup, info, result) {
if (runtimeTypeName == null) {
throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, { nodes: fieldNodes });
throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, { nodes: toNodes(fieldGroup) });
}

@@ -670,9 +807,9 @@ // releases before 16.0.0 supported returning `GraphQLObjectType` from `resolveType`

if (runtimeType == null) {
throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, { nodes: fieldNodes });
throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, { nodes: toNodes(fieldGroup) });
}
if (!(0, graphql_1.isObjectType)(runtimeType)) {
throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, { nodes: fieldNodes });
throw (0, utils_1.createGraphQLError)(`Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, { nodes: toNodes(fieldGroup) });
}
if (!exeContext.schema.isSubType(returnType, runtimeType)) {
throw (0, utils_1.createGraphQLError)(`Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, { nodes: fieldNodes });
throw (0, utils_1.createGraphQLError)(`Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, { nodes: toNodes(fieldGroup) });
}

@@ -684,3 +821,3 @@ return runtimeType;

*/
function completeObjectValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
function completeObjectValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
// If there is an isTypeOf predicate function, call it with the

@@ -694,28 +831,66 @@ // current result. If isTypeOf returns false, then raise an error rather

if (!resolvedIsTypeOf) {
throw invalidReturnTypeError(returnType, result, fieldNodes);
throw invalidReturnTypeError(returnType, result, fieldGroup);
}
return collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord);
return collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap);
});
}
if (!isTypeOf) {
throw invalidReturnTypeError(returnType, result, fieldNodes);
throw invalidReturnTypeError(returnType, result, fieldGroup);
}
}
return collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord);
return collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap);
}
function invalidReturnTypeError(returnType, result, fieldNodes) {
return (0, utils_1.createGraphQLError)(`Expected value of type "${returnType.name}" but got: ${(0, utils_1.inspect)(result)}.`, {
nodes: fieldNodes,
});
function invalidReturnTypeError(returnType, result, fieldGroup) {
return (0, utils_1.createGraphQLError)(`Expected value of type "${returnType.name}" but got: ${(0, utils_1.inspect)(result)}.`, { nodes: toNodes(fieldGroup) });
}
function collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord) {
function addNewDeferredFragments(newDeferUsages, newDeferMap, path) {
// For each new deferUsage object:
for (const newDeferUsage of newDeferUsages) {
const parentDeferUsage = newDeferUsage.parentDeferUsage;
const parent = parentDeferUsage === undefined
? undefined
: deferredFragmentRecordFromDeferUsage(parentDeferUsage, newDeferMap);
// Instantiate the new record.
const deferredFragmentRecord = {
path,
label: newDeferUsage.label,
parent,
};
// Update the map.
newDeferMap.set(newDeferUsage, deferredFragmentRecord);
}
return newDeferMap;
}
function deferredFragmentRecordFromDeferUsage(deferUsage, deferMap) {
return deferMap.get(deferUsage);
}
function collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap) {
// Collect sub-fields to execute to complete this value.
const { fields: subFieldNodes, patches: subPatches } = collectSubfields(exeContext, returnType, fieldNodes);
const subFields = executeFields(exeContext, returnType, result, path, subFieldNodes, asyncPayloadRecord);
for (const subPatch of subPatches) {
const { label, fields: subPatchFieldNodes } = subPatch;
executeDeferredFragment(exeContext, returnType, result, subPatchFieldNodes, label, path, asyncPayloadRecord);
const collectedSubfields = collectSubfields(exeContext, returnType, fieldGroup);
let groupedFieldSet = collectedSubfields.groupedFieldSet;
const newDeferUsages = collectedSubfields.newDeferUsages;
if (deferMap === undefined && newDeferUsages.length === 0) {
return executeFields(exeContext, returnType, result, path, groupedFieldSet, incrementalContext, undefined);
}
const subFieldPlan = buildSubFieldPlan(groupedFieldSet, incrementalContext?.deferUsageSet);
groupedFieldSet = subFieldPlan.groupedFieldSet;
const newGroupedFieldSets = subFieldPlan.newGroupedFieldSets;
const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map(deferMap), path);
const subFields = executeFields(exeContext, returnType, result, path, groupedFieldSet, incrementalContext, newDeferMap);
if (newGroupedFieldSets.size > 0) {
const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets(exeContext, returnType, result, path, incrementalContext?.deferUsageSet, newGroupedFieldSets, newDeferMap);
const context = incrementalContext ?? exeContext;
addIncrementalDataRecords(context, newDeferredGroupedFieldSetRecords);
}
return subFields;
}
function buildSubFieldPlan(originalGroupedFieldSet, deferUsageSet) {
let fieldPlan = originalGroupedFieldSet._fieldPlan;
if (fieldPlan !== undefined) {
return fieldPlan;
}
fieldPlan = (0, buildFieldPlan_js_1.buildFieldPlan)(originalGroupedFieldSet, deferUsageSet);
originalGroupedFieldSet._fieldPlan = fieldPlan;
return fieldPlan;
}
/**

@@ -875,10 +1050,2 @@ * If a resolveType function is not given, then a default resolve behavior is

exports.flattenIncrementalResults = flattenIncrementalResults;
async function* ensureAsyncIterable(someExecutionResult) {
if ('initialResult' in someExecutionResult) {
yield* flattenIncrementalResults(someExecutionResult);
}
else {
yield someExecutionResult;
}
}
function mapSourceToResponse(exeContext, resultOrStream) {

@@ -894,3 +1061,3 @@ if (!(0, utils_1.isAsyncIterable)(resultOrStream)) {

// "ExecuteQuery" algorithm, for which `execute` is also used.
return (0, flattenAsyncIterable_js_1.flattenAsyncIterable)((0, utils_1.mapAsyncIterator)(resultOrStream[Symbol.asyncIterator](), async (payload) => ensureAsyncIterable(await executeImpl(buildPerEventExecutionContext(exeContext, payload))), (error) => {
return (0, utils_1.mapAsyncIterator)(resultOrStream[Symbol.asyncIterator](), (payload) => executeOperation(buildPerEventExecutionContext(exeContext, payload)), (error) => {
const wrappedError = (0, utils_1.createGraphQLError)(error.message, {

@@ -901,3 +1068,3 @@ originalError: error,

throw wrappedError;
}));
});
}

@@ -924,13 +1091,14 @@ function createSourceEventStreamImpl(exeContext) {

}
const { fields: rootFields } = (0, utils_1.collectFields)(schema, fragments, variableValues, rootType, operation.selectionSet);
const [responseName, fieldNodes] = [...rootFields.entries()][0];
const fieldName = fieldNodes[0].name.value;
const fieldDef = getFieldDef(schema, rootType, fieldNodes[0]);
const { groupedFieldSet } = (0, collectFields_js_1.collectFields)(schema, fragments, variableValues, rootType, operation);
const firstRootField = [...groupedFieldSet.entries()][0];
const [responseName, fieldGroup] = firstRootField;
const fieldName = fieldGroup[0].node.name.value;
const fieldDef = getFieldDef(schema, rootType, fieldGroup[0].node);
if (!fieldDef) {
throw (0, utils_1.createGraphQLError)(`The subscription field "${fieldName}" is not defined.`, {
nodes: fieldNodes,
nodes: toNodes(fieldGroup),
});
}
const path = (0, utils_1.addPath)(undefined, responseName, rootType.name);
const info = buildResolveInfo(exeContext, fieldDef, fieldNodes, rootType, path);
const info = buildResolveInfo(exeContext, fieldDef, toNodes(fieldGroup), rootType, path);
try {

@@ -941,3 +1109,3 @@ // Implements the "ResolveFieldEventStream" algorithm from GraphQL specification.

// variables scope to fulfill any variable references.
const args = (0, utils_1.getArgumentValues)(fieldDef, fieldNodes[0], variableValues);
const args = (0, utils_1.getArgumentValues)(fieldDef, fieldGroup[0].node, variableValues);
// The resolve function's optional third argument is a context value that

@@ -953,3 +1121,3 @@ // is provided to every resolve function within an execution. It is commonly

return result.then(assertEventStream).then(undefined, error => {
throw (0, graphql_1.locatedError)(error, fieldNodes, (0, utils_1.pathToArray)(path));
throw (0, graphql_1.locatedError)(error, toNodes(fieldGroup), (0, utils_1.pathToArray)(path));
});

@@ -960,3 +1128,3 @@ }

catch (error) {
throw (0, graphql_1.locatedError)(error, fieldNodes, (0, utils_1.pathToArray)(path));
throw (0, graphql_1.locatedError)(error, toNodes(fieldGroup), (0, utils_1.pathToArray)(path));
}

@@ -982,343 +1150,210 @@ }

}
function executeDeferredFragment(exeContext, parentType, sourceValue, fields, label, path, parentContext) {
const asyncPayloadRecord = new DeferredFragmentRecord({
label,
path,
parentContext,
exeContext,
});
let promiseOrData;
try {
promiseOrData = executeFields(exeContext, parentType, sourceValue, path, fields, asyncPayloadRecord);
if ((0, utils_1.isPromise)(promiseOrData)) {
promiseOrData = promiseOrData.then(null, e => {
asyncPayloadRecord.errors.push(e);
return null;
});
}
function executeDeferredGroupedFieldSets(exeContext, parentType, sourceValue, path, parentDeferUsages, newGroupedFieldSets, deferMap) {
const newDeferredGroupedFieldSetRecords = [];
for (const [deferUsageSet, groupedFieldSet] of newGroupedFieldSets) {
const deferredFragmentRecords = getDeferredFragmentRecords(deferUsageSet, deferMap);
const deferredGroupedFieldSetRecord = {
path,
deferredFragmentRecords,
result: undefined,
};
const executor = () => executeDeferredGroupedFieldSet(deferredGroupedFieldSetRecord, exeContext, parentType, sourceValue, path, groupedFieldSet, {
errors: undefined,
deferUsageSet,
incrementalDataRecords: undefined,
}, deferMap);
const shouldDeferThisDeferUsageSet = shouldDefer(parentDeferUsages, deferUsageSet);
deferredGroupedFieldSetRecord.result = shouldDeferThisDeferUsageSet
? exeContext.enableEarlyExecution
? new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(Promise.resolve().then(executor))
: () => new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor())
: new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor());
newDeferredGroupedFieldSetRecords.push(deferredGroupedFieldSetRecord);
}
catch (e) {
asyncPayloadRecord.errors.push(e);
promiseOrData = null;
}
asyncPayloadRecord.addData(promiseOrData);
return newDeferredGroupedFieldSetRecords;
}
function executeStreamField(path, itemPath, item, exeContext, fieldNodes, info, itemType, label, parentContext) {
const asyncPayloadRecord = new StreamRecord({
label,
path: itemPath,
parentContext,
exeContext,
});
let completedItem;
function shouldDefer(parentDeferUsages, deferUsages) {
// If we have a new child defer usage, defer.
// Otherwise, this defer usage was already deferred when it was initially
// encountered, and is now in the midst of executing early, so the new
// deferred grouped fields set can be executed immediately.
return (parentDeferUsages === undefined ||
!Array.from(deferUsages).every(deferUsage => parentDeferUsages.has(deferUsage)));
}
function executeDeferredGroupedFieldSet(deferredGroupedFieldSetRecord, exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
let result;
try {
try {
if ((0, utils_1.isPromise)(item)) {
completedItem = item.then(resolved => completeValue(exeContext, itemType, fieldNodes, info, itemPath, resolved, asyncPayloadRecord));
}
else {
completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
}
if ((0, utils_1.isPromise)(completedItem)) {
// Note: we don't rely on a `catch` method, but we do expect "thenable"
// to take a second callback for the error case.
completedItem = completedItem.then(undefined, rawError => {
rawError = (0, coerceError_js_1.coerceError)(rawError);
const error = (0, graphql_1.locatedError)(rawError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
return handledError;
});
}
}
catch (rawError) {
const coercedError = (0, coerceError_js_1.coerceError)(rawError);
const error = (0, graphql_1.locatedError)(coercedError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
completedItem = handleFieldError(error, itemType, asyncPayloadRecord.errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
}
result = executeFields(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap);
}
catch (error) {
asyncPayloadRecord.errors.push(error);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
asyncPayloadRecord.addItems(null);
return asyncPayloadRecord;
return {
deferredGroupedFieldSetRecord,
path: (0, utils_1.pathToArray)(path),
errors: withError(incrementalContext.errors, error),
};
}
let completedItems;
if ((0, utils_1.isPromise)(completedItem)) {
completedItems = completedItem.then(value => [value], error => {
asyncPayloadRecord.errors.push(error);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
return null;
});
if ((0, utils_1.isPromise)(result)) {
return result.then(resolved => buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, resolved), error => ({
deferredGroupedFieldSetRecord,
path: (0, utils_1.pathToArray)(path),
errors: withError(incrementalContext.errors, error),
}));
}
else {
completedItems = [completedItem];
}
asyncPayloadRecord.addItems(completedItems);
return asyncPayloadRecord;
return buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, result);
}
async function executeStreamIteratorItem(iterator, exeContext, fieldNodes, info, itemType, asyncPayloadRecord, itemPath) {
let item;
try {
const { value, done } = await iterator.next();
if (done) {
asyncPayloadRecord.setIsCompletedIterator();
return { done, value: undefined };
}
item = value;
function buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, data) {
const { errors, incrementalDataRecords } = incrementalContext;
if (incrementalDataRecords === undefined) {
return {
deferredGroupedFieldSetRecord,
path: (0, utils_1.pathToArray)(path),
result: errors === undefined ? { data } : { data, errors: [...errors.values()] },
incrementalDataRecords,
};
}
catch (rawError) {
const coercedError = (0, coerceError_js_1.coerceError)(rawError);
const error = (0, graphql_1.locatedError)(coercedError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
const value = handleFieldError(error, itemType, asyncPayloadRecord.errors);
// don't continue if iterator throws
return { done: true, value };
if (errors === undefined) {
return {
deferredGroupedFieldSetRecord,
path: (0, utils_1.pathToArray)(path),
result: { data },
incrementalDataRecords,
};
}
let completedItem;
try {
completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
if ((0, utils_1.isPromise)(completedItem)) {
completedItem = completedItem.then(undefined, rawError => {
const error = (0, graphql_1.locatedError)(rawError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
return handledError;
});
}
return { done: false, value: completedItem };
}
catch (rawError) {
const error = (0, graphql_1.locatedError)(rawError, fieldNodes, (0, utils_1.pathToArray)(itemPath));
const value = handleFieldError(error, itemType, asyncPayloadRecord.errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
return { done: false, value };
}
return {
deferredGroupedFieldSetRecord,
path: (0, utils_1.pathToArray)(path),
result: { data, errors: [...errors.values()] },
incrementalDataRecords: filterIncrementalDataRecords(path, errors, incrementalDataRecords),
};
}
async function executeStreamIterator(initialIndex, iterator, exeContext, fieldNodes, info, itemType, path, label, parentContext) {
let index = initialIndex;
let previousAsyncPayloadRecord = parentContext ?? undefined;
while (true) {
const itemPath = (0, utils_1.addPath)(path, index, undefined);
const asyncPayloadRecord = new StreamRecord({
label,
path: itemPath,
parentContext: previousAsyncPayloadRecord,
iterator,
exeContext,
});
let iteration;
try {
iteration = await executeStreamIteratorItem(iterator, exeContext, fieldNodes, info, itemType, asyncPayloadRecord, itemPath);
}
catch (error) {
asyncPayloadRecord.errors.push(error);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
asyncPayloadRecord.addItems(null);
// entire stream has errored and bubbled upwards
if (iterator?.return) {
iterator.return().catch(() => {
// ignore errors
});
}
return;
}
const { done, value: completedItem } = iteration;
let completedItems;
if ((0, utils_1.isPromise)(completedItem)) {
completedItems = completedItem.then(value => [value], error => {
asyncPayloadRecord.errors.push(error);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
return null;
});
}
else {
completedItems = [completedItem];
}
asyncPayloadRecord.addItems(completedItems);
if (done) {
break;
}
previousAsyncPayloadRecord = asyncPayloadRecord;
index++;
}
function getDeferredFragmentRecords(deferUsages, deferMap) {
return Array.from(deferUsages).map(deferUsage => deferredFragmentRecordFromDeferUsage(deferUsage, deferMap));
}
function filterSubsequentPayloads(exeContext, nullPath, currentAsyncRecord) {
const nullPathArray = (0, utils_1.pathToArray)(nullPath);
exeContext.subsequentPayloads.forEach(asyncRecord => {
if (asyncRecord === currentAsyncRecord) {
// don't remove payload from where error originates
return;
}
for (let i = 0; i < nullPathArray.length; i++) {
if (asyncRecord.path[i] !== nullPathArray[i]) {
// asyncRecord points to a path unaffected by this payload
return;
function buildSyncStreamItemQueue(initialItem, initialIndex, streamPath, iterator, exeContext, fieldGroup, info, itemType) {
const streamItemQueue = [];
const enableEarlyExecution = exeContext.enableEarlyExecution;
const firstExecutor = () => {
const initialPath = (0, utils_1.addPath)(streamPath, initialIndex, undefined);
const firstStreamItem = new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(completeStreamItem(streamPath, initialPath, initialItem, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType));
let iteration = iterator.next();
let currentIndex = initialIndex + 1;
let currentStreamItem = firstStreamItem;
while (!iteration.done) {
// TODO: add test case for early sync termination
/* c8 ignore next 6 */
if (currentStreamItem instanceof BoxedPromiseOrValue_js_1.BoxedPromiseOrValue) {
const result = currentStreamItem.value;
if (!(0, utils_1.isPromise)(result) && result.errors !== undefined) {
break;
}
}
const itemPath = (0, utils_1.addPath)(streamPath, currentIndex, undefined);
const value = iteration.value;
const currentExecutor = () => completeStreamItem(streamPath, itemPath, value, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType);
currentStreamItem = enableEarlyExecution
? new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(currentExecutor())
: () => new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(currentExecutor());
streamItemQueue.push(currentStreamItem);
iteration = iterator.next();
currentIndex = initialIndex + 1;
}
// asyncRecord path points to nulled error field
if (isStreamPayload(asyncRecord) && asyncRecord.iterator?.return) {
asyncRecord.iterator.return().catch(() => {
// ignore error
});
}
exeContext.subsequentPayloads.delete(asyncRecord);
});
streamItemQueue.push(new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue({ path: streamPath }));
return firstStreamItem.value;
};
streamItemQueue.push(enableEarlyExecution
? new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(Promise.resolve().then(firstExecutor))
: () => new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(firstExecutor()));
return streamItemQueue;
}
function getCompletedIncrementalResults(exeContext) {
const incrementalResults = [];
for (const asyncPayloadRecord of exeContext.subsequentPayloads) {
const incrementalResult = {};
if (!asyncPayloadRecord.isCompleted) {
continue;
}
exeContext.subsequentPayloads.delete(asyncPayloadRecord);
if (isStreamPayload(asyncPayloadRecord)) {
const items = asyncPayloadRecord.items;
if (asyncPayloadRecord.isCompletedIterator) {
// async iterable resolver just finished but there may be pending payloads
continue;
}
incrementalResult.items = items;
}
else {
const data = asyncPayloadRecord.data;
incrementalResult.data = data ?? null;
}
incrementalResult.path = asyncPayloadRecord.path;
if (asyncPayloadRecord.label) {
incrementalResult.label = asyncPayloadRecord.label;
}
if (asyncPayloadRecord.errors.length > 0) {
incrementalResult.errors = asyncPayloadRecord.errors;
}
incrementalResults.push(incrementalResult);
function buildAsyncStreamItemQueue(initialIndex, streamPath, asyncIterator, exeContext, fieldGroup, info, itemType) {
const streamItemQueue = [];
const executor = () => getNextAsyncStreamItemResult(streamItemQueue, streamPath, initialIndex, asyncIterator, exeContext, fieldGroup, info, itemType);
streamItemQueue.push(exeContext.enableEarlyExecution
? new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor())
: () => new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor()));
return streamItemQueue;
}
async function getNextAsyncStreamItemResult(streamItemQueue, streamPath, index, asyncIterator, exeContext, fieldGroup, info, itemType) {
let iteration;
try {
iteration = await asyncIterator.next();
}
return incrementalResults;
}
function yieldSubsequentPayloads(exeContext) {
let isDone = false;
const abortPromise = new Promise((_, reject) => {
exeContext.signal?.addEventListener('abort', () => {
isDone = true;
reject(exeContext.signal?.reason);
});
});
async function next() {
if (isDone) {
return { value: undefined, done: true };
}
await Promise.race([
abortPromise,
...Array.from(exeContext.subsequentPayloads).map(p => p.promise),
]);
if (isDone) {
// a different call to next has exhausted all payloads
return { value: undefined, done: true };
}
const incremental = getCompletedIncrementalResults(exeContext);
const hasNext = exeContext.subsequentPayloads.size > 0;
if (!incremental.length && hasNext) {
return next();
}
if (!hasNext) {
isDone = true;
}
catch (error) {
return {
value: incremental.length ? { incremental, hasNext } : { hasNext },
done: false,
path: streamPath,
errors: [(0, graphql_1.locatedError)(error, toNodes(fieldGroup), (0, utils_1.pathToArray)(streamPath))],
};
}
function returnStreamIterators() {
const promises = [];
exeContext.subsequentPayloads.forEach(asyncPayloadRecord => {
if (isStreamPayload(asyncPayloadRecord) && asyncPayloadRecord.iterator?.return) {
promises.push(asyncPayloadRecord.iterator.return());
}
});
return Promise.all(promises);
if (iteration.done) {
return { path: streamPath };
}
return {
[Symbol.asyncIterator]() {
return this;
},
next,
async return() {
await returnStreamIterators();
isDone = true;
return { value: undefined, done: true };
},
async throw(error) {
await returnStreamIterators();
isDone = true;
return Promise.reject(error);
},
};
const itemPath = (0, utils_1.addPath)(streamPath, index, undefined);
const result = completeStreamItem(streamPath, itemPath, iteration.value, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType);
const executor = () => getNextAsyncStreamItemResult(streamItemQueue, streamPath, index, asyncIterator, exeContext, fieldGroup, info, itemType);
streamItemQueue.push(exeContext.enableEarlyExecution
? new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor())
: () => new BoxedPromiseOrValue_js_1.BoxedPromiseOrValue(executor()));
return result;
}
class DeferredFragmentRecord {
constructor(opts) {
this.type = 'defer';
this.label = opts.label;
this.path = (0, utils_1.pathToArray)(opts.path);
this.parentContext = opts.parentContext;
this.errors = [];
this._exeContext = opts.exeContext;
this._exeContext.subsequentPayloads.add(this);
this.isCompleted = false;
this.data = null;
this.promise = new Promise(resolve => {
this._resolve = MaybePromise => {
resolve(MaybePromise);
};
}).then(data => {
this.data = data;
this.isCompleted = true;
});
function completeStreamItem(streamPath, itemPath, item, exeContext, incrementalContext, fieldGroup, info, itemType) {
if ((0, utils_1.isPromise)(item)) {
return completePromisedValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, new Map()).then(resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), error => ({
path: streamPath,
errors: withError(incrementalContext.errors, error),
}));
}
addData(data) {
const parentData = this.parentContext?.promise;
if (parentData) {
this._resolve?.(parentData.then(() => data));
return;
let result;
try {
try {
result = completeValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, new Map());
}
this._resolve?.(data);
catch (rawError) {
handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
result = null;
}
}
catch (error) {
return {
path: streamPath,
errors: withError(incrementalContext.errors, error),
};
}
if ((0, utils_1.isPromise)(result)) {
return result
.then(undefined, rawError => {
handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
return null;
})
.then(resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), error => ({
path: streamPath,
errors: withError(incrementalContext.errors, error),
}));
}
return buildStreamItemResult(incrementalContext, streamPath, result);
}
class StreamRecord {
constructor(opts) {
this.type = 'stream';
this.items = null;
this.label = opts.label;
this.path = (0, utils_1.pathToArray)(opts.path);
this.parentContext = opts.parentContext;
this.iterator = opts.iterator;
this.errors = [];
this._exeContext = opts.exeContext;
this._exeContext.subsequentPayloads.add(this);
this.isCompleted = false;
this.items = null;
this.promise = new Promise(resolve => {
this._resolve = MaybePromise => {
resolve(MaybePromise);
};
}).then(items => {
this.items = items;
this.isCompleted = true;
});
function buildStreamItemResult(incrementalContext, streamPath, item) {
const { errors, incrementalDataRecords } = incrementalContext;
if (incrementalDataRecords === undefined) {
return {
path: streamPath,
item,
errors: errors === undefined ? undefined : [...errors.values()],
incrementalDataRecords,
};
}
addItems(items) {
const parentData = this.parentContext?.promise;
if (parentData) {
this._resolve?.(parentData.then(() => items));
return;
}
this._resolve?.(items);
if (errors === undefined) {
return {
path: streamPath,
item,
errors,
incrementalDataRecords,
};
}
setIsCompletedIterator() {
this.isCompletedIterator = true;
}
return {
path: streamPath,
item,
errors: [...errors.values()],
incrementalDataRecords: filterIncrementalDataRecords(streamPath, errors, incrementalDataRecords),
};
}
function isStreamPayload(asyncPayload) {
return asyncPayload.type === 'stream';
}
/**

@@ -1325,0 +1360,0 @@ * This method looks up the field on the given type definition.

import { assertValidSchema, getDirectiveValues, GraphQLError, isAbstractType, isLeafType, isListType, isNonNullType, isObjectType, Kind, locatedError, SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef, } from 'graphql';
import { ValueOrPromise } from 'value-or-promise';
import { collectSubFields as _collectSubfields, addPath, collectFields, createGraphQLError, getArgumentValues, getDefinedRootType, GraphQLStreamDirective, inspect, isAsyncIterable, isIterableObject, isObjectLike, isPromise, mapAsyncIterator, memoize1, memoize3, pathToArray, promiseReduce, } from '@graphql-tools/utils';
import { addPath, createGraphQLError, getArgumentValues, getDefinedRootType, GraphQLStreamDirective, inspect, isAsyncIterable, isIterableObject, isObjectLike, isPromise, mapAsyncIterator, memoize1, memoize3, pathToArray, promiseReduce, } from '@graphql-tools/utils';
import { BoxedPromiseOrValue } from './BoxedPromiseOrValue.js';
import { buildFieldPlan } from './buildFieldPlan.js';
import { coerceError } from './coerceError.js';
import { flattenAsyncIterable } from './flattenAsyncIterable.js';
import { collectSubfields as _collectSubfields, collectFields, } from './collectFields.js';
import { buildIncrementalResponse } from './IncrementalPublisher.js';
import { invariant } from './invariant.js';

@@ -14,3 +16,3 @@ import { promiseForObject } from './promiseForObject.js';

*/
const collectSubfields = memoize3((exeContext, returnType, fieldNodes) => _collectSubfields(exeContext.schema, exeContext.fragments, exeContext.variableValues, returnType, fieldNodes));
const collectSubfields = memoize3((exeContext, returnType, fieldGroup) => _collectSubfields(exeContext.schema, exeContext.fragments, exeContext.variableValues, exeContext.operation, returnType, fieldGroup));
/**

@@ -49,42 +51,4 @@ * Implements the "Executing requests" section of the GraphQL specification,

}
return executeImpl(exeContext);
return executeOperation(exeContext);
}
function executeImpl(exeContext) {
if (exeContext.signal?.aborted) {
throw exeContext.signal.reason;
}
// Return a Promise that will eventually resolve to the data described by
// The "Response" section of the GraphQL specification.
//
// If errors are encountered while executing a GraphQL field, only that
// field and its descendants will be omitted, and sibling fields will still
// be executed. An execution which encounters errors will still result in a
// resolved Promise.
//
// Errors from sub-fields of a NonNull type may propagate to the top level,
// at which point we still log the error and null the parent field, which
// in this case is the entire response.
const result = new ValueOrPromise(() => executeOperation(exeContext))
.then(data => {
const initialResult = buildResponse(data, exeContext.errors);
if (exeContext.subsequentPayloads.size > 0) {
return {
initialResult: {
...initialResult,
hasNext: true,
},
subsequentResults: yieldSubsequentPayloads(exeContext),
};
}
return initialResult;
}, (error) => {
if (exeContext.signal?.aborted) {
throw exeContext.signal.reason;
}
exeContext.errors.push(error);
return buildResponse(null, exeContext.errors);
})
.resolve();
return result;
}
/**

@@ -107,5 +71,46 @@ * Also implements the "Executing requests" section of the GraphQL specification.

*/
function buildResponse(data, errors) {
return errors.length === 0 ? { data } : { errors, data };
function buildDataResponse(exeContext, data) {
const { errors, incrementalDataRecords } = exeContext;
if (incrementalDataRecords === undefined) {
return buildSingleResult(data, errors);
}
if (errors === undefined) {
return buildIncrementalResponse(exeContext, data, undefined, incrementalDataRecords);
}
const filteredIncrementalDataRecords = filterIncrementalDataRecords(undefined, errors, incrementalDataRecords);
if (filteredIncrementalDataRecords.length === 0) {
return buildSingleResult(data, errors);
}
return buildIncrementalResponse(exeContext, data, Array.from(errors.values()), filteredIncrementalDataRecords);
}
function buildSingleResult(data, errors) {
return errors !== undefined ? { errors: Array.from(errors.values()), data } : { data };
}
function filterIncrementalDataRecords(initialPath, errors, incrementalDataRecords) {
const filteredIncrementalDataRecords = [];
for (const incrementalDataRecord of incrementalDataRecords) {
let currentPath = incrementalDataRecord.path;
if (errors.has(currentPath)) {
continue;
}
const paths = [currentPath];
let filtered = false;
while (currentPath !== initialPath) {
// Because currentPath leads to initialPath or is undefined, and the
// loop will exit if initialPath is undefined, currentPath must be
// defined.
// TODO: Consider, however, adding an invariant.
currentPath = currentPath.prev;
if (errors.has(currentPath)) {
filtered = true;
break;
}
paths.push(currentPath);
}
if (!filtered) {
filteredIncrementalDataRecords.push(incrementalDataRecord);
}
}
return filteredIncrementalDataRecords;
}
/**

@@ -143,3 +148,3 @@ * Essential assertions before executing to provide developer feedback for

export function buildExecutionContext(args) {
const { schema, document, rootValue, contextValue, variableValues: rawVariableValues, operationName, fieldResolver, typeResolver, subscribeFieldResolver, signal, } = args;
const { schema, document, rootValue, contextValue, variableValues: rawVariableValues, operationName, fieldResolver, typeResolver, subscribeFieldResolver, enableEarlyExecution, signal, } = args;
// If the schema used for execution is invalid, throw an error.

@@ -193,5 +198,7 @@ assertValidSchema(schema);

subscribeFieldResolver: subscribeFieldResolver ?? defaultFieldResolver,
subsequentPayloads: new Set(),
errors: [],
enableEarlyExecution: enableEarlyExecution !== false,
signal,
errors: undefined,
cancellableStreams: undefined,
incrementalDataRecords: undefined,
};

@@ -203,4 +210,3 @@ }

rootValue: payload,
subsequentPayloads: new Set(),
errors: [],
errors: undefined,
};

@@ -212,24 +218,72 @@ }

function executeOperation(exeContext) {
const { operation, schema, fragments, variableValues, rootValue } = exeContext;
const rootType = getDefinedRootType(schema, operation.operation, [operation]);
if (rootType == null) {
createGraphQLError(`Schema is not configured to execute ${operation.operation} operation.`, {
nodes: operation,
});
if (exeContext.signal?.aborted) {
throw exeContext.signal.reason;
}
const { fields: rootFields, patches } = collectFields(schema, fragments, variableValues, rootType, operation.selectionSet);
const path = undefined;
try {
const { operation, schema, fragments, variableValues, rootValue } = exeContext;
const rootType = getDefinedRootType(schema, operation.operation, [operation]);
if (rootType == null) {
createGraphQLError(`Schema is not configured to execute ${operation.operation} operation.`, {
nodes: operation,
});
}
const collectedFields = collectFields(schema, fragments, variableValues, rootType, operation);
let groupedFieldSet = collectedFields.groupedFieldSet;
const newDeferUsages = collectedFields.newDeferUsages;
let data;
if (newDeferUsages.length === 0) {
data = executeRootGroupedFieldSet(exeContext, operation.operation, rootType, rootValue, groupedFieldSet, undefined);
}
else {
const fieldPLan = buildFieldPlan(groupedFieldSet);
groupedFieldSet = fieldPLan.groupedFieldSet;
const newGroupedFieldSets = fieldPLan.newGroupedFieldSets;
const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map());
data = executeRootGroupedFieldSet(exeContext, operation.operation, rootType, rootValue, groupedFieldSet, newDeferMap);
if (newGroupedFieldSets.size > 0) {
const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets(exeContext, rootType, rootValue, undefined, undefined, newGroupedFieldSets, newDeferMap);
addIncrementalDataRecords(exeContext, newDeferredGroupedFieldSetRecords);
}
}
if (isPromise(data)) {
return data.then(resolved => buildDataResponse(exeContext, resolved), error => {
if (exeContext.signal?.aborted) {
throw exeContext.signal.reason;
}
return {
data: null,
errors: withError(exeContext.errors, error),
};
});
}
return buildDataResponse(exeContext, data);
}
catch (error) {
if (exeContext.signal?.aborted) {
throw exeContext.signal.reason;
}
return { data: null, errors: withError(exeContext.errors, error) };
}
}
function executeRootGroupedFieldSet(exeContext, operation, rootType, rootValue, groupedFieldSet, deferMap) {
let result;
if (operation.operation === 'mutation') {
result = executeFieldsSerially(exeContext, rootType, rootValue, path, rootFields);
if (operation === 'mutation') {
result = executeFieldsSerially(exeContext, rootType, rootValue, undefined, groupedFieldSet, undefined, deferMap);
}
else {
result = executeFields(exeContext, rootType, rootValue, path, rootFields);
result = executeFields(exeContext, rootType, rootValue, undefined, groupedFieldSet, undefined, deferMap);
}
for (const patch of patches) {
const { label, fields: patchFields } = patch;
executeDeferredFragment(exeContext, rootType, rootValue, patchFields, label, path);
}
return result;
}
function addIncrementalDataRecords(context, newIncrementalDataRecords) {
const incrementalDataRecords = context.incrementalDataRecords;
if (incrementalDataRecords === undefined) {
context.incrementalDataRecords = [...newIncrementalDataRecords];
return;
}
incrementalDataRecords.push(...newIncrementalDataRecords);
}
function withError(errors, error) {
return errors === undefined ? [error] : [...errors.values(), error];
}
/**

@@ -239,4 +293,4 @@ * Implements the "Executing selection sets" section of the spec

*/
function executeFieldsSerially(exeContext, parentType, sourceValue, path, fields) {
return promiseReduce(fields, (results, [responseName, fieldNodes]) => {
function executeFieldsSerially(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
return promiseReduce(groupedFieldSet, (results, [responseName, fieldGroup]) => {
const fieldPath = addPath(path, responseName, parentType.name);

@@ -246,10 +300,15 @@ if (exeContext.signal?.aborted) {

}
return new ValueOrPromise(() => executeField(exeContext, parentType, sourceValue, fieldNodes, fieldPath)).then(result => {
if (result === undefined) {
const result = executeField(exeContext, parentType, sourceValue, fieldGroup, fieldPath, incrementalContext, deferMap);
if (result === undefined) {
return results;
}
if (isPromise(result)) {
return result.then(resolved => {
results[responseName] = resolved;
return results;
}
results[responseName] = result;
return results;
});
}, Object.create(null)).resolve();
});
}
results[responseName] = result;
return results;
}, Object.create(null));
}

@@ -260,7 +319,7 @@ /**

*/
function executeFields(exeContext, parentType, sourceValue, path, fields, asyncPayloadRecord) {
function executeFields(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
const results = Object.create(null);
let containsPromise = false;
try {
for (const [responseName, fieldNodes] of fields) {
for (const [responseName, fieldGroup] of groupedFieldSet) {
if (exeContext.signal?.aborted) {

@@ -270,3 +329,3 @@ throw exeContext.signal.reason;

const fieldPath = addPath(path, responseName, parentType.name);
const result = executeField(exeContext, parentType, sourceValue, fieldNodes, fieldPath, asyncPayloadRecord);
const result = executeField(exeContext, parentType, sourceValue, fieldGroup, fieldPath, incrementalContext, deferMap);
if (result !== undefined) {

@@ -289,3 +348,3 @@ results[responseName] = result;

}
// If there are no promises, we can just return the object
// If there are no promises, we can just return the object and any incrementalDataRecords
if (!containsPromise) {

@@ -299,2 +358,5 @@ return results;

}
function toNodes(fieldGroup) {
return fieldGroup.map(fieldDetails => fieldDetails.node);
}
/**

@@ -306,5 +368,4 @@ * Implements the "Executing fields" section of the spec

*/
function executeField(exeContext, parentType, source, fieldNodes, path, asyncPayloadRecord) {
const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
const fieldDef = getFieldDef(exeContext.schema, parentType, fieldNodes[0]);
function executeField(exeContext, parentType, source, fieldGroup, path, incrementalContext, deferMap) {
const fieldDef = getFieldDef(exeContext.schema, parentType, fieldGroup[0].node);
if (!fieldDef) {

@@ -315,3 +376,3 @@ return;

const resolveFn = fieldDef.resolve ?? exeContext.fieldResolver;
const info = buildResolveInfo(exeContext, fieldDef, fieldNodes, parentType, path);
const info = buildResolveInfo(exeContext, fieldDef, toNodes(fieldGroup), parentType, path);
// Get the resolve function, regardless of if its result is normal or abrupt (error).

@@ -322,3 +383,3 @@ try {

// TODO: find a way to memoize, in case this field is within a List type.
const args = getArgumentValues(fieldDef, fieldNodes[0], exeContext.variableValues);
const args = getArgumentValues(fieldDef, fieldGroup[0].node, exeContext.variableValues);
// The resolve function's optional third argument is a context value that

@@ -329,9 +390,6 @@ // is provided to every resolve function within an execution. It is commonly

const result = resolveFn(source, args, contextValue, info);
let completed;
if (isPromise(result)) {
completed = result.then(resolved => completeValue(exeContext, returnType, fieldNodes, info, path, resolved, asyncPayloadRecord));
return completePromisedValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
}
else {
completed = completeValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
}
const completed = completeValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
if (isPromise(completed)) {

@@ -341,7 +399,4 @@ // Note: we don't rely on a `catch` method, but we do expect "thenable"

return completed.then(undefined, rawError => {
rawError = coerceError(rawError);
const error = locatedError(rawError, fieldNodes, pathToArray(path));
const handledError = handleFieldError(error, returnType, errors);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
return handledError;
handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
return null;
});

@@ -352,7 +407,4 @@ }

catch (rawError) {
const coercedError = coerceError(rawError);
const error = locatedError(coercedError, fieldNodes, pathToArray(path));
const handledError = handleFieldError(error, returnType, errors);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
return handledError;
handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
return null;
}

@@ -380,3 +432,4 @@ }

}
function handleFieldError(error, returnType, errors) {
function handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext) {
const error = locatedError(coerceError(rawError), toNodes(fieldGroup), pathToArray(path));
// If the field type is non-nullable, then it is resolved without any

@@ -389,4 +442,9 @@ // protection from errors, however it still properly locates the error.

// a null value for this field if one is encountered.
errors.push(error);
return null;
const context = incrementalContext ?? exeContext;
let errors = context.errors;
if (errors === undefined) {
errors = new Map();
context.errors = errors;
}
errors.set(path, error);
}

@@ -414,3 +472,3 @@ /**

*/
function completeValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
function completeValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
// If result is an Error, throw a located error.

@@ -423,4 +481,4 @@ if (result instanceof Error) {

if (isNonNullType(returnType)) {
const completed = completeValue(exeContext, returnType.ofType, fieldNodes, info, path, result, asyncPayloadRecord);
if (completed === null) {
const completed = completeValue(exeContext, returnType.ofType, fieldGroup, info, path, result, incrementalContext, deferMap);
if (completed == null) {
throw new Error(`Cannot return null for non-nullable field ${info.parentType.name}.${info.fieldName}.`);

@@ -436,3 +494,3 @@ }

if (isListType(returnType)) {
return completeListValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
return completeListValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
}

@@ -447,18 +505,32 @@ // If field type is a leaf type, Scalar or Enum, serialize to a valid value,

if (isAbstractType(returnType)) {
return completeAbstractValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
return completeAbstractValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
}
// If field type is Object, execute and complete all sub-selections.
if (isObjectType(returnType)) {
return completeObjectValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord);
return completeObjectValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap);
}
/* c8 ignore next 6 */
// Not reachable, all possible output types have been considered.
console.assert(false, 'Cannot complete value of unexpected output type: ' + inspect(returnType));
invariant(false, 'Cannot complete value of unexpected output type: ' + inspect(returnType));
}
async function completePromisedValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
try {
const resolved = await result;
let completed = completeValue(exeContext, returnType, fieldGroup, info, path, resolved, incrementalContext, deferMap);
if (isPromise(completed)) {
completed = await completed;
}
return completed;
}
catch (rawError) {
handleFieldError(rawError, exeContext, returnType, fieldGroup, path, incrementalContext);
return null;
}
}
/**
* Returns an object containing the `@stream` arguments if a field should be
* Returns an object containing info for streaming if a field should be
* streamed based on the experimental flag, stream directive present and
* not disabled by the "if" argument.
*/
function getStreamValues(exeContext, fieldNodes, path) {
function getStreamUsage(exeContext, fieldGroup, path) {
// do not stream inner lists of multi-dimensional lists

@@ -468,5 +540,10 @@ if (typeof path.key === 'number') {

}
// TODO: add test for this case (a streamed list nested under a list).
/* c8 ignore next 7 */
if (fieldGroup._streamUsage !== undefined) {
return fieldGroup._streamUsage;
}
// validation only allows equivalent streams on multiple fields, so it is
// safe to only check the first fieldNode for the stream directive
const stream = getDirectiveValues(GraphQLStreamDirective, fieldNodes[0], exeContext.variableValues);
const stream = getDirectiveValues(GraphQLStreamDirective, fieldGroup[0].node, exeContext.variableValues);
if (!stream) {

@@ -480,6 +557,14 @@ return;

invariant(stream['initialCount'] >= 0, 'initialCount must be a positive integer');
return {
invariant(exeContext.operation.operation !== 'subscription', '`@stream` directive not supported on subscription operations. Disable `@stream` by setting the `if` argument to `false`.');
const streamedFieldGroup = fieldGroup.map(fieldDetails => ({
node: fieldDetails.node,
deferUsage: undefined,
}));
const streamUsage = {
initialCount: stream['initialCount'],
label: typeof stream['label'] === 'string' ? stream['label'] : undefined,
fieldGroup: streamedFieldGroup,
};
fieldGroup._streamUsage = streamUsage;
return streamUsage;
}

@@ -490,14 +575,36 @@ /**

*/
async function completeAsyncIteratorValue(exeContext, itemType, fieldNodes, info, path, iterator, asyncPayloadRecord) {
async function completeAsyncIteratorValue(exeContext, itemType, fieldGroup, info, path, asyncIterator, incrementalContext, deferMap) {
exeContext.signal?.addEventListener('abort', () => {
iterator.return?.();
asyncIterator.return?.();
});
const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
const stream = getStreamValues(exeContext, fieldNodes, path);
let containsPromise = false;
const completedResults = [];
let index = 0;
const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
while (true) {
if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) {
executeStreamIterator(index, iterator, exeContext, fieldNodes, info, itemType, path, stream.label, asyncPayloadRecord);
if (streamUsage && index >= streamUsage.initialCount) {
const streamItemQueue = buildAsyncStreamItemQueue(index, path, asyncIterator, exeContext, streamUsage.fieldGroup, info, itemType);
const returnFn = asyncIterator.return;
let streamRecord;
if (returnFn === undefined) {
streamRecord = {
label: streamUsage.label,
path,
streamItemQueue,
};
}
else {
streamRecord = {
label: streamUsage.label,
path,
streamItemQueue,
earlyReturn: returnFn.bind(asyncIterator),
};
if (exeContext.cancellableStreams === undefined) {
exeContext.cancellableStreams = new Set();
}
exeContext.cancellableStreams.add(streamRecord);
}
const context = incrementalContext ?? exeContext;
addIncrementalDataRecords(context, [streamRecord]);
break;

@@ -508,19 +615,33 @@ }

try {
iteration = await iterator.next();
if (iteration.done) {
break;
}
iteration = await asyncIterator.next();
}
catch (rawError) {
const coercedError = coerceError(rawError);
const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath));
completedResults.push(handleFieldError(error, itemType, errors));
throw locatedError(coerceError(rawError), toNodes(fieldGroup), pathToArray(path));
}
// TODO: add test case for stream returning done before initialCount
/* c8 ignore next 3 */
if (iteration.done) {
break;
}
if (completeListItemValue(iteration.value, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord)) {
const item = iteration.value;
// TODO: add tests for stream backed by asyncIterator that returns a promise
/* c8 ignore start */
if (isPromise(item)) {
completedResults.push(completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap));
containsPromise = true;
}
index += 1;
else if (
/* c8 ignore stop */
completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap)
// TODO: add tests for stream backed by asyncIterator that completes to a promise
/* c8 ignore start */
) {
containsPromise = true;
}
/* c8 ignore stop */
index++;
}
return containsPromise ? Promise.all(completedResults) : completedResults;
return containsPromise
? /* c8 ignore start */ Promise.all(completedResults)
: /* c8 ignore stop */ completedResults;
}

@@ -531,8 +652,7 @@ /**

*/
function completeListValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
function completeListValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
const itemType = returnType.ofType;
const errors = asyncPayloadRecord?.errors ?? exeContext.errors;
if (isAsyncIterable(result)) {
const iterator = result[Symbol.asyncIterator]();
return completeAsyncIteratorValue(exeContext, itemType, fieldNodes, info, path, iterator, asyncPayloadRecord);
const asyncIterator = result[Symbol.asyncIterator]();
return completeAsyncIteratorValue(exeContext, itemType, fieldGroup, info, path, asyncIterator, incrementalContext, deferMap);
}

@@ -542,22 +662,37 @@ if (!isIterableObject(result)) {

}
const stream = getStreamValues(exeContext, fieldNodes, path);
return completeIterableValue(exeContext, itemType, fieldGroup, info, path, result, incrementalContext, deferMap);
}
function completeIterableValue(exeContext, itemType, fieldGroup, info, path, items, incrementalContext, deferMap) {
// This is specified as a simple map, however we're optimizing the path
// where the list contains no Promises by avoiding creating another Promise.
let containsPromise = false;
let previousAsyncPayloadRecord = asyncPayloadRecord;
const completedResults = [];
let index = 0;
for (const item of result) {
const streamUsage = getStreamUsage(exeContext, fieldGroup, path);
const iterator = items[Symbol.iterator]();
let iteration = iterator.next();
while (!iteration.done) {
const item = iteration.value;
if (streamUsage && index >= streamUsage.initialCount) {
const streamRecord = {
label: streamUsage.label,
path,
streamItemQueue: buildSyncStreamItemQueue(item, index, path, iterator, exeContext, streamUsage.fieldGroup, info, itemType),
};
const context = incrementalContext ?? exeContext;
addIncrementalDataRecords(context, [streamRecord]);
break;
}
// No need to modify the info object containing the path,
// since from here on it is not ever accessed by resolver functions.
const itemPath = addPath(path, index, undefined);
if (stream && typeof stream.initialCount === 'number' && index >= stream.initialCount) {
previousAsyncPayloadRecord = executeStreamField(path, itemPath, item, exeContext, fieldNodes, info, itemType, stream.label, previousAsyncPayloadRecord);
index++;
continue;
if (isPromise(item)) {
completedResults.push(completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap));
containsPromise = true;
}
if (completeListItemValue(item, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord)) {
else if (completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap)) {
containsPromise = true;
}
index++;
iteration = iterator.next();
}

@@ -571,11 +706,5 @@ return containsPromise ? Promise.all(completedResults) : completedResults;

*/
function completeListItemValue(item, completedResults, errors, exeContext, itemType, fieldNodes, info, itemPath, asyncPayloadRecord) {
function completeListItemValue(item, completedResults, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap) {
try {
let completedItem;
if (isPromise(item)) {
completedItem = item.then(resolved => completeValue(exeContext, itemType, fieldNodes, info, itemPath, resolved, asyncPayloadRecord));
}
else {
completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
}
const completedItem = completeValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, deferMap);
if (isPromise(completedItem)) {

@@ -585,7 +714,4 @@ // Note: we don't rely on a `catch` method, but we do expect "thenable"

completedResults.push(completedItem.then(undefined, rawError => {
rawError = coerceError(rawError);
const error = locatedError(rawError, fieldNodes, pathToArray(itemPath));
const handledError = handleFieldError(error, itemType, errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
return handledError;
handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
return null;
}));

@@ -597,10 +723,21 @@ return true;

catch (rawError) {
const coercedError = coerceError(rawError);
const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath));
const handledError = handleFieldError(error, itemType, errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
completedResults.push(handledError);
handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
completedResults.push(null);
}
return false;
}
async function completePromisedListItemValue(item, exeContext, itemType, fieldGroup, info, itemPath, incrementalContext, deferMap) {
try {
const resolved = await item;
let completed = completeValue(exeContext, itemType, fieldGroup, info, itemPath, resolved, incrementalContext, deferMap);
if (isPromise(completed)) {
completed = await completed;
}
return completed;
}
catch (rawError) {
handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
return null;
}
}
/**

@@ -635,3 +772,3 @@ * Complete a Scalar or Enum by serializing to a valid value, returning

*/
function completeAbstractValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
function completeAbstractValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
const resolveTypeFn = returnType.resolveType ?? exeContext.typeResolver;

@@ -641,9 +778,9 @@ const contextValue = exeContext.contextValue;

if (isPromise(runtimeType)) {
return runtimeType.then(resolvedRuntimeType => completeObjectValue(exeContext, ensureValidRuntimeType(resolvedRuntimeType, exeContext, returnType, fieldNodes, info, result), fieldNodes, info, path, result, asyncPayloadRecord));
return runtimeType.then(resolvedRuntimeType => completeObjectValue(exeContext, ensureValidRuntimeType(resolvedRuntimeType, exeContext, returnType, fieldGroup, info, result), fieldGroup, info, path, result, incrementalContext, deferMap));
}
return completeObjectValue(exeContext, ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldNodes, info, result), fieldNodes, info, path, result, asyncPayloadRecord);
return completeObjectValue(exeContext, ensureValidRuntimeType(runtimeType, exeContext, returnType, fieldGroup, info, result), fieldGroup, info, path, result, incrementalContext, deferMap);
}
function ensureValidRuntimeType(runtimeTypeName, exeContext, returnType, fieldNodes, info, result) {
function ensureValidRuntimeType(runtimeTypeName, exeContext, returnType, fieldGroup, info, result) {
if (runtimeTypeName == null) {
throw createGraphQLError(`Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, { nodes: fieldNodes });
throw createGraphQLError(`Abstract type "${returnType.name}" must resolve to an Object type at runtime for field "${info.parentType.name}.${info.fieldName}". Either the "${returnType.name}" type should provide a "resolveType" function or each possible type should provide an "isTypeOf" function.`, { nodes: toNodes(fieldGroup) });
}

@@ -661,9 +798,9 @@ // releases before 16.0.0 supported returning `GraphQLObjectType` from `resolveType`

if (runtimeType == null) {
throw createGraphQLError(`Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, { nodes: fieldNodes });
throw createGraphQLError(`Abstract type "${returnType.name}" was resolved to a type "${runtimeTypeName}" that does not exist inside the schema.`, { nodes: toNodes(fieldGroup) });
}
if (!isObjectType(runtimeType)) {
throw createGraphQLError(`Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, { nodes: fieldNodes });
throw createGraphQLError(`Abstract type "${returnType.name}" was resolved to a non-object type "${runtimeTypeName}".`, { nodes: toNodes(fieldGroup) });
}
if (!exeContext.schema.isSubType(returnType, runtimeType)) {
throw createGraphQLError(`Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, { nodes: fieldNodes });
throw createGraphQLError(`Runtime Object type "${runtimeType.name}" is not a possible type for "${returnType.name}".`, { nodes: toNodes(fieldGroup) });
}

@@ -675,3 +812,3 @@ return runtimeType;

*/
function completeObjectValue(exeContext, returnType, fieldNodes, info, path, result, asyncPayloadRecord) {
function completeObjectValue(exeContext, returnType, fieldGroup, info, path, result, incrementalContext, deferMap) {
// If there is an isTypeOf predicate function, call it with the

@@ -685,28 +822,66 @@ // current result. If isTypeOf returns false, then raise an error rather

if (!resolvedIsTypeOf) {
throw invalidReturnTypeError(returnType, result, fieldNodes);
throw invalidReturnTypeError(returnType, result, fieldGroup);
}
return collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord);
return collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap);
});
}
if (!isTypeOf) {
throw invalidReturnTypeError(returnType, result, fieldNodes);
throw invalidReturnTypeError(returnType, result, fieldGroup);
}
}
return collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord);
return collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap);
}
function invalidReturnTypeError(returnType, result, fieldNodes) {
return createGraphQLError(`Expected value of type "${returnType.name}" but got: ${inspect(result)}.`, {
nodes: fieldNodes,
});
function invalidReturnTypeError(returnType, result, fieldGroup) {
return createGraphQLError(`Expected value of type "${returnType.name}" but got: ${inspect(result)}.`, { nodes: toNodes(fieldGroup) });
}
function collectAndExecuteSubfields(exeContext, returnType, fieldNodes, path, result, asyncPayloadRecord) {
function addNewDeferredFragments(newDeferUsages, newDeferMap, path) {
// For each new deferUsage object:
for (const newDeferUsage of newDeferUsages) {
const parentDeferUsage = newDeferUsage.parentDeferUsage;
const parent = parentDeferUsage === undefined
? undefined
: deferredFragmentRecordFromDeferUsage(parentDeferUsage, newDeferMap);
// Instantiate the new record.
const deferredFragmentRecord = {
path,
label: newDeferUsage.label,
parent,
};
// Update the map.
newDeferMap.set(newDeferUsage, deferredFragmentRecord);
}
return newDeferMap;
}
function deferredFragmentRecordFromDeferUsage(deferUsage, deferMap) {
return deferMap.get(deferUsage);
}
function collectAndExecuteSubfields(exeContext, returnType, fieldGroup, path, result, incrementalContext, deferMap) {
// Collect sub-fields to execute to complete this value.
const { fields: subFieldNodes, patches: subPatches } = collectSubfields(exeContext, returnType, fieldNodes);
const subFields = executeFields(exeContext, returnType, result, path, subFieldNodes, asyncPayloadRecord);
for (const subPatch of subPatches) {
const { label, fields: subPatchFieldNodes } = subPatch;
executeDeferredFragment(exeContext, returnType, result, subPatchFieldNodes, label, path, asyncPayloadRecord);
const collectedSubfields = collectSubfields(exeContext, returnType, fieldGroup);
let groupedFieldSet = collectedSubfields.groupedFieldSet;
const newDeferUsages = collectedSubfields.newDeferUsages;
if (deferMap === undefined && newDeferUsages.length === 0) {
return executeFields(exeContext, returnType, result, path, groupedFieldSet, incrementalContext, undefined);
}
const subFieldPlan = buildSubFieldPlan(groupedFieldSet, incrementalContext?.deferUsageSet);
groupedFieldSet = subFieldPlan.groupedFieldSet;
const newGroupedFieldSets = subFieldPlan.newGroupedFieldSets;
const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map(deferMap), path);
const subFields = executeFields(exeContext, returnType, result, path, groupedFieldSet, incrementalContext, newDeferMap);
if (newGroupedFieldSets.size > 0) {
const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets(exeContext, returnType, result, path, incrementalContext?.deferUsageSet, newGroupedFieldSets, newDeferMap);
const context = incrementalContext ?? exeContext;
addIncrementalDataRecords(context, newDeferredGroupedFieldSetRecords);
}
return subFields;
}
function buildSubFieldPlan(originalGroupedFieldSet, deferUsageSet) {
let fieldPlan = originalGroupedFieldSet._fieldPlan;
if (fieldPlan !== undefined) {
return fieldPlan;
}
fieldPlan = buildFieldPlan(originalGroupedFieldSet, deferUsageSet);
originalGroupedFieldSet._fieldPlan = fieldPlan;
return fieldPlan;
}
/**

@@ -862,10 +1037,2 @@ * If a resolveType function is not given, then a default resolve behavior is

}
async function* ensureAsyncIterable(someExecutionResult) {
if ('initialResult' in someExecutionResult) {
yield* flattenIncrementalResults(someExecutionResult);
}
else {
yield someExecutionResult;
}
}
function mapSourceToResponse(exeContext, resultOrStream) {

@@ -881,3 +1048,3 @@ if (!isAsyncIterable(resultOrStream)) {

// "ExecuteQuery" algorithm, for which `execute` is also used.
return flattenAsyncIterable(mapAsyncIterator(resultOrStream[Symbol.asyncIterator](), async (payload) => ensureAsyncIterable(await executeImpl(buildPerEventExecutionContext(exeContext, payload))), (error) => {
return mapAsyncIterator(resultOrStream[Symbol.asyncIterator](), (payload) => executeOperation(buildPerEventExecutionContext(exeContext, payload)), (error) => {
const wrappedError = createGraphQLError(error.message, {

@@ -888,3 +1055,3 @@ originalError: error,

throw wrappedError;
}));
});
}

@@ -911,13 +1078,14 @@ function createSourceEventStreamImpl(exeContext) {

}
const { fields: rootFields } = collectFields(schema, fragments, variableValues, rootType, operation.selectionSet);
const [responseName, fieldNodes] = [...rootFields.entries()][0];
const fieldName = fieldNodes[0].name.value;
const fieldDef = getFieldDef(schema, rootType, fieldNodes[0]);
const { groupedFieldSet } = collectFields(schema, fragments, variableValues, rootType, operation);
const firstRootField = [...groupedFieldSet.entries()][0];
const [responseName, fieldGroup] = firstRootField;
const fieldName = fieldGroup[0].node.name.value;
const fieldDef = getFieldDef(schema, rootType, fieldGroup[0].node);
if (!fieldDef) {
throw createGraphQLError(`The subscription field "${fieldName}" is not defined.`, {
nodes: fieldNodes,
nodes: toNodes(fieldGroup),
});
}
const path = addPath(undefined, responseName, rootType.name);
const info = buildResolveInfo(exeContext, fieldDef, fieldNodes, rootType, path);
const info = buildResolveInfo(exeContext, fieldDef, toNodes(fieldGroup), rootType, path);
try {

@@ -928,3 +1096,3 @@ // Implements the "ResolveFieldEventStream" algorithm from GraphQL specification.

// variables scope to fulfill any variable references.
const args = getArgumentValues(fieldDef, fieldNodes[0], variableValues);
const args = getArgumentValues(fieldDef, fieldGroup[0].node, variableValues);
// The resolve function's optional third argument is a context value that

@@ -940,3 +1108,3 @@ // is provided to every resolve function within an execution. It is commonly

return result.then(assertEventStream).then(undefined, error => {
throw locatedError(error, fieldNodes, pathToArray(path));
throw locatedError(error, toNodes(fieldGroup), pathToArray(path));
});

@@ -947,3 +1115,3 @@ }

catch (error) {
throw locatedError(error, fieldNodes, pathToArray(path));
throw locatedError(error, toNodes(fieldGroup), pathToArray(path));
}

@@ -969,343 +1137,210 @@ }

}
function executeDeferredFragment(exeContext, parentType, sourceValue, fields, label, path, parentContext) {
const asyncPayloadRecord = new DeferredFragmentRecord({
label,
path,
parentContext,
exeContext,
});
let promiseOrData;
try {
promiseOrData = executeFields(exeContext, parentType, sourceValue, path, fields, asyncPayloadRecord);
if (isPromise(promiseOrData)) {
promiseOrData = promiseOrData.then(null, e => {
asyncPayloadRecord.errors.push(e);
return null;
});
}
function executeDeferredGroupedFieldSets(exeContext, parentType, sourceValue, path, parentDeferUsages, newGroupedFieldSets, deferMap) {
const newDeferredGroupedFieldSetRecords = [];
for (const [deferUsageSet, groupedFieldSet] of newGroupedFieldSets) {
const deferredFragmentRecords = getDeferredFragmentRecords(deferUsageSet, deferMap);
const deferredGroupedFieldSetRecord = {
path,
deferredFragmentRecords,
result: undefined,
};
const executor = () => executeDeferredGroupedFieldSet(deferredGroupedFieldSetRecord, exeContext, parentType, sourceValue, path, groupedFieldSet, {
errors: undefined,
deferUsageSet,
incrementalDataRecords: undefined,
}, deferMap);
const shouldDeferThisDeferUsageSet = shouldDefer(parentDeferUsages, deferUsageSet);
deferredGroupedFieldSetRecord.result = shouldDeferThisDeferUsageSet
? exeContext.enableEarlyExecution
? new BoxedPromiseOrValue(Promise.resolve().then(executor))
: () => new BoxedPromiseOrValue(executor())
: new BoxedPromiseOrValue(executor());
newDeferredGroupedFieldSetRecords.push(deferredGroupedFieldSetRecord);
}
catch (e) {
asyncPayloadRecord.errors.push(e);
promiseOrData = null;
}
asyncPayloadRecord.addData(promiseOrData);
return newDeferredGroupedFieldSetRecords;
}
function executeStreamField(path, itemPath, item, exeContext, fieldNodes, info, itemType, label, parentContext) {
const asyncPayloadRecord = new StreamRecord({
label,
path: itemPath,
parentContext,
exeContext,
});
let completedItem;
function shouldDefer(parentDeferUsages, deferUsages) {
// If we have a new child defer usage, defer.
// Otherwise, this defer usage was already deferred when it was initially
// encountered, and is now in the midst of executing early, so the new
// deferred grouped fields set can be executed immediately.
return (parentDeferUsages === undefined ||
!Array.from(deferUsages).every(deferUsage => parentDeferUsages.has(deferUsage)));
}
function executeDeferredGroupedFieldSet(deferredGroupedFieldSetRecord, exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap) {
let result;
try {
try {
if (isPromise(item)) {
completedItem = item.then(resolved => completeValue(exeContext, itemType, fieldNodes, info, itemPath, resolved, asyncPayloadRecord));
}
else {
completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
}
if (isPromise(completedItem)) {
// Note: we don't rely on a `catch` method, but we do expect "thenable"
// to take a second callback for the error case.
completedItem = completedItem.then(undefined, rawError => {
rawError = coerceError(rawError);
const error = locatedError(rawError, fieldNodes, pathToArray(itemPath));
const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
return handledError;
});
}
}
catch (rawError) {
const coercedError = coerceError(rawError);
const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath));
completedItem = handleFieldError(error, itemType, asyncPayloadRecord.errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
}
result = executeFields(exeContext, parentType, sourceValue, path, groupedFieldSet, incrementalContext, deferMap);
}
catch (error) {
asyncPayloadRecord.errors.push(error);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
asyncPayloadRecord.addItems(null);
return asyncPayloadRecord;
return {
deferredGroupedFieldSetRecord,
path: pathToArray(path),
errors: withError(incrementalContext.errors, error),
};
}
let completedItems;
if (isPromise(completedItem)) {
completedItems = completedItem.then(value => [value], error => {
asyncPayloadRecord.errors.push(error);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
return null;
});
if (isPromise(result)) {
return result.then(resolved => buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, resolved), error => ({
deferredGroupedFieldSetRecord,
path: pathToArray(path),
errors: withError(incrementalContext.errors, error),
}));
}
else {
completedItems = [completedItem];
}
asyncPayloadRecord.addItems(completedItems);
return asyncPayloadRecord;
return buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, result);
}
async function executeStreamIteratorItem(iterator, exeContext, fieldNodes, info, itemType, asyncPayloadRecord, itemPath) {
let item;
try {
const { value, done } = await iterator.next();
if (done) {
asyncPayloadRecord.setIsCompletedIterator();
return { done, value: undefined };
}
item = value;
function buildDeferredGroupedFieldSetResult(incrementalContext, deferredGroupedFieldSetRecord, path, data) {
const { errors, incrementalDataRecords } = incrementalContext;
if (incrementalDataRecords === undefined) {
return {
deferredGroupedFieldSetRecord,
path: pathToArray(path),
result: errors === undefined ? { data } : { data, errors: [...errors.values()] },
incrementalDataRecords,
};
}
catch (rawError) {
const coercedError = coerceError(rawError);
const error = locatedError(coercedError, fieldNodes, pathToArray(itemPath));
const value = handleFieldError(error, itemType, asyncPayloadRecord.errors);
// don't continue if iterator throws
return { done: true, value };
if (errors === undefined) {
return {
deferredGroupedFieldSetRecord,
path: pathToArray(path),
result: { data },
incrementalDataRecords,
};
}
let completedItem;
try {
completedItem = completeValue(exeContext, itemType, fieldNodes, info, itemPath, item, asyncPayloadRecord);
if (isPromise(completedItem)) {
completedItem = completedItem.then(undefined, rawError => {
const error = locatedError(rawError, fieldNodes, pathToArray(itemPath));
const handledError = handleFieldError(error, itemType, asyncPayloadRecord.errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
return handledError;
});
}
return { done: false, value: completedItem };
}
catch (rawError) {
const error = locatedError(rawError, fieldNodes, pathToArray(itemPath));
const value = handleFieldError(error, itemType, asyncPayloadRecord.errors);
filterSubsequentPayloads(exeContext, itemPath, asyncPayloadRecord);
return { done: false, value };
}
return {
deferredGroupedFieldSetRecord,
path: pathToArray(path),
result: { data, errors: [...errors.values()] },
incrementalDataRecords: filterIncrementalDataRecords(path, errors, incrementalDataRecords),
};
}
async function executeStreamIterator(initialIndex, iterator, exeContext, fieldNodes, info, itemType, path, label, parentContext) {
let index = initialIndex;
let previousAsyncPayloadRecord = parentContext ?? undefined;
while (true) {
const itemPath = addPath(path, index, undefined);
const asyncPayloadRecord = new StreamRecord({
label,
path: itemPath,
parentContext: previousAsyncPayloadRecord,
iterator,
exeContext,
});
let iteration;
try {
iteration = await executeStreamIteratorItem(iterator, exeContext, fieldNodes, info, itemType, asyncPayloadRecord, itemPath);
}
catch (error) {
asyncPayloadRecord.errors.push(error);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
asyncPayloadRecord.addItems(null);
// entire stream has errored and bubbled upwards
if (iterator?.return) {
iterator.return().catch(() => {
// ignore errors
});
}
return;
}
const { done, value: completedItem } = iteration;
let completedItems;
if (isPromise(completedItem)) {
completedItems = completedItem.then(value => [value], error => {
asyncPayloadRecord.errors.push(error);
filterSubsequentPayloads(exeContext, path, asyncPayloadRecord);
return null;
});
}
else {
completedItems = [completedItem];
}
asyncPayloadRecord.addItems(completedItems);
if (done) {
break;
}
previousAsyncPayloadRecord = asyncPayloadRecord;
index++;
}
function getDeferredFragmentRecords(deferUsages, deferMap) {
return Array.from(deferUsages).map(deferUsage => deferredFragmentRecordFromDeferUsage(deferUsage, deferMap));
}
function filterSubsequentPayloads(exeContext, nullPath, currentAsyncRecord) {
const nullPathArray = pathToArray(nullPath);
exeContext.subsequentPayloads.forEach(asyncRecord => {
if (asyncRecord === currentAsyncRecord) {
// don't remove payload from where error originates
return;
}
for (let i = 0; i < nullPathArray.length; i++) {
if (asyncRecord.path[i] !== nullPathArray[i]) {
// asyncRecord points to a path unaffected by this payload
return;
function buildSyncStreamItemQueue(initialItem, initialIndex, streamPath, iterator, exeContext, fieldGroup, info, itemType) {
const streamItemQueue = [];
const enableEarlyExecution = exeContext.enableEarlyExecution;
const firstExecutor = () => {
const initialPath = addPath(streamPath, initialIndex, undefined);
const firstStreamItem = new BoxedPromiseOrValue(completeStreamItem(streamPath, initialPath, initialItem, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType));
let iteration = iterator.next();
let currentIndex = initialIndex + 1;
let currentStreamItem = firstStreamItem;
while (!iteration.done) {
// TODO: add test case for early sync termination
/* c8 ignore next 6 */
if (currentStreamItem instanceof BoxedPromiseOrValue) {
const result = currentStreamItem.value;
if (!isPromise(result) && result.errors !== undefined) {
break;
}
}
const itemPath = addPath(streamPath, currentIndex, undefined);
const value = iteration.value;
const currentExecutor = () => completeStreamItem(streamPath, itemPath, value, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType);
currentStreamItem = enableEarlyExecution
? new BoxedPromiseOrValue(currentExecutor())
: () => new BoxedPromiseOrValue(currentExecutor());
streamItemQueue.push(currentStreamItem);
iteration = iterator.next();
currentIndex = initialIndex + 1;
}
// asyncRecord path points to nulled error field
if (isStreamPayload(asyncRecord) && asyncRecord.iterator?.return) {
asyncRecord.iterator.return().catch(() => {
// ignore error
});
}
exeContext.subsequentPayloads.delete(asyncRecord);
});
streamItemQueue.push(new BoxedPromiseOrValue({ path: streamPath }));
return firstStreamItem.value;
};
streamItemQueue.push(enableEarlyExecution
? new BoxedPromiseOrValue(Promise.resolve().then(firstExecutor))
: () => new BoxedPromiseOrValue(firstExecutor()));
return streamItemQueue;
}
function getCompletedIncrementalResults(exeContext) {
const incrementalResults = [];
for (const asyncPayloadRecord of exeContext.subsequentPayloads) {
const incrementalResult = {};
if (!asyncPayloadRecord.isCompleted) {
continue;
}
exeContext.subsequentPayloads.delete(asyncPayloadRecord);
if (isStreamPayload(asyncPayloadRecord)) {
const items = asyncPayloadRecord.items;
if (asyncPayloadRecord.isCompletedIterator) {
// async iterable resolver just finished but there may be pending payloads
continue;
}
incrementalResult.items = items;
}
else {
const data = asyncPayloadRecord.data;
incrementalResult.data = data ?? null;
}
incrementalResult.path = asyncPayloadRecord.path;
if (asyncPayloadRecord.label) {
incrementalResult.label = asyncPayloadRecord.label;
}
if (asyncPayloadRecord.errors.length > 0) {
incrementalResult.errors = asyncPayloadRecord.errors;
}
incrementalResults.push(incrementalResult);
function buildAsyncStreamItemQueue(initialIndex, streamPath, asyncIterator, exeContext, fieldGroup, info, itemType) {
const streamItemQueue = [];
const executor = () => getNextAsyncStreamItemResult(streamItemQueue, streamPath, initialIndex, asyncIterator, exeContext, fieldGroup, info, itemType);
streamItemQueue.push(exeContext.enableEarlyExecution
? new BoxedPromiseOrValue(executor())
: () => new BoxedPromiseOrValue(executor()));
return streamItemQueue;
}
async function getNextAsyncStreamItemResult(streamItemQueue, streamPath, index, asyncIterator, exeContext, fieldGroup, info, itemType) {
let iteration;
try {
iteration = await asyncIterator.next();
}
return incrementalResults;
}
function yieldSubsequentPayloads(exeContext) {
let isDone = false;
const abortPromise = new Promise((_, reject) => {
exeContext.signal?.addEventListener('abort', () => {
isDone = true;
reject(exeContext.signal?.reason);
});
});
async function next() {
if (isDone) {
return { value: undefined, done: true };
}
await Promise.race([
abortPromise,
...Array.from(exeContext.subsequentPayloads).map(p => p.promise),
]);
if (isDone) {
// a different call to next has exhausted all payloads
return { value: undefined, done: true };
}
const incremental = getCompletedIncrementalResults(exeContext);
const hasNext = exeContext.subsequentPayloads.size > 0;
if (!incremental.length && hasNext) {
return next();
}
if (!hasNext) {
isDone = true;
}
catch (error) {
return {
value: incremental.length ? { incremental, hasNext } : { hasNext },
done: false,
path: streamPath,
errors: [locatedError(error, toNodes(fieldGroup), pathToArray(streamPath))],
};
}
function returnStreamIterators() {
const promises = [];
exeContext.subsequentPayloads.forEach(asyncPayloadRecord => {
if (isStreamPayload(asyncPayloadRecord) && asyncPayloadRecord.iterator?.return) {
promises.push(asyncPayloadRecord.iterator.return());
}
});
return Promise.all(promises);
if (iteration.done) {
return { path: streamPath };
}
return {
[Symbol.asyncIterator]() {
return this;
},
next,
async return() {
await returnStreamIterators();
isDone = true;
return { value: undefined, done: true };
},
async throw(error) {
await returnStreamIterators();
isDone = true;
return Promise.reject(error);
},
};
const itemPath = addPath(streamPath, index, undefined);
const result = completeStreamItem(streamPath, itemPath, iteration.value, exeContext, { errors: undefined, incrementalDataRecords: undefined }, fieldGroup, info, itemType);
const executor = () => getNextAsyncStreamItemResult(streamItemQueue, streamPath, index, asyncIterator, exeContext, fieldGroup, info, itemType);
streamItemQueue.push(exeContext.enableEarlyExecution
? new BoxedPromiseOrValue(executor())
: () => new BoxedPromiseOrValue(executor()));
return result;
}
class DeferredFragmentRecord {
constructor(opts) {
this.type = 'defer';
this.label = opts.label;
this.path = pathToArray(opts.path);
this.parentContext = opts.parentContext;
this.errors = [];
this._exeContext = opts.exeContext;
this._exeContext.subsequentPayloads.add(this);
this.isCompleted = false;
this.data = null;
this.promise = new Promise(resolve => {
this._resolve = MaybePromise => {
resolve(MaybePromise);
};
}).then(data => {
this.data = data;
this.isCompleted = true;
});
function completeStreamItem(streamPath, itemPath, item, exeContext, incrementalContext, fieldGroup, info, itemType) {
if (isPromise(item)) {
return completePromisedValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, new Map()).then(resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), error => ({
path: streamPath,
errors: withError(incrementalContext.errors, error),
}));
}
addData(data) {
const parentData = this.parentContext?.promise;
if (parentData) {
this._resolve?.(parentData.then(() => data));
return;
let result;
try {
try {
result = completeValue(exeContext, itemType, fieldGroup, info, itemPath, item, incrementalContext, new Map());
}
this._resolve?.(data);
catch (rawError) {
handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
result = null;
}
}
catch (error) {
return {
path: streamPath,
errors: withError(incrementalContext.errors, error),
};
}
if (isPromise(result)) {
return result
.then(undefined, rawError => {
handleFieldError(rawError, exeContext, itemType, fieldGroup, itemPath, incrementalContext);
return null;
})
.then(resolvedItem => buildStreamItemResult(incrementalContext, streamPath, resolvedItem), error => ({
path: streamPath,
errors: withError(incrementalContext.errors, error),
}));
}
return buildStreamItemResult(incrementalContext, streamPath, result);
}
class StreamRecord {
constructor(opts) {
this.type = 'stream';
this.items = null;
this.label = opts.label;
this.path = pathToArray(opts.path);
this.parentContext = opts.parentContext;
this.iterator = opts.iterator;
this.errors = [];
this._exeContext = opts.exeContext;
this._exeContext.subsequentPayloads.add(this);
this.isCompleted = false;
this.items = null;
this.promise = new Promise(resolve => {
this._resolve = MaybePromise => {
resolve(MaybePromise);
};
}).then(items => {
this.items = items;
this.isCompleted = true;
});
function buildStreamItemResult(incrementalContext, streamPath, item) {
const { errors, incrementalDataRecords } = incrementalContext;
if (incrementalDataRecords === undefined) {
return {
path: streamPath,
item,
errors: errors === undefined ? undefined : [...errors.values()],
incrementalDataRecords,
};
}
addItems(items) {
const parentData = this.parentContext?.promise;
if (parentData) {
this._resolve?.(parentData.then(() => items));
return;
}
this._resolve?.(items);
if (errors === undefined) {
return {
path: streamPath,
item,
errors,
incrementalDataRecords,
};
}
setIsCompletedIterator() {
this.isCompletedIterator = true;
}
return {
path: streamPath,
item,
errors: [...errors.values()],
incrementalDataRecords: filterIncrementalDataRecords(streamPath, errors, incrementalDataRecords),
};
}
function isStreamPayload(asyncPayload) {
return asyncPayload.type === 'stream';
}
/**

@@ -1312,0 +1347,0 @@ * This method looks up the field on the given type definition.

{
"name": "@graphql-tools/executor",
"version": "1.2.6",
"version": "2.0.0-alpha-20240606144658-8963c8b8f661638eaee0e101a55f3b6e46cc03ff",
"sideEffects": false,

@@ -9,3 +9,3 @@ "peerDependencies": {

"dependencies": {
"@graphql-tools/utils": "^10.1.1",
"@graphql-tools/utils": "10.3.0-alpha-20240606144658-8963c8b8f661638eaee0e101a55f3b6e46cc03ff",
"@graphql-typed-document-node/core": "3.2.0",

@@ -12,0 +12,0 @@ "@repeaterjs/repeater": "^3.0.4",

@@ -1,9 +0,5 @@

import { DocumentNode, FieldNode, FragmentDefinitionNode, GraphQLError, GraphQLField, GraphQLFieldResolver, GraphQLFormattedError, GraphQLObjectType, GraphQLResolveInfo, GraphQLSchema, GraphQLTypeResolver, OperationDefinitionNode } from 'graphql';
import { DocumentNode, FieldNode, FragmentDefinitionNode, GraphQLError, GraphQLField, GraphQLFieldResolver, GraphQLObjectType, GraphQLResolveInfo, GraphQLSchema, GraphQLTypeResolver, OperationDefinitionNode } from 'graphql';
import { Maybe, MaybePromise, Path } from '@graphql-tools/utils';
import { TypedDocumentNode } from '@graphql-typed-document-node/core';
export interface SingularExecutionResult<TData = any, TExtensions = any> {
errors?: ReadonlyArray<GraphQLError>;
data?: TData | null;
extensions?: TExtensions;
}
import { CancellableStreamRecord, IncrementalDataRecord, IncrementalExecutionResults, InitialIncrementalExecutionResult, SingularExecutionResult, SubsequentIncrementalExecutionResult } from './types.js';
/**

@@ -44,59 +40,8 @@ * Terminology

subscribeFieldResolver: GraphQLFieldResolver<any, TContext>;
errors: Array<GraphQLError>;
subsequentPayloads: Set<AsyncPayloadRecord>;
signal?: AbortSignal;
enableEarlyExecution: boolean;
signal: AbortSignal | undefined;
errors: Map<Path | undefined, GraphQLError> | undefined;
cancellableStreams: Set<CancellableStreamRecord> | undefined;
incrementalDataRecords: Array<IncrementalDataRecord> | undefined;
}
export interface FormattedExecutionResult<TData = Record<string, unknown>, TExtensions = Record<string, unknown>> {
errors?: ReadonlyArray<GraphQLFormattedError>;
data?: TData | null;
extensions?: TExtensions;
}
export interface IncrementalExecutionResults<TData = Record<string, unknown>, TExtensions = Record<string, unknown>> {
initialResult: InitialIncrementalExecutionResult<TData, TExtensions>;
subsequentResults: AsyncGenerator<SubsequentIncrementalExecutionResult<TData, TExtensions>, void, void>;
}
export interface InitialIncrementalExecutionResult<TData = Record<string, unknown>, TExtensions = Record<string, unknown>> extends SingularExecutionResult<TData, TExtensions> {
hasNext: boolean;
incremental?: ReadonlyArray<IncrementalResult<TData, TExtensions>>;
extensions?: TExtensions;
}
export interface FormattedInitialIncrementalExecutionResult<TData = Record<string, unknown>, TExtensions = Record<string, unknown>> extends FormattedExecutionResult<TData, TExtensions> {
hasNext: boolean;
incremental?: ReadonlyArray<FormattedIncrementalResult<TData, TExtensions>>;
extensions?: TExtensions;
}
export interface SubsequentIncrementalExecutionResult<TData = Record<string, unknown>, TExtensions = Record<string, unknown>> {
hasNext: boolean;
incremental?: ReadonlyArray<IncrementalResult<TData, TExtensions>>;
extensions?: TExtensions;
}
export interface FormattedSubsequentIncrementalExecutionResult<TData = Record<string, unknown>, TExtensions = Record<string, unknown>> {
hasNext: boolean;
incremental?: ReadonlyArray<FormattedIncrementalResult<TData, TExtensions>>;
extensions?: TExtensions;
}
export interface IncrementalDeferResult<TData = Record<string, unknown>, TExtensions = Record<string, unknown>> extends SingularExecutionResult<TData, TExtensions> {
path?: ReadonlyArray<string | number>;
label?: string;
}
export interface FormattedIncrementalDeferResult<TData = Record<string, unknown>, TExtensions = Record<string, unknown>> extends FormattedExecutionResult<TData, TExtensions> {
path?: ReadonlyArray<string | number>;
label?: string;
}
export interface IncrementalStreamResult<TData = Array<unknown>, TExtensions = Record<string, unknown>> {
errors?: ReadonlyArray<GraphQLError>;
items?: TData | null;
path?: ReadonlyArray<string | number>;
label?: string;
extensions?: TExtensions;
}
export interface FormattedIncrementalStreamResult<TData = Array<unknown>, TExtensions = Record<string, unknown>> {
errors?: ReadonlyArray<GraphQLFormattedError>;
items?: TData | null;
path?: ReadonlyArray<string | number>;
label?: string;
extensions?: TExtensions;
}
export type IncrementalResult<TData = Record<string, unknown>, TExtensions = Record<string, unknown>> = IncrementalDeferResult<TData, TExtensions> | IncrementalStreamResult<TData, TExtensions>;
export type FormattedIncrementalResult<TData = Record<string, unknown>, TExtensions = Record<string, unknown>> = FormattedIncrementalDeferResult<TData, TExtensions> | FormattedIncrementalStreamResult<TData, TExtensions>;
export interface ExecutionArgs<TData = any, TVariables = any, TContext = any> {

@@ -112,2 +57,3 @@ schema: GraphQLSchema;

subscribeFieldResolver?: Maybe<GraphQLFieldResolver<any, TContext>>;
enableEarlyExecution?: Maybe<boolean>;
signal?: AbortSignal;

@@ -210,45 +156,2 @@ }

export declare function flattenIncrementalResults<TData>(incrementalResults: IncrementalExecutionResults<TData>): AsyncGenerator<SubsequentIncrementalExecutionResult<TData, Record<string, unknown>>, void, void>;
declare class DeferredFragmentRecord {
type: 'defer';
errors: Array<GraphQLError>;
label: string | undefined;
path: Array<string | number>;
promise: Promise<void>;
data: Record<string, unknown> | null;
parentContext: AsyncPayloadRecord | undefined;
isCompleted: boolean;
_exeContext: ExecutionContext;
_resolve?: (arg: MaybePromise<Record<string, unknown> | null>) => void;
constructor(opts: {
label: string | undefined;
path: Path | undefined;
parentContext: AsyncPayloadRecord | undefined;
exeContext: ExecutionContext;
});
addData(data: MaybePromise<Record<string, unknown> | null>): void;
}
declare class StreamRecord {
type: 'stream';
errors: Array<GraphQLError>;
label: string | undefined;
path: Array<string | number>;
items: Array<unknown> | null;
promise: Promise<void>;
parentContext: AsyncPayloadRecord | undefined;
iterator: AsyncIterator<unknown> | undefined;
isCompletedIterator?: boolean;
isCompleted: boolean;
_exeContext: ExecutionContext;
_resolve?: (arg: MaybePromise<Array<unknown> | null>) => void;
constructor(opts: {
label: string | undefined;
path: Path | undefined;
iterator?: AsyncIterator<unknown>;
parentContext: AsyncPayloadRecord | undefined;
exeContext: ExecutionContext;
});
addItems(items: MaybePromise<Array<unknown> | null>): void;
setIsCompletedIterator(): void;
}
type AsyncPayloadRecord = DeferredFragmentRecord | StreamRecord;
/**

@@ -267,2 +170,1 @@ * This method looks up the field on the given type definition.

export declare function isIncrementalResult<TData>(result: SingularExecutionResult<TData> | IncrementalExecutionResults<TData>): result is IncrementalExecutionResults<TData>;
export {};

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc