Socket
Socket
Sign inDemoInstall

graphql

Package Overview
Dependencies
Maintainers
8
Versions
259
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

graphql - npm Package Compare versions

Comparing version 17.0.0-alpha.3.canary.pr.4026.5922420b3b235970ee230497190e28c8290c8f16 to 17.0.0-alpha.3.canary.pr.4026.5e657d31b3abdc38acd6bb21c50ed3a41aa33905

jsutils/memoizeByFirst.d.ts

269

execution/execute.js

@@ -20,2 +20,3 @@ 'use strict';

const memoize3_js_1 = require('../jsutils/memoize3.js');
const memoizeByFirst_js_1 = require('../jsutils/memoizeByFirst.js');
const Path_js_1 = require('../jsutils/Path.js');

@@ -40,10 +41,9 @@ const promiseForObject_js_1 = require('../jsutils/promiseForObject.js');

/**
* A memoized function for building subfield plans with regard to the return
* type. Memoizing ensures the subfield plans are not repeatedly calculated, which
* A memoized collection of relevant subfields with regard to the return
* type. Memoizing ensures the subfields are not repeatedly calculated, which
* saves overhead when resolving lists of values.
*/
const buildSubFieldPlan = (0, memoize3_js_1.memoize3)(
(exeContext, returnType, fieldGroup, deferUsages) => {
const { groupedFieldSet: subGroupedFieldSet, newDeferUsages } = (0,
collectFields_js_1.collectSubfields)(
const collectSubfields = (0, memoize3_js_1.memoize3)(
(exeContext, returnType, fieldGroup) =>
(0, collectFields_js_1.collectSubfields)(
exeContext.schema,

@@ -55,12 +55,11 @@ exeContext.fragments,

fieldGroup,
);
return {
...(0, buildFieldPlan_js_1.buildFieldPlan)(
subGroupedFieldSet,
deferUsages,
),
newDeferUsages,
};
},
),
);
const buildSubFieldPlan = (0, memoizeByFirst_js_1.memoizeByFirst)(
(originalGroupedFieldSet, deferUsageSet) =>
(0, buildFieldPlan_js_1.buildFieldPlan)(
originalGroupedFieldSet,
deferUsageSet,
),
);
const UNEXPECTED_EXPERIMENTAL_DIRECTIVES =

@@ -144,5 +143,64 @@ 'The provided schema unexpectedly contains experimental directives (@defer or @stream). These directives may only be utilized if experimental execution features are explicitly enabled.';

// in this case is the entire response.
const { errors, errorPaths, futures, cancellableStreams } = exeContext;
try {
const data = executeOperation(exeContext);
let data;
const {
operation,
schema,
fragments,
variableValues,
rootValue,
errors,
errorPaths,
futures,
cancellableStreams,
} = exeContext;
const rootType = schema.getRootType(operation.operation);
if (rootType == null) {
throw new GraphQLError_js_1.GraphQLError(
`Schema is not configured to execute ${operation.operation} operation.`,
{ nodes: operation },
);
}
const { groupedFieldSet: nonPartitionedGroupedFieldSet, newDeferUsages } =
(0, collectFields_js_1.collectFields)(
schema,
fragments,
variableValues,
rootType,
operation,
);
if (newDeferUsages.length === 0) {
data = executeRootGroupedFieldSet(
exeContext,
operation.operation,
rootType,
rootValue,
nonPartitionedGroupedFieldSet,
undefined,
);
} else {
const { groupedFieldSet, newGroupedFieldSets } = (0,
buildFieldPlan_js_1.buildFieldPlan)(nonPartitionedGroupedFieldSet);
const newDeferMap = addNewDeferredFragments(newDeferUsages, new Map());
data = executeRootGroupedFieldSet(
exeContext,
operation.operation,
rootType,
rootValue,
groupedFieldSet,
newDeferMap,
);
if (newGroupedFieldSets.size > 0) {
const newDeferredGroupedFieldSetRecords =
executeDeferredGroupedFieldSets(
exeContext,
rootType,
rootValue,
undefined,
newGroupedFieldSets,
newDeferMap,
);
futures.push(...newDeferredGroupedFieldSetRecords);
}
}
if ((0, isPromise_js_1.isPromise)(data)) {

@@ -179,3 +237,3 @@ return data.then(

) {
const filteredFutures = filterFutures(errorPaths, futures);
const filteredFutures = filterFutures(undefined, errorPaths, futures);
if (filteredFutures.length > 0) {

@@ -191,3 +249,3 @@ return (0, IncrementalPublisher_js_1.buildIncrementalResponse)(

}
function filterFutures(errorPaths, futures) {
function filterFutures(initialPath, errorPaths, futures) {
if (errorPaths.size === 0) {

@@ -202,12 +260,23 @@ return futures;

: future.streamRecord.path;
while (currentPath !== undefined) {
if (errorPaths.has(currentPath)) {
continue;
}
const paths = [currentPath];
let filtered = false;
while (currentPath !== initialPath) {
// Because currentPath leads to initialPath or is undefined, and the
// loop will exit if initialPath is undefined, currentPath must be
// defined.
// TODO: Consider, however, adding an invariant.
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
currentPath = currentPath.prev;
if (errorPaths.has(currentPath)) {
filtered = true;
break;
}
currentPath = currentPath.prev;
paths.push(currentPath);
}
if (errorPaths.has(currentPath)) {
continue;
if (!filtered) {
filteredFutures.push(future);
}
filteredFutures.push(future);
}

@@ -329,75 +398,43 @@ return filteredFutures;

}
/**
* Implements the "Executing operations" section of the spec.
*/
function executeOperation(exeContext) {
const { operation, schema, fragments, variableValues, rootValue, futures } =
exeContext;
const rootType = schema.getRootType(operation.operation);
if (rootType == null) {
throw new GraphQLError_js_1.GraphQLError(
`Schema is not configured to execute ${operation.operation} operation.`,
{ nodes: operation },
);
}
const { groupedFieldSet: originalGroupedFieldSet, newDeferUsages } = (0,
collectFields_js_1.collectFields)(
schema,
fragments,
variableValues,
rootType,
operation,
);
const { groupedFieldSet, newGroupedFieldSets } = (0,
buildFieldPlan_js_1.buildFieldPlan)(originalGroupedFieldSet);
const newDeferMap = addNewDeferredFragments(newDeferUsages);
const path = undefined;
let result;
switch (operation.operation) {
function executeRootGroupedFieldSet(
exeContext,
operation,
rootType,
rootValue,
groupedFieldSet,
deferMap,
) {
switch (operation) {
case ast_js_1.OperationTypeNode.QUERY:
result = executeFields(
return executeFields(
exeContext,
rootType,
rootValue,
path,
undefined,
groupedFieldSet,
undefined,
newDeferMap,
deferMap,
);
break;
case ast_js_1.OperationTypeNode.MUTATION:
result = executeFieldsSerially(
return executeFieldsSerially(
exeContext,
rootType,
rootValue,
path,
undefined,
groupedFieldSet,
newDeferMap,
deferMap,
);
break;
case ast_js_1.OperationTypeNode.SUBSCRIPTION:
// TODO: deprecate `subscribe` and move all logic here
// Temporary solution until we finish merging execute and subscribe together
result = executeFields(
return executeFields(
exeContext,
rootType,
rootValue,
path,
undefined,
groupedFieldSet,
undefined,
newDeferMap,
deferMap,
);
}
if (newGroupedFieldSets.size > 0) {
const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets(
exeContext,
rootType,
rootValue,
path,
newGroupedFieldSets,
newDeferMap,
);
futures.push(...newDeferredGroupedFieldSetRecords);
}
return result;
}

@@ -1292,9 +1329,3 @@ /**

*/
function addNewDeferredFragments(newDeferUsages, deferMap, path) {
if (newDeferUsages.length === 0) {
// Given no DeferUsages, return the existing map, creating one if necessary.
return deferMap ?? new Map();
}
// Create a copy of the old map.
const newDeferMap = deferMap === undefined ? new Map() : new Map(deferMap);
function addNewDeferredFragments(newDeferUsages, newDeferMap, path) {
// For each new deferUsage object:

@@ -1333,10 +1364,53 @@ for (const newDeferUsage of newDeferUsages) {

// Collect sub-fields to execute to complete this value.
const { groupedFieldSet, newGroupedFieldSets, newDeferUsages } =
buildSubFieldPlan(
const { groupedFieldSet: nonPartitionedGroupedFieldSet, newDeferUsages } =
collectSubfields(exeContext, returnType, fieldGroup);
if (newDeferUsages.length === 0) {
if (deferMap === undefined) {
return executeFields(
exeContext,
returnType,
result,
path,
nonPartitionedGroupedFieldSet,
incrementalContext,
undefined,
);
}
const { groupedFieldSet, newGroupedFieldSets } = buildSubFieldPlan(
nonPartitionedGroupedFieldSet,
incrementalContext?.deferUsageSet,
);
const subFields = executeFields(
exeContext,
returnType,
fieldGroup,
incrementalContext?.deferUsageSet,
result,
path,
groupedFieldSet,
incrementalContext,
deferMap,
);
const newDeferMap = addNewDeferredFragments(newDeferUsages, deferMap, path);
if (newGroupedFieldSets.size > 0) {
const newDeferredGroupedFieldSetRecords = executeDeferredGroupedFieldSets(
exeContext,
returnType,
result,
path,
newGroupedFieldSets,
deferMap,
);
(incrementalContext ?? exeContext).futures.push(
...newDeferredGroupedFieldSetRecords,
);
}
return subFields;
}
const { groupedFieldSet, newGroupedFieldSets } = buildSubFieldPlan(
nonPartitionedGroupedFieldSet,
incrementalContext?.deferUsageSet,
);
const newDeferMap = addNewDeferredFragments(
newDeferUsages,
new Map(deferMap),
path,
);
const subFields = executeFields(

@@ -1695,3 +1769,3 @@ exeContext,

data: resolved,
futures: filterFutures(errorPaths, futures),
futures: filterFutures(path, errorPaths, futures),
errors,

@@ -1714,3 +1788,3 @@ }),

data,
futures: filterFutures(errorPaths, futures),
futures: filterFutures(path, errorPaths, futures),
errors,

@@ -1735,2 +1809,3 @@ };

streamRecord,
itemPath: initialPath,
executor: (incrementalContext) =>

@@ -1757,2 +1832,3 @@ Promise.resolve().then(() => {

streamRecord,
itemPath: currentPath,
executor: (nextIncrementalContext) =>

@@ -1783,4 +1859,6 @@ executor(currentPath, item, nextIncrementalContext),

) {
const initialPath = (0, Path_js_1.addPath)(path, initialIndex, undefined);
const firstStreamItems = new IncrementalPublisher_js_1.StreamItemsRecord({
streamRecord,
itemPath: initialPath,
executor: (incrementalContext) =>

@@ -1836,3 +1914,4 @@ Promise.resolve().then(() =>

path,
index + 1,
itemPath,
index,
nodes,

@@ -1848,2 +1927,3 @@ asyncIterator,

path,
initialPath,
initialIndex,

@@ -1856,2 +1936,3 @@ nodes,

streamRecord,
itemPath: initialPath,
executor: (incrementalContext) =>

@@ -1863,3 +1944,3 @@ Promise.resolve().then(() =>

path,
initialIndex,
initialIndex + 1,
incrementalContext,

@@ -1884,3 +1965,3 @@ nodes,

) {
const { errors, errorPaths, futures } = incrementalContext;
const { path, errors, errorPaths, futures } = incrementalContext;
if ((0, isPromise_js_1.isPromise)(item)) {

@@ -1900,3 +1981,3 @@ return completePromisedValue(

items: [resolvedItem],
futures: filterFutures(errorPaths, futures),
futures: filterFutures(path, errorPaths, futures),
errors,

@@ -1963,3 +2044,3 @@ }),

items: [resolvedItem],
futures: filterFutures(errorPaths, futures),
futures: filterFutures(path, errorPaths, futures),
errors,

@@ -1980,5 +2061,5 @@ }),

items: [completedItem],
futures: filterFutures(errorPaths, futures),
futures: filterFutures(path, errorPaths, futures),
errors,
};
}

@@ -160,2 +160,3 @@ import type { ObjMap } from '../jsutils/ObjMap.js';

deferUsageSet: DeferUsageSet | undefined;
path: Path | undefined;
errors: Array<GraphQLError>;

@@ -247,2 +248,3 @@ errorPaths: Set<Path>;

streamRecord: StreamRecord;
itemPath?: Path | undefined;
executor: (

@@ -249,0 +251,0 @@ incrementalContext: IncrementalContext,

@@ -418,2 +418,3 @@ 'use strict';

deferUsageSet,
path,
errors: [],

@@ -423,6 +424,6 @@ errorPaths: new Set(),

};
for (const deferredFragmentRecord of this.deferredFragmentRecords) {
for (const deferredFragmentRecord of deferredFragmentRecords) {
deferredFragmentRecord.deferredGroupedFieldSetRecords.push(this);
}
this.result = this.deferredFragmentRecords.some(
this.result = deferredFragmentRecords.some(
(deferredFragmentRecord) => deferredFragmentRecord.id !== undefined,

@@ -461,6 +462,7 @@ )

constructor(opts) {
const { streamRecord, executor } = opts;
const { streamRecord, itemPath, executor } = opts;
this.streamRecord = streamRecord;
const incrementalContext = {
deferUsageSet: undefined,
path: itemPath,
errors: [],

@@ -467,0 +469,0 @@ errorPaths: new Set(),

@@ -9,5 +9,2 @@ /**

R,
T extends Array<unknown>,
>(
fn: (a1: A1, a2: A2, a3: A3, ...rest: T) => R,
): (a1: A1, a2: A2, a3: A3, ...rest: T) => R;
>(fn: (a1: A1, a2: A2, a3: A3) => R): (a1: A1, a2: A2, a3: A3) => R;

@@ -9,3 +9,3 @@ 'use strict';

let cache0;
return function memoized(a1, a2, a3, ...rest) {
return function memoized(a1, a2, a3) {
if (cache0 === undefined) {

@@ -26,3 +26,3 @@ cache0 = new WeakMap();

if (fnResult === undefined) {
fnResult = fn(a1, a2, a3, ...rest);
fnResult = fn(a1, a2, a3);
cache2.set(a3, fnResult);

@@ -29,0 +29,0 @@ }

{
"name": "graphql",
"version": "17.0.0-alpha.3.canary.pr.4026.5922420b3b235970ee230497190e28c8290c8f16",
"version": "17.0.0-alpha.3.canary.pr.4026.5e657d31b3abdc38acd6bb21c50ed3a41aa33905",
"description": "A Query Language and Runtime which can target any service.",

@@ -5,0 +5,0 @@ "license": "MIT",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc