@graphql-tools/batch-execute
Advanced tools
Comparing version 7.1.3-alpha-07a30dd3.0 to 7.1.3-alpha-12ccae3b.0
import DataLoader from 'dataloader'; | ||
import { ExecutionParams, Executor } from '@graphql-tools/utils'; | ||
export declare function createBatchingExecutor(executor: Executor, dataLoaderOptions?: DataLoader.Options<any, any, any>, extensionsReducer?: (mergedExtensions: Record<string, any>, executionParams: ExecutionParams) => Record<string, any>): Executor; | ||
import { Request, Executor } from '@graphql-tools/utils'; | ||
export declare function createBatchingExecutor(executor: Executor, dataLoaderOptions?: DataLoader.Options<any, any, any>, extensionsReducer?: (mergedExtensions: Record<string, any>, request: Request) => Record<string, any>): Executor; |
import DataLoader from 'dataloader'; | ||
import { ExecutionParams, Executor } from '@graphql-tools/utils'; | ||
export declare function createBatchingExecutor(executor: Executor, dataLoaderOptions?: DataLoader.Options<any, any, any>, extensionsReducer?: (mergedExtensions: Record<string, any>, executionParams: ExecutionParams) => Record<string, any>): Executor; | ||
import { Request, Executor } from '@graphql-tools/utils'; | ||
export declare function createBatchingExecutor(executor: Executor, dataLoaderOptions?: DataLoader.Options<any, any, any>, extensionsReducer?: (mergedExtensions: Record<string, any>, request: Request) => Record<string, any>): Executor; |
import DataLoader from 'dataloader'; | ||
import { ExecutionParams, Executor } from '@graphql-tools/utils'; | ||
export declare const getBatchingExecutor: (A1: Record<string, any>, A2: Executor<Record<string, any>>, A3: DataLoader.Options<any, any, any>, A4: (mergedExtensions: Record<string, any>, executionParams: ExecutionParams) => Record<string, any>) => Executor<Record<string, any>>; | ||
import { Request, Executor } from '@graphql-tools/utils'; | ||
export declare const getBatchingExecutor: (A1: Record<string, any>, A2: Executor<Record<string, any>, Record<string, any>>, A3: DataLoader.Options<any, any, any> | undefined, A4: ((mergedExtensions: Record<string, any>, request: Request) => Record<string, any>) | undefined) => Executor<Record<string, any>, Record<string, any>>; |
182
es5/index.js
@@ -22,3 +22,3 @@ 'use strict'; | ||
} | ||
return null; | ||
throw new Error("Key " + prefixedKey + " is not correctly prefixed"); | ||
} | ||
@@ -61,3 +61,4 @@ | ||
*/ | ||
function mergeExecutionParams(execs, extensionsReducer) { | ||
function mergeRequests(requests, extensionsReducer) { | ||
var e_1, _a; | ||
var mergedVariables = Object.create(null); | ||
@@ -69,18 +70,33 @@ var mergedVariableDefinitions = []; | ||
var operation; | ||
execs.forEach(function (executionParams, index) { | ||
var prefixedExecutionParams = prefixExecutionParams(createPrefix(index), executionParams); | ||
prefixedExecutionParams.document.definitions.forEach(function (def) { | ||
var _a; | ||
if (isOperationDefinition(def)) { | ||
operation = def.operation; | ||
mergedSelections.push.apply(mergedSelections, tslib.__spreadArray([], tslib.__read(def.selectionSet.selections))); | ||
mergedVariableDefinitions.push.apply(mergedVariableDefinitions, tslib.__spreadArray([], tslib.__read(((_a = def.variableDefinitions) !== null && _a !== void 0 ? _a : [])))); | ||
for (var index in requests) { | ||
var request = requests[index]; | ||
var prefixedRequests = prefixRequest(createPrefix(index), request); | ||
try { | ||
for (var _b = (e_1 = void 0, tslib.__values(prefixedRequests.document.definitions)), _c = _b.next(); !_c.done; _c = _b.next()) { | ||
var def = _c.value; | ||
if (isOperationDefinition(def)) { | ||
operation = def.operation; | ||
mergedSelections.push.apply(mergedSelections, tslib.__spreadArray([], tslib.__read(def.selectionSet.selections))); | ||
if (def.variableDefinitions) { | ||
mergedVariableDefinitions.push.apply(mergedVariableDefinitions, tslib.__spreadArray([], tslib.__read(def.variableDefinitions))); | ||
} | ||
} | ||
if (isFragmentDefinition(def)) { | ||
mergedFragmentDefinitions.push(def); | ||
} | ||
} | ||
if (isFragmentDefinition(def)) { | ||
mergedFragmentDefinitions.push(def); | ||
} | ||
catch (e_1_1) { e_1 = { error: e_1_1 }; } | ||
finally { | ||
try { | ||
if (_c && !_c.done && (_a = _b.return)) _a.call(_b); | ||
} | ||
}); | ||
Object.assign(mergedVariables, prefixedExecutionParams.variables); | ||
mergedExtensions = extensionsReducer(mergedExtensions, executionParams); | ||
}); | ||
finally { if (e_1) throw e_1.error; } | ||
} | ||
Object.assign(mergedVariables, prefixedRequests.variables); | ||
mergedExtensions = extensionsReducer(mergedExtensions, request); | ||
} | ||
if (operation == null) { | ||
throw new Error('Could not identify operation type. Did the document only include fragment definitions?'); | ||
} | ||
var mergedOperationDefinition = { | ||
@@ -102,12 +118,14 @@ kind: graphql.Kind.OPERATION_DEFINITION, | ||
extensions: mergedExtensions, | ||
context: execs[0].context, | ||
info: execs[0].info, | ||
context: requests[0].context, | ||
info: requests[0].info, | ||
}; | ||
} | ||
function prefixExecutionParams(prefix, executionParams) { | ||
function prefixRequest(prefix, request) { | ||
var _a; | ||
var document = aliasTopLevelFields(prefix, executionParams.document); | ||
var variableNames = Object.keys(executionParams.variables); | ||
var _b; | ||
var document = aliasTopLevelFields(prefix, request.document); | ||
var executionVariables = (_b = request.variables) !== null && _b !== void 0 ? _b : {}; | ||
var variableNames = Object.keys(executionVariables); | ||
if (variableNames.length === 0) { | ||
return tslib.__assign(tslib.__assign({}, executionParams), { document: document }); | ||
return tslib.__assign(tslib.__assign({}, request), { document: document }); | ||
} | ||
@@ -120,3 +138,3 @@ document = graphql.visit(document, (_a = {}, | ||
var prefixedVariables = variableNames.reduce(function (acc, name) { | ||
acc[prefix + name] = executionParams.variables[name]; | ||
acc[prefix + name] = executionVariables[name]; | ||
return acc; | ||
@@ -242,3 +260,5 @@ }, Object.create(null)); | ||
*/ | ||
function splitResult(mergedResult, numResults) { | ||
function splitResult(_a, numResults) { | ||
var _b, e_1, _c; | ||
var data = _a.data, errors = _a.errors; | ||
var splitResults = []; | ||
@@ -248,45 +268,37 @@ for (var i = 0; i < numResults; i++) { | ||
} | ||
var data = mergedResult.data; | ||
if (data) { | ||
Object.keys(data).forEach(function (prefixedKey) { | ||
var _a; | ||
var _b = parseKey(prefixedKey), index = _b.index, originalKey = _b.originalKey; | ||
if (!splitResults[index].data) { | ||
splitResults[index].data = (_a = {}, _a[originalKey] = data[prefixedKey], _a); | ||
for (var prefixedKey in data) { | ||
var _d = parseKey(prefixedKey), index = _d.index, originalKey = _d.originalKey; | ||
var result = splitResults[index]; | ||
if (result == null) { | ||
continue; | ||
} | ||
if (result.data == null) { | ||
result.data = (_b = {}, _b[originalKey] = data[prefixedKey], _b); | ||
} | ||
else { | ||
splitResults[index].data[originalKey] = data[prefixedKey]; | ||
result.data[originalKey] = data[prefixedKey]; | ||
} | ||
}); | ||
} | ||
} | ||
var errors = mergedResult.errors; | ||
if (errors) { | ||
var newErrors_1 = Object.create(null); | ||
errors.forEach(function (error) { | ||
if (error.path) { | ||
var parsedKey = parseKey(error.path[0]); | ||
if (parsedKey) { | ||
try { | ||
for (var errors_1 = tslib.__values(errors), errors_1_1 = errors_1.next(); !errors_1_1.done; errors_1_1 = errors_1.next()) { | ||
var error = errors_1_1.value; | ||
if (error.path) { | ||
var parsedKey = parseKey(error.path[0]); | ||
var index = parsedKey.index, originalKey = parsedKey.originalKey; | ||
var newError = utils.relocatedError(error, tslib.__spreadArray([originalKey], tslib.__read(error.path.slice(1)))); | ||
if (!newErrors_1[index]) { | ||
newErrors_1[index] = [newError]; | ||
} | ||
else { | ||
newErrors_1[index].push(newError); | ||
} | ||
return; | ||
var errors_2 = (splitResults[index].errors = (splitResults[index].errors || [])); | ||
errors_2.push(newError); | ||
} | ||
} | ||
splitResults.forEach(function (_splitResult, index) { | ||
if (!newErrors_1[index]) { | ||
newErrors_1[index] = [error]; | ||
} | ||
else { | ||
newErrors_1[index].push(error); | ||
} | ||
}); | ||
}); | ||
Object.keys(newErrors_1).forEach(function (index) { | ||
splitResults[index].errors = newErrors_1[index]; | ||
}); | ||
} | ||
catch (e_1_1) { e_1 = { error: e_1_1 }; } | ||
finally { | ||
try { | ||
if (errors_1_1 && !errors_1_1.done && (_c = errors_1.return)) _c.call(errors_1); | ||
} | ||
finally { if (e_1) throw e_1.error; } | ||
} | ||
} | ||
@@ -297,43 +309,48 @@ return splitResults; | ||
function createBatchingExecutor(executor, dataLoaderOptions, extensionsReducer) { | ||
var loader = new DataLoader(createLoadFn(executor, extensionsReducer !== null && extensionsReducer !== void 0 ? extensionsReducer : defaultExtensionsReducer), dataLoaderOptions); | ||
return function (executionParams) { return loader.load(executionParams); }; | ||
if (extensionsReducer === void 0) { extensionsReducer = defaultExtensionsReducer; } | ||
var loader = new DataLoader(createLoadFn(executor, extensionsReducer), dataLoaderOptions); | ||
return function (request) { var _a; return ((_a = request.info) === null || _a === void 0 ? void 0 : _a.operation.operation) === 'subscription' ? executor(request) : loader.load(request); }; | ||
} | ||
function createLoadFn(executor, extensionsReducer) { | ||
var _this = this; | ||
return function (execs) { return tslib.__awaiter(_this, void 0, void 0, function () { | ||
var execBatches, index, exec, currentBatch, operationType, currentOperationType, executionResults; | ||
return tslib.__generator(this, function (_a) { | ||
return function (requests) { return tslib.__awaiter(_this, void 0, void 0, function () { | ||
var execBatches, index, request, currentBatch, operationType, currentOperationType, executionResults; | ||
var _a, _b; | ||
return tslib.__generator(this, function (_c) { | ||
execBatches = []; | ||
index = 0; | ||
exec = execs[index]; | ||
currentBatch = [exec]; | ||
request = requests[index]; | ||
currentBatch = [request]; | ||
execBatches.push(currentBatch); | ||
operationType = graphql.getOperationAST(exec.document, undefined).operation; | ||
while (++index < execs.length) { | ||
currentOperationType = graphql.getOperationAST(execs[index].document, undefined).operation; | ||
operationType = (_a = graphql.getOperationAST(request.document, undefined)) === null || _a === void 0 ? void 0 : _a.operation; | ||
if (operationType == null) { | ||
throw new Error('Could not identify operation type of document.'); | ||
} | ||
while (++index < requests.length) { | ||
currentOperationType = (_b = graphql.getOperationAST(requests[index].document, undefined)) === null || _b === void 0 ? void 0 : _b.operation; | ||
if (operationType == null) { | ||
throw new Error('Could not identify operation type of document.'); | ||
} | ||
if (operationType === currentOperationType) { | ||
currentBatch.push(execs[index]); | ||
currentBatch.push(requests[index]); | ||
} | ||
else { | ||
currentBatch = [execs[index]]; | ||
currentBatch = [requests[index]]; | ||
execBatches.push(currentBatch); | ||
} | ||
} | ||
executionResults = []; | ||
execBatches.forEach(function (execBatch) { | ||
var mergedExecutionParams = mergeExecutionParams(execBatch, extensionsReducer); | ||
executionResults.push(new valueOrPromise.ValueOrPromise(function () { return executor(mergedExecutionParams); })); | ||
executionResults = execBatches.map(function (execBatch) { | ||
var mergedRequests = mergeRequests(execBatch, extensionsReducer); | ||
return new valueOrPromise.ValueOrPromise(function () { return executor(mergedRequests); }); | ||
}); | ||
return [2 /*return*/, valueOrPromise.ValueOrPromise.all(executionResults).then(function (resultBatches) { | ||
var results = []; | ||
resultBatches.forEach(function (resultBatch, index) { | ||
results = results.concat(splitResult(resultBatch, execBatches[index].length)); | ||
}); | ||
return results; | ||
}).resolve()]; | ||
return [2 /*return*/, valueOrPromise.ValueOrPromise.all(executionResults) | ||
.then(function (resultBatches) { | ||
return resultBatches.reduce(function (results, resultBatch, index) { return results.concat(splitResult(resultBatch, execBatches[index].length)); }, new Array()); | ||
}) | ||
.resolve()]; | ||
}); | ||
}); }; | ||
} | ||
function defaultExtensionsReducer(mergedExtensions, executionParams) { | ||
var newExtensions = executionParams.extensions; | ||
function defaultExtensionsReducer(mergedExtensions, request) { | ||
var newExtensions = request.extensions; | ||
if (newExtensions != null) { | ||
@@ -381,2 +398,1 @@ Object.assign(mergedExtensions, newExtensions); | ||
exports.getBatchingExecutor = getBatchingExecutor; | ||
//# sourceMappingURL=index.js.map |
{ | ||
"name": "@graphql-tools/batch-execute/es5", | ||
"version": "7.1.3-alpha-07a30dd3.0", | ||
"version": "7.1.3-alpha-12ccae3b.0", | ||
"description": "A set of utils for faster development of GraphQL tools", | ||
@@ -10,3 +10,3 @@ "sideEffects": false, | ||
"dependencies": { | ||
"@graphql-tools/utils": "8.0.0-alpha-07a30dd3.0", | ||
"@graphql-tools/utils": "8.0.0-alpha-12ccae3b.0", | ||
"dataloader": "2.0.0", | ||
@@ -13,0 +13,0 @@ "tslib": "~2.3.0", |
@@ -1,2 +0,2 @@ | ||
export declare function createPrefix(index: number): string; | ||
export declare function createPrefix(index: string): string; | ||
export declare function parseKey(prefixedKey: string): { | ||
@@ -3,0 +3,0 @@ index: number; |
@@ -5,2 +5,2 @@ import { ExecutionResult } from 'graphql'; | ||
*/ | ||
export declare function splitResult(mergedResult: ExecutionResult, numResults: number): Array<ExecutionResult>; | ||
export declare function splitResult({ data, errors }: ExecutionResult, numResults: number): Array<ExecutionResult>; |
import DataLoader from 'dataloader'; | ||
import { ExecutionParams, Executor } from '@graphql-tools/utils'; | ||
export declare const getBatchingExecutor: (A1: Record<string, any>, A2: Executor<Record<string, any>>, A3: DataLoader.Options<any, any, any>, A4: (mergedExtensions: Record<string, any>, executionParams: ExecutionParams) => Record<string, any>) => Executor<Record<string, any>>; | ||
import { Request, Executor } from '@graphql-tools/utils'; | ||
export declare const getBatchingExecutor: (A1: Record<string, any>, A2: Executor<Record<string, any>, Record<string, any>>, A3: DataLoader.Options<any, any, any> | undefined, A4: ((mergedExtensions: Record<string, any>, request: Request) => Record<string, any>) | undefined) => Executor<Record<string, any>, Record<string, any>>; |
143
index.js
@@ -21,3 +21,3 @@ 'use strict'; | ||
} | ||
return null; | ||
throw new Error(`Key ${prefixedKey} is not correctly prefixed`); | ||
} | ||
@@ -60,3 +60,3 @@ | ||
*/ | ||
function mergeExecutionParams(execs, extensionsReducer) { | ||
function mergeRequests(requests, extensionsReducer) { | ||
const mergedVariables = Object.create(null); | ||
@@ -68,10 +68,12 @@ const mergedVariableDefinitions = []; | ||
let operation; | ||
execs.forEach((executionParams, index) => { | ||
const prefixedExecutionParams = prefixExecutionParams(createPrefix(index), executionParams); | ||
prefixedExecutionParams.document.definitions.forEach(def => { | ||
var _a; | ||
for (const index in requests) { | ||
const request = requests[index]; | ||
const prefixedRequests = prefixRequest(createPrefix(index), request); | ||
for (const def of prefixedRequests.document.definitions) { | ||
if (isOperationDefinition(def)) { | ||
operation = def.operation; | ||
mergedSelections.push(...def.selectionSet.selections); | ||
mergedVariableDefinitions.push(...((_a = def.variableDefinitions) !== null && _a !== void 0 ? _a : [])); | ||
if (def.variableDefinitions) { | ||
mergedVariableDefinitions.push(...def.variableDefinitions); | ||
} | ||
} | ||
@@ -81,6 +83,9 @@ if (isFragmentDefinition(def)) { | ||
} | ||
}); | ||
Object.assign(mergedVariables, prefixedExecutionParams.variables); | ||
mergedExtensions = extensionsReducer(mergedExtensions, executionParams); | ||
}); | ||
} | ||
Object.assign(mergedVariables, prefixedRequests.variables); | ||
mergedExtensions = extensionsReducer(mergedExtensions, request); | ||
} | ||
if (operation == null) { | ||
throw new Error('Could not identify operation type. Did the document only include fragment definitions?'); | ||
} | ||
const mergedOperationDefinition = { | ||
@@ -102,11 +107,13 @@ kind: graphql.Kind.OPERATION_DEFINITION, | ||
extensions: mergedExtensions, | ||
context: execs[0].context, | ||
info: execs[0].info, | ||
context: requests[0].context, | ||
info: requests[0].info, | ||
}; | ||
} | ||
function prefixExecutionParams(prefix, executionParams) { | ||
let document = aliasTopLevelFields(prefix, executionParams.document); | ||
const variableNames = Object.keys(executionParams.variables); | ||
function prefixRequest(prefix, request) { | ||
var _a; | ||
let document = aliasTopLevelFields(prefix, request.document); | ||
const executionVariables = (_a = request.variables) !== null && _a !== void 0 ? _a : {}; | ||
const variableNames = Object.keys(executionVariables); | ||
if (variableNames.length === 0) { | ||
return { ...executionParams, document }; | ||
return { ...request, document }; | ||
} | ||
@@ -119,3 +126,3 @@ document = graphql.visit(document, { | ||
const prefixedVariables = variableNames.reduce((acc, name) => { | ||
acc[prefix + name] = executionParams.variables[name]; | ||
acc[prefix + name] = executionVariables[name]; | ||
return acc; | ||
@@ -264,3 +271,3 @@ }, Object.create(null)); | ||
*/ | ||
function splitResult(mergedResult, numResults) { | ||
function splitResult({ data, errors }, numResults) { | ||
const splitResults = []; | ||
@@ -270,44 +277,27 @@ for (let i = 0; i < numResults; i++) { | ||
} | ||
const data = mergedResult.data; | ||
if (data) { | ||
Object.keys(data).forEach(prefixedKey => { | ||
for (const prefixedKey in data) { | ||
const { index, originalKey } = parseKey(prefixedKey); | ||
if (!splitResults[index].data) { | ||
splitResults[index].data = { [originalKey]: data[prefixedKey] }; | ||
const result = splitResults[index]; | ||
if (result == null) { | ||
continue; | ||
} | ||
if (result.data == null) { | ||
result.data = { [originalKey]: data[prefixedKey] }; | ||
} | ||
else { | ||
splitResults[index].data[originalKey] = data[prefixedKey]; | ||
result.data[originalKey] = data[prefixedKey]; | ||
} | ||
}); | ||
} | ||
} | ||
const errors = mergedResult.errors; | ||
if (errors) { | ||
const newErrors = Object.create(null); | ||
errors.forEach(error => { | ||
for (const error of errors) { | ||
if (error.path) { | ||
const parsedKey = parseKey(error.path[0]); | ||
if (parsedKey) { | ||
const { index, originalKey } = parsedKey; | ||
const newError = utils.relocatedError(error, [originalKey, ...error.path.slice(1)]); | ||
if (!newErrors[index]) { | ||
newErrors[index] = [newError]; | ||
} | ||
else { | ||
newErrors[index].push(newError); | ||
} | ||
return; | ||
} | ||
const { index, originalKey } = parsedKey; | ||
const newError = utils.relocatedError(error, [originalKey, ...error.path.slice(1)]); | ||
const errors = (splitResults[index].errors = (splitResults[index].errors || [])); | ||
errors.push(newError); | ||
} | ||
splitResults.forEach((_splitResult, index) => { | ||
if (!newErrors[index]) { | ||
newErrors[index] = [error]; | ||
} | ||
else { | ||
newErrors[index].push(error); | ||
} | ||
}); | ||
}); | ||
Object.keys(newErrors).forEach(index => { | ||
splitResults[index].errors = newErrors[index]; | ||
}); | ||
} | ||
} | ||
@@ -317,40 +307,42 @@ return splitResults; | ||
function createBatchingExecutor(executor, dataLoaderOptions, extensionsReducer) { | ||
const loader = new DataLoader(createLoadFn(executor, extensionsReducer !== null && extensionsReducer !== void 0 ? extensionsReducer : defaultExtensionsReducer), dataLoaderOptions); | ||
return (executionParams) => loader.load(executionParams); | ||
function createBatchingExecutor(executor, dataLoaderOptions, extensionsReducer = defaultExtensionsReducer) { | ||
const loader = new DataLoader(createLoadFn(executor, extensionsReducer), dataLoaderOptions); | ||
return (request) => { var _a; return ((_a = request.info) === null || _a === void 0 ? void 0 : _a.operation.operation) === 'subscription' ? executor(request) : loader.load(request); }; | ||
} | ||
function createLoadFn(executor, extensionsReducer) { | ||
return async (execs) => { | ||
return async (requests) => { | ||
var _a, _b; | ||
const execBatches = []; | ||
let index = 0; | ||
const exec = execs[index]; | ||
let currentBatch = [exec]; | ||
const request = requests[index]; | ||
let currentBatch = [request]; | ||
execBatches.push(currentBatch); | ||
const operationType = graphql.getOperationAST(exec.document, undefined).operation; | ||
while (++index < execs.length) { | ||
const currentOperationType = graphql.getOperationAST(execs[index].document, undefined).operation; | ||
const operationType = (_a = graphql.getOperationAST(request.document, undefined)) === null || _a === void 0 ? void 0 : _a.operation; | ||
if (operationType == null) { | ||
throw new Error('Could not identify operation type of document.'); | ||
} | ||
while (++index < requests.length) { | ||
const currentOperationType = (_b = graphql.getOperationAST(requests[index].document, undefined)) === null || _b === void 0 ? void 0 : _b.operation; | ||
if (operationType == null) { | ||
throw new Error('Could not identify operation type of document.'); | ||
} | ||
if (operationType === currentOperationType) { | ||
currentBatch.push(execs[index]); | ||
currentBatch.push(requests[index]); | ||
} | ||
else { | ||
currentBatch = [execs[index]]; | ||
currentBatch = [requests[index]]; | ||
execBatches.push(currentBatch); | ||
} | ||
} | ||
const executionResults = []; | ||
execBatches.forEach(execBatch => { | ||
const mergedExecutionParams = mergeExecutionParams(execBatch, extensionsReducer); | ||
executionResults.push(new valueOrPromise.ValueOrPromise(() => executor(mergedExecutionParams))); | ||
const executionResults = execBatches.map(execBatch => { | ||
const mergedRequests = mergeRequests(execBatch, extensionsReducer); | ||
return new valueOrPromise.ValueOrPromise(() => executor(mergedRequests)); | ||
}); | ||
return valueOrPromise.ValueOrPromise.all(executionResults).then(resultBatches => { | ||
let results = []; | ||
resultBatches.forEach((resultBatch, index) => { | ||
results = results.concat(splitResult(resultBatch, execBatches[index].length)); | ||
}); | ||
return results; | ||
}).resolve(); | ||
return valueOrPromise.ValueOrPromise.all(executionResults) | ||
.then(resultBatches => resultBatches.reduce((results, resultBatch, index) => results.concat(splitResult(resultBatch, execBatches[index].length)), new Array())) | ||
.resolve(); | ||
}; | ||
} | ||
function defaultExtensionsReducer(mergedExtensions, executionParams) { | ||
const newExtensions = executionParams.extensions; | ||
function defaultExtensionsReducer(mergedExtensions, request) { | ||
const newExtensions = request.extensions; | ||
if (newExtensions != null) { | ||
@@ -398,2 +390,1 @@ Object.assign(mergedExtensions, newExtensions); | ||
exports.getBatchingExecutor = getBatchingExecutor; | ||
//# sourceMappingURL=index.js.map |
{ | ||
"name": "@graphql-tools/batch-execute", | ||
"version": "7.1.3-alpha-07a30dd3.0", | ||
"version": "7.1.3-alpha-12ccae3b.0", | ||
"description": "A set of utils for faster development of GraphQL tools", | ||
@@ -10,3 +10,3 @@ "sideEffects": false, | ||
"dependencies": { | ||
"@graphql-tools/utils": "8.0.0-alpha-07a30dd3.0", | ||
"@graphql-tools/utils": "8.0.0-alpha-12ccae3b.0", | ||
"dataloader": "2.0.0", | ||
@@ -13,0 +13,0 @@ "tslib": "~2.3.0", |
@@ -1,2 +0,2 @@ | ||
export declare function createPrefix(index: number): string; | ||
export declare function createPrefix(index: string): string; | ||
export declare function parseKey(prefixedKey: string): { | ||
@@ -3,0 +3,0 @@ index: number; |
@@ -5,2 +5,2 @@ import { ExecutionResult } from 'graphql'; | ||
*/ | ||
export declare function splitResult(mergedResult: ExecutionResult, numResults: number): Array<ExecutionResult>; | ||
export declare function splitResult({ data, errors }: ExecutionResult, numResults: number): Array<ExecutionResult>; |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
1602
0
65981
22
+ Added@graphql-tools/utils@8.0.0-alpha-12ccae3b.0(transitive)
- Removed@graphql-tools/utils@8.0.0-alpha-07a30dd3.0(transitive)