@graphql-tools/utils
Advanced tools
Comparing version 10.2.2-alpha-20240605114440-2a4d046c7b35a8992b375e57ba51ef83065a94be to 10.2.2-alpha-20240605143758-8e5f865856baf8ed233b7171d26e288c7c70f013
@@ -6,9 +6,26 @@ "use strict"; | ||
function mergeDeep(sources, respectPrototype = false, respectArrays = false, respectArrayLength = false) { | ||
const target = sources[0] || {}; | ||
if (respectArrays && respectArrayLength) { | ||
let expectedLength; | ||
const areArraysInTheSameLength = sources.every(source => { | ||
if (Array.isArray(source)) { | ||
if (expectedLength === undefined) { | ||
expectedLength = source.length; | ||
return true; | ||
} | ||
else if (expectedLength === source.length) { | ||
return true; | ||
} | ||
} | ||
return false; | ||
}); | ||
if (areArraysInTheSameLength) { | ||
return new Array(expectedLength).fill(null).map((_, index) => mergeDeep(sources.map(source => source[index]), respectPrototype, respectArrays, respectArrayLength)); | ||
} | ||
} | ||
const output = {}; | ||
if (respectPrototype) { | ||
Object.setPrototypeOf(output, Object.create(Object.getPrototypeOf(target))); | ||
Object.setPrototypeOf(output, Object.create(Object.getPrototypeOf(sources[0]))); | ||
} | ||
for (const source of sources) { | ||
if (isObject(target) && isObject(source)) { | ||
if (isObject(source)) { | ||
if (respectPrototype) { | ||
@@ -32,3 +49,3 @@ const outputPrototype = Object.getPrototypeOf(output); | ||
else { | ||
output[key] = mergeDeep([output[key], source[key]], respectPrototype, respectArrays); | ||
output[key] = mergeDeep([output[key], source[key]], respectPrototype, respectArrays, respectArrayLength); | ||
} | ||
@@ -38,3 +55,8 @@ } | ||
if (Array.isArray(source[key])) { | ||
output[key].push(...source[key]); | ||
if (respectArrayLength && output[key].length === source[key].length) { | ||
output[key] = mergeDeep([output[key], source[key]], respectPrototype, respectArrays, respectArrayLength); | ||
} | ||
else { | ||
output[key].push(...source[key]); | ||
} | ||
} | ||
@@ -50,16 +72,2 @@ else { | ||
} | ||
else if (respectArrays && Array.isArray(target)) { | ||
if (Array.isArray(source)) { | ||
if (respectArrayLength && source.length === target.length) { | ||
return target.map((targetElem, i) => mergeDeep([targetElem, source[i]], respectPrototype, respectArrays, respectArrayLength)); | ||
} | ||
target.push(...source); | ||
} | ||
else { | ||
target.push(source); | ||
} | ||
} | ||
else if (respectArrays && Array.isArray(source)) { | ||
return [target, ...source]; | ||
} | ||
} | ||
@@ -66,0 +74,0 @@ return output; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.mergeIncrementalResult = void 0; | ||
const tslib_1 = require("tslib"); | ||
const dlv_1 = tslib_1.__importDefault(require("dlv")); | ||
const merge_1 = require("dset/merge"); | ||
const pathsMap = new WeakMap(); | ||
function mergeIncrementalResult({ incrementalResult, executionResult, }) { | ||
let path = [ | ||
'data', | ||
...(incrementalResult.path ?? []), | ||
]; | ||
if (incrementalResult.pending) { | ||
let paths = pathsMap.get(executionResult); | ||
if (paths === undefined) { | ||
paths = new Map(); | ||
pathsMap.set(executionResult, paths); | ||
} | ||
for (const { id, path } of incrementalResult.pending) { | ||
paths.set(id, ['data', ...path]); | ||
} | ||
} | ||
if (incrementalResult.pending) { | ||
const paths = pathsMap.get(executionResult); | ||
for (const { id, path } of incrementalResult.pending) { | ||
if (id !== undefined) { | ||
if (paths === undefined) { | ||
throw new Error('Invalid incremental delivery format.'); | ||
} | ||
paths.set(id, ['data', ...path]); | ||
} | ||
} | ||
} | ||
const path = ['data', ...(incrementalResult.path ?? [])]; | ||
if (incrementalResult.items) { | ||
if (incrementalResult.id) { | ||
const id = incrementalResult.id; | ||
path = pathsMap.get(executionResult)?.get(id); | ||
if (path === undefined) { | ||
throw new Error('Invalid incremental delivery format.'); | ||
} | ||
const list = (0, dlv_1.default)(executionResult, path); | ||
list.push(...incrementalResult.items); | ||
for (const item of incrementalResult.items) { | ||
(0, merge_1.dset)(executionResult, path, item); | ||
// Increment the last path segment (the array index) to merge the next item at the next index | ||
path[path.length - 1]++; | ||
} | ||
else { | ||
const path = ['data', ...(incrementalResult.path ?? [])]; | ||
for (const item of incrementalResult.items) { | ||
(0, merge_1.dset)(executionResult, path, item); | ||
// Increment the last path segment (the array index) to merge the next item at the next index | ||
path[path.length - 1]++; | ||
} | ||
} | ||
} | ||
if (incrementalResult.data) { | ||
if (incrementalResult.id) { | ||
const id = incrementalResult.id; | ||
if (id !== undefined) { | ||
path = pathsMap.get(executionResult)?.get(id); | ||
if (path === undefined) { | ||
throw new Error('Invalid incremental delivery format.'); | ||
} | ||
} | ||
} | ||
(0, merge_1.dset)(executionResult, path, incrementalResult.data); | ||
@@ -80,13 +32,3 @@ } | ||
} | ||
if (incrementalResult.completed) { | ||
// Remove tracking and add additional errors | ||
for (const { id, errors } of incrementalResult.completed) { | ||
pathsMap.get(executionResult)?.delete(id); | ||
if (errors) { | ||
executionResult.errors = executionResult.errors || []; | ||
executionResult.errors.push(...errors); | ||
} | ||
} | ||
} | ||
} | ||
exports.mergeIncrementalResult = mergeIncrementalResult; |
import { isSome } from './helpers.js'; | ||
export function mergeDeep(sources, respectPrototype = false, respectArrays = false, respectArrayLength = false) { | ||
const target = sources[0] || {}; | ||
if (respectArrays && respectArrayLength) { | ||
let expectedLength; | ||
const areArraysInTheSameLength = sources.every(source => { | ||
if (Array.isArray(source)) { | ||
if (expectedLength === undefined) { | ||
expectedLength = source.length; | ||
return true; | ||
} | ||
else if (expectedLength === source.length) { | ||
return true; | ||
} | ||
} | ||
return false; | ||
}); | ||
if (areArraysInTheSameLength) { | ||
return new Array(expectedLength).fill(null).map((_, index) => mergeDeep(sources.map(source => source[index]), respectPrototype, respectArrays, respectArrayLength)); | ||
} | ||
} | ||
const output = {}; | ||
if (respectPrototype) { | ||
Object.setPrototypeOf(output, Object.create(Object.getPrototypeOf(target))); | ||
Object.setPrototypeOf(output, Object.create(Object.getPrototypeOf(sources[0]))); | ||
} | ||
for (const source of sources) { | ||
if (isObject(target) && isObject(source)) { | ||
if (isObject(source)) { | ||
if (respectPrototype) { | ||
@@ -28,3 +45,3 @@ const outputPrototype = Object.getPrototypeOf(output); | ||
else { | ||
output[key] = mergeDeep([output[key], source[key]], respectPrototype, respectArrays); | ||
output[key] = mergeDeep([output[key], source[key]], respectPrototype, respectArrays, respectArrayLength); | ||
} | ||
@@ -34,3 +51,8 @@ } | ||
if (Array.isArray(source[key])) { | ||
output[key].push(...source[key]); | ||
if (respectArrayLength && output[key].length === source[key].length) { | ||
output[key] = mergeDeep([output[key], source[key]], respectPrototype, respectArrays, respectArrayLength); | ||
} | ||
else { | ||
output[key].push(...source[key]); | ||
} | ||
} | ||
@@ -46,16 +68,2 @@ else { | ||
} | ||
else if (respectArrays && Array.isArray(target)) { | ||
if (Array.isArray(source)) { | ||
if (respectArrayLength && source.length === target.length) { | ||
return target.map((targetElem, i) => mergeDeep([targetElem, source[i]], respectPrototype, respectArrays, respectArrayLength)); | ||
} | ||
target.push(...source); | ||
} | ||
else { | ||
target.push(source); | ||
} | ||
} | ||
else if (respectArrays && Array.isArray(source)) { | ||
return [target, ...source]; | ||
} | ||
} | ||
@@ -62,0 +70,0 @@ return output; |
@@ -1,59 +0,12 @@ | ||
import dlv from 'dlv'; | ||
import { dset } from 'dset/merge'; | ||
const pathsMap = new WeakMap(); | ||
export function mergeIncrementalResult({ incrementalResult, executionResult, }) { | ||
let path = [ | ||
'data', | ||
...(incrementalResult.path ?? []), | ||
]; | ||
if (incrementalResult.pending) { | ||
let paths = pathsMap.get(executionResult); | ||
if (paths === undefined) { | ||
paths = new Map(); | ||
pathsMap.set(executionResult, paths); | ||
} | ||
for (const { id, path } of incrementalResult.pending) { | ||
paths.set(id, ['data', ...path]); | ||
} | ||
} | ||
if (incrementalResult.pending) { | ||
const paths = pathsMap.get(executionResult); | ||
for (const { id, path } of incrementalResult.pending) { | ||
if (id !== undefined) { | ||
if (paths === undefined) { | ||
throw new Error('Invalid incremental delivery format.'); | ||
} | ||
paths.set(id, ['data', ...path]); | ||
} | ||
} | ||
} | ||
const path = ['data', ...(incrementalResult.path ?? [])]; | ||
if (incrementalResult.items) { | ||
if (incrementalResult.id) { | ||
const id = incrementalResult.id; | ||
path = pathsMap.get(executionResult)?.get(id); | ||
if (path === undefined) { | ||
throw new Error('Invalid incremental delivery format.'); | ||
} | ||
const list = dlv(executionResult, path); | ||
list.push(...incrementalResult.items); | ||
for (const item of incrementalResult.items) { | ||
dset(executionResult, path, item); | ||
// Increment the last path segment (the array index) to merge the next item at the next index | ||
path[path.length - 1]++; | ||
} | ||
else { | ||
const path = ['data', ...(incrementalResult.path ?? [])]; | ||
for (const item of incrementalResult.items) { | ||
dset(executionResult, path, item); | ||
// Increment the last path segment (the array index) to merge the next item at the next index | ||
path[path.length - 1]++; | ||
} | ||
} | ||
} | ||
if (incrementalResult.data) { | ||
if (incrementalResult.id) { | ||
const id = incrementalResult.id; | ||
if (id !== undefined) { | ||
path = pathsMap.get(executionResult)?.get(id); | ||
if (path === undefined) { | ||
throw new Error('Invalid incremental delivery format.'); | ||
} | ||
} | ||
} | ||
dset(executionResult, path, incrementalResult.data); | ||
@@ -76,12 +29,2 @@ } | ||
} | ||
if (incrementalResult.completed) { | ||
// Remove tracking and add additional errors | ||
for (const { id, errors } of incrementalResult.completed) { | ||
pathsMap.get(executionResult)?.delete(id); | ||
if (errors) { | ||
executionResult.errors = executionResult.errors || []; | ||
executionResult.errors.push(...errors); | ||
} | ||
} | ||
} | ||
} |
{ | ||
"name": "@graphql-tools/utils", | ||
"version": "10.2.2-alpha-20240605114440-2a4d046c7b35a8992b375e57ba51ef83065a94be", | ||
"version": "10.2.2-alpha-20240605143758-8e5f865856baf8ed233b7171d26e288c7c70f013", | ||
"description": "Common package containing utils and types for GraphQL tools", | ||
@@ -12,3 +12,2 @@ "sideEffects": false, | ||
"cross-inspect": "1.0.0", | ||
"dlv": "^1.1.3", | ||
"dset": "^3.1.2", | ||
@@ -15,0 +14,0 @@ "tslib": "^2.4.0" |
@@ -20,11 +20,2 @@ import { DefinitionNode, DocumentNode, EnumTypeDefinitionNode, EnumTypeExtensionNode, FieldDefinitionNode, FieldNode, FragmentDefinitionNode, GraphQLArgument, GraphQLArgumentConfig, GraphQLDirective, GraphQLEnumType, GraphQLEnumValue, GraphQLEnumValueConfig, GraphQLError, GraphQLField, GraphQLFieldConfig, GraphQLInputField, GraphQLInputFieldConfig, GraphQLInputObjectType, GraphQLInputType, GraphQLInterfaceType, GraphQLIsTypeOfFn, GraphQLNamedType, GraphQLObjectType, GraphQLOutputType, GraphQLResolveInfo, GraphQLScalarLiteralParser, GraphQLScalarSerializer, GraphQLScalarType, GraphQLScalarValueParser, GraphQLSchema, GraphQLType, GraphQLTypeResolver, GraphQLUnionType, InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode, InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode, ObjectTypeDefinitionNode, ObjectTypeExtensionNode, OperationTypeNode, ScalarTypeDefinitionNode, ScalarTypeExtensionNode, SelectionNode, Source, UnionTypeDefinitionNode, UnionTypeExtensionNode } from 'graphql'; | ||
items?: TData | null; | ||
id?: string; | ||
pending?: ReadonlyArray<{ | ||
id: string; | ||
path: ReadonlyArray<string | number>; | ||
}>; | ||
completed?: ReadonlyArray<{ | ||
id: string; | ||
errors?: ReadonlyArray<GraphQLError>; | ||
}>; | ||
} | ||
@@ -31,0 +22,0 @@ export interface ExecutionRequest<TVariables extends Record<string, any> = any, TContext = any, TRootValue = any, TExtensions = Record<string, any>, TReturn = any> { |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
5
501350
10849
- Removeddlv@^1.1.3
- Removeddlv@1.1.3(transitive)