Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@graphql-tools/load

Package Overview
Dependencies
Maintainers
3
Versions
1176
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@graphql-tools/load - npm Package Compare versions

Comparing version 5.0.1-alpha-3cbf9d2.0 to 5.0.1-alpha-3cd13f3.0

611

index.cjs.js

@@ -7,27 +7,12 @@ 'use strict';

function _interopNamespace(e) {
if (e && e.__esModule) { return e; } else {
var n = {};
if (e) {
Object.keys(e).forEach(function (k) {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () {
return e[k];
}
});
});
}
n['default'] = e;
return n;
}
}
const utils = require('@graphql-tools/utils');
const process = require('process');
const graphql = require('graphql');
const isGlob = _interopDefault(require('is-glob'));
const pLimit = _interopDefault(require('p-limit'));
const schemaMerging = require('@graphql-tools/schema-merging');
const lodash = require('lodash');
const importFrom = _interopDefault(require('import-from'));
const unixify = _interopDefault(require('unixify'));
const globby = require('globby');
const globby__default = _interopDefault(globby);
const merge = require('@graphql-tools/merge');

@@ -53,18 +38,3 @@ function normalizePointers(unnormalizedPointerOrPointers) {

options.sort = 'sort' in options ? options.sort : true;
options.processedFiles = options.processedFiles || new Map();
}
async function prepareOptions(options) {
applyDefaultOptions(options);
options.fs = await utils.resolveBuiltinModule('fs', options.fs);
options.path = await utils.resolveBuiltinModule('path', options.path);
options.os = await utils.resolveBuiltinModule('os', options.os);
return options;
}
async function prepareOptionsSync(options) {
applyDefaultOptions(options);
options.fs = utils.resolveBuiltinModuleSync('fs', options.fs);
options.path = utils.resolveBuiltinModuleSync('path', options.path);
options.os = utils.resolveBuiltinModuleSync('os', options.os);
return options;
}

@@ -150,5 +120,4 @@ async function loadFile(pointer, options) {

async function getCustomLoaderByPath(path, cwd) {
function getCustomLoaderByPath(path, cwd) {
try {
const { default: importFrom } = await new Promise(function (resolve) { resolve(_interopNamespace(require('import-from'))); });
const requiredModule = importFrom(cwd, path);

@@ -167,19 +136,2 @@ if (requiredModule) {

}
function getCustomLoaderByPathSync(path, cwd) {
try {
let importFrom = require('import-from');
importFrom = importFrom.default || importFrom;
const requiredModule = importFrom(cwd, path);
if (requiredModule) {
if (requiredModule.default && typeof requiredModule.default === 'function') {
return requiredModule.default;
}
if (typeof requiredModule === 'function') {
return requiredModule;
}
}
}
catch (e) { }
return null;
}
async function useCustomLoader(loaderPointer, cwd) {

@@ -201,3 +153,3 @@ let loader;

if (typeof loaderPointer === 'string') {
loader = getCustomLoaderByPathSync(loaderPointer, cwd);
loader = getCustomLoaderByPath(loaderPointer, cwd);
}

@@ -244,3 +196,2 @@ else if (typeof loaderPointer === 'function') {

const queue = useQueue({ concurrency: CONCURRENCY_LIMIT });
const unixify = await new Promise(function (resolve) { resolve(_interopNamespace(require('unixify'))); }).then(m => m.default || m);
const { addSource, addGlob, collect } = createHelpers({

@@ -271,7 +222,5 @@ sources,

options,
unixify,
globs,
});
const { default: globby } = await new Promise(function (resolve) { resolve(_interopNamespace(require('globby'))); });
const paths = await globby(globs, createGlobbyOptions(options));
const paths = await globby__default(globs, createGlobbyOptions(options));
collectSourcesFromGlobals({

@@ -295,4 +244,2 @@ filepaths: paths,

const queue = useSyncQueue();
let unixify = require('unixify');
unixify = unixify.default || unixify;
const { addSource, addGlob, collect } = createHelpers({

@@ -323,6 +270,4 @@ sources,

options,
unixify,
globs,
});
const globby = require('globby');
const paths = globby.sync(globs, createGlobbyOptions(options));

@@ -360,3 +305,3 @@ collectSourcesFromGlobalsSync({

}
function includeIgnored({ options, unixify, globs }) {
function includeIgnored({ options, globs }) {
if (options.ignore) {

@@ -511,464 +456,2 @@ const ignoreList = utils.asArray(options.ignore)

const builtinTypes = ['String', 'Float', 'Int', 'Boolean', 'ID', 'Upload'];
const builtinDirectives = [
'deprecated',
'skip',
'include',
'cacheControl',
'key',
'external',
'requires',
'provides',
'connection',
'client',
];
/**
* Post processing of all imported type definitions. Loops over each of the
* imported type definitions, and processes it using collectNewTypeDefinitions.
*
* @param allDefinitions All definitions from all schemas
* @param definitionPool Current definitions (from first schema)
* @param newTypeDefinitions All imported definitions
* @returns Final collection of type definitions for the resulting schema
*/
function completeDefinitionPool(allDefinitions, definitionPool, newTypeDefinitions) {
const visitedDefinitions = {};
while (newTypeDefinitions.length > 0) {
const schemaMap = lodash.keyBy(lodash.reverse(allDefinitions), d => ('name' in d ? d.name.value : 'schema'));
const newDefinition = newTypeDefinitions.shift();
const defName = 'name' in newDefinition ? newDefinition.name.value : 'schema';
if (visitedDefinitions[defName]) {
continue;
}
const collectedTypedDefinitions = collectNewTypeDefinitions(allDefinitions, definitionPool, newDefinition, schemaMap);
newTypeDefinitions.push(...collectedTypedDefinitions);
definitionPool.push(...collectedTypedDefinitions);
visitedDefinitions[defName] = true;
}
return lodash.uniqBy(definitionPool, 'name.value');
}
/**
* Processes a single type definition, and performs a number of checks:
* - Add missing interface implementations
* - Add missing referenced types
* - Remove unused type definitions
*
* @param allDefinitions All definitions from all schemas
* (only used to find missing interface implementations)
* @param definitionPool Resulting definitions
* @param newDefinition All imported definitions
* @param schemaMap Map of all definitions for easy lookup
* @returns All relevant type definitions to add to the final schema
*/
function collectNewTypeDefinitions(allDefinitions, definitionPool, newDefinition, schemaMap) {
const newTypeDefinitions = [];
if (newDefinition.kind !== graphql.Kind.DIRECTIVE_DEFINITION) {
newDefinition.directives.forEach(collectDirective);
}
if (newDefinition.kind === graphql.Kind.ENUM_TYPE_DEFINITION) {
newDefinition.values.forEach(value => value.directives.forEach(collectDirective));
}
if (newDefinition.kind === graphql.Kind.INPUT_OBJECT_TYPE_DEFINITION) {
newDefinition.fields.forEach(collectNode);
}
if (newDefinition.kind === graphql.Kind.INTERFACE_TYPE_DEFINITION) {
const interfaceName = newDefinition.name.value;
newDefinition.fields.forEach(collectNode);
const interfaceImplementations = allDefinitions.filter(d => d.kind === graphql.Kind.OBJECT_TYPE_DEFINITION && d.interfaces.some(i => i.name.value === interfaceName));
newTypeDefinitions.push(...interfaceImplementations);
}
if (newDefinition.kind === graphql.Kind.UNION_TYPE_DEFINITION) {
newDefinition.types.forEach(type => {
if (!definitionPool.some(d => 'name' in d && d.name.value === type.name.value)) {
const typeName = type.name.value;
const typeMatch = schemaMap[typeName];
if (!typeMatch) {
throw new Error(`Couldn't find type ${typeName} in any of the schemas.`);
}
newTypeDefinitions.push(schemaMap[type.name.value]);
}
});
}
if (newDefinition.kind === graphql.Kind.OBJECT_TYPE_DEFINITION) {
// collect missing interfaces
newDefinition.interfaces.forEach(int => {
if (!definitionPool.some(d => 'name' in d && d.name.value === int.name.value)) {
const interfaceName = int.name.value;
const interfaceMatch = schemaMap[interfaceName];
if (!interfaceMatch) {
throw new Error(`Couldn't find interface ${interfaceName} in any of the schemas.`);
}
newTypeDefinitions.push(schemaMap[int.name.value]);
}
});
// iterate over all fields
newDefinition.fields.forEach(field => {
collectNode(field);
// collect missing argument input types
field.arguments.forEach(collectNode);
});
}
if (newDefinition.kind === graphql.Kind.SCHEMA_DEFINITION) {
newDefinition.operationTypes.forEach(operationType => {
if (!definitionPool.some(d => 'name' in d && d.name.value === operationType.type.name.value)) {
const typeName = operationType.type.name.value;
const typeMatch = schemaMap[typeName];
if (!typeMatch) {
throw new Error(`Couldn't find type ${typeName} in any of the schemas.`);
}
newTypeDefinitions.push(schemaMap[operationType.type.name.value]);
}
});
}
if (newDefinition.kind === graphql.Kind.OPERATION_DEFINITION || newDefinition.kind === graphql.Kind.FRAGMENT_DEFINITION) {
if (newDefinition.selectionSet) {
for (const selection of newDefinition.selectionSet.selections) {
collectFragments(selection);
}
}
}
return newTypeDefinitions;
function collectFragments(node) {
if (node.kind === graphql.Kind.FRAGMENT_SPREAD) {
const fragmentName = node.name.value;
if (!definitionPool.some(d => 'name' in d && d.name.value === fragmentName)) {
const fragmentMatch = schemaMap[fragmentName];
if (!fragmentMatch) {
throw new Error(`Fragment ${fragmentName}: Couldn't find fragment ${fragmentName} in any of the documents.`);
}
newTypeDefinitions.push(fragmentMatch);
}
}
else if (node.selectionSet) {
for (const selection of node.selectionSet.selections) {
for (const directive of node.directives) {
collectDirective(directive);
}
collectFragments(selection);
}
}
}
function collectNode(node) {
const nodeType = getNamedType(node.type);
const nodeTypeName = nodeType.name.value;
// collect missing argument input types
if (!definitionPool.some(d => 'name' in d && d.name.value === nodeTypeName) &&
!lodash.includes(builtinTypes, nodeTypeName)) {
const argTypeMatch = schemaMap[nodeTypeName];
if (!argTypeMatch) {
throw new Error(`Field ${node.name.value}: Couldn't find type ${nodeTypeName} in any of the schemas.`);
}
newTypeDefinitions.push(argTypeMatch);
}
node.directives.forEach(collectDirective);
}
function collectDirective(directive) {
const directiveName = directive.name.value;
if (!definitionPool.some(d => 'name' in d && d.name.value === directiveName) &&
!lodash.includes(builtinDirectives, directiveName)) {
const directive = schemaMap[directiveName];
if (!directive) {
throw new Error(`Directive ${directiveName}: Couldn't find type ${directiveName} in any of the schemas.`);
}
directive.arguments.forEach(collectNode);
newTypeDefinitions.push(directive);
}
}
}
/**
* Nested visitor for a type node to get to the final NamedType
*
* @param {TypeNode} type Type node to get NamedTypeNode for
* @returns {NamedTypeNode} The found NamedTypeNode
*/
function getNamedType(type) {
if (type.kind === graphql.Kind.NAMED_TYPE) {
return type;
}
return getNamedType(type.type);
}
const gqlExt = /\.g(raph)?ql(s)?$/;
function isGraphQLFile(f) {
return gqlExt.test(f);
}
const IMPORT_FROM_REGEX = /^import\s+(\*|(.*))\s+from\s+('|")(.*)('|");?$/;
const IMPORT_DEFAULT_REGEX = /^import\s+('|")(.*)('|");?$/;
/**
* Parse a single import line and extract imported types and schema filename
*
* @param importLine Import line
* @returns Processed import line
*/
function parseImportLine(importLine) {
if (IMPORT_FROM_REGEX.test(importLine)) {
// Apply regex to import line
const matches = importLine.match(IMPORT_FROM_REGEX);
if (matches && matches.length === 6 && matches[4]) {
// Extract matches into named variables
const [, wildcard, importsString, , from] = matches;
// Extract imported types
const imports = wildcard === '*' ? ['*'] : importsString.split(',').map(d => d.trim());
// Return information about the import line
return { imports, from };
}
}
else if (IMPORT_DEFAULT_REGEX.test(importLine)) {
const [, , from] = importLine.match(IMPORT_DEFAULT_REGEX);
return { imports: ['*'], from };
}
throw new Error(`
Import statement is not valid: ${importLine}
If you want to have comments starting with '# import', please use ''' instead!
You can only have 'import' statements in the following pattern;
# import [Type].[Field] from [File]
`);
}
/**
* Parse a schema and analyze all import lines
*
* @param sdl Schema to parse
* @returns Array with collection of imports per import line (file)
*/
function parseSDL(sdl) {
return sdl
.split('\n')
.map(l => l.trim())
.filter(l => l.startsWith('# import ') || l.startsWith('#import '))
.map(l => l.replace('#', '').trim())
.map(parseImportLine);
}
/**
* Main entry point. Recursively process all import statement in a schema
*
* @param filePath File path to the initial schema file
* @returns Single bundled schema with all imported types
*/
async function processImportSyntax(documentSource, options, allDefinitions) {
const typeDefinitions = [];
// Recursively process the imports, starting by importing all types from the initial schema
await collectDefinitions(['*'], documentSource, options, typeDefinitions, allDefinitions);
return process$1({
typeDefinitions,
options,
allDefinitions,
});
}
/**
* Main entry point. Recursively process all import statement in a schema
*
* @param documentSource File path to the initial schema file
* @returns Single bundled schema with all imported types
*/
function processImportSyntaxSync(documentSource, options, allDefinitions) {
const typeDefinitions = [];
// Recursively process the imports, starting by importing all types from the initial schema
collectDefinitionsSync(['*'], documentSource, options, typeDefinitions, allDefinitions);
return process$1({
typeDefinitions,
options,
allDefinitions,
});
}
function process$1({ typeDefinitions, options, allDefinitions, }) {
// Post processing of the final schema (missing types, unused types, etc.)
// Query, Mutation and Subscription should be merged
// And should always be in the first set, to make sure they
// are not filtered out.
const firstTypes = lodash.flatten(typeDefinitions);
const secondFirstTypes = typeDefinitions[0];
const otherFirstTypes = lodash.flatten(typeDefinitions.slice(1));
const firstSet = firstTypes.concat(secondFirstTypes, otherFirstTypes);
const processedTypeNames = [];
const mergedFirstTypes = [];
for (const type of firstSet) {
if ('name' in type) {
if (!processedTypeNames.includes(type.name.value)) {
processedTypeNames.push(type.name.value);
mergedFirstTypes.push(type);
}
else {
const existingType = mergedFirstTypes.find(t => t.name.value === type.name.value);
if ('fields' in existingType) {
existingType.fields = lodash.uniqBy(existingType.fields.concat(type.fields), 'name.value');
if (options.sort) {
existingType.fields = existingType.fields.sort(utils.compareNodes);
}
}
}
}
}
return completeDefinitionPool(lodash.flatten(allDefinitions), firstSet, lodash.flatten(typeDefinitions));
}
/**
* Parses a schema into a graphql DocumentNode.
* If the schema is empty a DocumentNode with empty definitions will be created.
*
* @param sdl Schema to parse
* @returns A graphql DocumentNode with definitions of the parsed sdl.
*/
function getDocumentFromSDL(sdl) {
if (isEmptySDL(sdl)) {
return {
kind: graphql.Kind.DOCUMENT,
definitions: [],
};
}
return graphql.parse(sdl, { noLocation: true });
}
/**
* Check if a schema contains any type definitions at all.
*
* @param sdl Schema to parse
* @returns True if SDL only contains comments and/or whitespaces
*/
function isEmptySDL(sdl) {
return (sdl
.split('\n')
.map(l => l.trim())
.filter(l => !(l.length === 0 || l.startsWith('#'))).length === 0);
}
/**
* Resolve the path of an import.
* First it will try to find a file relative from the file the import is in, if that fails it will try to resolve it as a module so imports from packages work correctly.
*
* @param filePath Path the import was made from
* @param importFrom Path given for the import
* @returns Full resolved path to a file
*/
function resolveModuleFilePath(filePath, importFrom, options) {
const { fs, path } = options;
if (fs && path) {
const fullPath = path.resolve(options.cwd, filePath);
const dirName = path.dirname(fullPath);
if (isGraphQLFile(fullPath) && isGraphQLFile(importFrom)) {
try {
return fs.realpathSync(path.join(dirName, importFrom));
}
catch (e) {
if (e.code === 'ENOENT') {
let resolveFrom = require('resolve-from');
resolveFrom = resolveFrom.default || resolveFrom;
return resolveFrom(dirName, importFrom);
}
}
}
}
return importFrom;
}
/**
* Recursively process all schema files. Keeps track of both the filtered
* type definitions, and all type definitions, because they might be needed
* in post-processing (to add missing types)
*
* @param imports Types specified in the import statement
* @param sdl Current schema
* @param filePath File location for current schema
* @param Tracking of processed schemas (for circular dependencies)
* @param Tracking of imported type definitions per schema
* @param Tracking of all type definitions per schema
* @returns Both the collection of all type definitions, and the collection of imported type definitions
*/
async function collectDefinitions(imports, source, options, typeDefinitions, allDefinitions) {
const rawModules = preapreRawModules({ allDefinitions, source, imports, options, typeDefinitions });
// Process each file (recursively)
await Promise.all(rawModules.map(async (module) => {
// If it was not yet processed (in case of circular dependencies)
const filepath = resolveModuleFilePath(source.location, module.from, options);
if (canProcess({
options,
module,
filepath,
})) {
const result = await loadFile(filepath, options);
await collectDefinitions(module.imports, result, options, typeDefinitions, allDefinitions);
}
}));
}
/**
* Recursively process all schema files. Keeps track of both the filtered
* type definitions, and all type definitions, because they might be needed
* in post-processing (to add missing types)
*
* @param imports Types specified in the import statement
* @param sdl Current schema
* @param filePath File location for current schema
* @param Tracking of processed schemas (for circular dependencies)
* @param Tracking of imported type definitions per schema
* @param Tracking of all type definitions per schema
* @returns Both the collection of all type definitions, and the collection of imported type definitions
*/
function collectDefinitionsSync(imports, source, options, typeDefinitions, allDefinitions) {
const rawModules = preapreRawModules({ allDefinitions, source, imports, options, typeDefinitions });
// Process each file (recursively)
rawModules.forEach(module => {
// If it was not yet processed (in case of circular dependencies)
const filepath = resolveModuleFilePath(source.location, module.from, options);
if (canProcess({
options,
module,
filepath,
})) {
const result = loadFileSync(filepath, options);
collectDefinitionsSync(module.imports, result, options, typeDefinitions, allDefinitions);
}
});
}
//
function preapreRawModules({ allDefinitions, imports, options, typeDefinitions, source, }) {
// Add all definitions to running total
allDefinitions.push(source.document.definitions);
// Filter TypeDefinitionNodes by type and defined imports
const currentTypeDefinitions = filterImportedDefinitions(imports, source.document.definitions, allDefinitions, options.sort);
// Add typedefinitions to running total
typeDefinitions.push(currentTypeDefinitions);
// Read imports from current file
return parseSDL(source.rawSDL);
}
function canProcess({ options, module, filepath, }) {
const processedFile = options.processedFiles.get(filepath);
if (!processedFile || !processedFile.find(rModule => lodash.isEqual(rModule, module))) {
// Mark this specific import line as processed for this file (for cicular dependency cases)
options.processedFiles.set(filepath, processedFile ? processedFile.concat(module) : [module]);
return true;
}
return false;
}
/**
* Filter the types loaded from a schema, first by relevant types,
* then by the types specified in the import statement.
*
* @param imports Types specified in the import statement
* @param typeDefinitions All definitions from a schema
* @returns Filtered collection of type definitions
*/
function filterImportedDefinitions(imports, typeDefinitions, allDefinitions, sort) {
// This should do something smart with fields
const filteredDefinitions = typeDefinitions;
if (imports.includes('*')) {
if (imports.length === 1 && imports[0] === '*' && allDefinitions.length > 1) {
const previousTypeDefinitions = lodash.keyBy(lodash.flatten(allDefinitions.slice(0, allDefinitions.length - 1)).filter(def => 'name' in def), def => 'name' in def && def.name.value);
return typeDefinitions.filter(typeDef => typeDef.kind === 'ObjectTypeDefinition' && previousTypeDefinitions[typeDef.name.value]);
}
return filteredDefinitions;
}
else {
const importedTypes = imports.map(i => i.split('.')[0]);
const result = filteredDefinitions.filter(d => 'name' in d && importedTypes.includes(d.name.value));
const fieldImports = imports.filter(i => i.split('.').length > 1);
const groupedFieldImports = lodash.groupBy(fieldImports, x => x.split('.')[0]);
for (const rootType in groupedFieldImports) {
const fields = groupedFieldImports[rootType].map(x => x.split('.')[1]);
const objectTypeDefinition = filteredDefinitions.find(def => 'name' in def && def.name.value === rootType);
if (objectTypeDefinition && 'fields' in objectTypeDefinition && !fields.includes('*')) {
objectTypeDefinition.fields = objectTypeDefinition.fields.filter((f) => fields.includes(f.name.value) || fields.includes('*'));
if (sort) {
objectTypeDefinition.fields.sort(utils.compareNodes);
}
}
}
return result;
}
}
const filterKind = (content, filterKinds) => {

@@ -999,3 +482,3 @@ if (content && content.definitions && content.definitions.length && filterKinds && filterKinds.length > 0) {

async function parseSource({ partialSource, options, globOptions, pointerOptionMap, addValidSource, cache, }) {
function parseSource({ partialSource, options, globOptions, pointerOptionMap, addValidSource }) {
if (partialSource) {

@@ -1013,3 +496,2 @@ const input = prepareInput({

useComments(input);
await useGraphQLImport(input, () => processImportSyntax(input.source, input.options, cache));
collectValidSources(input, addValidSource);

@@ -1019,20 +501,2 @@ }

}
function parseSourceSync({ partialSource, options, globOptions, pointerOptionMap, addValidSource, cache, }) {
if (partialSource) {
const input = prepareInput({
source: partialSource,
options,
globOptions,
pointerOptionMap,
});
parseSchema(input);
parseRawSDL(input);
if (input.source.document) {
useKindsFilter(input);
useComments(input);
useGraphQLImport(input, () => processImportSyntaxSync(input.source, input.options, cache));
collectValidSources(input, addValidSource);
}
}
}
//

@@ -1054,8 +518,3 @@ function prepareInput({ source, options, globOptions, pointerOptionMap, }) {

if (input.source.rawSDL) {
input.source.document = isEmptySDL(input.source.rawSDL)
? {
kind: graphql.Kind.DOCUMENT,
definitions: [],
}
: graphql.parse(new graphql.Source(input.source.rawSDL, input.source.location), input.options);
input.source.document = graphql.parse(new graphql.Source(input.source.rawSDL, input.source.location), input.options);
}

@@ -1070,25 +529,6 @@ }

if (!input.source.rawSDL) {
input.source.rawSDL = schemaMerging.printWithComments(input.source.document);
schemaMerging.resetComments();
input.source.rawSDL = merge.printWithComments(input.source.document);
merge.resetComments();
}
}
function useGraphQLImport(input, definitionsGetter) {
if (input.options.forceGraphQLImport ||
(!input.options.skipGraphQLImport && /^\#.*import /i.test(input.source.rawSDL.trimLeft()))) {
const rewriteDoc = (definitions) => {
input.source.document = {
kind: graphql.Kind.DOCUMENT,
definitions,
};
};
const result = definitionsGetter();
if (isPromise(result)) {
return result.then(rewriteDoc);
}
rewriteDoc(result);
}
}
function isPromise(val) {
return val instanceof Promise;
}
function collectValidSources(input, addValidSource) {

@@ -1104,3 +544,3 @@ if (input.source.document.definitions && input.source.document.definitions.length > 0) {

const globOptions = {};
await prepareOptions(options);
applyDefaultOptions(options);
const sources = await collectSources({

@@ -1111,3 +551,2 @@ pointerOptionMap,

const validSources = [];
const definitionsCacheForImport = [];
// If we have few k of files it may be an issue

@@ -1123,3 +562,2 @@ const limit = useLimit(CONCURRENCY_LIMIT$1);

},
cache: definitionsCacheForImport,
}))));

@@ -1131,3 +569,3 @@ return prepareResult({ options, pointerOptionMap, validSources });

const globOptions = {};
prepareOptionsSync(options);
applyDefaultOptions(options);
const sources = collectSourcesSync({

@@ -1138,5 +576,4 @@ pointerOptionMap,

const validSources = [];
const definitionsCacheForImport = [];
sources.forEach(partialSource => {
parseSourceSync({
parseSource({
partialSource,

@@ -1149,3 +586,2 @@ options,

},
cache: definitionsCacheForImport,
});

@@ -1192,3 +628,3 @@ });

};
const schema = await schemaMerging.mergeSchemasAsync(mergeSchemasOptions);
const schema = await merge.mergeSchemasAsync(mergeSchemasOptions);
if (options.includeSources) {

@@ -1210,3 +646,3 @@ includeSources(schema, sources);

};
const schema = schemaMerging.mergeSchemas(mergeSchemasOptions);
const schema = merge.mergeSchemas(mergeSchemasOptions);
if (options.includeSources) {

@@ -1244,7 +680,3 @@ includeSources(schema, sources);

exports.OPERATION_KINDS = OPERATION_KINDS;
exports.collectDefinitions = collectDefinitions;
exports.collectDefinitionsSync = collectDefinitionsSync;
exports.filterKind = filterKind;
exports.getDocumentFromSDL = getDocumentFromSDL;
exports.isEmptySDL = isEmptySDL;
exports.loadDocuments = loadDocuments;

@@ -1256,7 +688,2 @@ exports.loadDocumentsSync = loadDocumentsSync;

exports.loadTypedefsSync = loadTypedefsSync;
exports.parseImportLine = parseImportLine;
exports.parseSDL = parseSDL;
exports.processImportSyntax = processImportSyntax;
exports.processImportSyntaxSync = processImportSyntaxSync;
exports.resolveModuleFilePath = resolveModuleFilePath;
//# sourceMappingURL=index.cjs.js.map

@@ -5,2 +5,1 @@ export * from './load-typedefs';

export * from './filter-document-kind';
export * from './import-parser';

@@ -1,7 +0,10 @@

import { asArray, resolveBuiltinModule, resolveBuiltinModuleSync, debugLog, printSchemaWithDirectives, isDocumentString, parseGraphQLSDL, compareNodes, fixSchemaAst, compareStrings } from '@graphql-tools/utils';
import { asArray, debugLog, printSchemaWithDirectives, isDocumentString, parseGraphQLSDL, fixSchemaAst, compareStrings } from '@graphql-tools/utils';
import { cwd } from 'process';
import { isSchema, parse, Kind, Source, print } from 'graphql';
import isGlob from 'is-glob';
import pLimit from 'p-limit';
import { printWithComments, resetComments, mergeSchemasAsync, mergeSchemas } from '@graphql-tools/schema-merging';
import { uniqBy, keyBy, reverse, includes, flatten, isEqual, groupBy } from 'lodash';
import importFrom from 'import-from';
import unixify from 'unixify';
import globby, { sync } from 'globby';
import { printWithComments, resetComments, mergeSchemasAsync, mergeSchemas } from '@graphql-tools/merge';

@@ -25,20 +28,5 @@ function normalizePointers(unnormalizedPointerOrPointers) {

options.cache = options.cache || {};
options.cwd = options.cwd || process.cwd();
options.cwd = options.cwd || cwd();
options.sort = 'sort' in options ? options.sort : true;
options.processedFiles = options.processedFiles || new Map();
}
async function prepareOptions(options) {
applyDefaultOptions(options);
options.fs = await resolveBuiltinModule('fs', options.fs);
options.path = await resolveBuiltinModule('path', options.path);
options.os = await resolveBuiltinModule('os', options.os);
return options;
}
async function prepareOptionsSync(options) {
applyDefaultOptions(options);
options.fs = resolveBuiltinModuleSync('fs', options.fs);
options.path = resolveBuiltinModuleSync('path', options.path);
options.os = resolveBuiltinModuleSync('os', options.os);
return options;
}

@@ -124,5 +112,4 @@ async function loadFile(pointer, options) {

async function getCustomLoaderByPath(path, cwd) {
function getCustomLoaderByPath(path, cwd) {
try {
const { default: importFrom } = await import('import-from');
const requiredModule = importFrom(cwd, path);

@@ -141,19 +128,2 @@ if (requiredModule) {

}
function getCustomLoaderByPathSync(path, cwd) {
try {
let importFrom = require('import-from');
importFrom = importFrom.default || importFrom;
const requiredModule = importFrom(cwd, path);
if (requiredModule) {
if (requiredModule.default && typeof requiredModule.default === 'function') {
return requiredModule.default;
}
if (typeof requiredModule === 'function') {
return requiredModule;
}
}
}
catch (e) { }
return null;
}
async function useCustomLoader(loaderPointer, cwd) {

@@ -175,3 +145,3 @@ let loader;

if (typeof loaderPointer === 'string') {
loader = getCustomLoaderByPathSync(loaderPointer, cwd);
loader = getCustomLoaderByPath(loaderPointer, cwd);
}

@@ -218,3 +188,2 @@ else if (typeof loaderPointer === 'function') {

const queue = useQueue({ concurrency: CONCURRENCY_LIMIT });
const unixify = await import('unixify').then(m => m.default || m);
const { addSource, addGlob, collect } = createHelpers({

@@ -245,6 +214,4 @@ sources,

options,
unixify,
globs,
});
const { default: globby } = await import('globby');
const paths = await globby(globs, createGlobbyOptions(options));

@@ -269,4 +236,2 @@ collectSourcesFromGlobals({

const queue = useSyncQueue();
let unixify = require('unixify');
unixify = unixify.default || unixify;
const { addSource, addGlob, collect } = createHelpers({

@@ -297,7 +262,5 @@ sources,

options,
unixify,
globs,
});
const globby = require('globby');
const paths = globby.sync(globs, createGlobbyOptions(options));
const paths = sync(globs, createGlobbyOptions(options));
collectSourcesFromGlobalsSync({

@@ -334,3 +297,3 @@ filepaths: paths,

}
function includeIgnored({ options, unixify, globs }) {
function includeIgnored({ options, globs }) {
if (options.ignore) {

@@ -485,464 +448,2 @@ const ignoreList = asArray(options.ignore)

const builtinTypes = ['String', 'Float', 'Int', 'Boolean', 'ID', 'Upload'];
const builtinDirectives = [
'deprecated',
'skip',
'include',
'cacheControl',
'key',
'external',
'requires',
'provides',
'connection',
'client',
];
/**
* Post processing of all imported type definitions. Loops over each of the
* imported type definitions, and processes it using collectNewTypeDefinitions.
*
* @param allDefinitions All definitions from all schemas
* @param definitionPool Current definitions (from first schema)
* @param newTypeDefinitions All imported definitions
* @returns Final collection of type definitions for the resulting schema
*/
function completeDefinitionPool(allDefinitions, definitionPool, newTypeDefinitions) {
const visitedDefinitions = {};
while (newTypeDefinitions.length > 0) {
const schemaMap = keyBy(reverse(allDefinitions), d => ('name' in d ? d.name.value : 'schema'));
const newDefinition = newTypeDefinitions.shift();
const defName = 'name' in newDefinition ? newDefinition.name.value : 'schema';
if (visitedDefinitions[defName]) {
continue;
}
const collectedTypedDefinitions = collectNewTypeDefinitions(allDefinitions, definitionPool, newDefinition, schemaMap);
newTypeDefinitions.push(...collectedTypedDefinitions);
definitionPool.push(...collectedTypedDefinitions);
visitedDefinitions[defName] = true;
}
return uniqBy(definitionPool, 'name.value');
}
/**
* Processes a single type definition, and performs a number of checks:
* - Add missing interface implementations
* - Add missing referenced types
* - Remove unused type definitions
*
* @param allDefinitions All definitions from all schemas
* (only used to find missing interface implementations)
* @param definitionPool Resulting definitions
* @param newDefinition All imported definitions
* @param schemaMap Map of all definitions for easy lookup
* @returns All relevant type definitions to add to the final schema
*/
function collectNewTypeDefinitions(allDefinitions, definitionPool, newDefinition, schemaMap) {
const newTypeDefinitions = [];
if (newDefinition.kind !== Kind.DIRECTIVE_DEFINITION) {
newDefinition.directives.forEach(collectDirective);
}
if (newDefinition.kind === Kind.ENUM_TYPE_DEFINITION) {
newDefinition.values.forEach(value => value.directives.forEach(collectDirective));
}
if (newDefinition.kind === Kind.INPUT_OBJECT_TYPE_DEFINITION) {
newDefinition.fields.forEach(collectNode);
}
if (newDefinition.kind === Kind.INTERFACE_TYPE_DEFINITION) {
const interfaceName = newDefinition.name.value;
newDefinition.fields.forEach(collectNode);
const interfaceImplementations = allDefinitions.filter(d => d.kind === Kind.OBJECT_TYPE_DEFINITION && d.interfaces.some(i => i.name.value === interfaceName));
newTypeDefinitions.push(...interfaceImplementations);
}
if (newDefinition.kind === Kind.UNION_TYPE_DEFINITION) {
newDefinition.types.forEach(type => {
if (!definitionPool.some(d => 'name' in d && d.name.value === type.name.value)) {
const typeName = type.name.value;
const typeMatch = schemaMap[typeName];
if (!typeMatch) {
throw new Error(`Couldn't find type ${typeName} in any of the schemas.`);
}
newTypeDefinitions.push(schemaMap[type.name.value]);
}
});
}
if (newDefinition.kind === Kind.OBJECT_TYPE_DEFINITION) {
// collect missing interfaces
newDefinition.interfaces.forEach(int => {
if (!definitionPool.some(d => 'name' in d && d.name.value === int.name.value)) {
const interfaceName = int.name.value;
const interfaceMatch = schemaMap[interfaceName];
if (!interfaceMatch) {
throw new Error(`Couldn't find interface ${interfaceName} in any of the schemas.`);
}
newTypeDefinitions.push(schemaMap[int.name.value]);
}
});
// iterate over all fields
newDefinition.fields.forEach(field => {
collectNode(field);
// collect missing argument input types
field.arguments.forEach(collectNode);
});
}
if (newDefinition.kind === Kind.SCHEMA_DEFINITION) {
newDefinition.operationTypes.forEach(operationType => {
if (!definitionPool.some(d => 'name' in d && d.name.value === operationType.type.name.value)) {
const typeName = operationType.type.name.value;
const typeMatch = schemaMap[typeName];
if (!typeMatch) {
throw new Error(`Couldn't find type ${typeName} in any of the schemas.`);
}
newTypeDefinitions.push(schemaMap[operationType.type.name.value]);
}
});
}
if (newDefinition.kind === Kind.OPERATION_DEFINITION || newDefinition.kind === Kind.FRAGMENT_DEFINITION) {
if (newDefinition.selectionSet) {
for (const selection of newDefinition.selectionSet.selections) {
collectFragments(selection);
}
}
}
return newTypeDefinitions;
function collectFragments(node) {
if (node.kind === Kind.FRAGMENT_SPREAD) {
const fragmentName = node.name.value;
if (!definitionPool.some(d => 'name' in d && d.name.value === fragmentName)) {
const fragmentMatch = schemaMap[fragmentName];
if (!fragmentMatch) {
throw new Error(`Fragment ${fragmentName}: Couldn't find fragment ${fragmentName} in any of the documents.`);
}
newTypeDefinitions.push(fragmentMatch);
}
}
else if (node.selectionSet) {
for (const selection of node.selectionSet.selections) {
for (const directive of node.directives) {
collectDirective(directive);
}
collectFragments(selection);
}
}
}
function collectNode(node) {
const nodeType = getNamedType(node.type);
const nodeTypeName = nodeType.name.value;
// collect missing argument input types
if (!definitionPool.some(d => 'name' in d && d.name.value === nodeTypeName) &&
!includes(builtinTypes, nodeTypeName)) {
const argTypeMatch = schemaMap[nodeTypeName];
if (!argTypeMatch) {
throw new Error(`Field ${node.name.value}: Couldn't find type ${nodeTypeName} in any of the schemas.`);
}
newTypeDefinitions.push(argTypeMatch);
}
node.directives.forEach(collectDirective);
}
function collectDirective(directive) {
const directiveName = directive.name.value;
if (!definitionPool.some(d => 'name' in d && d.name.value === directiveName) &&
!includes(builtinDirectives, directiveName)) {
const directive = schemaMap[directiveName];
if (!directive) {
throw new Error(`Directive ${directiveName}: Couldn't find type ${directiveName} in any of the schemas.`);
}
directive.arguments.forEach(collectNode);
newTypeDefinitions.push(directive);
}
}
}
/**
* Nested visitor for a type node to get to the final NamedType
*
* @param {TypeNode} type Type node to get NamedTypeNode for
* @returns {NamedTypeNode} The found NamedTypeNode
*/
function getNamedType(type) {
if (type.kind === Kind.NAMED_TYPE) {
return type;
}
return getNamedType(type.type);
}
const gqlExt = /\.g(raph)?ql(s)?$/;
function isGraphQLFile(f) {
return gqlExt.test(f);
}
const IMPORT_FROM_REGEX = /^import\s+(\*|(.*))\s+from\s+('|")(.*)('|");?$/;
const IMPORT_DEFAULT_REGEX = /^import\s+('|")(.*)('|");?$/;
/**
* Parse a single import line and extract imported types and schema filename
*
* @param importLine Import line
* @returns Processed import line
*/
function parseImportLine(importLine) {
if (IMPORT_FROM_REGEX.test(importLine)) {
// Apply regex to import line
const matches = importLine.match(IMPORT_FROM_REGEX);
if (matches && matches.length === 6 && matches[4]) {
// Extract matches into named variables
const [, wildcard, importsString, , from] = matches;
// Extract imported types
const imports = wildcard === '*' ? ['*'] : importsString.split(',').map(d => d.trim());
// Return information about the import line
return { imports, from };
}
}
else if (IMPORT_DEFAULT_REGEX.test(importLine)) {
const [, , from] = importLine.match(IMPORT_DEFAULT_REGEX);
return { imports: ['*'], from };
}
throw new Error(`
Import statement is not valid: ${importLine}
If you want to have comments starting with '# import', please use ''' instead!
You can only have 'import' statements in the following pattern;
# import [Type].[Field] from [File]
`);
}
/**
* Parse a schema and analyze all import lines
*
* @param sdl Schema to parse
* @returns Array with collection of imports per import line (file)
*/
function parseSDL(sdl) {
return sdl
.split('\n')
.map(l => l.trim())
.filter(l => l.startsWith('# import ') || l.startsWith('#import '))
.map(l => l.replace('#', '').trim())
.map(parseImportLine);
}
/**
* Main entry point. Recursively process all import statement in a schema
*
* @param filePath File path to the initial schema file
* @returns Single bundled schema with all imported types
*/
async function processImportSyntax(documentSource, options, allDefinitions) {
const typeDefinitions = [];
// Recursively process the imports, starting by importing all types from the initial schema
await collectDefinitions(['*'], documentSource, options, typeDefinitions, allDefinitions);
return process$1({
typeDefinitions,
options,
allDefinitions,
});
}
/**
* Main entry point. Recursively process all import statement in a schema
*
* @param documentSource File path to the initial schema file
* @returns Single bundled schema with all imported types
*/
function processImportSyntaxSync(documentSource, options, allDefinitions) {
const typeDefinitions = [];
// Recursively process the imports, starting by importing all types from the initial schema
collectDefinitionsSync(['*'], documentSource, options, typeDefinitions, allDefinitions);
return process$1({
typeDefinitions,
options,
allDefinitions,
});
}
function process$1({ typeDefinitions, options, allDefinitions, }) {
// Post processing of the final schema (missing types, unused types, etc.)
// Query, Mutation and Subscription should be merged
// And should always be in the first set, to make sure they
// are not filtered out.
const firstTypes = flatten(typeDefinitions);
const secondFirstTypes = typeDefinitions[0];
const otherFirstTypes = flatten(typeDefinitions.slice(1));
const firstSet = firstTypes.concat(secondFirstTypes, otherFirstTypes);
const processedTypeNames = [];
const mergedFirstTypes = [];
for (const type of firstSet) {
if ('name' in type) {
if (!processedTypeNames.includes(type.name.value)) {
processedTypeNames.push(type.name.value);
mergedFirstTypes.push(type);
}
else {
const existingType = mergedFirstTypes.find(t => t.name.value === type.name.value);
if ('fields' in existingType) {
existingType.fields = uniqBy(existingType.fields.concat(type.fields), 'name.value');
if (options.sort) {
existingType.fields = existingType.fields.sort(compareNodes);
}
}
}
}
}
return completeDefinitionPool(flatten(allDefinitions), firstSet, flatten(typeDefinitions));
}
/**
* Parses a schema into a graphql DocumentNode.
* If the schema is empty a DocumentNode with empty definitions will be created.
*
* @param sdl Schema to parse
* @returns A graphql DocumentNode with definitions of the parsed sdl.
*/
function getDocumentFromSDL(sdl) {
if (isEmptySDL(sdl)) {
return {
kind: Kind.DOCUMENT,
definitions: [],
};
}
return parse(sdl, { noLocation: true });
}
/**
* Check if a schema contains any type definitions at all.
*
* @param sdl Schema to parse
* @returns True if SDL only contains comments and/or whitespaces
*/
function isEmptySDL(sdl) {
return (sdl
.split('\n')
.map(l => l.trim())
.filter(l => !(l.length === 0 || l.startsWith('#'))).length === 0);
}
/**
* Resolve the path of an import.
* First it will try to find a file relative from the file the import is in, if that fails it will try to resolve it as a module so imports from packages work correctly.
*
* @param filePath Path the import was made from
* @param importFrom Path given for the import
* @returns Full resolved path to a file
*/
function resolveModuleFilePath(filePath, importFrom, options) {
const { fs, path } = options;
if (fs && path) {
const fullPath = path.resolve(options.cwd, filePath);
const dirName = path.dirname(fullPath);
if (isGraphQLFile(fullPath) && isGraphQLFile(importFrom)) {
try {
return fs.realpathSync(path.join(dirName, importFrom));
}
catch (e) {
if (e.code === 'ENOENT') {
let resolveFrom = require('resolve-from');
resolveFrom = resolveFrom.default || resolveFrom;
return resolveFrom(dirName, importFrom);
}
}
}
}
return importFrom;
}
/**
* Recursively process all schema files. Keeps track of both the filtered
* type definitions, and all type definitions, because they might be needed
* in post-processing (to add missing types)
*
* @param imports Types specified in the import statement
* @param sdl Current schema
* @param filePath File location for current schema
* @param Tracking of processed schemas (for circular dependencies)
* @param Tracking of imported type definitions per schema
* @param Tracking of all type definitions per schema
* @returns Both the collection of all type definitions, and the collection of imported type definitions
*/
async function collectDefinitions(imports, source, options, typeDefinitions, allDefinitions) {
const rawModules = preapreRawModules({ allDefinitions, source, imports, options, typeDefinitions });
// Process each file (recursively)
await Promise.all(rawModules.map(async (module) => {
// If it was not yet processed (in case of circular dependencies)
const filepath = resolveModuleFilePath(source.location, module.from, options);
if (canProcess({
options,
module,
filepath,
})) {
const result = await loadFile(filepath, options);
await collectDefinitions(module.imports, result, options, typeDefinitions, allDefinitions);
}
}));
}
/**
* Recursively process all schema files. Keeps track of both the filtered
* type definitions, and all type definitions, because they might be needed
* in post-processing (to add missing types)
*
* @param imports Types specified in the import statement
* @param sdl Current schema
* @param filePath File location for current schema
* @param Tracking of processed schemas (for circular dependencies)
* @param Tracking of imported type definitions per schema
* @param Tracking of all type definitions per schema
* @returns Both the collection of all type definitions, and the collection of imported type definitions
*/
function collectDefinitionsSync(imports, source, options, typeDefinitions, allDefinitions) {
const rawModules = preapreRawModules({ allDefinitions, source, imports, options, typeDefinitions });
// Process each file (recursively)
rawModules.forEach(module => {
// If it was not yet processed (in case of circular dependencies)
const filepath = resolveModuleFilePath(source.location, module.from, options);
if (canProcess({
options,
module,
filepath,
})) {
const result = loadFileSync(filepath, options);
collectDefinitionsSync(module.imports, result, options, typeDefinitions, allDefinitions);
}
});
}
//
function preapreRawModules({ allDefinitions, imports, options, typeDefinitions, source, }) {
// Add all definitions to running total
allDefinitions.push(source.document.definitions);
// Filter TypeDefinitionNodes by type and defined imports
const currentTypeDefinitions = filterImportedDefinitions(imports, source.document.definitions, allDefinitions, options.sort);
// Add typedefinitions to running total
typeDefinitions.push(currentTypeDefinitions);
// Read imports from current file
return parseSDL(source.rawSDL);
}
function canProcess({ options, module, filepath, }) {
const processedFile = options.processedFiles.get(filepath);
if (!processedFile || !processedFile.find(rModule => isEqual(rModule, module))) {
// Mark this specific import line as processed for this file (for cicular dependency cases)
options.processedFiles.set(filepath, processedFile ? processedFile.concat(module) : [module]);
return true;
}
return false;
}
/**
* Filter the types loaded from a schema, first by relevant types,
* then by the types specified in the import statement.
*
* @param imports Types specified in the import statement
* @param typeDefinitions All definitions from a schema
* @returns Filtered collection of type definitions
*/
function filterImportedDefinitions(imports, typeDefinitions, allDefinitions, sort) {
// This should do something smart with fields
const filteredDefinitions = typeDefinitions;
if (imports.includes('*')) {
if (imports.length === 1 && imports[0] === '*' && allDefinitions.length > 1) {
const previousTypeDefinitions = keyBy(flatten(allDefinitions.slice(0, allDefinitions.length - 1)).filter(def => 'name' in def), def => 'name' in def && def.name.value);
return typeDefinitions.filter(typeDef => typeDef.kind === 'ObjectTypeDefinition' && previousTypeDefinitions[typeDef.name.value]);
}
return filteredDefinitions;
}
else {
const importedTypes = imports.map(i => i.split('.')[0]);
const result = filteredDefinitions.filter(d => 'name' in d && importedTypes.includes(d.name.value));
const fieldImports = imports.filter(i => i.split('.').length > 1);
const groupedFieldImports = groupBy(fieldImports, x => x.split('.')[0]);
for (const rootType in groupedFieldImports) {
const fields = groupedFieldImports[rootType].map(x => x.split('.')[1]);
const objectTypeDefinition = filteredDefinitions.find(def => 'name' in def && def.name.value === rootType);
if (objectTypeDefinition && 'fields' in objectTypeDefinition && !fields.includes('*')) {
objectTypeDefinition.fields = objectTypeDefinition.fields.filter((f) => fields.includes(f.name.value) || fields.includes('*'));
if (sort) {
objectTypeDefinition.fields.sort(compareNodes);
}
}
}
return result;
}
}
const filterKind = (content, filterKinds) => {

@@ -973,3 +474,3 @@ if (content && content.definitions && content.definitions.length && filterKinds && filterKinds.length > 0) {

async function parseSource({ partialSource, options, globOptions, pointerOptionMap, addValidSource, cache, }) {
function parseSource({ partialSource, options, globOptions, pointerOptionMap, addValidSource }) {
if (partialSource) {

@@ -987,3 +488,2 @@ const input = prepareInput({

useComments(input);
await useGraphQLImport(input, () => processImportSyntax(input.source, input.options, cache));
collectValidSources(input, addValidSource);

@@ -993,20 +493,2 @@ }

}
function parseSourceSync({ partialSource, options, globOptions, pointerOptionMap, addValidSource, cache, }) {
if (partialSource) {
const input = prepareInput({
source: partialSource,
options,
globOptions,
pointerOptionMap,
});
parseSchema(input);
parseRawSDL(input);
if (input.source.document) {
useKindsFilter(input);
useComments(input);
useGraphQLImport(input, () => processImportSyntaxSync(input.source, input.options, cache));
collectValidSources(input, addValidSource);
}
}
}
//

@@ -1028,8 +510,3 @@ function prepareInput({ source, options, globOptions, pointerOptionMap, }) {

if (input.source.rawSDL) {
input.source.document = isEmptySDL(input.source.rawSDL)
? {
kind: Kind.DOCUMENT,
definitions: [],
}
: parse(new Source(input.source.rawSDL, input.source.location), input.options);
input.source.document = parse(new Source(input.source.rawSDL, input.source.location), input.options);
}

@@ -1048,21 +525,2 @@ }

}
function useGraphQLImport(input, definitionsGetter) {
if (input.options.forceGraphQLImport ||
(!input.options.skipGraphQLImport && /^\#.*import /i.test(input.source.rawSDL.trimLeft()))) {
const rewriteDoc = (definitions) => {
input.source.document = {
kind: Kind.DOCUMENT,
definitions,
};
};
const result = definitionsGetter();
if (isPromise(result)) {
return result.then(rewriteDoc);
}
rewriteDoc(result);
}
}
function isPromise(val) {
return val instanceof Promise;
}
function collectValidSources(input, addValidSource) {

@@ -1078,3 +536,3 @@ if (input.source.document.definitions && input.source.document.definitions.length > 0) {

const globOptions = {};
await prepareOptions(options);
applyDefaultOptions(options);
const sources = await collectSources({

@@ -1085,3 +543,2 @@ pointerOptionMap,

const validSources = [];
const definitionsCacheForImport = [];
// If we have few k of files it may be an issue

@@ -1097,3 +554,2 @@ const limit = useLimit(CONCURRENCY_LIMIT$1);

},
cache: definitionsCacheForImport,
}))));

@@ -1105,3 +561,3 @@ return prepareResult({ options, pointerOptionMap, validSources });

const globOptions = {};
prepareOptionsSync(options);
applyDefaultOptions(options);
const sources = collectSourcesSync({

@@ -1112,5 +568,4 @@ pointerOptionMap,

const validSources = [];
const definitionsCacheForImport = [];
sources.forEach(partialSource => {
parseSourceSync({
parseSource({
partialSource,

@@ -1123,3 +578,2 @@ options,

},
cache: definitionsCacheForImport,
});

@@ -1214,3 +668,3 @@ });

export { NON_OPERATION_KINDS, OPERATION_KINDS, collectDefinitions, collectDefinitionsSync, filterKind, getDocumentFromSDL, isEmptySDL, loadDocuments, loadDocumentsSync, loadSchema, loadSchemaSync, loadTypedefs, loadTypedefsSync, parseImportLine, parseSDL, processImportSyntax, processImportSyntaxSync, resolveModuleFilePath };
export { NON_OPERATION_KINDS, OPERATION_KINDS, filterKind, loadDocuments, loadDocumentsSync, loadSchema, loadSchemaSync, loadTypedefs, loadTypedefsSync };
//# sourceMappingURL=index.esm.js.map
import { Source, SingleFileOptions, Loader } from '@graphql-tools/utils';
import { RawModule } from './import-parser';
export declare type LoadTypedefsOptions<ExtraConfig = {
[key: string]: any;
}> = SingleFileOptions & ExtraConfig & {
processedFiles?: Map<string, RawModule[]>;
cache?: {

@@ -16,5 +14,2 @@ [key: string]: Source;

forceGraphQLImport?: boolean;
fs?: typeof import('fs');
path?: typeof import('path');
os?: typeof import('os');
};

@@ -21,0 +16,0 @@ export declare type UnnormalizedTypeDefPointer = {

3

load-typedefs/options.d.ts
import { LoadTypedefsOptions } from './../load-typedefs';
export declare function prepareOptions<T>(options: LoadTypedefsOptions<Partial<T>>): Promise<LoadTypedefsOptions<Partial<T>>>;
export declare function prepareOptionsSync<T>(options: LoadTypedefsOptions<Partial<T>>): Promise<LoadTypedefsOptions<Partial<T>>>;
export declare function applyDefaultOptions<T>(options: LoadTypedefsOptions<Partial<T>>): void;
import { Source } from '@graphql-tools/utils';
import { DefinitionNode } from 'graphql';
declare type AddValidSource = (source: Source) => void;

@@ -10,6 +9,4 @@ declare type ParseOptions = {

addValidSource: AddValidSource;
cache: DefinitionNode[][];
};
export declare function parseSource({ partialSource, options, globOptions, pointerOptionMap, addValidSource, cache, }: ParseOptions): Promise<void>;
export declare function parseSourceSync({ partialSource, options, globOptions, pointerOptionMap, addValidSource, cache, }: ParseOptions): void;
export declare function parseSource({ partialSource, options, globOptions, pointerOptionMap, addValidSource }: ParseOptions): void;
export {};
{
"name": "@graphql-tools/load",
"version": "5.0.1-alpha-3cbf9d2.0",
"version": "5.0.1-alpha-3cd13f3.0",
"description": "A set of utils for faster development of GraphQL tools",

@@ -9,11 +9,8 @@ "peerDependencies": {

"dependencies": {
"@graphql-tools/utils": "5.0.1-alpha-3cbf9d2.0",
"@graphql-tools/schema-merging": "5.0.1-alpha-3cbf9d2.0",
"aggregate-error": "3.0.1",
"@graphql-tools/utils": "5.0.1-alpha-3cd13f3.0",
"@graphql-tools/merge": "5.0.1-alpha-3cd13f3.0",
"globby": "11.0.0",
"import-from": "^3.0.0",
"import-from": "3.0.0",
"is-glob": "4.0.1",
"lodash": "4.17.15",
"p-limit": "2.3.0",
"resolve-from": "5.0.0",
"tslib": "1.11.1",

@@ -34,3 +31,2 @@ "unixify": "1.0.0",

"@types/is-glob": "4.0.1",
"@types/lodash": "4.14.150",
"@types/valid-url": "1.0.3",

@@ -37,0 +33,0 @@ "graphql-tag": "2.10.3",

import { LoadTypedefsOptions, UnnormalizedTypeDefPointer } from './load-typedefs';
import { GraphQLSchema, BuildSchemaOptions } from 'graphql';
import { MergeSchemasConfig } from '@graphql-tools/schema-merging';
import { MergeSchemasConfig } from '@graphql-tools/merge';
export declare type LoadSchemaOptions = BuildSchemaOptions & LoadTypedefsOptions & Partial<MergeSchemasConfig> & {

@@ -5,0 +5,0 @@ /**

@@ -1,4 +0,3 @@

export declare function getCustomLoaderByPath(path: string, cwd: string): Promise<any>;
export declare function getCustomLoaderByPathSync(path: string, cwd: string): any;
export declare function getCustomLoaderByPath(path: string, cwd: string): any;
export declare function useCustomLoader(loaderPointer: any, cwd: string): Promise<any>;
export declare function useCustomLoaderSync(loaderPointer: any, cwd: string): any;

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc