apollo-codegen
Advanced tools
Comparing version 0.5.1 to 0.6.0
@@ -149,2 +149,3 @@ #!/usr/bin/env node | ||
handleError(error ? error : new Error(message)); | ||
}).help().strict().argv; | ||
}).help().strict().argv; | ||
//# sourceMappingURL=cli.js.map |
@@ -6,3 +6,3 @@ 'use strict'; | ||
}); | ||
exports.CompilationContext = undefined; | ||
exports.Compiler = undefined; | ||
@@ -13,2 +13,6 @@ var _stringify = require('babel-runtime/core-js/json/stringify'); | ||
var _values = require('babel-runtime/core-js/object/values'); | ||
var _values2 = _interopRequireDefault(_values); | ||
var _from = require('babel-runtime/core-js/array/from'); | ||
@@ -18,5 +22,5 @@ | ||
var _values = require('babel-runtime/core-js/object/values'); | ||
var _entries = require('babel-runtime/core-js/object/entries'); | ||
var _values2 = _interopRequireDefault(_values); | ||
var _entries2 = _interopRequireDefault(_entries); | ||
@@ -31,6 +35,2 @@ var _assign = require('babel-runtime/core-js/object/assign'); | ||
var _entries = require('babel-runtime/core-js/object/entries'); | ||
var _entries2 = _interopRequireDefault(_entries); | ||
var _slicedToArray2 = require('babel-runtime/helpers/slicedToArray'); | ||
@@ -48,2 +48,6 @@ | ||
var _map = require('babel-runtime/core-js/map'); | ||
var _map2 = _interopRequireDefault(_map); | ||
var _getIterator2 = require('babel-runtime/core-js/get-iterator'); | ||
@@ -53,6 +57,2 @@ | ||
var _create = require('babel-runtime/core-js/object/create'); | ||
var _create2 = _interopRequireDefault(_create); | ||
var _set = require('babel-runtime/core-js/set'); | ||
@@ -70,2 +70,7 @@ | ||
var _create = require('babel-runtime/core-js/object/create'); | ||
var _create2 = _interopRequireDefault(_create); | ||
exports.compileToIR = compileToIR; | ||
exports.stringifyIR = stringifyIR; | ||
@@ -84,6 +89,26 @@ exports.printIR = printIR; | ||
var CompilationContext = exports.CompilationContext = function () { | ||
function CompilationContext(schema, document) { | ||
(0, _classCallCheck3.default)(this, CompilationContext); | ||
function compileToIR(schema, document) { | ||
var compiler = new Compiler(schema, document); | ||
var operations = (0, _create2.default)(null); | ||
compiler.operations.forEach(function (operation) { | ||
operations[operation.name.value] = compiler.compileOperation(operation); | ||
}); | ||
var fragments = (0, _create2.default)(null); | ||
compiler.fragments.forEach(function (fragment) { | ||
fragments[fragment.name.value] = compiler.compileFragment(fragment); | ||
}); | ||
var typesUsed = compiler.typesUsed; | ||
return { schema: schema, operations: operations, fragments: fragments, typesUsed: typesUsed }; | ||
} | ||
var Compiler = exports.Compiler = function () { | ||
function Compiler(schema, document) { | ||
(0, _classCallCheck3.default)(this, Compiler); | ||
this.schema = schema; | ||
@@ -127,5 +152,7 @@ | ||
} | ||
this.compiledFragmentMap = (0, _create2.default)(null); | ||
} | ||
(0, _createClass3.default)(CompilationContext, [{ | ||
(0, _createClass3.default)(Compiler, [{ | ||
key: 'fragmentNamed', | ||
@@ -136,7 +163,2 @@ value: function fragmentNamed(fragmentName) { | ||
}, { | ||
key: 'compileOperations', | ||
value: function compileOperations() { | ||
return this.operations.map(this.compileOperation, this); | ||
} | ||
}, { | ||
key: 'compileOperation', | ||
@@ -158,14 +180,17 @@ value: function compileOperation(operationDefinition) { | ||
var rootType = (0, _graphql2.getOperationRootType)(this.schema, operationDefinition); | ||
var groupedFieldSet = this.collectFields(rootType, operationDefinition.selectionSet); | ||
var groupedVisitedFragmentSet = new _map2.default(); | ||
var groupedFieldSet = this.collectFields(rootType, operationDefinition.selectionSet, undefined, groupedVisitedFragmentSet); | ||
var fragmentsReferencedSet = (0, _create2.default)(null); | ||
var fields = this.resolveFields(rootType, groupedFieldSet, fragmentsReferencedSet); | ||
return { operationName: operationName, variables: variables, source: source, fields: fields, fragmentsReferenced: (0, _keys2.default)(fragmentsReferencedSet) }; | ||
var _resolveFields = this.resolveFields(rootType, groupedFieldSet, groupedVisitedFragmentSet, fragmentsReferencedSet); | ||
var fields = _resolveFields.fields; | ||
var fragmentsReferenced = (0, _keys2.default)(fragmentsReferencedSet); | ||
return { operationName: operationName, variables: variables, source: source, fields: fields, fragmentsReferenced: fragmentsReferenced }; | ||
} | ||
}, { | ||
key: 'compileFragments', | ||
value: function compileFragments() { | ||
return this.fragments.map(this.compileFragment, this); | ||
} | ||
}, { | ||
key: 'compileFragment', | ||
@@ -177,8 +202,15 @@ value: function compileFragment(fragmentDefinition) { | ||
var fragmentType = (0, _graphql.typeFromAST)(this.schema, fragmentDefinition.typeCondition); | ||
var groupedFieldSet = this.collectFields(fragmentType, fragmentDefinition.selectionSet); | ||
var fields = this.resolveFields(fragmentType, groupedFieldSet); | ||
var typeConditions = this.resolveSubTypes(fragmentType, groupedFieldSet); | ||
var typeCondition = (0, _graphql.typeFromAST)(this.schema, fragmentDefinition.typeCondition); | ||
return { fragmentName: fragmentName, source: source, fields: fields, typeConditions: typeConditions }; | ||
var groupedVisitedFragmentSet = new _map2.default(); | ||
var groupedFieldSet = this.collectFields(typeCondition, fragmentDefinition.selectionSet, undefined, groupedVisitedFragmentSet); | ||
var _resolveFields2 = this.resolveFields(typeCondition, groupedFieldSet, groupedVisitedFragmentSet); | ||
var fields = _resolveFields2.fields; | ||
var fragmentSpreads = _resolveFields2.fragmentSpreads; | ||
var inlineFragments = _resolveFields2.inlineFragments; | ||
return { fragmentName: fragmentName, source: source, typeCondition: typeCondition, fields: fields, fragmentSpreads: fragmentSpreads, inlineFragments: inlineFragments }; | ||
} | ||
@@ -189,3 +221,3 @@ }, { | ||
var groupedFieldSet = arguments.length <= 2 || arguments[2] === undefined ? (0, _create2.default)(null) : arguments[2]; | ||
var visitedFragmentSet = arguments.length <= 3 || arguments[3] === undefined ? (0, _create2.default)(null) : arguments[3]; | ||
var groupedVisitedFragmentSet = arguments.length <= 3 || arguments[3] === undefined ? new _map2.default() : arguments[3]; | ||
@@ -216,7 +248,9 @@ if (!(0, _graphql.isCompositeType)(parentType)) { | ||
if (!groupedFieldSet[responseName]) { | ||
groupedFieldSet[responseName] = []; | ||
if (groupedFieldSet) { | ||
if (!groupedFieldSet[responseName]) { | ||
groupedFieldSet[responseName] = []; | ||
} | ||
groupedFieldSet[responseName].push([parentType, (0, _extends3.default)({}, selection, { type: fieldType })]); | ||
} | ||
groupedFieldSet[responseName].push([parentType, (0, _extends3.default)({}, selection, { type: fieldType })]); | ||
break; | ||
@@ -229,3 +263,3 @@ } | ||
this.collectFields((0, _graphql.isTypeSubTypeOf)(this.schema, inlineFragmentType, parentType) ? inlineFragmentType : parentType, selection.selectionSet, groupedFieldSet, visitedFragmentSet); | ||
this.collectFields((0, _graphql.isTypeSubTypeOf)(this.schema, inlineFragmentType, parentType) ? inlineFragmentType : parentType, selection.selectionSet, groupedFieldSet, groupedVisitedFragmentSet); | ||
break; | ||
@@ -238,3 +272,3 @@ } | ||
var fragment = this.fragmentNamed(fragmentName); | ||
if (!fragment) continue; | ||
if (!fragment) throw new _graphql.GraphQLError('Cannot find fragment "' + fragmentName + '"'); | ||
@@ -244,12 +278,16 @@ var _typeCondition = fragment.typeCondition; | ||
var effectiveType = (0, _graphql.isTypeSubTypeOf)(this.schema, fragmentType, parentType) ? fragmentType : parentType; | ||
if (groupedVisitedFragmentSet) { | ||
var visitedFragmentSet = groupedVisitedFragmentSet.get(parentType); | ||
if (!visitedFragmentSet) { | ||
visitedFragmentSet = {}; | ||
groupedVisitedFragmentSet.set(parentType, visitedFragmentSet); | ||
} | ||
if (!visitedFragmentSet[effectiveType]) { | ||
visitedFragmentSet[effectiveType] = {}; | ||
if (visitedFragmentSet[fragmentName]) continue; | ||
visitedFragmentSet[fragmentName] = true; | ||
} | ||
if (visitedFragmentSet[effectiveType][fragmentName]) continue; | ||
visitedFragmentSet[effectiveType][fragmentName] = true; | ||
var effectiveType = (0, _graphql.isTypeSubTypeOf)(this.schema, fragmentType, parentType) ? fragmentType : parentType; | ||
this.collectFields(effectiveType, fragment.selectionSet, groupedFieldSet, visitedFragmentSet); | ||
this.collectFields(effectiveType, fragment.selectionSet, null, groupedVisitedFragmentSet); | ||
break; | ||
@@ -278,3 +316,3 @@ } | ||
key: 'mergeSelectionSets', | ||
value: function mergeSelectionSets(parentType, fieldSet, visitedFragmentSet) { | ||
value: function mergeSelectionSets(parentType, fieldSet, groupedVisitedFragmentSet) { | ||
var groupedFieldSet = (0, _create2.default)(null); | ||
@@ -295,3 +333,3 @@ | ||
if (selectionSet) { | ||
this.collectFields(parentType, selectionSet, groupedFieldSet, visitedFragmentSet); | ||
this.collectFields(parentType, selectionSet, groupedFieldSet, groupedVisitedFragmentSet); | ||
} | ||
@@ -318,3 +356,3 @@ } | ||
key: 'resolveFields', | ||
value: function resolveFields(parentType, groupedFieldSet, fragmentsReferencedSet) { | ||
value: function resolveFields(parentType, groupedFieldSet, groupedVisitedFragmentSet, fragmentsReferencedSet) { | ||
var _this2 = this; | ||
@@ -351,16 +389,19 @@ | ||
var unmodifiedFieldType = (0, _graphql.getNamedType)(fieldType); | ||
var bareFieldType = (0, _graphql.getNamedType)(fieldType); | ||
if ((0, _graphql.isCompositeType)(unmodifiedFieldType)) { | ||
var visitedFragmentSet = (0, _create2.default)(null); | ||
var subSelectionSet = this.mergeSelectionSets(unmodifiedFieldType, fieldSet, visitedFragmentSet); | ||
if (!(0, _graphql2.isBuiltInType)(bareFieldType)) { | ||
this.typesUsedSet.add(bareFieldType); | ||
} | ||
field.fragmentSpreads = fragmentSpreadsForType(unmodifiedFieldType, visitedFragmentSet); | ||
if ((0, _graphql.isCompositeType)(bareFieldType)) { | ||
var subSelectionGroupedVisitedFragmentSet = new _map2.default(); | ||
var subSelectionGroupedFieldSet = this.mergeSelectionSets(bareFieldType, fieldSet, subSelectionGroupedVisitedFragmentSet); | ||
if (fragmentsReferencedSet) { | ||
_assign2.default.apply(Object, [fragmentsReferencedSet].concat((0, _toConsumableArray3.default)((0, _values2.default)(visitedFragmentSet)))); | ||
} | ||
var _resolveFields3 = this.resolveFields(bareFieldType, subSelectionGroupedFieldSet, subSelectionGroupedVisitedFragmentSet, fragmentsReferencedSet); | ||
field.fields = this.resolveFields(unmodifiedFieldType, subSelectionSet, fragmentsReferencedSet); | ||
field.typeConditions = this.resolveSubTypes(unmodifiedFieldType, subSelectionSet, visitedFragmentSet, fragmentsReferencedSet); | ||
var _fields = _resolveFields3.fields; | ||
var _fragmentSpreads = _resolveFields3.fragmentSpreads; | ||
var _inlineFragments = _resolveFields3.inlineFragments; | ||
(0, _assign2.default)(field, { fields: _fields, fragmentSpreads: _fragmentSpreads, inlineFragments: _inlineFragments }); | ||
} | ||
@@ -385,22 +426,32 @@ | ||
return fields; | ||
var fragmentSpreads = this.fragmentSpreadsForParentType(parentType, groupedVisitedFragmentSet); | ||
var inlineFragments = this.resolveInlineFragments(parentType, groupedFieldSet, groupedVisitedFragmentSet, fragmentsReferencedSet); | ||
if (fragmentsReferencedSet) { | ||
_assign2.default.apply(Object, [fragmentsReferencedSet].concat((0, _toConsumableArray3.default)(groupedVisitedFragmentSet.values()))); | ||
} | ||
return { fields: fields, fragmentSpreads: fragmentSpreads, inlineFragments: inlineFragments }; | ||
} | ||
}, { | ||
key: 'resolveSubTypes', | ||
value: function resolveSubTypes(parentType, groupedFieldSet) { | ||
key: 'resolveInlineFragments', | ||
value: function resolveInlineFragments(parentType, groupedFieldSet, groupedVisitedFragmentSet, fragmentsReferencedSet) { | ||
var _this3 = this; | ||
var visitedFragmentSet = arguments.length <= 2 || arguments[2] === undefined ? (0, _create2.default)(null) : arguments[2]; | ||
var fragmentsReferencedSet = arguments[3]; | ||
return this.collectPossibleTypes(parentType, groupedFieldSet, groupedVisitedFragmentSet).map(function (typeCondition) { | ||
var _resolveFields4 = _this3.resolveFields(typeCondition, groupedFieldSet, groupedVisitedFragmentSet, fragmentsReferencedSet); | ||
return this.collectSubTypes(parentType, groupedFieldSet).map(function (type) { | ||
var fields = _this3.resolveFields(type, groupedFieldSet, fragmentsReferencedSet); | ||
var fragmentSpreads = fragmentSpreadsForType(type, visitedFragmentSet); | ||
return { type: type, fields: fields, fragmentSpreads: fragmentSpreads }; | ||
var fields = _resolveFields4.fields; | ||
var fragmentSpreads = _resolveFields4.fragmentSpreads; | ||
return { typeCondition: typeCondition, fields: fields, fragmentSpreads: fragmentSpreads }; | ||
}); | ||
} | ||
}, { | ||
key: 'collectSubTypes', | ||
value: function collectSubTypes(parentType, groupedFieldSet) { | ||
var typeConditions = new _set2.default(); | ||
key: 'collectPossibleTypes', | ||
value: function collectPossibleTypes(parentType, groupedFieldSet, groupedVisitedFragmentSet) { | ||
if (!(0, _graphql.isAbstractType)(parentType)) return []; | ||
var possibleTypes = new _set2.default(); | ||
var _iteratorNormalCompletion5 = true; | ||
@@ -413,27 +464,27 @@ var _didIteratorError5 = false; | ||
var fieldSet = _step5.value; | ||
var _iteratorNormalCompletion6 = true; | ||
var _didIteratorError6 = false; | ||
var _iteratorError6 = undefined; | ||
var _iteratorNormalCompletion7 = true; | ||
var _didIteratorError7 = false; | ||
var _iteratorError7 = undefined; | ||
try { | ||
for (var _iterator6 = (0, _getIterator3.default)(fieldSet), _step6; !(_iteratorNormalCompletion6 = (_step6 = _iterator6.next()).done); _iteratorNormalCompletion6 = true) { | ||
var _step6$value = (0, _slicedToArray3.default)(_step6.value, 1); | ||
for (var _iterator7 = (0, _getIterator3.default)(fieldSet), _step7; !(_iteratorNormalCompletion7 = (_step7 = _iterator7.next()).done); _iteratorNormalCompletion7 = true) { | ||
var _step7$value = (0, _slicedToArray3.default)(_step7.value, 1); | ||
var typeCondition = _step6$value[0]; | ||
var typeCondition = _step7$value[0]; | ||
if (!(0, _graphql.isTypeSubTypeOf)(this.schema, parentType, typeCondition)) { | ||
typeConditions.add(typeCondition); | ||
if (this.schema.isPossibleType(parentType, typeCondition)) { | ||
possibleTypes.add(typeCondition); | ||
} | ||
} | ||
} catch (err) { | ||
_didIteratorError6 = true; | ||
_iteratorError6 = err; | ||
_didIteratorError7 = true; | ||
_iteratorError7 = err; | ||
} finally { | ||
try { | ||
if (!_iteratorNormalCompletion6 && _iterator6.return) { | ||
_iterator6.return(); | ||
if (!_iteratorNormalCompletion7 && _iterator7.return) { | ||
_iterator7.return(); | ||
} | ||
} finally { | ||
if (_didIteratorError6) { | ||
throw _iteratorError6; | ||
if (_didIteratorError7) { | ||
throw _iteratorError7; | ||
} | ||
@@ -443,2 +494,4 @@ } | ||
} | ||
// Also include type conditions for fragment spreads | ||
} catch (err) { | ||
@@ -459,5 +512,96 @@ _didIteratorError5 = true; | ||
return (0, _from2.default)(typeConditions); | ||
if (groupedVisitedFragmentSet) { | ||
var _iteratorNormalCompletion6 = true; | ||
var _didIteratorError6 = false; | ||
var _iteratorError6 = undefined; | ||
try { | ||
for (var _iterator6 = (0, _getIterator3.default)(groupedVisitedFragmentSet.keys()), _step6; !(_iteratorNormalCompletion6 = (_step6 = _iterator6.next()).done); _iteratorNormalCompletion6 = true) { | ||
var effectiveType = _step6.value; | ||
if (this.schema.isPossibleType(parentType, effectiveType)) { | ||
possibleTypes.add(effectiveType); | ||
} | ||
} | ||
} catch (err) { | ||
_didIteratorError6 = true; | ||
_iteratorError6 = err; | ||
} finally { | ||
try { | ||
if (!_iteratorNormalCompletion6 && _iterator6.return) { | ||
_iterator6.return(); | ||
} | ||
} finally { | ||
if (_didIteratorError6) { | ||
throw _iteratorError6; | ||
} | ||
} | ||
} | ||
} | ||
return (0, _from2.default)(possibleTypes); | ||
} | ||
}, { | ||
key: 'fragmentSpreadsForParentType', | ||
value: function fragmentSpreadsForParentType(parentType, groupedVisitedFragmentSet) { | ||
if (!groupedVisitedFragmentSet) return []; | ||
var fragmentSpreads = new _set2.default(); | ||
var _iteratorNormalCompletion8 = true; | ||
var _didIteratorError8 = false; | ||
var _iteratorError8 = undefined; | ||
try { | ||
for (var _iterator8 = (0, _getIterator3.default)(groupedVisitedFragmentSet), _step8; !(_iteratorNormalCompletion8 = (_step8 = _iterator8.next()).done); _iteratorNormalCompletion8 = true) { | ||
var _step8$value = (0, _slicedToArray3.default)(_step8.value, 2); | ||
var effectiveType = _step8$value[0]; | ||
var visitedFragmentSet = _step8$value[1]; | ||
if (!(0, _graphql2.isTypeProperSuperTypeOf)(this.schema, effectiveType, parentType)) continue; | ||
var _iteratorNormalCompletion9 = true; | ||
var _didIteratorError9 = false; | ||
var _iteratorError9 = undefined; | ||
try { | ||
for (var _iterator9 = (0, _getIterator3.default)((0, _keys2.default)(visitedFragmentSet)), _step9; !(_iteratorNormalCompletion9 = (_step9 = _iterator9.next()).done); _iteratorNormalCompletion9 = true) { | ||
var fragmentName = _step9.value; | ||
fragmentSpreads.add(fragmentName); | ||
} | ||
} catch (err) { | ||
_didIteratorError9 = true; | ||
_iteratorError9 = err; | ||
} finally { | ||
try { | ||
if (!_iteratorNormalCompletion9 && _iterator9.return) { | ||
_iterator9.return(); | ||
} | ||
} finally { | ||
if (_didIteratorError9) { | ||
throw _iteratorError9; | ||
} | ||
} | ||
} | ||
} | ||
} catch (err) { | ||
_didIteratorError8 = true; | ||
_iteratorError8 = err; | ||
} finally { | ||
try { | ||
if (!_iteratorNormalCompletion8 && _iterator8.return) { | ||
_iterator8.return(); | ||
} | ||
} finally { | ||
if (_didIteratorError8) { | ||
throw _iteratorError8; | ||
} | ||
} | ||
} | ||
return (0, _from2.default)(fragmentSpreads); | ||
} | ||
}, { | ||
key: 'typesUsed', | ||
@@ -473,9 +617,5 @@ get: function get() { | ||
}]); | ||
return CompilationContext; | ||
return Compiler; | ||
}(); | ||
function fragmentSpreadsForType(type, visitedFragmentSet) { | ||
return visitedFragmentSet[type] ? (0, _keys2.default)(visitedFragmentSet[type]) : []; | ||
} | ||
var typenameField = { kind: _graphql.Kind.FIELD, name: { kind: _graphql.Kind.NAME, value: '__typename' } }; | ||
@@ -514,3 +654,3 @@ | ||
var fields = _ref3.fields; | ||
var typeConditions = _ref3.typeConditions; | ||
var inlineFragments = _ref3.inlineFragments; | ||
var fragmentSpreads = _ref3.fragmentSpreads; | ||
@@ -520,5 +660,6 @@ | ||
return field.name + ': ' + String(field.type) + (0, _printing.wrap)(' ', printIR(field)); | ||
}).concat(typeConditions && typeConditions.map(function (typeCondition) { | ||
return '' + String(typeCondition.type) + (0, _printing.wrap)(' ', printIR(typeCondition)); | ||
}).concat(inlineFragments && inlineFragments.map(function (inlineFragment) { | ||
return '' + String(inlineFragment.typeCondition) + (0, _printing.wrap)(' ', printIR(inlineFragment)); | ||
}))); | ||
} | ||
} | ||
//# sourceMappingURL=compilation.js.map |
@@ -113,2 +113,3 @@ 'use strict'; | ||
return downloadSchema; | ||
}(); | ||
}(); | ||
//# sourceMappingURL=downloadSchema.js.map |
@@ -88,2 +88,3 @@ 'use strict'; | ||
} | ||
} | ||
} | ||
//# sourceMappingURL=errors.js.map |
@@ -6,10 +6,9 @@ 'use strict'; | ||
}); | ||
exports.default = generate; | ||
exports.loadSchema = loadSchema; | ||
exports.loadAndMergeQueryDocuments = loadAndMergeQueryDocuments; | ||
var _path = require('path'); | ||
var _values = require('babel-runtime/core-js/object/values'); | ||
var _path2 = _interopRequireDefault(_path); | ||
var _values2 = _interopRequireDefault(_values); | ||
exports.default = generate; | ||
var _fs = require('fs'); | ||
@@ -19,10 +18,6 @@ | ||
var _mkdirp = require('mkdirp'); | ||
var _errors = require('./errors'); | ||
var _mkdirp2 = _interopRequireDefault(_mkdirp); | ||
var _loading = require('./loading'); | ||
var _graphql = require('graphql'); | ||
var _errors = require('./errors'); | ||
var _validation = require('./validation'); | ||
@@ -37,9 +32,9 @@ | ||
function generate(inputPaths, schemaPath, outputPath, target) { | ||
var schema = loadSchema(schemaPath); | ||
var schema = (0, _loading.loadSchema)(schemaPath); | ||
var document = loadAndMergeQueryDocuments(inputPaths); | ||
var document = (0, _loading.loadAndMergeQueryDocuments)(inputPaths); | ||
(0, _validation.validateQueryDocument)(schema, document); | ||
var context = new _compilation.CompilationContext(schema, document); | ||
var context = (0, _compilation.compileToIR)(schema, document); | ||
@@ -53,33 +48,7 @@ var output = target && target.toLowerCase() === 'json' ? generateIR(context) : (0, _swift.generateSource)(context); | ||
return (0, _compilation.stringifyIR)({ | ||
operations: context.operations.map(function (operation) { | ||
return context.compileOperation(operation); | ||
}), | ||
fragments: context.fragments.map(function (fragment) { | ||
return context.compileFragment(fragment); | ||
}) | ||
operations: (0, _values2.default)(context.operations), | ||
fragments: (0, _values2.default)(context.fragments), | ||
typesUsed: context.typesUsed | ||
}, '\t'); | ||
} | ||
function loadSchema(schemaPath) { | ||
if (!_fs2.default.existsSync(schemaPath)) { | ||
throw new _errors.ToolError('Cannot find GraphQL schema file: ' + schemaPath); | ||
} | ||
var schemaData = require(schemaPath); | ||
if (!schemaData.__schema) { | ||
throw new _errors.ToolError('GraphQL schema file should contain a valid GraphQL introspection query result'); | ||
} | ||
return (0, _graphql.buildClientSchema)(schemaData); | ||
} | ||
function loadAndMergeQueryDocuments(inputPaths) { | ||
var sources = inputPaths.map(function (inputPath) { | ||
var body = _fs2.default.readFileSync(inputPath, 'utf8'); | ||
return new _graphql.Source(body, inputPath); | ||
}); | ||
return (0, _graphql.concatAST)(sources.map(function (source) { | ||
return (0, _graphql.parse)(source); | ||
})); | ||
} | ||
//# sourceMappingURL=generate.js.map |
@@ -19,2 +19,3 @@ 'use strict'; | ||
exports.downloadSchema = _downloadSchema3.default; | ||
exports.generate = _generate3.default; | ||
exports.generate = _generate3.default; | ||
//# sourceMappingURL=index.js.map |
@@ -6,29 +6,11 @@ 'use strict'; | ||
}); | ||
exports.generateSource = generateSource; | ||
var _changeCase = require('change-case'); | ||
var _codeGeneration = require('./codeGeneration'); | ||
var _printing = require('../utilities/printing'); | ||
var _strings = require('./strings'); | ||
var _types = require('./types'); | ||
var _operations = require('./operations'); | ||
var _fragments = require('./fragments'); | ||
function generateSource(context) { | ||
var operations = context.compileOperations(); | ||
var fragments = context.compileFragments(); | ||
var typeDeclarations = context.typesUsed.map(_types.typeDeclarationForGraphQLType); | ||
var operationClassDeclarations = operations.map(_operations.classDeclarationForOperation); | ||
var fragmentClassDeclarations = fragments.map(_fragments.classDeclarationForFragment); | ||
return (0, _printing.join)(['// This file was automatically generated and should not be edited.\n\n', importDeclarations() + '\n', (0, _printing.wrap)('\n', (0, _printing.join)(typeDeclarations, '\n\n'), '\n'), (0, _printing.wrap)('\n', (0, _printing.join)(operationClassDeclarations, '\n\n'), '\n'), (0, _printing.wrap)('\n', (0, _printing.join)(fragmentClassDeclarations, '\n\n'), '\n')]); | ||
} | ||
function importDeclarations() { | ||
return 'import Apollo'; | ||
} | ||
Object.defineProperty(exports, 'generateSource', { | ||
enumerable: true, | ||
get: function get() { | ||
return _codeGeneration.generateSource; | ||
} | ||
}); | ||
//# sourceMappingURL=index.js.map |
@@ -12,8 +12,9 @@ 'use strict'; | ||
function multilineString(string) { | ||
function multilineString(context, string) { | ||
var lines = string.split('\n'); | ||
return lines.map(function (line, index) { | ||
lines.forEach(function (line, index) { | ||
var isLastLine = index != lines.length - 1; | ||
return '"' + escapedString(line) + '"' + (isLastLine ? ' +' : ''); | ||
}).join('\n'); | ||
} | ||
context.printOnNewline('"' + escapedString(line) + '"' + (isLastLine ? ' +' : '')); | ||
}); | ||
} | ||
//# sourceMappingURL=strings.js.map |
@@ -6,2 +6,9 @@ 'use strict'; | ||
}); | ||
var _defineProperty2 = require('babel-runtime/helpers/defineProperty'); | ||
var _defineProperty3 = _interopRequireDefault(_defineProperty2); | ||
var _builtInScalarMap; | ||
exports.typeNameFromGraphQLType = typeNameFromGraphQLType; | ||
@@ -16,7 +23,11 @@ exports.typeDeclarationForGraphQLType = typeDeclarationForGraphQLType; | ||
function typeNameFromGraphQLType(type, unmodifiedTypeName) { | ||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } | ||
var builtInScalarMap = (_builtInScalarMap = {}, (0, _defineProperty3.default)(_builtInScalarMap, _graphql.GraphQLString.name, 'String'), (0, _defineProperty3.default)(_builtInScalarMap, _graphql.GraphQLInt.name, 'Int'), (0, _defineProperty3.default)(_builtInScalarMap, _graphql.GraphQLFloat.name, 'Float'), (0, _defineProperty3.default)(_builtInScalarMap, _graphql.GraphQLBoolean.name, 'Bool'), (0, _defineProperty3.default)(_builtInScalarMap, _graphql.GraphQLID.name, 'GraphQLID'), _builtInScalarMap); | ||
function typeNameFromGraphQLType(type, bareTypeName) { | ||
var nullable = arguments.length <= 2 || arguments[2] === undefined ? true : arguments[2]; | ||
if (type instanceof _graphql.GraphQLNonNull) { | ||
return typeNameFromGraphQLType(type.ofType, unmodifiedTypeName, false); | ||
return typeNameFromGraphQLType(type.ofType, bareTypeName, false); | ||
} | ||
@@ -26,7 +37,7 @@ | ||
if (type instanceof _graphql.GraphQLList) { | ||
typeName = '[' + typeNameFromGraphQLType(type.ofType, unmodifiedTypeName, true) + ']'; | ||
} else if (type === _graphql.GraphQLID) { | ||
typeName = 'GraphQLID'; | ||
typeName = '[' + typeNameFromGraphQLType(type.ofType, bareTypeName, true) + ']'; | ||
} else if (type instanceof _graphql.GraphQLScalarType) { | ||
typeName = builtInScalarMap[type.name] || _graphql.GraphQLString; | ||
} else { | ||
typeName = unmodifiedTypeName || type.name; | ||
typeName = bareTypeName || type.name; | ||
} | ||
@@ -37,9 +48,9 @@ | ||
function typeDeclarationForGraphQLType(type) { | ||
function typeDeclarationForGraphQLType(generator, type) { | ||
if (type instanceof _graphql.GraphQLEnumType) { | ||
return enumerationDeclaration(type); | ||
return enumerationDeclaration(generator, type); | ||
} | ||
} | ||
function enumerationDeclaration(type) { | ||
function enumerationDeclaration(generator, type) { | ||
var name = type.name; | ||
@@ -50,7 +61,13 @@ var description = type.description; | ||
var caseDeclarations = values.map(function (value) { | ||
return 'case ' + (0, _changeCase.camelCase)(value.name) + ' = "' + value.value + '"' + (0, _printing.wrap)(' /// ', value.description); | ||
generator.printNewlineIfNeeded(); | ||
generator.printOnNewline(description && '/// ' + description); | ||
generator.printOnNewline('public enum ' + name + ': String'); | ||
generator.withinBlock(function () { | ||
values.forEach(function (value) { | ||
return generator.printOnNewline('case ' + (0, _changeCase.camelCase)(value.name) + ' = "' + value.value + '"' + (0, _printing.wrap)(' /// ', value.description)); | ||
}); | ||
}); | ||
return (0, _printing.join)([description && '/// ' + description + '\n', 'public enum ' + name + ': String ', (0, _printing.block)(caseDeclarations), '\n\n', 'extension ' + name + ': JSONDecodable, JSONEncodable {}']); | ||
} | ||
generator.printNewline(); | ||
generator.printOnNewline('extension ' + name + ': JSONDecodable, JSONEncodable {}'); | ||
} | ||
//# sourceMappingURL=types.js.map |
@@ -6,2 +6,4 @@ 'use strict'; | ||
}); | ||
exports.isBuiltInType = isBuiltInType; | ||
exports.isTypeProperSuperTypeOf = isTypeProperSuperTypeOf; | ||
exports.getOperationRootType = getOperationRootType; | ||
@@ -12,2 +14,13 @@ exports.getFieldDef = getFieldDef; | ||
function isBuiltInType(type) { | ||
if (type instanceof _graphql.GraphQLEnumType) return false; | ||
return true; | ||
} | ||
function isTypeProperSuperTypeOf(schema, maybeSuperType, subType) { | ||
return (0, _graphql.isEqualType)(maybeSuperType, subType) || (0, _graphql.isAbstractType)(maybeSuperType) && schema.isPossibleType(maybeSuperType, subType); | ||
} | ||
// Utility functions extracted from graphql-js | ||
/** | ||
@@ -42,4 +55,2 @@ * Extracts the root type of the operation from the schema. | ||
*/ | ||
// Utility functions extracted from graphql-js | ||
function getFieldDef(schema, parentType, fieldAST) { | ||
@@ -59,2 +70,3 @@ var name = fieldAST.name.value; | ||
} | ||
} | ||
} | ||
//# sourceMappingURL=graphql.js.map |
@@ -40,2 +40,3 @@ 'use strict'; | ||
return maybeString && maybeString.replace(/\n/g, '\n '); | ||
} | ||
} | ||
//# sourceMappingURL=printing.js.map |
@@ -71,2 +71,3 @@ 'use strict'; | ||
}; | ||
} | ||
} | ||
//# sourceMappingURL=validation.js.map |
{ | ||
"name": "apollo-codegen", | ||
"version": "0.5.1", | ||
"version": "0.6.0", | ||
"description": "Generate client code based on a GraphQL schema and query documents", | ||
@@ -8,7 +8,6 @@ "main": "./lib/index.js", | ||
"scripts": { | ||
"compile": "babel src --out-dir lib", | ||
"watch": "babel src --out-dir lib --watch", | ||
"compile": "babel src --out-dir lib --source-maps", | ||
"watch": "babel src --out-dir lib --watch --source-maps", | ||
"prepublish": "rm -rf lib && npm run compile", | ||
"test": "mocha --recursive --compilers js:babel-register", | ||
"regenerate-expected-output": "apollo-codegen generate test/starwars/HeroAndFriendsNames.graphql --schema test/starwars/schema.json --output test/swift/expectedOutput/HeroAndFriendsNamesAPI.swift" | ||
"test": "mocha --recursive --compilers js:babel-register" | ||
}, | ||
@@ -30,7 +29,7 @@ "repository": { | ||
"glob": "^7.0.5", | ||
"mocha": "^2.5.3", | ||
"source-map-support": "^0.4.2" | ||
"mocha": "^2.5.3" | ||
}, | ||
"dependencies": { | ||
"babel-runtime": "^6.11.6", | ||
"source-map-support": "^0.4.2", | ||
"change-case": "^3.0.0", | ||
@@ -37,0 +36,0 @@ "graphql": "^0.7.0", |
@@ -1,2 +0,2 @@ | ||
import chai, { assert, expect } from 'chai' | ||
import chai, { expect } from 'chai' | ||
import chaiSubset from 'chai-subset' | ||
@@ -16,8 +16,9 @@ chai.use(chaiSubset); | ||
import { loadSchema } from '../src/generate' | ||
import { CompilationContext, stringifyIR, printIR } from '../src/compilation' | ||
import { loadSchema } from '../src/loading' | ||
import { compileToIR, stringifyIR, printIR } from '../src/compilation' | ||
const schema = loadSchema(require.resolve('./starwars/schema.json')); | ||
describe('compilation', () => { | ||
describe('Compiling query documents', () => { | ||
it(`should include defined variables`, () => { | ||
@@ -32,7 +33,5 @@ const document = parse(` | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['HeroName'])).to.containSubset({ | ||
operationName: 'HeroName', | ||
@@ -54,10 +53,23 @@ variables: [ | ||
const context = new CompilationContext(schema, document); | ||
const { typesUsed } = compileToIR(schema, document); | ||
context.compileOperation(context.operations[0]); | ||
expect(filteredIR(typesUsed)).to.deep.equal(['Episode']); | ||
}); | ||
assert.deepEqual(stringifyAndParseIR(context.typesUsed), ['Episode']); | ||
it(`should keep track of types used in fields`, () => { | ||
const document = parse(` | ||
query Hero { | ||
hero { | ||
name | ||
appearsIn | ||
} | ||
} | ||
`); | ||
const { typesUsed } = compileToIR(schema, document); | ||
expect(filteredIR(typesUsed)).to.deep.equal(['Episode']); | ||
}); | ||
it(`should flatten inline fragments with the same parent type`, () => { | ||
it(`should recursively flatten inline fragments with type conditions that match the parent type`, () => { | ||
const document = parse(` | ||
@@ -79,8 +91,8 @@ query Hero { | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['Hero'])).to.deep.equal({ | ||
operationName: 'Hero', | ||
variables: [], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
@@ -104,3 +116,4 @@ { | ||
], | ||
typeConditions: [] | ||
fragmentSpreads: [], | ||
inlineFragments: [] | ||
} | ||
@@ -111,3 +124,3 @@ ] | ||
it(`should expand fragment spreads with the same parent type recursively`, () => { | ||
it(`should recursively include fragment spreads with type conditions that match the parent type`, () => { | ||
const document = parse(` | ||
@@ -133,8 +146,7 @@ query Hero { | ||
const context = new CompilationContext(schema, document); | ||
const { operations, fragments } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['Hero'])).to.deep.equal({ | ||
operationName: 'Hero', | ||
variables: [], | ||
fragmentsReferenced: ['HeroDetails', 'MoreHeroDetails'], | ||
@@ -145,3 +157,2 @@ fields: [ | ||
type: 'Character', | ||
fragmentSpreads: ['HeroDetails', 'MoreHeroDetails'], | ||
fields: [ | ||
@@ -151,19 +162,13 @@ { | ||
type: 'ID!' | ||
}, | ||
{ name: 'appearsIn', | ||
type: '[Episode]!' | ||
}, | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
} | ||
], | ||
fragmentSpreads: ['HeroDetails', 'MoreHeroDetails'], | ||
inlineFragments: [], | ||
} | ||
] | ||
], | ||
}); | ||
const heroDetailsIR = context.compileFragment(context.fragmentNamed('HeroDetails')); | ||
expect(stringifyAndParseIR(heroDetailsIR)).to.containSubset({ | ||
expect(filteredIR(fragments['HeroDetails'])).to.deep.equal({ | ||
fragmentName: 'HeroDetails', | ||
typeCondition: 'Character', | ||
fields: [ | ||
@@ -174,5 +179,2 @@ { | ||
}, | ||
{ name: 'appearsIn', | ||
type: '[Episode]!' | ||
}, | ||
{ | ||
@@ -182,9 +184,10 @@ name: 'name', | ||
} | ||
] | ||
], | ||
fragmentSpreads: ['MoreHeroDetails'], | ||
inlineFragments: [] | ||
}); | ||
const moreHeroDetailsIR = context.compileFragment(context.fragmentNamed('MoreHeroDetails')); | ||
expect(stringifyAndParseIR(moreHeroDetailsIR)).to.containSubset({ | ||
expect(filteredIR(fragments['MoreHeroDetails'])).to.deep.equal({ | ||
fragmentName: 'MoreHeroDetails', | ||
typeCondition: 'Character', | ||
fields: [ | ||
@@ -198,7 +201,9 @@ { name: 'appearsIn', | ||
} | ||
] | ||
], | ||
fragmentSpreads: [], | ||
inlineFragments: [] | ||
}); | ||
}); | ||
it(`should expand fragment spreads with the same parent type at each nested level`, () => { | ||
it(`should include fragment spreads from subselections`, () => { | ||
const document = parse(` | ||
@@ -223,7 +228,5 @@ query HeroAndFriends { | ||
const context = new CompilationContext(schema, document); | ||
const { operations, fragments } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['HeroAndFriends'])).to.deep.equal({ | ||
operationName: 'HeroAndFriends', | ||
@@ -236,7 +239,5 @@ variables: [], | ||
type: 'Character', | ||
fragmentSpreads: ['HeroDetails'], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
{ name: 'appearsIn', | ||
type: '[Episode]!' | ||
}, | ||
@@ -247,9 +248,5 @@ { | ||
}, | ||
{ name: 'appearsIn', | ||
type: '[Episode]!' | ||
}, | ||
{ | ||
name: 'friends', | ||
type: '[Character]', | ||
fragmentSpreads: ['HeroDetails'], | ||
fields: [ | ||
@@ -259,10 +256,10 @@ { | ||
type: 'ID!' | ||
}, | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
} | ||
], | ||
fragmentSpreads: ['HeroDetails'], | ||
inlineFragments: [] | ||
} | ||
], | ||
fragmentSpreads: ['HeroDetails'], | ||
inlineFragments: [] | ||
} | ||
@@ -272,6 +269,5 @@ ] | ||
const heroDetailsIR = context.compileFragment(context.fragmentNamed('HeroDetails')); | ||
expect(stringifyAndParseIR(heroDetailsIR)).to.containSubset({ | ||
expect(filteredIR(fragments['HeroDetails'])).to.deep.equal({ | ||
fragmentName: 'HeroDetails', | ||
typeCondition: 'Character', | ||
fields: [ | ||
@@ -286,7 +282,9 @@ { | ||
} | ||
] | ||
], | ||
fragmentSpreads: [], | ||
inlineFragments: [] | ||
}); | ||
}); | ||
it(`should expand inline fragments with type conditions`, () => { | ||
it(`should include type conditions with merged fields for inline fragments`, () => { | ||
const document = parse(` | ||
@@ -306,8 +304,8 @@ query Hero { | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['Hero'])).to.deep.equal({ | ||
operationName: 'Hero', | ||
variables: [], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
@@ -323,5 +321,6 @@ { | ||
], | ||
typeConditions: [ | ||
fragmentSpreads: [], | ||
inlineFragments: [ | ||
{ | ||
type: 'Droid', | ||
typeCondition: 'Droid', | ||
fields: [ | ||
@@ -337,5 +336,6 @@ { | ||
], | ||
fragmentSpreads: [] | ||
}, | ||
{ | ||
type: 'Human', | ||
typeCondition: 'Human', | ||
fields: [ | ||
@@ -351,2 +351,3 @@ { | ||
], | ||
fragmentSpreads: [] | ||
} | ||
@@ -359,3 +360,3 @@ ] | ||
it(`should expand fragment spreads with type conditions`, () => { | ||
it(`should include fragment spreads with type conditions`, () => { | ||
const document = parse(` | ||
@@ -379,8 +380,7 @@ query Hero { | ||
const context = new CompilationContext(schema, document); | ||
const { operations, fragments } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['Hero'])).to.deep.equal({ | ||
operationName: 'Hero', | ||
variables: [], | ||
fragmentsReferenced: ['DroidDetails', 'HumanDetails'], | ||
@@ -391,3 +391,3 @@ fields: [ | ||
type: 'Character', | ||
fragmentSpreads: [], | ||
fragmentSpreads: ['DroidDetails', 'HumanDetails'], | ||
fields: [ | ||
@@ -399,32 +399,3 @@ { | ||
], | ||
typeConditions: [ | ||
{ | ||
type: 'Droid', | ||
fragmentSpreads: ['DroidDetails'], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
}, | ||
{ | ||
name: 'primaryFunction', | ||
type: 'String' | ||
}, | ||
], | ||
}, | ||
{ | ||
type: 'Human', | ||
fragmentSpreads: ['HumanDetails'], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
}, | ||
{ | ||
name: 'height', | ||
type: 'Float' | ||
}, | ||
], | ||
} | ||
] | ||
inlineFragments: [] | ||
} | ||
@@ -434,6 +405,5 @@ ] | ||
const droidDetailsIR = context.compileFragment(context.fragmentNamed('DroidDetails')); | ||
expect(stringifyAndParseIR(droidDetailsIR)).to.containSubset({ | ||
expect(filteredIR(fragments['DroidDetails'])).to.deep.equal({ | ||
fragmentName: 'DroidDetails', | ||
typeCondition: 'Droid', | ||
fields: [ | ||
@@ -444,9 +414,10 @@ { | ||
} | ||
] | ||
], | ||
fragmentSpreads: [], | ||
inlineFragments: [] | ||
}); | ||
const humanDetailsIR = context.compileFragment(context.fragmentNamed('HumanDetails')); | ||
expect(stringifyAndParseIR(humanDetailsIR)).to.containSubset({ | ||
expect(filteredIR(fragments['HumanDetails'])).to.deep.equal({ | ||
fragmentName: 'HumanDetails', | ||
typeCondition: 'Human', | ||
fields: [ | ||
@@ -457,10 +428,13 @@ { | ||
} | ||
] | ||
], | ||
fragmentSpreads: [], | ||
inlineFragments: [] | ||
}); | ||
}); | ||
it(`should expand inline fragments with type conditions in fragments`, () => { | ||
it(`should not include type conditions for fragment spreads with type conditions that match the parent type`, () => { | ||
const document = parse(` | ||
query Hero { | ||
hero { | ||
name | ||
...HeroDetails | ||
@@ -472,17 +446,10 @@ } | ||
name | ||
... on Droid { | ||
primaryFunction | ||
} | ||
... on Human { | ||
height | ||
} | ||
} | ||
`); | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['Hero'])).to.deep.equal({ | ||
operationName: 'Hero', | ||
variables: [], | ||
fragmentsReferenced: ['HeroDetails'], | ||
@@ -500,40 +467,47 @@ fields: [ | ||
], | ||
typeConditions: [ | ||
{ | ||
type: 'Droid', | ||
fragmentSpreads: [], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
}, | ||
{ | ||
name: 'primaryFunction', | ||
type: 'String' | ||
}, | ||
], | ||
}, | ||
{ | ||
type: 'Human', | ||
fragmentSpreads: [], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
}, | ||
{ | ||
name: 'height', | ||
type: 'Float' | ||
}, | ||
], | ||
} | ||
] | ||
inlineFragments: [] | ||
} | ||
], | ||
}); | ||
}); | ||
it(`should include type conditions for inline fragments in fragments`, () => { | ||
const document = parse(` | ||
query Hero { | ||
hero { | ||
...HeroDetails | ||
} | ||
} | ||
fragment HeroDetails on Character { | ||
name | ||
... on Droid { | ||
primaryFunction | ||
} | ||
... on Human { | ||
height | ||
} | ||
} | ||
`); | ||
const { operations, fragments } = compileToIR(schema, document); | ||
expect(filteredIR(operations['Hero'])).to.deep.equal({ | ||
operationName: 'Hero', | ||
variables: [], | ||
fragmentsReferenced: ['HeroDetails'], | ||
fields: [ | ||
{ | ||
name: 'hero', | ||
type: 'Character', | ||
fields: [], | ||
fragmentSpreads: ['HeroDetails'], | ||
inlineFragments: [] | ||
} | ||
] | ||
}); | ||
const heroDetailsIR = context.compileFragment(context.fragmentNamed('HeroDetails')); | ||
expect(stringifyAndParseIR(heroDetailsIR)).to.containSubset({ | ||
expect(filteredIR(fragments['HeroDetails'])).to.deep.equal({ | ||
fragmentName: 'HeroDetails', | ||
typeCondition: 'Character', | ||
fields: [ | ||
@@ -545,6 +519,6 @@ { | ||
], | ||
typeConditions: [ | ||
fragmentSpreads: [], | ||
inlineFragments: [ | ||
{ | ||
type: 'Droid', | ||
fragmentSpreads: [], | ||
typeCondition: 'Droid', | ||
fields: [ | ||
@@ -560,6 +534,6 @@ { | ||
], | ||
fragmentSpreads: [] | ||
}, | ||
{ | ||
type: 'Human', | ||
fragmentSpreads: [], | ||
typeCondition: 'Human', | ||
fields: [ | ||
@@ -575,2 +549,3 @@ { | ||
], | ||
fragmentSpreads: [] | ||
} | ||
@@ -581,3 +556,3 @@ ] | ||
it(`should expand a nested inline fragment with a super type as a type condition`, () => { | ||
it(`should inherit type condition when nesting an inline fragment in an inline fragment with a more specific type condition`, () => { | ||
const document = parse(` | ||
@@ -595,8 +570,8 @@ query HeroName { | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['HeroName'])).to.deep.equal({ | ||
operationName: 'HeroName', | ||
variables: [], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
@@ -607,5 +582,6 @@ { | ||
fields: [], | ||
typeConditions: [ | ||
fragmentSpreads: [], | ||
inlineFragments: [ | ||
{ | ||
type: 'Droid', | ||
typeCondition: 'Droid', | ||
fields: [ | ||
@@ -617,2 +593,3 @@ { | ||
], | ||
fragmentSpreads: [] | ||
} | ||
@@ -625,3 +602,3 @@ ] | ||
it(`should expand a nested inline fragment with a subtype as a type condition`, () => { | ||
it(`should not inherit type condition when nesting an inline fragment in an inline fragment with a less specific type condition`, () => { | ||
const document = parse(` | ||
@@ -639,8 +616,8 @@ query HeroName { | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['HeroName'])).to.deep.equal({ | ||
operationName: 'HeroName', | ||
variables: [], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
@@ -651,5 +628,6 @@ { | ||
fields: [], | ||
typeConditions: [ | ||
fragmentSpreads: [], | ||
inlineFragments: [ | ||
{ | ||
type: 'Droid', | ||
typeCondition: 'Droid', | ||
fields: [ | ||
@@ -661,2 +639,3 @@ { | ||
], | ||
fragmentSpreads: [], | ||
} | ||
@@ -669,3 +648,3 @@ ] | ||
it(`should expand a nested fragment spread with a supertype as a type condition`, () => { | ||
it(`should inherit type condition when nesting a fragment spread in an inline fragment with a more specific type condition`, () => { | ||
const document = parse(` | ||
@@ -685,8 +664,7 @@ query HeroName { | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['HeroName'])).to.deep.equal({ | ||
operationName: 'HeroName', | ||
variables: [], | ||
fragmentsReferenced: ['HeroName'], | ||
@@ -697,14 +675,9 @@ fields: [ | ||
type: 'Character', | ||
fields: [], | ||
fragmentSpreads: [], | ||
fields: [], | ||
typeConditions: [ | ||
inlineFragments: [ | ||
{ | ||
type: 'Droid', | ||
typeCondition: 'Droid', | ||
fragmentSpreads: ['HeroName'], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
} | ||
], | ||
fields: [], | ||
} | ||
@@ -717,3 +690,3 @@ ] | ||
it(`should expand a nested fragment spread with a subtype as a type condition`, () => { | ||
it(`should not inherit type condition when nesting a fragment spread in an inline fragment with a less specific type condition`, () => { | ||
const document = parse(` | ||
@@ -733,8 +706,7 @@ query HeroName { | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['HeroName'])).to.deep.equal({ | ||
operationName: 'HeroName', | ||
variables: [], | ||
fragmentsReferenced: ['DroidName'], | ||
@@ -745,16 +717,5 @@ fields: [ | ||
type: 'Character', | ||
fragmentSpreads: [], | ||
fields: [], | ||
typeConditions: [ | ||
{ | ||
type: 'Droid', | ||
fragmentSpreads: ['DroidName'], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
} | ||
], | ||
} | ||
] | ||
fragmentSpreads: ['DroidName'], | ||
inlineFragments: [] | ||
} | ||
@@ -765,3 +726,3 @@ ] | ||
it(`should expand inline fragments on a union type`, () => { | ||
it(`should include type conditions for inline fragments on a union type`, () => { | ||
const document = parse(` | ||
@@ -782,8 +743,8 @@ query Search { | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
expect(filteredIR(operations['Search'])).to.deep.equal({ | ||
operationName: 'Search', | ||
variables: [], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
@@ -794,5 +755,6 @@ { | ||
fields: [], | ||
typeConditions: [ | ||
fragmentSpreads: [], | ||
inlineFragments: [ | ||
{ | ||
type: 'Droid', | ||
typeCondition: 'Droid', | ||
fields: [ | ||
@@ -808,5 +770,6 @@ { | ||
], | ||
fragmentSpreads: [], | ||
}, | ||
{ | ||
type: 'Human', | ||
typeCondition: 'Human', | ||
fields: [ | ||
@@ -822,2 +785,3 @@ { | ||
], | ||
fragmentSpreads: [], | ||
} | ||
@@ -830,3 +794,3 @@ ] | ||
it(`should keep track of fragments referenced at a nested level`, () => { | ||
it(`should keep track of fragments referenced in a subselection`, () => { | ||
const document = parse(` | ||
@@ -847,10 +811,8 @@ query HeroAndFriends { | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
assert.deepEqual(queryIR.fragmentsReferenced, ['HeroDetails']); | ||
expect(operations['HeroAndFriends'].fragmentsReferenced).to.deep.equal(['HeroDetails']); | ||
}); | ||
it(`should keep track of fragments with a type condition referenced at a nested level`, () => { | ||
it(`should keep track of fragments referenced in a subselection nested in an inline fragment`, () => { | ||
const document = parse(` | ||
@@ -873,7 +835,5 @@ query HeroAndFriends { | ||
const context = new CompilationContext(schema, document); | ||
const { operations } = compileToIR(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
assert.deepEqual(queryIR.fragmentsReferenced, ['HeroDetails']); | ||
expect(operations['HeroAndFriends'].fragmentsReferenced).to.deep.equal(['HeroDetails']); | ||
}); | ||
@@ -890,7 +850,6 @@ | ||
const document = parse(source); | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
const { operations } = compileToIR(schema, document); | ||
assert.equal(queryIR.source, stripIndent` | ||
expect(operations['HeroName'].source).to.equal(stripIndent` | ||
query HeroName { | ||
@@ -912,7 +871,6 @@ hero { | ||
const document = parse(source); | ||
const context = new CompilationContext(schema, document); | ||
const fragmentIR = context.compileFragment(context.fragments[0]); | ||
const { fragments } = compileToIR(schema, document); | ||
assert.equal(fragmentIR.source, stripIndent` | ||
expect(fragments['HeroDetails'].source).to.equal(stripIndent` | ||
fragment HeroDetails on Character { | ||
@@ -926,4 +884,9 @@ __typename | ||
function stringifyAndParseIR(ir) { | ||
return JSON.parse(stringifyIR(ir)); | ||
function filteredIR(ir) { | ||
return JSON.parse(stringifyIR(ir), function(key, value) { | ||
if (key === 'source') { | ||
return undefined; | ||
} | ||
return value; | ||
}); | ||
} |
@@ -1,2 +0,2 @@ | ||
import { assert } from 'chai' | ||
import { expect } from 'chai'; | ||
@@ -6,57 +6,84 @@ import { stripIndent } from 'common-tags' | ||
import { | ||
GraphQLString, | ||
GraphQLInt, | ||
GraphQLFloat, | ||
GraphQLBoolean, | ||
GraphQLID, | ||
GraphQLString, | ||
GraphQLList, | ||
GraphQLNonNull | ||
GraphQLNonNull, | ||
GraphQLScalarType, | ||
} from 'graphql'; | ||
import { loadSchema } from '../../src/loading' | ||
const schema = loadSchema(require.resolve('../starwars/schema.json')); | ||
import CodeGenerator from '../../src/utilities/CodeGenerator'; | ||
import { typeNameFromGraphQLType, typeDeclarationForGraphQLType } from '../../src/swift/types' | ||
import { loadSchema } from '../../src/generate' | ||
describe('Swift code generation: Types', function() { | ||
describe('#typeNameFromGraphQLType()', function() { | ||
it('should return String? for GraphQLString', function() { | ||
expect(typeNameFromGraphQLType(GraphQLString)).to.equal('String?'); | ||
}); | ||
const schema = loadSchema(require.resolve('../starwars/schema.json')); | ||
it('should return String for GraphQLNonNull(GraphQLString)', function() { | ||
expect(typeNameFromGraphQLType(new GraphQLNonNull(GraphQLString))).to.equal('String'); | ||
}); | ||
describe('#typeNameFromGraphQLType()', () => { | ||
it('should return GraphQLID? for GraphQLID', () => { | ||
assert.equal(typeNameFromGraphQLType(GraphQLID), 'GraphQLID?'); | ||
}); | ||
it('should return [String?]? for GraphQLList(GraphQLString)', function() { | ||
expect(typeNameFromGraphQLType(new GraphQLList(GraphQLString))).to.equal('[String?]?'); | ||
}); | ||
it('should return String? for GraphQLString', () => { | ||
assert.equal(typeNameFromGraphQLType(GraphQLString), 'String?'); | ||
}); | ||
it('should return [String?] for GraphQLNonNull(GraphQLList(GraphQLString))', function() { | ||
expect(typeNameFromGraphQLType(new GraphQLNonNull(new GraphQLList(GraphQLString)))).to.equal('[String?]'); | ||
}); | ||
it('should return String for GraphQLNonNull(GraphQLString)', () => { | ||
assert.equal(typeNameFromGraphQLType(new GraphQLNonNull(GraphQLString)), 'String'); | ||
}); | ||
it('should return [String]? for GraphQLList(GraphQLNonNull(GraphQLString))', function() { | ||
expect(typeNameFromGraphQLType(new GraphQLList(new GraphQLNonNull(GraphQLString)))).to.equal('[String]?'); | ||
}); | ||
it('should return [String?]? for GraphQLList(GraphQLString)', () => { | ||
assert.equal(typeNameFromGraphQLType(new GraphQLList(GraphQLString)), '[String?]?'); | ||
}); | ||
it('should return [String] for GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLString)))', function() { | ||
expect(typeNameFromGraphQLType(new GraphQLNonNull(new GraphQLList(new GraphQLNonNull(GraphQLString))))).to.equal('[String]'); | ||
}); | ||
it('should return [String?] for GraphQLNonNull(GraphQLList(GraphQLString))', () => { | ||
assert.equal(typeNameFromGraphQLType(new GraphQLNonNull(new GraphQLList(GraphQLString))), '[String?]'); | ||
}); | ||
it('should return Int? for GraphQLInt', function() { | ||
expect(typeNameFromGraphQLType(GraphQLInt)).to.equal('Int?'); | ||
}); | ||
it('should return [String]? for GraphQLList(GraphQLNonNull(GraphQLString))', () => { | ||
assert.equal(typeNameFromGraphQLType(new GraphQLList(new GraphQLNonNull(GraphQLString))), '[String]?'); | ||
}); | ||
it('should return Float? for GraphQLFloat', function() { | ||
expect(typeNameFromGraphQLType(GraphQLFloat)).to.equal('Float?'); | ||
}); | ||
it('should return [String] for GraphQLNonNull(GraphQLList(GraphQLNonNull(GraphQLString)))', () => { | ||
assert.equal(typeNameFromGraphQLType(new GraphQLNonNull(new GraphQLList(new GraphQLNonNull(GraphQLString)))), '[String]'); | ||
it('should return Bool? for GraphQLBoolean', function() { | ||
expect(typeNameFromGraphQLType(GraphQLBoolean)).to.equal('Bool?'); | ||
}); | ||
it('should return GraphQLID? for GraphQLID', function() { | ||
expect(typeNameFromGraphQLType(GraphQLID)).to.equal('GraphQLID?'); | ||
}); | ||
it('should return String? for a custom scalar type', function() { | ||
expect(typeNameFromGraphQLType(new GraphQLScalarType({ name: 'CustomScalarType', serialize: String }))).to.equal('String?'); | ||
}); | ||
}); | ||
}); | ||
describe('#typeNameFromGraphQLType()', () => { | ||
it('should return an enum declaration for a GraphQLEnumType', () => { | ||
assert.equal(typeDeclarationForGraphQLType(schema.getType('Episode')), stripIndent` | ||
/// The episodes in the Star Wars trilogy | ||
public enum Episode: String { | ||
case newhope = "NEWHOPE" /// Star Wars Episode IV: A New Hope, released in 1977. | ||
case empire = "EMPIRE" /// Star Wars Episode V: The Empire Strikes Back, released in 1980. | ||
case jedi = "JEDI" /// Star Wars Episode VI: Return of the Jedi, released in 1983. | ||
} | ||
describe('#typeDeclarationForGraphQLType()', function() { | ||
it('should generate an enum declaration for a GraphQLEnumType', function() { | ||
const generator = new CodeGenerator(); | ||
extension Episode: JSONDecodable, JSONEncodable {}` | ||
); | ||
typeDeclarationForGraphQLType(generator, schema.getType('Episode')); | ||
expect(generator.output).to.equal(stripIndent` | ||
/// The episodes in the Star Wars trilogy | ||
public enum Episode: String { | ||
case newhope = "NEWHOPE" /// Star Wars Episode IV: A New Hope, released in 1977. | ||
case empire = "EMPIRE" /// Star Wars Episode V: The Empire Strikes Back, released in 1980. | ||
case jedi = "JEDI" /// Star Wars Episode VI: Return of the Jedi, released in 1983. | ||
} | ||
extension Episode: JSONDecodable, JSONEncodable {} | ||
`); | ||
}); | ||
}); | ||
}); |
@@ -9,3 +9,3 @@ import { assert } from 'chai' | ||
loadAndMergeQueryDocuments, | ||
} from '../src/generate' | ||
} from '../src/loading' | ||
@@ -16,3 +16,3 @@ import { validateQueryDocument } from '../src/validation' | ||
describe('#validateQueryDocument()', () => { | ||
describe('Validation', () => { | ||
it(`should throw an error for AnonymousQuery.graphql`, () => { | ||
@@ -19,0 +19,0 @@ const inputPaths = [path.join(__dirname, './starwars/AnonymousQuery.graphql')]; |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
248337
9
48
4816
8
+ Addedsource-map-support@^0.4.2
+ Addedsource-map@0.5.7(transitive)
+ Addedsource-map-support@0.4.18(transitive)