apollo-codegen
Advanced tools
Comparing version 0.5.0 to 0.5.1
@@ -169,4 +169,5 @@ 'use strict'; | ||
var fields = this.resolveFields(fragmentType, groupedFieldSet); | ||
var typeConditions = this.resolveSubTypes(fragmentType, groupedFieldSet); | ||
return { fragmentName: fragmentName, source: source, fields: fields }; | ||
return { fragmentName: fragmentName, source: source, fields: fields, typeConditions: typeConditions }; | ||
} | ||
@@ -334,20 +335,13 @@ }, { | ||
if ((0, _graphql.isCompositeType)(unmodifiedFieldType)) { | ||
(function () { | ||
var visitedFragmentSet = (0, _create2.default)(null); | ||
var subSelectionSet = _this2.mergeSelectionSets(unmodifiedFieldType, fieldSet, visitedFragmentSet); | ||
var visitedFragmentSet = (0, _create2.default)(null); | ||
var subSelectionSet = this.mergeSelectionSets(unmodifiedFieldType, fieldSet, visitedFragmentSet); | ||
field.fragmentSpreads = fragmentSpreadsForType(unmodifiedFieldType, visitedFragmentSet); | ||
field.fragmentSpreads = fragmentSpreadsForType(unmodifiedFieldType, visitedFragmentSet); | ||
if (fragmentsReferencedSet) { | ||
_assign2.default.apply(Object, [fragmentsReferencedSet].concat((0, _toConsumableArray3.default)((0, _values2.default)(visitedFragmentSet)))); | ||
} | ||
if (fragmentsReferencedSet) { | ||
_assign2.default.apply(Object, [fragmentsReferencedSet].concat((0, _toConsumableArray3.default)((0, _values2.default)(visitedFragmentSet)))); | ||
} | ||
field.fields = _this2.resolveFields(unmodifiedFieldType, subSelectionSet, fragmentsReferencedSet); | ||
field.typeConditions = _this2.collectSubTypes(unmodifiedFieldType, subSelectionSet).map(function (subType) { | ||
var fields = _this2.resolveFields(subType, subSelectionSet, fragmentsReferencedSet); | ||
var fragmentSpreads = fragmentSpreadsForType(subType, visitedFragmentSet); | ||
return { type: subType, fields: fields, fragmentSpreads: fragmentSpreads }; | ||
}); | ||
})(); | ||
field.fields = this.resolveFields(unmodifiedFieldType, subSelectionSet, fragmentsReferencedSet); | ||
field.typeConditions = this.resolveSubTypes(unmodifiedFieldType, subSelectionSet, visitedFragmentSet, fragmentsReferencedSet); | ||
} | ||
@@ -375,2 +369,16 @@ | ||
}, { | ||
key: 'resolveSubTypes', | ||
value: function resolveSubTypes(parentType, groupedFieldSet) { | ||
var _this3 = this; | ||
var visitedFragmentSet = arguments.length <= 2 || arguments[2] === undefined ? (0, _create2.default)(null) : arguments[2]; | ||
var fragmentsReferencedSet = arguments[3]; | ||
return this.collectSubTypes(parentType, groupedFieldSet).map(function (type) { | ||
var fields = _this3.resolveFields(type, groupedFieldSet, fragmentsReferencedSet); | ||
var fragmentSpreads = fragmentSpreadsForType(type, visitedFragmentSet); | ||
return { type: type, fields: fields, fragmentSpreads: fragmentSpreads }; | ||
}); | ||
} | ||
}, { | ||
key: 'collectSubTypes', | ||
@@ -377,0 +385,0 @@ value: function collectSubTypes(parentType, groupedFieldSet) { |
{ | ||
"name": "apollo-codegen", | ||
"version": "0.5.0", | ||
"version": "0.5.1", | ||
"description": "Generate client code based on a GraphQL schema and query documents", | ||
@@ -5,0 +5,0 @@ "main": "./lib/index.js", |
@@ -32,2 +32,3 @@ import chai, { assert, expect } from 'chai' | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -39,17 +40,2 @@ | ||
{ name: 'episode', type: 'Episode' } | ||
], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
{ | ||
name: 'hero', | ||
type: 'Character', | ||
fragmentSpreads: [], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
} | ||
], | ||
typeConditions: [], | ||
} | ||
] | ||
@@ -69,2 +55,3 @@ }); | ||
const context = new CompilationContext(schema, document); | ||
context.compileOperation(context.operations[0]); | ||
@@ -75,3 +62,3 @@ | ||
it(`should compile inline fragments recursively`, () => { | ||
it(`should flatten inline fragments with the same parent type`, () => { | ||
const document = parse(` | ||
@@ -94,2 +81,3 @@ query Hero { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -99,4 +87,2 @@ | ||
operationName: 'Hero', | ||
variables: [], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
@@ -106,3 +92,2 @@ { | ||
type: 'Character', | ||
fragmentSpreads: [], | ||
fields: [ | ||
@@ -128,3 +113,3 @@ { | ||
it(`should compile fragment spreads recursively`, () => { | ||
it(`should expand fragment spreads with the same parent type recursively`, () => { | ||
const document = parse(` | ||
@@ -151,2 +136,3 @@ query Hero { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -156,3 +142,2 @@ | ||
operationName: 'Hero', | ||
variables: [], | ||
fragmentsReferenced: ['HeroDetails', 'MoreHeroDetails'], | ||
@@ -177,3 +162,2 @@ fields: [ | ||
], | ||
typeConditions: [] | ||
} | ||
@@ -218,3 +202,3 @@ ] | ||
it(`should compile fragment spreads at each nested level`, () => { | ||
it(`should expand fragment spreads with the same parent type at each nested level`, () => { | ||
const document = parse(` | ||
@@ -240,5 +224,5 @@ query HeroAndFriends { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
@@ -279,6 +263,4 @@ operationName: 'HeroAndFriends', | ||
], | ||
typeConditions: [] | ||
} | ||
], | ||
typeConditions: [] | ||
} | ||
@@ -305,3 +287,3 @@ ] | ||
it(`should compile inline fragments with type conditions`, () => { | ||
it(`should expand inline fragments with type conditions`, () => { | ||
const document = parse(` | ||
@@ -322,2 +304,3 @@ query Hero { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -327,4 +310,2 @@ | ||
operationName: 'Hero', | ||
variables: [], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
@@ -334,3 +315,2 @@ { | ||
type: 'Character', | ||
fragmentSpreads: [], | ||
fields: [ | ||
@@ -345,3 +325,2 @@ { | ||
type: 'Droid', | ||
fragmentSpreads: [], | ||
fields: [ | ||
@@ -360,3 +339,2 @@ { | ||
type: 'Human', | ||
fragmentSpreads: [], | ||
fields: [ | ||
@@ -379,3 +357,3 @@ { | ||
it(`should compile fragment spreads with type conditions`, () => { | ||
it(`should expand fragment spreads with type conditions`, () => { | ||
const document = parse(` | ||
@@ -400,2 +378,3 @@ query Hero { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -405,3 +384,2 @@ | ||
operationName: 'Hero', | ||
variables: [], | ||
fragmentsReferenced: ['DroidDetails', 'HumanDetails'], | ||
@@ -478,4 +456,118 @@ fields: [ | ||
it(`should compile a nested inline fragment with a super type as a type condition`, () => { | ||
it(`should expand inline fragments with type conditions in fragments`, () => { | ||
const document = parse(` | ||
query Hero { | ||
hero { | ||
...HeroDetails | ||
} | ||
} | ||
fragment HeroDetails on Character { | ||
name | ||
... on Droid { | ||
primaryFunction | ||
} | ||
... on Human { | ||
height | ||
} | ||
} | ||
`); | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
expect(stringifyAndParseIR(queryIR)).to.containSubset({ | ||
operationName: 'Hero', | ||
fragmentsReferenced: ['HeroDetails'], | ||
fields: [ | ||
{ | ||
name: 'hero', | ||
type: 'Character', | ||
fragmentSpreads: ['HeroDetails'], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
} | ||
], | ||
typeConditions: [ | ||
{ | ||
type: 'Droid', | ||
fragmentSpreads: [], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
}, | ||
{ | ||
name: 'primaryFunction', | ||
type: 'String' | ||
}, | ||
], | ||
}, | ||
{ | ||
type: 'Human', | ||
fragmentSpreads: [], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
}, | ||
{ | ||
name: 'height', | ||
type: 'Float' | ||
}, | ||
], | ||
} | ||
] | ||
} | ||
] | ||
}); | ||
const heroDetailsIR = context.compileFragment(context.fragmentNamed('HeroDetails')); | ||
expect(stringifyAndParseIR(heroDetailsIR)).to.containSubset({ | ||
fragmentName: 'HeroDetails', | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
} | ||
], | ||
typeConditions: [ | ||
{ | ||
type: 'Droid', | ||
fragmentSpreads: [], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
}, | ||
{ | ||
name: 'primaryFunction', | ||
type: 'String' | ||
}, | ||
], | ||
}, | ||
{ | ||
type: 'Human', | ||
fragmentSpreads: [], | ||
fields: [ | ||
{ | ||
name: 'name', | ||
type: 'String!' | ||
}, | ||
{ | ||
name: 'height', | ||
type: 'Float' | ||
}, | ||
], | ||
} | ||
] | ||
}); | ||
}); | ||
it(`should expand a nested inline fragment with a super type as a type condition`, () => { | ||
const document = parse(` | ||
query HeroName { | ||
@@ -493,2 +585,3 @@ hero { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -498,4 +591,2 @@ | ||
operationName: 'HeroName', | ||
variables: [], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
@@ -505,3 +596,2 @@ { | ||
type: 'Character', | ||
fragmentSpreads: [], | ||
fields: [], | ||
@@ -511,3 +601,2 @@ typeConditions: [ | ||
type: 'Droid', | ||
fragmentSpreads: [], | ||
fields: [ | ||
@@ -526,3 +615,3 @@ { | ||
it(`should compile a nested inline fragment with a subtype as a type condition`, () => { | ||
it(`should expand a nested inline fragment with a subtype as a type condition`, () => { | ||
const document = parse(` | ||
@@ -541,2 +630,3 @@ query HeroName { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -546,4 +636,2 @@ | ||
operationName: 'HeroName', | ||
variables: [], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
@@ -553,3 +641,2 @@ { | ||
type: 'Character', | ||
fragmentSpreads: [], | ||
fields: [], | ||
@@ -559,3 +646,2 @@ typeConditions: [ | ||
type: 'Droid', | ||
fragmentSpreads: [], | ||
fields: [ | ||
@@ -574,3 +660,3 @@ { | ||
it(`should compile a nested fragment spread with a supertype as a type condition`, () => { | ||
it(`should expand a nested fragment spread with a supertype as a type condition`, () => { | ||
const document = parse(` | ||
@@ -591,2 +677,3 @@ query HeroName { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -596,3 +683,2 @@ | ||
operationName: 'HeroName', | ||
variables: [], | ||
fragmentsReferenced: ['HeroName'], | ||
@@ -622,3 +708,3 @@ fields: [ | ||
it(`should compile a nested fragment spread with a subtype as a type condition`, () => { | ||
it(`should expand a nested fragment spread with a subtype as a type condition`, () => { | ||
const document = parse(` | ||
@@ -639,2 +725,3 @@ query HeroName { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -644,3 +731,2 @@ | ||
operationName: 'HeroName', | ||
variables: [], | ||
fragmentsReferenced: ['DroidName'], | ||
@@ -670,3 +756,3 @@ fields: [ | ||
it(`should compile inline fragments on a union type`, () => { | ||
it(`should expand inline fragments on a union type`, () => { | ||
const document = parse(` | ||
@@ -688,2 +774,3 @@ query Search { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -693,4 +780,2 @@ | ||
operationName: 'Search', | ||
variables: [], | ||
fragmentsReferenced: [], | ||
fields: [ | ||
@@ -700,3 +785,2 @@ { | ||
type: '[SearchResult]', | ||
fragmentSpreads: [], | ||
fields: [], | ||
@@ -706,3 +790,2 @@ typeConditions: [ | ||
type: 'Droid', | ||
fragmentSpreads: [], | ||
fields: [ | ||
@@ -721,3 +804,2 @@ { | ||
type: 'Human', | ||
fragmentSpreads: [], | ||
fields: [ | ||
@@ -757,2 +839,3 @@ { | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -782,2 +865,3 @@ | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -797,4 +881,4 @@ | ||
const document = parse(source); | ||
const context = new CompilationContext(schema, document); | ||
const context = new CompilationContext(schema, document); | ||
const queryIR = context.compileOperation(context.operations[0]); | ||
@@ -819,4 +903,4 @@ | ||
const document = parse(source); | ||
const context = new CompilationContext(schema, document); | ||
const context = new CompilationContext(schema, document); | ||
const fragmentIR = context.compileFragment(context.fragments[0]); | ||
@@ -823,0 +907,0 @@ |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
143161
32
4074