@graphitation/supermassive
Advanced tools
Comparing version 3.0.0-alpha.7 to 3.0.0-alpha.8
# Change Log - @graphitation/supermassive | ||
This log was last generated on Tue, 12 Sep 2023 23:58:24 GMT and should not be manually modified. | ||
This log was last generated on Thu, 14 Sep 2023 19:05:21 GMT and should not be manually modified. | ||
<!-- Start content --> | ||
## 3.0.0-alpha.8 | ||
Thu, 14 Sep 2023 19:05:21 GMT | ||
### Changes | ||
- change signature of executeWithSchema and subscribeWithSchema functions (vladimir.razuvaev@gmail.com) | ||
## 3.0.0-alpha.7 | ||
Tue, 12 Sep 2023 23:58:24 GMT | ||
Tue, 12 Sep 2023 23:58:31 GMT | ||
@@ -11,0 +19,0 @@ ### Changes |
@@ -87,7 +87,4 @@ "use strict"; | ||
document, | ||
schema: { | ||
schemaId: "test", | ||
definitions: import_swapi_schema.typeDefs, | ||
resolvers: import_resolvers.default | ||
}, | ||
definitions: import_swapi_schema.typeDefs, | ||
resolvers: import_resolvers.default, | ||
contextValue: { | ||
@@ -94,0 +91,0 @@ models: import_models.default |
@@ -386,2 +386,3 @@ "use strict"; | ||
} | ||
const currentSchemaId = exeContext.schemaFragment.schemaId; | ||
return exeContext.schemaFragmentLoader( | ||
@@ -392,5 +393,5 @@ exeContext.schemaFragment, | ||
).then(({ mergedFragment, mergedContextValue }) => { | ||
if (exeContext.schemaFragment.schemaId !== mergedFragment.schemaId) { | ||
if (currentSchemaId !== mergedFragment.schemaId) { | ||
throw new Error( | ||
`Cannot use new schema fragment: old and new fragments describe different schemas: ${exeContext.schemaFragment.schemaId} vs. ${mergedFragment.schemaId}` | ||
`Cannot use new schema fragment: old and new fragments describe different schemas: ${currentSchemaId} vs. ${mergedFragment.schemaId}` | ||
); | ||
@@ -397,0 +398,0 @@ } |
import { PromiseOrValue } from "./jsutils/PromiseOrValue"; | ||
import { ExecutionResult, ExecutionWithSchemaArgs } from "./types"; | ||
export declare function executeWithSchema({ schema, document, rootValue, contextValue, variableValues, operationName, fieldResolver, typeResolver, fieldExecutionHooks, }: ExecutionWithSchemaArgs): PromiseOrValue<ExecutionResult>; | ||
export declare function executeWithSchema({ document, definitions, resolvers, rootValue, contextValue, variableValues, operationName, fieldResolver, typeResolver, fieldExecutionHooks, }: ExecutionWithSchemaArgs): PromiseOrValue<ExecutionResult>; | ||
//# sourceMappingURL=executeWithSchema.d.ts.map |
"use strict"; | ||
var __defProp = Object.defineProperty; | ||
var __defProps = Object.defineProperties; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropDescs = Object.getOwnPropertyDescriptors; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getOwnPropSymbols = Object.getOwnPropertySymbols; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __propIsEnum = Object.prototype.propertyIsEnumerable; | ||
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; | ||
var __spreadValues = (a, b) => { | ||
for (var prop in b || (b = {})) | ||
if (__hasOwnProp.call(b, prop)) | ||
__defNormalProp(a, prop, b[prop]); | ||
if (__getOwnPropSymbols) | ||
for (var prop of __getOwnPropSymbols(b)) { | ||
if (__propIsEnum.call(b, prop)) | ||
__defNormalProp(a, prop, b[prop]); | ||
} | ||
return a; | ||
}; | ||
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); | ||
var __export = (target, all) => { | ||
@@ -45,4 +28,5 @@ for (var name in all) | ||
function executeWithSchema({ | ||
schema, | ||
document, | ||
definitions, | ||
resolvers, | ||
rootValue, | ||
@@ -56,4 +40,4 @@ contextValue, | ||
}) { | ||
const { definitions } = (0, import_extractMinimalViableSchemaForRequestDocument.extractMinimalViableSchemaForRequestDocument)( | ||
(0, import_graphql.buildASTSchema)(schema.definitions), | ||
const extracted = (0, import_extractMinimalViableSchemaForRequestDocument.extractMinimalViableSchemaForRequestDocument)( | ||
(0, import_graphql.buildASTSchema)(definitions), | ||
document | ||
@@ -63,5 +47,7 @@ ); | ||
document, | ||
schemaFragment: __spreadProps(__spreadValues({}, schema), { | ||
definitions | ||
}), | ||
schemaFragment: { | ||
schemaId: "executeWithSchema", | ||
definitions: extracted.definitions, | ||
resolvers | ||
}, | ||
rootValue, | ||
@@ -68,0 +54,0 @@ contextValue, |
import { PromiseOrValue } from "./jsutils/PromiseOrValue"; | ||
import { ExecutionWithSchemaArgs, ExecutionResult } from "./types"; | ||
export declare function subscribeWithSchema({ schema, document, rootValue, contextValue, variableValues, operationName, fieldResolver, typeResolver, }: ExecutionWithSchemaArgs): PromiseOrValue<ExecutionResult>; | ||
export declare function subscribeWithSchema({ document, definitions, resolvers, rootValue, contextValue, variableValues, operationName, fieldResolver, typeResolver, }: ExecutionWithSchemaArgs): PromiseOrValue<ExecutionResult>; | ||
//# sourceMappingURL=subscribeWithSchema.d.ts.map |
"use strict"; | ||
var __defProp = Object.defineProperty; | ||
var __defProps = Object.defineProperties; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropDescs = Object.getOwnPropertyDescriptors; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getOwnPropSymbols = Object.getOwnPropertySymbols; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __propIsEnum = Object.prototype.propertyIsEnumerable; | ||
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; | ||
var __spreadValues = (a, b) => { | ||
for (var prop in b || (b = {})) | ||
if (__hasOwnProp.call(b, prop)) | ||
__defNormalProp(a, prop, b[prop]); | ||
if (__getOwnPropSymbols) | ||
for (var prop of __getOwnPropSymbols(b)) { | ||
if (__propIsEnum.call(b, prop)) | ||
__defNormalProp(a, prop, b[prop]); | ||
} | ||
return a; | ||
}; | ||
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); | ||
var __export = (target, all) => { | ||
@@ -45,4 +28,5 @@ for (var name in all) | ||
function subscribeWithSchema({ | ||
schema, | ||
document, | ||
definitions, | ||
resolvers, | ||
rootValue, | ||
@@ -55,4 +39,4 @@ contextValue, | ||
}) { | ||
const { definitions } = (0, import_extractMinimalViableSchemaForRequestDocument.extractMinimalViableSchemaForRequestDocument)( | ||
(0, import_graphql.buildASTSchema)(schema.definitions), | ||
const extracted = (0, import_extractMinimalViableSchemaForRequestDocument.extractMinimalViableSchemaForRequestDocument)( | ||
(0, import_graphql.buildASTSchema)(definitions), | ||
document | ||
@@ -62,5 +46,7 @@ ); | ||
document, | ||
schemaFragment: __spreadProps(__spreadValues({}, schema), { | ||
definitions | ||
}), | ||
schemaFragment: { | ||
schemaId: "subscribeWithSchema", | ||
definitions: extracted.definitions, | ||
resolvers | ||
}, | ||
rootValue, | ||
@@ -67,0 +53,0 @@ contextValue, |
@@ -115,2 +115,3 @@ import { GraphQLError, GraphQLFormattedError, GraphQLScalarType, DocumentNode, FragmentDefinitionNode, OperationDefinitionNode } from "graphql"; | ||
export interface CommonExecutionArgs { | ||
document: DocumentNode; | ||
rootValue?: unknown; | ||
@@ -129,3 +130,2 @@ contextValue?: unknown; | ||
export type ExecutionWithoutSchemaArgs = CommonExecutionArgs & { | ||
document: DocumentNode; | ||
schemaFragment: SchemaFragment; | ||
@@ -135,8 +135,4 @@ schemaFragmentLoader?: SchemaFragmentLoader; | ||
export type ExecutionWithSchemaArgs = CommonExecutionArgs & { | ||
document: DocumentNode; | ||
schema: { | ||
schemaId: string; | ||
definitions: DocumentNode; | ||
resolvers: UserResolvers; | ||
}; | ||
definitions: DocumentNode; | ||
resolvers: UserResolvers; | ||
}; | ||
@@ -143,0 +139,0 @@ export type SchemaId = string; |
{ | ||
"name": "@graphitation/supermassive", | ||
"license": "MIT", | ||
"version": "3.0.0-alpha.7", | ||
"version": "3.0.0-alpha.8", | ||
"main": "./lib/index", | ||
@@ -6,0 +6,0 @@ "repository": { |
163
README.md
@@ -30,18 +30,34 @@ # @graphitation/supermassive | ||
Currently, supermassive expects inlined types in normal GraphQL documents that are sent to it. It doesn't require having a schema, only the query document and resolver functions. We achieve this by running a pre-processing step on queries using the schema, in a same stage where `graphql` tags would normally be extracted and pre-parsed. Schema is often a very big part of the bundle and memory volume, so this drastically improves that and removes the need of creating a schema in runtime, which can also be very costly. | ||
Starting with version 3.0, supermassive expects a compact fragment of the schema alongside documents that are sent to it. | ||
It doesn't require having a full schema, only a small fragment necessary for a specific operation. | ||
In this initial phase, we will achieve the goal of tree-shaking the schema definitions. We do this by inlining required metadata into the documents that describe the operations, after which they can be executed with the need of the entire schema. This means overall bundle size will be decreased when only a subset of the schema is actually used, which pays off significantly when a host application introduces its first component(s) leveraging GraphQL. | ||
Schema fragment format is much more compact than the graphql-js AST. It is optimized for fast merging and serializing, | ||
so you can incrementally load and merge schema fragments as different operations are requested. | ||
### Possible future 1 - Relay IR | ||
Different strategies of "fragmentizing" the schema are possible, it is up to your specific case which strategy to choose. | ||
Below you can find some strategies that we found useful. | ||
Current implementation has some bundle size / memory cost because GraphQL AST format isn't super optimized and adding types to it makes it even worse. However there is an already type annotated AST format that is more compact - Relay IR. As Relay anyway needs IR to do it's store operations, this wouldn't incur additional bundle cost to include them. Relay IR Printer would need to be modified to include more type information, but Relay IR is otherwise already typed and has all type information we | ||
need. | ||
#### 1. Schema fragment by operation | ||
Relay IR is more efficient because it allows aliasing common elements of the document, like types or selections, thus reducing the total document size. | ||
We can extract schema fragments by running a pre-processing step on queries, in the same stage where `graphql` tags would normally be extracted and pre-parsed. | ||
Supermassive package contains several handy utilities for this strategy. | ||
### Possible future 2 - pre-normalized executor | ||
#### 2. Schema modules | ||
If the schema is big enough, it is a common practice to split it into [multiple modules](https://the-guild.dev/graphql/modules/docs). | ||
In this case the natural strategy is to generate a single fragment per module. Supermassive supports custom fragment loader, | ||
so when an operation contains an unknown field or type, supermassive will request a fragment for it. | ||
This strategy implies "schema map" to properly map operations or individual types to appropriate schema modules | ||
(schema map generation is out of scope for supermassive itself). | ||
#### 3. Variation of the two | ||
Other strategies usually represent a variation of those two. E.g. a fragment per specific group of operations, group of modules, | ||
or mix of modules and operations. Supermassive is agnostic to your "fragmentizing" strategy. | ||
### Possible future - pre-normalized executor | ||
In a scenario where executor is running close to the client (sometimes even in same process or at least in same browser), it might be worth exploring removing some of the requirements imposed by the usual GraphQL transport - for example serialization. Not only GraphQL executors do the JSON serialization, but also they return the data that is optimized for transport and that matches the query tree. This means clients need to perform often expensive normazilation. As traffic and message size might be less important in same process / same browser scenarios, it might be worthwhile exploring return pre-normalized data from supermassive. This offers massive speedups for some clients like Apollo ([see benchmarks](https://github.com/vladar/graphql-normalized)). | ||
### Possible future 3 - Tree-shaking based on documents | ||
### Possible future - Tree-shaking based on documents | ||
@@ -77,3 +93,3 @@ Current implementation is more efficient in terms of bundles than one requiring full schema, but resolvers are also not always needed. By going through fields being selected in the documents, resolvers can be split or tree-shook to only load ones that are required for certain frontend bundle. | ||
### Possible future 4 - GraphQL-to-JS | ||
### Possible future - GraphQL-to-JS | ||
@@ -147,3 +163,3 @@ We can expand on the previous phase by ahead-of-time compiling the resolution of the operations, their field-resolvers, and invocation thereof into JavaScript code. This essentially does away with any need for AST of the operation during execution. This means execution will be faster as no more generic lookups and checks need to be performed. | ||
### Possible future 5 - persisted queries | ||
### Possible future - persisted queries | ||
@@ -182,11 +198,24 @@ we can make it possible to replace the operations at runtime using a simple identifier, thus allowing GraphQL clients to execute their operations using these identifiers that they obtain through a concept known as ["persisted queries"](https://relay.dev/docs/guides/persisted-queries/). This means that GraphQL clients that do not require graphql-js AST _themselves_ to operate, such as Relay, will be able to greatly reduce the size of the User-Experience bundles by entirely eliminating the document in favour of a short identifier. | ||
There are 3 main parts of Supermassive - the executor, query annotator and implicit resolver extractor. Executor is the part that actually runs the queries. It takes resolvers object instead of schema and annotated documents instead of normal documents. Query annotator processes query to include type information inside them. It can be ran as part of query extraction stage in Relay Compiler or eg in `@graphitation/graphql-js-tag`. Implicit resolver extractor writes out resolvers for types that are only implicitly defined in GraphQL SDL, like Unions or Input Objects. It generates typescript file with extracted object that can be merged with the rest of the resolvers. | ||
There are 3 main parts of Supermassive: | ||
1. The executor | ||
2. Minimal viable schema fragment extractor and query annotator (for [schema fragment by operation](#1-schema-fragment-by-operation) strategy) | ||
3. Schema fragment encoder from/to graphql-js AST (for [schema modules](#2-schema-modules) strategy) + utilities for fragment merging | ||
Executor is the part that actually runs the queries. It takes operation document and schema fragment (composed of resolvers and compact type definitions) and optionally, schema fragment loader. | ||
Schema fragment extractor process query to extract type definitions necessary to execute this query. | ||
Query annotator could be used to inline those type definitions into query itself (e.g. as a directive). | ||
Annotator can be run as part of query extraction stage in Relay Compiler or eg in `@graphitation/graphql-js-tag`. | ||
Schema fragment encoder is necessary to produce schema fragments based on [schema modules](#2-schema-modules). | ||
It takes graphql-js AST of the individual schema module and converts it to the compact schema fragment format. | ||
### Executor | ||
Two functions are provided - `executeWithSchema` and `executeWithoutSchema`. They attempt to match `graphql-js`'s `execute` function parameters. `executeWithSchema` fully matches it and is meant for development or testing. It does the transform and resolver extraction in runtime. `executeWithoutSchema` relies on those being done during compile/bundling time. | ||
Two functions are provided - `executeWithSchema` and `executeWithoutSchema`. They attempt to match `graphql-js`'s `execute` function parameters. `executeWithSchema` fully matches it and is meant for development or testing. It does the schema fragment extraction for operation in runtime. `executeWithoutSchema` expects schema fragment extracted at build/compile-time, as a required argument. | ||
```graphql | ||
```ts | ||
interface CommonExecutionArgs { | ||
resolvers: Resolvers; | ||
document: DocumentNode; | ||
rootValue?: unknown; | ||
@@ -200,27 +229,109 @@ contextValue?: unknown; | ||
type ExecutionWithoutSchemaArgs = CommonExecutionArgs & { | ||
document: DocumentNode; | ||
schemaFragment: SchemaFragment; | ||
schemaFragmentLoader?: SchemaFragmentLoader; | ||
}; | ||
type ExecutionWithSchemaArgs = CommonExecutionArgs & { | ||
document: UntypedDocumentNode; | ||
typeDefs: UntypedDocumentNode; | ||
definitions: DocumentNode; | ||
resolvers: UserResolvers; | ||
}; | ||
function executeWithoutSchema(args: ExecutionWithoutSchemaArgs): PromiseOrValue<ExecutionResult> | ||
function executeWithoutSchema( | ||
args: ExecutionWithoutSchemaArgs, | ||
): PromiseOrValue<ExecutionResult>; | ||
function executeWithSchema(args: ExecutionWithSchemaArgs): PromiseOrValue<ExecutionResult> | ||
function executeWithSchema( | ||
args: ExecutionWithSchemaArgs, | ||
): PromiseOrValue<ExecutionResult>; | ||
``` | ||
### Transform | ||
### Minimal viable schema extractor | ||
Supermassive requires annotated GraphQL documents. See [`@graphitation/supermassive-ast`](../supermassive-ast/). | ||
Extracts minimal schema fragment necessary for operation execution with supermassive. | ||
### Resolver extractor | ||
```ts | ||
export function extractMinimalViableSchemaForRequestDocument( | ||
schema: GraphQLSchema, | ||
requestDocument: DocumentNode, | ||
): { definitions: SchemaDefinitions; unknownDirectives: DirectiveNode[] }; | ||
``` | ||
Supermassive provides a bin command to extract implicit resolvers. See [`@graphitation/supermassive-extractors`](../supermassive-extractors/) and [`@graphitation/cli`](../cli). | ||
### Webpack Transform | ||
```sh | ||
supermassive extract-schema PATH_TO_TYPEDEFS.graphql | ||
Inlines schema definitions extracted with `extractMinimalViableSchemaForRequestDocument` into the `@schema` directive of each operation / fragment node. | ||
With `@graphitation/graphql-js-tag` and `@graphitation/ts-transform-graphql-js-tag` (in webpack config) | ||
```js | ||
import { buildASTSchema } from 'graphql' | ||
import { getTransformer } from "@graphitation/ts-transform-graphql-js-tag"; | ||
import { annotateDocumentGraphQLTransform } from "@graphitation/supermassive"; | ||
// ... | ||
{ | ||
test: /\.tsx?$/, | ||
loader: "ts-loader", | ||
options: { | ||
getCustomTransformers: () => ({ | ||
before: [ | ||
getTransformer({ | ||
graphqlTagModuleExport: "graphql", | ||
transformer: annotateDocumentGraphQLTransform( | ||
buildASTSchema({ | ||
fs.readFileSync( | ||
"PATH_TO_SCHEMA_TYPEDEFS.graphql", | ||
{ encoding: "utf-8" } | ||
), | ||
) | ||
), | ||
}), | ||
], | ||
}), | ||
}, | ||
}, | ||
} | ||
``` | ||
It generates `__generated__/NAME_OF_TYPEDEFS.ts` file, on top of which user provided resolvers can be merged when executing. | ||
### Schema definitions encoder | ||
Encodes SDL type definitions represented by graphql-js AST to compact format, necessary for execution with supermassive. | ||
```ts | ||
export function encodeASTSchema( | ||
schemaFragment: DocumentNode, | ||
): SchemaDefinitions[]; | ||
export function decodeASTSchema( | ||
encodedSchemaFragments: SchemaDefinitions[], | ||
): DocumentNode; | ||
``` | ||
Usage example: | ||
```js | ||
import { prase } from "graphql"; | ||
import { encodeASTSchema, decodeASTSchema } from "@graphitation/supermassive"; | ||
const typeDefs = parse( | ||
fs.readFileSync("PATH_TO_SCHEMA_TYPEDEFS.graphql", { encoding: "utf-8" }), | ||
); | ||
const encodedTypeDefs = encodeASTSchema(typeDefs); | ||
const decodedTypeDefs = decodeASTSchema(encodedTypeDefs); // decodedTypeDefs are same as typeDefs | ||
``` | ||
### Utilities for definitions and resolvers merging | ||
Utilities are useful when it is necessary to produce a single schema fragment from multiple schema fragments. | ||
```ts | ||
export function mergeSchemaDefinitions( | ||
accumulator: SchemaDefinitions, | ||
definitions: SchemaDefinitions[], | ||
): SchemaDefinitions; | ||
export function mergeResolvers( | ||
accumulator: Resolvers, | ||
resolvers: (Resolvers | Resolvers[])[], | ||
): Resolvers; | ||
``` |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
333
1462737
14482