Socket
Socket
Sign inDemoInstall

apollo-cache-inmemory

Package Overview
Dependencies
Maintainers
4
Versions
149
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

apollo-cache-inmemory - npm Package Compare versions

Comparing version 1.3.0-verify.4 to 1.3.0

lib/depTrackingCache.d.ts

11

CHANGELOG.md

@@ -10,2 +10,9 @@ # CHANGELOG

### 1.3.0
- Optimize repeated `apollo-cache-inmemory` reads by caching partial query
results. As a consequence, watched queries will not be rebroadcast unless the
data have changed.
[PR #3394](https://github.com/apollographql/apollo-client/pull/3394)
### 1.2.5

@@ -41,2 +48,6 @@

### vNEXT
- Optimize repeated `apollo-cache-inmemory` reads by caching partial query results
[#3394](https://github.com/apollographql/apollo-client/pull/3394)
### 1.2.0

@@ -43,0 +54,0 @@

1239

lib/bundle.umd.js
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('apollo-utilities'), require('graphql/language/printer'), require('graphql-anywhere'), require('apollo-cache')) :
typeof define === 'function' && define.amd ? define(['exports', 'apollo-utilities', 'graphql/language/printer', 'graphql-anywhere', 'apollo-cache'], factory) :
(factory((global.apollo = global.apollo || {}, global.apollo.cache = global.apollo.cache || {}, global.apollo.cache.inmemory = {}),global.apollo.utilities,global.print,global.graphqlAnywhere,global.apolloCache.core));
}(this, (function (exports,apolloUtilities,printer,graphqlAnywhere,apolloCache) { 'use strict';
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('apollo-utilities'), require('graphql/language/printer'), require('apollo-cache')) :
typeof define === 'function' && define.amd ? define(['exports', 'apollo-utilities', 'graphql/language/printer', 'apollo-cache'], factory) :
(factory((global.apollo = global.apollo || {}, global.apollo.cache = global.apollo.cache || {}, global.apollo.cache.inmemory = {}),global.apollo.utilities,global.print,global.apolloCache.core));
}(this, (function (exports,apolloUtilities,printer,apolloCache) { 'use strict';
graphqlAnywhere = graphqlAnywhere && graphqlAnywhere.hasOwnProperty('default') ? graphqlAnywhere['default'] : graphqlAnywhere;
var haveWarned = false;

@@ -37,4 +35,3 @@ var HeuristicFragmentMatcher = (function () {

}
context.returnPartialData = true;
return true;
return 'heuristic';
}

@@ -49,4 +46,3 @@ if (obj.__typename === typeCondition) {

'https://www.apollographql.com/docs/react/recipes/fragment-matching.html', 'error');
context.returnPartialData = true;
return true;
return 'heuristic';
};

@@ -98,2 +94,512 @@ return HeuristicFragmentMatcher;

var wrap = require('optimism').wrap;
var CacheKeyNode = (function () {
function CacheKeyNode() {
this.children = null;
this.key = null;
}
CacheKeyNode.prototype.lookup = function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
return this.lookupArray(args);
};
CacheKeyNode.prototype.lookupArray = function (array) {
var node = this;
array.forEach(function (value) {
node = node.getOrCreate(value);
});
return node.key || (node.key = Object.create(null));
};
CacheKeyNode.prototype.getOrCreate = function (value) {
var map = this.children || (this.children = new Map);
return map.get(value) || map.set(value, new CacheKeyNode()).get(value);
};
return CacheKeyNode;
}());
var hasOwn = Object.prototype.hasOwnProperty;
var DepTrackingCache = (function () {
function DepTrackingCache(data) {
if (data === void 0) { data = Object.create(null); }
var _this = this;
this.data = data;
this.depend = wrap(function (dataId) { return _this.data[dataId]; }, {
disposable: true,
makeCacheKey: function (dataId) {
return dataId;
}
});
}
DepTrackingCache.prototype.toObject = function () {
return this.data;
};
DepTrackingCache.prototype.get = function (dataId) {
this.depend(dataId);
return this.data[dataId];
};
DepTrackingCache.prototype.set = function (dataId, value) {
var oldValue = this.data[dataId];
if (value !== oldValue) {
this.data[dataId] = value;
this.depend.dirty(dataId);
}
};
DepTrackingCache.prototype.delete = function (dataId) {
if (hasOwn.call(this.data, dataId)) {
delete this.data[dataId];
this.depend.dirty(dataId);
}
};
DepTrackingCache.prototype.clear = function () {
this.replace(null);
};
DepTrackingCache.prototype.replace = function (newData) {
var _this = this;
if (newData) {
Object.keys(newData).forEach(function (dataId) {
_this.set(dataId, newData[dataId]);
});
Object.keys(this.data).forEach(function (dataId) {
if (!hasOwn.call(newData, dataId)) {
_this.delete(dataId);
}
});
}
else {
Object.keys(this.data).forEach(function (dataId) {
_this.delete(dataId);
});
}
};
return DepTrackingCache;
}());
function defaultNormalizedCacheFactory(seed) {
return new DepTrackingCache(seed);
}
var __assign = (undefined && undefined.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var CIRCULAR = Object.create(null);
var QueryKeyMaker = (function () {
function QueryKeyMaker(cacheKeyRoot) {
this.cacheKeyRoot = cacheKeyRoot;
this.perQueryKeyMakers = new Map();
}
QueryKeyMaker.prototype.forQuery = function (document) {
if (!this.perQueryKeyMakers.has(document)) {
this.perQueryKeyMakers.set(document, new PerQueryKeyMaker(this.cacheKeyRoot, document));
}
return this.perQueryKeyMakers.get(document);
};
return QueryKeyMaker;
}());
var PerQueryKeyMaker = (function () {
function PerQueryKeyMaker(cacheKeyRoot, query) {
this.cacheKeyRoot = cacheKeyRoot;
this.query = query;
this.cache = new Map;
this.lookupArray = this.cacheMethod(this.lookupArray);
this.lookupObject = this.cacheMethod(this.lookupObject);
this.lookupFragmentSpread = this.cacheMethod(this.lookupFragmentSpread);
}
PerQueryKeyMaker.prototype.cacheMethod = function (method) {
var _this = this;
return function (value) {
if (_this.cache.has(value)) {
var cached = _this.cache.get(value);
if (cached === CIRCULAR) {
throw new Error("QueryKeyMaker cannot handle circular query structures");
}
return cached;
}
_this.cache.set(value, CIRCULAR);
try {
var result = method.call(_this, value);
_this.cache.set(value, result);
return result;
}
catch (e) {
_this.cache.delete(value);
throw e;
}
};
};
PerQueryKeyMaker.prototype.lookupQuery = function (document) {
return this.lookupObject(document);
};
PerQueryKeyMaker.prototype.lookupSelectionSet = function (selectionSet) {
return this.lookupObject(selectionSet);
};
PerQueryKeyMaker.prototype.lookupFragmentSpread = function (fragmentSpread) {
var name = fragmentSpread.name.value;
var fragment = null;
this.query.definitions.some(function (definition) {
if (definition.kind === "FragmentDefinition" &&
definition.name.value === name) {
fragment = definition;
return true;
}
});
return this.lookupObject(__assign({}, fragmentSpread, { fragment: fragment }));
};
PerQueryKeyMaker.prototype.lookupAny = function (value) {
if (Array.isArray(value)) {
return this.lookupArray(value);
}
if (typeof value === "object" && value !== null) {
if (value.kind === "FragmentSpread") {
return this.lookupFragmentSpread(value);
}
return this.lookupObject(value);
}
return value;
};
PerQueryKeyMaker.prototype.lookupArray = function (array) {
var elements = array.map(this.lookupAny, this);
return this.cacheKeyRoot.lookup(Array.prototype, this.cacheKeyRoot.lookupArray(elements));
};
PerQueryKeyMaker.prototype.lookupObject = function (object) {
var _this = this;
var keys = Object.keys(object);
var locIndex = keys.indexOf("loc");
if (locIndex >= 0)
keys.splice(locIndex, 1);
keys.sort();
var values = keys.map(function (key) { return _this.lookupAny(object[key]); });
return this.cacheKeyRoot.lookup(Object.getPrototypeOf(object), this.cacheKeyRoot.lookupArray(keys), this.cacheKeyRoot.lookupArray(values));
};
return PerQueryKeyMaker;
}());
var __assign$1 = (undefined && undefined.__assign) || function () {
__assign$1 = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign$1.apply(this, arguments);
};
var StoreReader = (function () {
function StoreReader(_a) {
var _b = _a === void 0 ? {} : _a, _c = _b.addTypename, addTypename = _c === void 0 ? false : _c, _d = _b.cacheKeyRoot, cacheKeyRoot = _d === void 0 ? new CacheKeyNode : _d;
var reader = this;
var executeStoreQuery = reader.executeStoreQuery, executeSelectionSet = reader.executeSelectionSet;
reader.addTypename = addTypename;
reader.cacheKeyRoot = cacheKeyRoot;
reader.keyMaker = new QueryKeyMaker(cacheKeyRoot);
this.executeStoreQuery = wrap(function (options) {
return executeStoreQuery.call(reader, options);
}, {
makeCacheKey: function (_a) {
var query = _a.query, rootValue = _a.rootValue, contextValue = _a.contextValue, variableValues = _a.variableValues;
if (contextValue.store instanceof DepTrackingCache) {
return reader.cacheKeyRoot.lookup(reader.keyMaker.forQuery(query).lookupQuery(query), contextValue.store, JSON.stringify(variableValues));
}
}
});
this.executeSelectionSet = wrap(function (options) {
return executeSelectionSet.call(reader, options);
}, {
makeCacheKey: function (_a) {
var selectionSet = _a.selectionSet, rootValue = _a.rootValue, execContext = _a.execContext;
if (execContext.contextValue.store instanceof DepTrackingCache) {
return reader.cacheKeyRoot.lookup(reader.keyMaker.forQuery(execContext.query).lookupSelectionSet(selectionSet), execContext.contextValue.store, JSON.stringify(execContext.variableValues), rootValue.id);
}
}
});
}
StoreReader.prototype.readQueryFromStore = function (options) {
var optsPatch = { returnPartialData: false };
return this.diffQueryAgainstStore(__assign$1({}, options, optsPatch)).result;
};
StoreReader.prototype.diffQueryAgainstStore = function (_a) {
var store = _a.store, query = _a.query, variables = _a.variables, previousResult = _a.previousResult, _b = _a.returnPartialData, returnPartialData = _b === void 0 ? true : _b, _c = _a.rootId, rootId = _c === void 0 ? 'ROOT_QUERY' : _c, fragmentMatcherFunction = _a.fragmentMatcherFunction, config = _a.config;
var queryDefinition = apolloUtilities.getQueryDefinition(query);
variables = apolloUtilities.assign({}, apolloUtilities.getDefaultValues(queryDefinition), variables);
var context = {
store: store,
dataIdFromObject: (config && config.dataIdFromObject) || null,
cacheRedirects: (config && config.cacheRedirects) || {},
};
var execResult = this.executeStoreQuery({
query: query,
rootValue: {
type: 'id',
id: rootId,
generated: true,
typename: 'Query',
},
contextValue: context,
variableValues: variables,
fragmentMatcher: fragmentMatcherFunction,
});
var hasMissingFields = execResult.missing && execResult.missing.length > 0;
if (hasMissingFields && !returnPartialData) {
execResult.missing.forEach(function (info) {
if (info.tolerable)
return;
throw new Error("Can't find field " + info.fieldName + " on object (" + info.objectId + ") " + JSON.stringify(store.get(info.objectId), null, 2) + ".");
});
}
if (previousResult) {
if (apolloUtilities.isEqual(previousResult, execResult.result)) {
execResult.result = previousResult;
}
}
return {
result: execResult.result,
complete: !hasMissingFields,
};
};
StoreReader.prototype.executeStoreQuery = function (_a) {
var query = _a.query, rootValue = _a.rootValue, contextValue = _a.contextValue, variableValues = _a.variableValues, _b = _a.fragmentMatcher, fragmentMatcher = _b === void 0 ? function () { return true; } : _b;
var mainDefinition = apolloUtilities.getMainDefinition(query);
var fragments = apolloUtilities.getFragmentDefinitions(query);
var fragmentMap = apolloUtilities.createFragmentMap(fragments);
var execContext = {
query: query,
fragmentMap: fragmentMap,
contextValue: contextValue,
variableValues: variableValues,
fragmentMatcher: fragmentMatcher,
};
return this.executeSelectionSet({
selectionSet: mainDefinition.selectionSet,
rootValue: rootValue,
execContext: execContext,
parentKind: mainDefinition.kind,
});
};
StoreReader.prototype.executeSelectionSet = function (_a) {
var _this = this;
var selectionSet = _a.selectionSet, rootValue = _a.rootValue, execContext = _a.execContext, parentKind = _a.parentKind;
var fragmentMap = execContext.fragmentMap, contextValue = execContext.contextValue, variables = execContext.variableValues;
var finalResult = {
result: {},
};
var didReadTypename = false;
function handleMissing(result) {
var _a;
if (result.missing) {
finalResult.missing = finalResult.missing || [];
(_a = finalResult.missing).push.apply(_a, result.missing);
}
return result.result;
}
var handleSelection = function (selection) {
var _a;
if (!apolloUtilities.shouldInclude(selection, variables)) {
return;
}
if (apolloUtilities.isField(selection)) {
var fieldResult = handleMissing(_this.executeField(selection, rootValue, execContext));
var keyName = apolloUtilities.resultKeyNameFromField(selection);
if (keyName === "__typename") {
didReadTypename = true;
}
if (typeof fieldResult !== 'undefined') {
merge(finalResult.result, (_a = {},
_a[keyName] = fieldResult,
_a));
}
}
else {
var fragment = void 0;
if (apolloUtilities.isInlineFragment(selection)) {
fragment = selection;
}
else {
fragment = fragmentMap[selection.name.value];
if (!fragment) {
throw new Error("No fragment named " + selection.name.value);
}
}
var typeCondition = fragment.typeCondition.name.value;
var match = execContext.fragmentMatcher(rootValue, typeCondition, contextValue);
if (match) {
var fragmentExecResult = _this.executeSelectionSet({
selectionSet: fragment.selectionSet,
rootValue: rootValue,
execContext: execContext,
parentKind: fragment.kind,
});
if (match === 'heuristic' && fragmentExecResult.missing) {
fragmentExecResult = __assign$1({}, fragmentExecResult, { missing: fragmentExecResult.missing.map(function (info) {
return __assign$1({}, info, { tolerable: true });
}) });
}
merge(finalResult.result, handleMissing(fragmentExecResult));
}
}
};
selectionSet.selections.forEach(handleSelection);
if (!didReadTypename &&
this.addTypename &&
parentKind !== "OperationDefinition") {
handleSelection({
kind: "Field",
name: {
kind: "Name",
value: "__typename",
},
});
}
return finalResult;
};
StoreReader.prototype.executeField = function (field, rootValue, execContext) {
var variables = execContext.variableValues, contextValue = execContext.contextValue;
var fieldName = field.name.value;
var args = apolloUtilities.argumentsObjectFromField(field, variables);
var info = {
resultKey: apolloUtilities.resultKeyNameFromField(field),
directives: apolloUtilities.getDirectiveInfoFromField(field, variables),
};
var readStoreResult = readStoreResolver(fieldName, rootValue, args, contextValue, info);
if (!field.selectionSet) {
return readStoreResult;
}
if (readStoreResult.result == null) {
return readStoreResult;
}
function handleMissing(res) {
var missing = null;
if (readStoreResult.missing) {
missing = missing || [];
missing.push.apply(missing, readStoreResult.missing);
}
if (res.missing) {
missing = missing || [];
missing.push.apply(missing, res.missing);
}
return {
result: res.result,
missing: missing,
};
}
if (Array.isArray(readStoreResult.result)) {
return handleMissing(this.executeSubSelectedArray(field, readStoreResult.result, execContext));
}
return handleMissing(this.executeSelectionSet({
selectionSet: field.selectionSet,
rootValue: readStoreResult.result,
execContext: execContext,
parentKind: field.kind,
}));
};
StoreReader.prototype.executeSubSelectedArray = function (field, result, execContext) {
var _this = this;
var missing = null;
function handleMissing(childResult) {
if (childResult.missing) {
missing = missing || [];
missing.push.apply(missing, childResult.missing);
}
return childResult.result;
}
result = result.map(function (item) {
if (item === null) {
return null;
}
if (Array.isArray(item)) {
return handleMissing(_this.executeSubSelectedArray(field, item, execContext));
}
return handleMissing(_this.executeSelectionSet({
selectionSet: field.selectionSet,
rootValue: item,
execContext: execContext,
parentKind: field.kind,
}));
});
return { result: result, missing: missing };
};
return StoreReader;
}());
function assertIdValue(idValue) {
if (!apolloUtilities.isIdValue(idValue)) {
throw new Error("Encountered a sub-selection on the query, but the store doesn't have an object reference. This should never happen during normal use unless you have custom code that is directly manipulating the store; please file an issue.");
}
}
function readStoreResolver(fieldName, idValue, args, context, _a) {
var resultKey = _a.resultKey, directives = _a.directives;
assertIdValue(idValue);
var objId = idValue.id;
var obj = context.store.get(objId);
var storeKeyName = fieldName;
if (args || directives) {
storeKeyName = apolloUtilities.getStoreKeyName(storeKeyName, args, directives);
}
var fieldValue = void 0;
if (obj) {
fieldValue = obj[storeKeyName];
if (typeof fieldValue === 'undefined' &&
context.cacheRedirects &&
(obj.__typename || objId === 'ROOT_QUERY')) {
var typename = obj.__typename || 'Query';
var type = context.cacheRedirects[typename];
if (type) {
var resolver = type[fieldName];
if (resolver) {
fieldValue = resolver(obj, args, {
getCacheKey: function (storeObj) {
return apolloUtilities.toIdValue({
id: context.dataIdFromObject(storeObj),
typename: storeObj.__typename,
});
},
});
}
}
}
}
if (typeof fieldValue === 'undefined') {
return {
result: fieldValue,
missing: [{
objectId: objId,
fieldName: storeKeyName,
tolerable: false,
}],
};
}
if (apolloUtilities.isJsonValue(fieldValue)) {
fieldValue = fieldValue.json;
}
return {
result: fieldValue,
};
}
var hasOwn$1 = Object.prototype.hasOwnProperty;
function merge(target, source) {
if (source !== null && typeof source === 'object' &&
source !== target) {
if (Object.isExtensible && !Object.isExtensible(target)) {
target = __assign$1({}, target);
}
Object.keys(source).forEach(function (sourceKey) {
var sourceVal = source[sourceKey];
if (!hasOwn$1.call(target, sourceKey)) {
target[sourceKey] = sourceVal;
}
else {
target[sourceKey] = merge(target[sourceKey], sourceVal);
}
});
}
return target;
}
var ObjectCache = (function () {

@@ -124,3 +630,3 @@ function ObjectCache(data) {

}());
function defaultNormalizedCacheFactory(seed) {
function defaultNormalizedCacheFactory$1(seed) {
return new ObjectCache(seed);

@@ -142,4 +648,4 @@ }

})();
var __assign = (undefined && undefined.__assign) || function () {
__assign = Object.assign || function(t) {
var __assign$2 = (undefined && undefined.__assign) || function () {
__assign$2 = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {

@@ -152,3 +658,3 @@ s = arguments[i];

};
return __assign.apply(this, arguments);
return __assign$2.apply(this, arguments);
};

@@ -170,63 +676,66 @@ var WriteError = (function (_super) {

}
function writeQueryToStore(_a) {
var result = _a.result, query = _a.query, _b = _a.storeFactory, storeFactory = _b === void 0 ? defaultNormalizedCacheFactory : _b, _c = _a.store, store = _c === void 0 ? storeFactory() : _c, variables = _a.variables, dataIdFromObject = _a.dataIdFromObject, _d = _a.fragmentMap, fragmentMap = _d === void 0 ? {} : _d, fragmentMatcherFunction = _a.fragmentMatcherFunction;
var queryDefinition = apolloUtilities.getQueryDefinition(query);
variables = apolloUtilities.assign({}, apolloUtilities.getDefaultValues(queryDefinition), variables);
try {
return writeSelectionSetToStore({
var TYPENAME_FIELD = {
kind: 'Field',
name: {
kind: 'Name',
value: '__typename',
},
};
var StoreWriter = (function () {
function StoreWriter(_a) {
var _b = (_a === void 0 ? {} : _a).addTypename, addTypename = _b === void 0 ? false : _b;
this.addTypename = addTypename;
}
StoreWriter.prototype.writeQueryToStore = function (_a) {
var query = _a.query, result = _a.result, _b = _a.store, store = _b === void 0 ? defaultNormalizedCacheFactory() : _b, variables = _a.variables, dataIdFromObject = _a.dataIdFromObject, fragmentMatcherFunction = _a.fragmentMatcherFunction;
return this.writeResultToStore({
dataId: 'ROOT_QUERY',
result: result,
selectionSet: queryDefinition.selectionSet,
context: {
store: store,
storeFactory: storeFactory,
processedData: {},
variables: variables,
dataIdFromObject: dataIdFromObject,
fragmentMap: fragmentMap,
fragmentMatcherFunction: fragmentMatcherFunction,
},
document: query,
store: store,
variables: variables,
dataIdFromObject: dataIdFromObject,
fragmentMatcherFunction: fragmentMatcherFunction,
});
}
catch (e) {
throw enhanceErrorWithDocument(e, query);
}
}
function writeResultToStore(_a) {
var dataId = _a.dataId, result = _a.result, document = _a.document, _b = _a.storeFactory, storeFactory = _b === void 0 ? defaultNormalizedCacheFactory : _b, _c = _a.store, store = _c === void 0 ? storeFactory() : _c, variables = _a.variables, dataIdFromObject = _a.dataIdFromObject, fragmentMatcherFunction = _a.fragmentMatcherFunction;
var operationDefinition = apolloUtilities.getOperationDefinition(document);
var selectionSet = operationDefinition.selectionSet;
var fragmentMap = apolloUtilities.createFragmentMap(apolloUtilities.getFragmentDefinitions(document));
variables = apolloUtilities.assign({}, apolloUtilities.getDefaultValues(operationDefinition), variables);
try {
return writeSelectionSetToStore({
result: result,
dataId: dataId,
selectionSet: selectionSet,
context: {
store: store,
storeFactory: storeFactory,
processedData: {},
variables: variables,
dataIdFromObject: dataIdFromObject,
fragmentMap: fragmentMap,
fragmentMatcherFunction: fragmentMatcherFunction,
},
});
}
catch (e) {
throw enhanceErrorWithDocument(e, document);
}
}
function writeSelectionSetToStore(_a) {
var result = _a.result, dataId = _a.dataId, selectionSet = _a.selectionSet, context = _a.context;
var variables = context.variables, store = context.store, fragmentMap = context.fragmentMap;
selectionSet.selections.forEach(function (selection) {
var included = apolloUtilities.shouldInclude(selection, variables);
if (apolloUtilities.isField(selection)) {
var resultFieldKey = apolloUtilities.resultKeyNameFromField(selection);
var value = result[resultFieldKey];
if (included) {
};
StoreWriter.prototype.writeResultToStore = function (_a) {
var dataId = _a.dataId, result = _a.result, document = _a.document, _b = _a.store, store = _b === void 0 ? defaultNormalizedCacheFactory() : _b, variables = _a.variables, dataIdFromObject = _a.dataIdFromObject, fragmentMatcherFunction = _a.fragmentMatcherFunction;
var operationDefinition = apolloUtilities.getOperationDefinition(document);
try {
return this.writeSelectionSetToStore({
result: result,
dataId: dataId,
selectionSet: operationDefinition.selectionSet,
context: {
store: store,
processedData: {},
variables: apolloUtilities.assign({}, apolloUtilities.getDefaultValues(operationDefinition), variables),
dataIdFromObject: dataIdFromObject,
fragmentMap: apolloUtilities.createFragmentMap(apolloUtilities.getFragmentDefinitions(document)),
fragmentMatcherFunction: fragmentMatcherFunction,
},
parentKind: operationDefinition.kind,
});
}
catch (e) {
throw enhanceErrorWithDocument(e, document);
}
};
StoreWriter.prototype.writeSelectionSetToStore = function (_a) {
var _this = this;
var result = _a.result, dataId = _a.dataId, selectionSet = _a.selectionSet, context = _a.context, parentKind = _a.parentKind;
var variables = context.variables, store = context.store, fragmentMap = context.fragmentMap;
var didWriteTypename = false;
selectionSet.selections.forEach(function (selection) {
if (!apolloUtilities.shouldInclude(selection, variables)) {
return;
}
if (apolloUtilities.isField(selection)) {
var resultFieldKey = apolloUtilities.resultKeyNameFromField(selection);
var value = result[resultFieldKey];
if (resultFieldKey === "__typename") {
didWriteTypename = true;
}
if (typeof value !== 'undefined') {
writeFieldToStore({
_this.writeFieldToStore({
dataId: dataId,

@@ -249,40 +758,164 @@ value: value,

}
else {
var fragment = void 0;
if (apolloUtilities.isInlineFragment(selection)) {
fragment = selection;
}
else {
fragment = (fragmentMap || {})[selection.name.value];
if (!fragment) {
throw new Error("No fragment named " + selection.name.value + ".");
}
}
var matches = true;
if (context.fragmentMatcherFunction && fragment.typeCondition) {
var idValue = apolloUtilities.toIdValue({ id: 'self', typename: undefined });
var fakeContext = {
store: new ObjectCache({ self: result }),
cacheRedirects: {},
};
var match = context.fragmentMatcherFunction(idValue, fragment.typeCondition.name.value, fakeContext);
if (!apolloUtilities.isProduction() && match === 'heuristic') {
console.error('WARNING: heuristic fragment matching going on!');
}
matches = !!match;
}
if (matches) {
_this.writeSelectionSetToStore({
result: result,
selectionSet: fragment.selectionSet,
dataId: dataId,
context: context,
parentKind: fragment.kind,
});
}
}
});
if (!didWriteTypename &&
this.addTypename &&
parentKind !== "OperationDefinition" &&
typeof result.__typename === "string") {
this.writeFieldToStore({
dataId: dataId,
value: result.__typename,
field: TYPENAME_FIELD,
context: context,
});
}
return store;
};
StoreWriter.prototype.writeFieldToStore = function (_a) {
var field = _a.field, value = _a.value, dataId = _a.dataId, context = _a.context;
var _b;
var variables = context.variables, dataIdFromObject = context.dataIdFromObject, store = context.store;
var storeValue;
var storeObject;
var storeFieldName = apolloUtilities.storeKeyNameFromField(field, variables);
if (!field.selectionSet || value === null) {
storeValue =
value != null && typeof value === 'object'
?
{ type: 'json', json: value }
:
value;
}
else if (Array.isArray(value)) {
var generatedId = dataId + "." + storeFieldName;
storeValue = this.processArrayValue(value, generatedId, field.selectionSet, context, field.kind);
}
else {
var fragment = void 0;
if (apolloUtilities.isInlineFragment(selection)) {
fragment = selection;
var valueDataId = dataId + "." + storeFieldName;
var generated = true;
if (!isGeneratedId(valueDataId)) {
valueDataId = '$' + valueDataId;
}
else {
fragment = (fragmentMap || {})[selection.name.value];
if (!fragment) {
throw new Error("No fragment named " + selection.name.value + ".");
if (dataIdFromObject) {
var semanticId = dataIdFromObject(value);
if (semanticId && isGeneratedId(semanticId)) {
throw new Error('IDs returned by dataIdFromObject cannot begin with the "$" character.');
}
if (semanticId ||
(typeof semanticId === 'number' && semanticId === 0)) {
valueDataId = semanticId;
generated = false;
}
}
var matches = true;
if (context.fragmentMatcherFunction && fragment.typeCondition) {
var idValue = apolloUtilities.toIdValue({ id: 'self', typename: undefined });
var fakeContext = {
store: new ObjectCache({ self: result }),
returnPartialData: false,
hasMissingField: false,
cacheRedirects: {},
};
matches = context.fragmentMatcherFunction(idValue, fragment.typeCondition.name.value, fakeContext);
if (!apolloUtilities.isProduction() && fakeContext.returnPartialData) {
console.error('WARNING: heuristic fragment matching going on!');
if (!isDataProcessed(valueDataId, field, context.processedData)) {
this.writeSelectionSetToStore({
dataId: valueDataId,
result: value,
selectionSet: field.selectionSet,
context: context,
parentKind: field.kind,
});
}
var typename = value.__typename;
storeValue = apolloUtilities.toIdValue({ id: valueDataId, typename: typename }, generated);
storeObject = store.get(dataId);
var escapedId = storeObject && storeObject[storeFieldName];
if (escapedId !== storeValue && apolloUtilities.isIdValue(escapedId)) {
var hadTypename = escapedId.typename !== undefined;
var hasTypename = typename !== undefined;
var typenameChanged = hadTypename && hasTypename && escapedId.typename !== typename;
if (generated && !escapedId.generated && !typenameChanged) {
throw new Error("Store error: the application attempted to write an object with no provided id" +
(" but the store already contains an id of " + escapedId.id + " for this object. The selectionSet") +
" that was trying to be written is:\n" +
printer.print(field));
}
if (hadTypename && !hasTypename) {
throw new Error("Store error: the application attempted to write an object with no provided typename" +
(" but the store already contains an object with typename of " + escapedId.typename + " for the object of id " + escapedId.id + ". The selectionSet") +
" that was trying to be written is:\n" +
printer.print(field));
}
if (escapedId.generated) {
if (typenameChanged) {
if (!generated) {
store.delete(escapedId.id);
}
}
else {
mergeWithGenerated(escapedId.id, storeValue.id, store);
}
}
}
if (included && matches) {
writeSelectionSetToStore({
result: result,
selectionSet: fragment.selectionSet,
dataId: dataId,
}
storeObject = store.get(dataId);
if (!storeObject || !apolloUtilities.isEqual(storeValue, storeObject[storeFieldName])) {
store.set(dataId, __assign$2({}, storeObject, (_b = {}, _b[storeFieldName] = storeValue, _b)));
}
};
StoreWriter.prototype.processArrayValue = function (value, generatedId, selectionSet, context, parentKind) {
var _this = this;
return value.map(function (item, index) {
if (item === null) {
return null;
}
var itemDataId = generatedId + "." + index;
if (Array.isArray(item)) {
return _this.processArrayValue(item, itemDataId, selectionSet, context, parentKind);
}
var generated = true;
if (context.dataIdFromObject) {
var semanticId = context.dataIdFromObject(item);
if (semanticId) {
itemDataId = semanticId;
generated = false;
}
}
if (!isDataProcessed(itemDataId, selectionSet, context.processedData)) {
_this.writeSelectionSetToStore({
dataId: itemDataId,
result: item,
selectionSet: selectionSet,
context: context,
parentKind: parentKind,
});
}
}
});
return store;
}
return apolloUtilities.toIdValue({ id: itemDataId, typename: item.__typename }, generated);
});
};
return StoreWriter;
}());
function isGeneratedId(id) {

@@ -292,13 +925,26 @@ return id[0] === '$';

function mergeWithGenerated(generatedKey, realKey, cache) {
if (generatedKey === realKey) {
return false;
}
var generated = cache.get(generatedKey);
var real = cache.get(realKey);
var madeChanges = false;
Object.keys(generated).forEach(function (key) {
var value = generated[key];
var realValue = real[key];
if (apolloUtilities.isIdValue(value) && isGeneratedId(value.id) && apolloUtilities.isIdValue(realValue)) {
mergeWithGenerated(value.id, realValue.id, cache);
if (apolloUtilities.isIdValue(value) &&
isGeneratedId(value.id) &&
apolloUtilities.isIdValue(realValue) &&
!apolloUtilities.isEqual(value, realValue) &&
mergeWithGenerated(value.id, realValue.id, cache)) {
madeChanges = true;
}
cache.delete(generatedKey);
cache.set(realKey, __assign({}, generated, real));
});
cache.delete(generatedKey);
var newRealValue = __assign$2({}, generated, real);
if (apolloUtilities.isEqual(newRealValue, real)) {
return madeChanges;
}
cache.set(realKey, newRealValue);
return true;
}

@@ -322,120 +968,5 @@ function isDataProcessed(dataId, field, processedData) {

}
function writeFieldToStore(_a) {
var field = _a.field, value = _a.value, dataId = _a.dataId, context = _a.context;
var _b;
var variables = context.variables, dataIdFromObject = context.dataIdFromObject, store = context.store;
var storeValue;
var storeObject;
var storeFieldName = apolloUtilities.storeKeyNameFromField(field, variables);
var shouldMerge = false;
var generatedKey = '';
if (!field.selectionSet || value === null) {
storeValue =
value != null && typeof value === 'object'
?
{ type: 'json', json: value }
:
value;
}
else if (Array.isArray(value)) {
var generatedId = dataId + "." + storeFieldName;
storeValue = processArrayValue(value, generatedId, field.selectionSet, context);
}
else {
var valueDataId = dataId + "." + storeFieldName;
var generated = true;
if (!isGeneratedId(valueDataId)) {
valueDataId = '$' + valueDataId;
}
if (dataIdFromObject) {
var semanticId = dataIdFromObject(value);
if (semanticId && isGeneratedId(semanticId)) {
throw new Error('IDs returned by dataIdFromObject cannot begin with the "$" character.');
}
if (semanticId || (typeof semanticId === 'number' && semanticId === 0)) {
valueDataId = semanticId;
generated = false;
}
}
if (!isDataProcessed(valueDataId, field, context.processedData)) {
writeSelectionSetToStore({
dataId: valueDataId,
result: value,
selectionSet: field.selectionSet,
context: context,
});
}
var typename = value.__typename;
storeValue = apolloUtilities.toIdValue({ id: valueDataId, typename: typename }, generated);
storeObject = store.get(dataId);
var escapedId = storeObject && storeObject[storeFieldName];
if (escapedId !== storeValue && apolloUtilities.isIdValue(escapedId)) {
var hadTypename = escapedId.typename !== undefined;
var hasTypename = typename !== undefined;
var typenameChanged = hadTypename && hasTypename && escapedId.typename !== typename;
if (generated && !escapedId.generated && !typenameChanged) {
throw new Error("Store error: the application attempted to write an object with no provided id" +
(" but the store already contains an id of " + escapedId.id + " for this object. The selectionSet") +
" that was trying to be written is:\n" +
printer.print(field));
}
if (hadTypename && !hasTypename) {
throw new Error("Store error: the application attempted to write an object with no provided typename" +
(" but the store already contains an object with typename of " + escapedId.typename + " for the object of id " + escapedId.id + ". The selectionSet") +
" that was trying to be written is:\n" +
printer.print(field));
}
if (escapedId.generated) {
generatedKey = escapedId.id;
if (typenameChanged) {
if (!generated) {
store.delete(generatedKey);
}
}
else {
shouldMerge = true;
}
}
}
}
var newStoreObj = __assign({}, store.get(dataId), (_b = {}, _b[storeFieldName] = storeValue, _b));
if (shouldMerge) {
mergeWithGenerated(generatedKey, storeValue.id, store);
}
storeObject = store.get(dataId);
if (!storeObject || storeValue !== storeObject[storeFieldName]) {
store.set(dataId, newStoreObj);
}
}
function processArrayValue(value, generatedId, selectionSet, context) {
return value.map(function (item, index) {
if (item === null) {
return null;
}
var itemDataId = generatedId + "." + index;
if (Array.isArray(item)) {
return processArrayValue(item, itemDataId, selectionSet, context);
}
var generated = true;
if (context.dataIdFromObject) {
var semanticId = context.dataIdFromObject(item);
if (semanticId) {
itemDataId = semanticId;
generated = false;
}
}
if (!isDataProcessed(itemDataId, selectionSet, context.processedData)) {
writeSelectionSetToStore({
dataId: itemDataId,
result: item,
selectionSet: selectionSet,
context: context,
});
}
return apolloUtilities.toIdValue({ id: itemDataId, typename: item.__typename }, generated);
});
}
var __assign$1 = (undefined && undefined.__assign) || function () {
__assign$1 = Object.assign || function(t) {
var __assign$3 = (undefined && undefined.__assign) || function () {
__assign$3 = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {

@@ -448,154 +979,4 @@ s = arguments[i];

};
return __assign$1.apply(this, arguments);
return __assign$3.apply(this, arguments);
};
var ID_KEY = typeof Symbol !== 'undefined' ? Symbol('id') : '@@id';
function readQueryFromStore(options) {
var optsPatch = { returnPartialData: false };
return diffQueryAgainstStore(__assign$1({}, options, optsPatch)).result;
}
var readStoreResolver = function (fieldName, idValue, args, context, _a) {
var resultKey = _a.resultKey, directives = _a.directives;
assertIdValue(idValue);
var objId = idValue.id;
var obj = context.store.get(objId);
var storeKeyName = fieldName;
if (args || directives) {
storeKeyName = apolloUtilities.getStoreKeyName(storeKeyName, args, directives);
}
var fieldValue = void 0;
if (obj) {
fieldValue = obj[storeKeyName];
if (typeof fieldValue === 'undefined' &&
context.cacheRedirects &&
(obj.__typename || objId === 'ROOT_QUERY')) {
var typename = obj.__typename || 'Query';
var type = context.cacheRedirects[typename];
if (type) {
var resolver = type[fieldName];
if (resolver) {
fieldValue = resolver(obj, args, {
getCacheKey: function (storeObj) {
return apolloUtilities.toIdValue({
id: context.dataIdFromObject(storeObj),
typename: storeObj.__typename,
});
},
});
}
}
}
}
if (typeof fieldValue === 'undefined') {
if (!context.returnPartialData) {
throw new Error("Can't find field " + storeKeyName + " on object (" + objId + ") " + JSON.stringify(obj, null, 2) + ".");
}
context.hasMissingField = true;
return fieldValue;
}
if (apolloUtilities.isJsonValue(fieldValue)) {
if (idValue.previousResult &&
apolloUtilities.isEqual(idValue.previousResult[resultKey], fieldValue.json)) {
return idValue.previousResult[resultKey];
}
return fieldValue.json;
}
if (idValue.previousResult) {
fieldValue = addPreviousResultToIdValues(fieldValue, idValue.previousResult[resultKey]);
}
return fieldValue;
};
function diffQueryAgainstStore(_a) {
var store = _a.store, query = _a.query, variables = _a.variables, previousResult = _a.previousResult, _b = _a.returnPartialData, returnPartialData = _b === void 0 ? true : _b, _c = _a.rootId, rootId = _c === void 0 ? 'ROOT_QUERY' : _c, fragmentMatcherFunction = _a.fragmentMatcherFunction, config = _a.config;
var queryDefinition = apolloUtilities.getQueryDefinition(query);
variables = apolloUtilities.assign({}, apolloUtilities.getDefaultValues(queryDefinition), variables);
var context = {
store: store,
returnPartialData: returnPartialData,
dataIdFromObject: (config && config.dataIdFromObject) || null,
cacheRedirects: (config && config.cacheRedirects) || {},
hasMissingField: false,
};
var rootIdValue = {
type: 'id',
id: rootId,
previousResult: previousResult,
};
var result = graphqlAnywhere(readStoreResolver, query, rootIdValue, context, variables, {
fragmentMatcher: fragmentMatcherFunction,
resultMapper: resultMapper,
});
return {
result: result,
complete: !context.hasMissingField,
};
}
function assertIdValue(idValue) {
if (!apolloUtilities.isIdValue(idValue)) {
throw new Error("Encountered a sub-selection on the query, but the store doesn't have an object reference. This should never happen during normal use unless you have custom code that is directly manipulating the store; please file an issue.");
}
}
function addPreviousResultToIdValues(value, previousResult) {
if (apolloUtilities.isIdValue(value)) {
return __assign$1({}, value, { previousResult: previousResult });
}
else if (Array.isArray(value)) {
var idToPreviousResult_1 = new Map();
if (Array.isArray(previousResult)) {
previousResult.forEach(function (item) {
if (item && item[ID_KEY]) {
idToPreviousResult_1.set(item[ID_KEY], item);
}
});
}
return value.map(function (item, i) {
var itemPreviousResult = previousResult && previousResult[i];
if (apolloUtilities.isIdValue(item)) {
itemPreviousResult =
idToPreviousResult_1.get(item.id) || itemPreviousResult;
}
return addPreviousResultToIdValues(item, itemPreviousResult);
});
}
return value;
}
function resultMapper(resultFields, idValue) {
if (idValue.previousResult) {
var currentResultKeys_1 = Object.keys(resultFields);
var sameAsPreviousResult = Object.keys(idValue.previousResult).every(function (key) { return currentResultKeys_1.indexOf(key) > -1; }) &&
currentResultKeys_1.every(function (key) {
return areNestedArrayItemsStrictlyEqual(resultFields[key], idValue.previousResult[key]);
});
if (sameAsPreviousResult) {
return idValue.previousResult;
}
}
Object.defineProperty(resultFields, ID_KEY, {
enumerable: false,
configurable: true,
writable: false,
value: idValue.id,
});
return resultFields;
}
function areNestedArrayItemsStrictlyEqual(a, b) {
if (a === b) {
return true;
}
if (!Array.isArray(a) || !Array.isArray(b) || a.length !== b.length) {
return false;
}
return a.every(function (item, i) { return areNestedArrayItemsStrictlyEqual(item, b[i]); });
}
var __assign$2 = (undefined && undefined.__assign) || function () {
__assign$2 = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign$2.apply(this, arguments);
};
var RecordingCache = (function () {

@@ -614,3 +995,3 @@ function RecordingCache(data) {

RecordingCache.prototype.toObject = function () {
return __assign$2({}, this.data, this.recordedData);
return __assign$3({}, this.data, this.recordedData);
};

@@ -638,3 +1019,3 @@ RecordingCache.prototype.get = function (dataId) {

this.clear();
this.recordedData = __assign$2({}, newData);
this.recordedData = __assign$3({}, newData);
};

@@ -661,4 +1042,4 @@ return RecordingCache;

})();
var __assign$3 = (undefined && undefined.__assign) || function () {
__assign$3 = Object.assign || function(t) {
var __assign$4 = (undefined && undefined.__assign) || function () {
__assign$4 = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {

@@ -671,3 +1052,3 @@ s = arguments[i];

};
return __assign$3.apply(this, arguments);
return __assign$4.apply(this, arguments);
};

@@ -678,3 +1059,2 @@ var defaultConfig = {

addTypename: true,
storeFactory: defaultNormalizedCacheFactory,
};

@@ -698,6 +1078,7 @@ function defaultDataIdFromObject(result) {

_this.optimistic = [];
_this.watches = [];
_this.typenameDocumentCache = new WeakMap();
_this.watches = new Set();
_this.typenameDocumentCache = new Map();
_this.cacheKeyRoot = new CacheKeyNode();
_this.silenceBroadcast = false;
_this.config = __assign$3({}, defaultConfig, config);
_this.config = __assign$4({}, defaultConfig, config);
if (_this.config.customResolvers) {

@@ -712,3 +1093,27 @@ console.warn('customResolvers have been renamed to cacheRedirects. Please update your config as we will be deprecating customResolvers in the next major version.');

_this.addTypename = _this.config.addTypename;
_this.data = _this.config.storeFactory();
_this.data = defaultNormalizedCacheFactory();
_this.storeReader = new StoreReader({
addTypename: _this.config.addTypename,
cacheKeyRoot: _this.cacheKeyRoot,
});
_this.storeWriter = new StoreWriter({
addTypename: _this.config.addTypename,
});
var cache = _this;
var maybeBroadcastWatch = cache.maybeBroadcastWatch;
_this.maybeBroadcastWatch = wrap(function (c) {
return maybeBroadcastWatch.call(_this, c);
}, {
makeCacheKey: function (c) {
if (c.optimistic && cache.optimistic.length > 0) {
return;
}
if (c.previousResult) {
return;
}
if (cache.data instanceof DepTrackingCache) {
return cache.cacheKeyRoot.lookup(c.query, JSON.stringify(c.variables));
}
}
});
return _this;

@@ -733,5 +1138,8 @@ }

}
return readQueryFromStore({
store: this.config.storeFactory(this.extract(query.optimistic)),
query: this.transformDocument(query.query),
var store = (query.optimistic && this.optimistic.length)
? defaultNormalizedCacheFactory(this.extract(true))
: this.data;
return this.storeReader.readQueryFromStore({
store: store,
query: query.query,
variables: query.variables,

@@ -745,7 +1153,7 @@ rootId: query.rootId,

InMemoryCache.prototype.write = function (write) {
writeResultToStore({
this.storeWriter.writeResultToStore({
dataId: write.dataId,
result: write.result,
variables: write.variables,
document: this.transformDocument(write.query),
document: write.query,
store: this.data,

@@ -758,5 +1166,8 @@ dataIdFromObject: this.config.dataIdFromObject,

InMemoryCache.prototype.diff = function (query) {
return diffQueryAgainstStore({
store: this.config.storeFactory(this.extract(query.optimistic)),
query: this.transformDocument(query.query),
var store = (query.optimistic && this.optimistic.length)
? defaultNormalizedCacheFactory(this.extract(true))
: this.data;
return this.storeReader.diffQueryAgainstStore({
store: store,
query: query.query,
variables: query.variables,

@@ -771,5 +1182,5 @@ returnPartialData: query.returnPartialData,

var _this = this;
this.watches.push(watch);
this.watches.add(watch);
return function () {
_this.watches = _this.watches.filter(function (c) { return c !== watch; });
_this.watches.delete(watch);
};

@@ -841,3 +1252,3 @@ };

return this.read({
query: this.transformDocument(apolloUtilities.getFragmentQueryDocument(options.fragment, options.fragmentName)),
query: apolloUtilities.getFragmentQueryDocument(options.fragment, options.fragmentName),
variables: options.variables,

@@ -852,3 +1263,3 @@ rootId: options.id,

result: options.data,
query: this.transformDocument(options.query),
query: options.query,
variables: options.variables,

@@ -861,3 +1272,3 @@ });

result: options.data,
query: this.transformDocument(apolloUtilities.getFragmentQueryDocument(options.fragment, options.fragmentName)),
query: apolloUtilities.getFragmentQueryDocument(options.fragment, options.fragmentName),
variables: options.variables,

@@ -868,13 +1279,25 @@ });

var _this = this;
if (this.silenceBroadcast)
return;
this.watches.forEach(function (c) {
var newData = _this.diff({
query: c.query,
variables: c.variables,
previousResult: c.previousResult && c.previousResult(),
optimistic: c.optimistic,
if (!this.silenceBroadcast) {
var optimistic_1 = this.optimistic.length > 0;
this.watches.forEach(function (c) {
_this.maybeBroadcastWatch(c);
if (optimistic_1) {
_this.maybeBroadcastWatch.dirty(c);
}
});
c.callback(newData);
}
};
InMemoryCache.prototype.maybeBroadcastWatch = function (c) {
var previousResult = c.previousResult && c.previousResult();
var newData = this.diff({
query: c.query,
variables: c.variables,
previousResult: previousResult,
optimistic: c.optimistic,
});
if (previousResult &&
previousResult === newData.result) {
return;
}
c.callback(newData);
};

@@ -886,15 +1309,11 @@ return InMemoryCache;

exports.defaultDataIdFromObject = defaultDataIdFromObject;
exports.ID_KEY = ID_KEY;
exports.readQueryFromStore = readQueryFromStore;
exports.diffQueryAgainstStore = diffQueryAgainstStore;
exports.StoreReader = StoreReader;
exports.assertIdValue = assertIdValue;
exports.WriteError = WriteError;
exports.enhanceErrorWithDocument = enhanceErrorWithDocument;
exports.writeQueryToStore = writeQueryToStore;
exports.writeResultToStore = writeResultToStore;
exports.writeSelectionSetToStore = writeSelectionSetToStore;
exports.StoreWriter = StoreWriter;
exports.HeuristicFragmentMatcher = HeuristicFragmentMatcher;
exports.IntrospectionFragmentMatcher = IntrospectionFragmentMatcher;
exports.ObjectCache = ObjectCache;
exports.defaultNormalizedCacheFactory = defaultNormalizedCacheFactory;
exports.defaultNormalizedCacheFactory = defaultNormalizedCacheFactory$1;
exports.RecordingCache = RecordingCache;

@@ -901,0 +1320,0 @@ exports.record = record;

@@ -7,3 +7,3 @@ import { IdValue } from 'apollo-utilities';

canBypassInit(): boolean;
match(idValue: IdValue, typeCondition: string, context: ReadStoreContext): boolean;
match(idValue: IdValue, typeCondition: string, context: ReadStoreContext): boolean | 'heuristic';
}

@@ -10,0 +10,0 @@ export declare class IntrospectionFragmentMatcher implements FragmentMatcherInterface {

@@ -30,4 +30,3 @@ import { isTest, warnOnceInDevelopment } from 'apollo-utilities';

}
context.returnPartialData = true;
return true;
return 'heuristic';
}

@@ -42,4 +41,3 @@ if (obj.__typename === typeCondition) {

'https://www.apollographql.com/docs/react/recipes/fragment-matching.html', 'error');
context.returnPartialData = true;
return true;
return 'heuristic';
};

@@ -46,0 +44,0 @@ return HeuristicFragmentMatcher;

@@ -12,2 +12,5 @@ import { DocumentNode } from 'graphql';

private typenameDocumentCache;
private storeReader;
private storeWriter;
private cacheKeyRoot;
private silenceBroadcast;

@@ -32,3 +35,4 @@ constructor(config?: ApolloReducerConfig);

protected broadcastWatches(): void;
private maybeBroadcastWatch;
}
//# sourceMappingURL=inMemoryCache.d.ts.map

@@ -28,5 +28,6 @@ var __extends = (this && this.__extends) || (function () {

import { HeuristicFragmentMatcher } from './fragmentMatcher';
import { writeResultToStore } from './writeToStore';
import { readQueryFromStore, diffQueryAgainstStore } from './readFromStore';
import { defaultNormalizedCacheFactory } from './objectCache';
import { StoreReader } from './readFromStore';
import { StoreWriter } from './writeToStore';
import { defaultNormalizedCacheFactory, DepTrackingCache } from './depTrackingCache';
import { wrap, CacheKeyNode } from './optimism';
import { record } from './recordingCache';

@@ -37,3 +38,2 @@ var defaultConfig = {

addTypename: true,
storeFactory: defaultNormalizedCacheFactory,
};

@@ -57,4 +57,5 @@ export function defaultDataIdFromObject(result) {

_this.optimistic = [];
_this.watches = [];
_this.typenameDocumentCache = new WeakMap();
_this.watches = new Set();
_this.typenameDocumentCache = new Map();
_this.cacheKeyRoot = new CacheKeyNode();
_this.silenceBroadcast = false;

@@ -71,3 +72,27 @@ _this.config = __assign({}, defaultConfig, config);

_this.addTypename = _this.config.addTypename;
_this.data = _this.config.storeFactory();
_this.data = defaultNormalizedCacheFactory();
_this.storeReader = new StoreReader({
addTypename: _this.config.addTypename,
cacheKeyRoot: _this.cacheKeyRoot,
});
_this.storeWriter = new StoreWriter({
addTypename: _this.config.addTypename,
});
var cache = _this;
var maybeBroadcastWatch = cache.maybeBroadcastWatch;
_this.maybeBroadcastWatch = wrap(function (c) {
return maybeBroadcastWatch.call(_this, c);
}, {
makeCacheKey: function (c) {
if (c.optimistic && cache.optimistic.length > 0) {
return;
}
if (c.previousResult) {
return;
}
if (cache.data instanceof DepTrackingCache) {
return cache.cacheKeyRoot.lookup(c.query, JSON.stringify(c.variables));
}
}
});
return _this;

@@ -92,5 +117,8 @@ }

}
return readQueryFromStore({
store: this.config.storeFactory(this.extract(query.optimistic)),
query: this.transformDocument(query.query),
var store = (query.optimistic && this.optimistic.length)
? defaultNormalizedCacheFactory(this.extract(true))
: this.data;
return this.storeReader.readQueryFromStore({
store: store,
query: query.query,
variables: query.variables,

@@ -104,7 +132,7 @@ rootId: query.rootId,

InMemoryCache.prototype.write = function (write) {
writeResultToStore({
this.storeWriter.writeResultToStore({
dataId: write.dataId,
result: write.result,
variables: write.variables,
document: this.transformDocument(write.query),
document: write.query,
store: this.data,

@@ -117,5 +145,8 @@ dataIdFromObject: this.config.dataIdFromObject,

InMemoryCache.prototype.diff = function (query) {
return diffQueryAgainstStore({
store: this.config.storeFactory(this.extract(query.optimistic)),
query: this.transformDocument(query.query),
var store = (query.optimistic && this.optimistic.length)
? defaultNormalizedCacheFactory(this.extract(true))
: this.data;
return this.storeReader.diffQueryAgainstStore({
store: store,
query: query.query,
variables: query.variables,

@@ -130,5 +161,5 @@ returnPartialData: query.returnPartialData,

var _this = this;
this.watches.push(watch);
this.watches.add(watch);
return function () {
_this.watches = _this.watches.filter(function (c) { return c !== watch; });
_this.watches.delete(watch);
};

@@ -200,3 +231,3 @@ };

return this.read({
query: this.transformDocument(getFragmentQueryDocument(options.fragment, options.fragmentName)),
query: getFragmentQueryDocument(options.fragment, options.fragmentName),
variables: options.variables,

@@ -211,3 +242,3 @@ rootId: options.id,

result: options.data,
query: this.transformDocument(options.query),
query: options.query,
variables: options.variables,

@@ -220,3 +251,3 @@ });

result: options.data,
query: this.transformDocument(getFragmentQueryDocument(options.fragment, options.fragmentName)),
query: getFragmentQueryDocument(options.fragment, options.fragmentName),
variables: options.variables,

@@ -227,13 +258,25 @@ });

var _this = this;
if (this.silenceBroadcast)
return;
this.watches.forEach(function (c) {
var newData = _this.diff({
query: c.query,
variables: c.variables,
previousResult: c.previousResult && c.previousResult(),
optimistic: c.optimistic,
if (!this.silenceBroadcast) {
var optimistic_1 = this.optimistic.length > 0;
this.watches.forEach(function (c) {
_this.maybeBroadcastWatch(c);
if (optimistic_1) {
_this.maybeBroadcastWatch.dirty(c);
}
});
c.callback(newData);
}
};
InMemoryCache.prototype.maybeBroadcastWatch = function (c) {
var previousResult = c.previousResult && c.previousResult();
var newData = this.diff({
query: c.query,
variables: c.variables,
previousResult: previousResult,
optimistic: c.optimistic,
});
if (previousResult &&
previousResult === newData.result) {
return;
}
c.callback(newData);
};

@@ -240,0 +283,0 @@ return InMemoryCache;

import { IdValue } from 'apollo-utilities';
import { Cache } from 'apollo-cache';
import { ReadQueryOptions, DiffQueryAgainstStoreOptions } from './types';
export declare const ID_KEY: string | symbol;
export declare function readQueryFromStore<QueryType>(options: ReadQueryOptions): QueryType;
export declare function diffQueryAgainstStore<T>({ store, query, variables, previousResult, returnPartialData, rootId, fragmentMatcherFunction, config, }: DiffQueryAgainstStoreOptions): Cache.DiffResult<T>;
import { ReadStoreContext, DiffQueryAgainstStoreOptions, ReadQueryOptions } from './types';
import { CacheKeyNode } from './optimism';
export { OptimisticWrapperFunction } from './optimism';
export declare type VariableMap = {
[name: string]: any;
};
export declare type FragmentMatcher = (rootValue: any, typeCondition: string, context: ReadStoreContext) => boolean | 'heuristic';
export declare type ExecResultMissingField = {
objectId: string;
fieldName: string;
tolerable: boolean;
};
export declare type ExecResult<R = any> = {
result: R;
missing?: ExecResultMissingField[];
};
declare type StoreReaderOptions = {
addTypename?: boolean;
cacheKeyRoot?: CacheKeyNode;
};
export declare class StoreReader {
private addTypename;
private cacheKeyRoot;
private keyMaker;
constructor({ addTypename, cacheKeyRoot, }?: StoreReaderOptions);
readQueryFromStore<QueryType>(options: ReadQueryOptions): QueryType;
diffQueryAgainstStore<T>({ store, query, variables, previousResult, returnPartialData, rootId, fragmentMatcherFunction, config, }: DiffQueryAgainstStoreOptions): Cache.DiffResult<T>;
private executeStoreQuery;
private executeSelectionSet;
private executeField;
private executeSubSelectedArray;
}
export declare function assertIdValue(idValue: IdValue): void;
//# sourceMappingURL=readFromStore.d.ts.map

@@ -12,10 +12,248 @@ var __assign = (this && this.__assign) || function () {

};
import graphqlAnywhere from 'graphql-anywhere';
import { assign, isEqual, getDefaultValues, getQueryDefinition, isJsonValue, isIdValue, toIdValue, getStoreKeyName, } from 'apollo-utilities';
export var ID_KEY = typeof Symbol !== 'undefined' ? Symbol('id') : '@@id';
export function readQueryFromStore(options) {
var optsPatch = { returnPartialData: false };
return diffQueryAgainstStore(__assign({}, options, optsPatch)).result;
import { assign, getDefaultValues, getQueryDefinition, isEqual, argumentsObjectFromField, createFragmentMap, getDirectiveInfoFromField, getFragmentDefinitions, getMainDefinition, getStoreKeyName, isField, isIdValue, isInlineFragment, isJsonValue, resultKeyNameFromField, shouldInclude, toIdValue, } from 'apollo-utilities';
import { wrap, CacheKeyNode } from './optimism';
import { DepTrackingCache } from './depTrackingCache';
import { QueryKeyMaker } from './queryKeyMaker';
var StoreReader = (function () {
function StoreReader(_a) {
var _b = _a === void 0 ? {} : _a, _c = _b.addTypename, addTypename = _c === void 0 ? false : _c, _d = _b.cacheKeyRoot, cacheKeyRoot = _d === void 0 ? new CacheKeyNode : _d;
var reader = this;
var executeStoreQuery = reader.executeStoreQuery, executeSelectionSet = reader.executeSelectionSet;
reader.addTypename = addTypename;
reader.cacheKeyRoot = cacheKeyRoot;
reader.keyMaker = new QueryKeyMaker(cacheKeyRoot);
this.executeStoreQuery = wrap(function (options) {
return executeStoreQuery.call(reader, options);
}, {
makeCacheKey: function (_a) {
var query = _a.query, rootValue = _a.rootValue, contextValue = _a.contextValue, variableValues = _a.variableValues;
if (contextValue.store instanceof DepTrackingCache) {
return reader.cacheKeyRoot.lookup(reader.keyMaker.forQuery(query).lookupQuery(query), contextValue.store, JSON.stringify(variableValues));
}
}
});
this.executeSelectionSet = wrap(function (options) {
return executeSelectionSet.call(reader, options);
}, {
makeCacheKey: function (_a) {
var selectionSet = _a.selectionSet, rootValue = _a.rootValue, execContext = _a.execContext;
if (execContext.contextValue.store instanceof DepTrackingCache) {
return reader.cacheKeyRoot.lookup(reader.keyMaker.forQuery(execContext.query).lookupSelectionSet(selectionSet), execContext.contextValue.store, JSON.stringify(execContext.variableValues), rootValue.id);
}
}
});
}
StoreReader.prototype.readQueryFromStore = function (options) {
var optsPatch = { returnPartialData: false };
return this.diffQueryAgainstStore(__assign({}, options, optsPatch)).result;
};
StoreReader.prototype.diffQueryAgainstStore = function (_a) {
var store = _a.store, query = _a.query, variables = _a.variables, previousResult = _a.previousResult, _b = _a.returnPartialData, returnPartialData = _b === void 0 ? true : _b, _c = _a.rootId, rootId = _c === void 0 ? 'ROOT_QUERY' : _c, fragmentMatcherFunction = _a.fragmentMatcherFunction, config = _a.config;
var queryDefinition = getQueryDefinition(query);
variables = assign({}, getDefaultValues(queryDefinition), variables);
var context = {
store: store,
dataIdFromObject: (config && config.dataIdFromObject) || null,
cacheRedirects: (config && config.cacheRedirects) || {},
};
var execResult = this.executeStoreQuery({
query: query,
rootValue: {
type: 'id',
id: rootId,
generated: true,
typename: 'Query',
},
contextValue: context,
variableValues: variables,
fragmentMatcher: fragmentMatcherFunction,
});
var hasMissingFields = execResult.missing && execResult.missing.length > 0;
if (hasMissingFields && !returnPartialData) {
execResult.missing.forEach(function (info) {
if (info.tolerable)
return;
throw new Error("Can't find field " + info.fieldName + " on object (" + info.objectId + ") " + JSON.stringify(store.get(info.objectId), null, 2) + ".");
});
}
if (previousResult) {
if (isEqual(previousResult, execResult.result)) {
execResult.result = previousResult;
}
}
return {
result: execResult.result,
complete: !hasMissingFields,
};
};
StoreReader.prototype.executeStoreQuery = function (_a) {
var query = _a.query, rootValue = _a.rootValue, contextValue = _a.contextValue, variableValues = _a.variableValues, _b = _a.fragmentMatcher, fragmentMatcher = _b === void 0 ? function () { return true; } : _b;
var mainDefinition = getMainDefinition(query);
var fragments = getFragmentDefinitions(query);
var fragmentMap = createFragmentMap(fragments);
var execContext = {
query: query,
fragmentMap: fragmentMap,
contextValue: contextValue,
variableValues: variableValues,
fragmentMatcher: fragmentMatcher,
};
return this.executeSelectionSet({
selectionSet: mainDefinition.selectionSet,
rootValue: rootValue,
execContext: execContext,
parentKind: mainDefinition.kind,
});
};
StoreReader.prototype.executeSelectionSet = function (_a) {
var _this = this;
var selectionSet = _a.selectionSet, rootValue = _a.rootValue, execContext = _a.execContext, parentKind = _a.parentKind;
var fragmentMap = execContext.fragmentMap, contextValue = execContext.contextValue, variables = execContext.variableValues;
var finalResult = {
result: {},
};
var didReadTypename = false;
function handleMissing(result) {
var _a;
if (result.missing) {
finalResult.missing = finalResult.missing || [];
(_a = finalResult.missing).push.apply(_a, result.missing);
}
return result.result;
}
var handleSelection = function (selection) {
var _a;
if (!shouldInclude(selection, variables)) {
return;
}
if (isField(selection)) {
var fieldResult = handleMissing(_this.executeField(selection, rootValue, execContext));
var keyName = resultKeyNameFromField(selection);
if (keyName === "__typename") {
didReadTypename = true;
}
if (typeof fieldResult !== 'undefined') {
merge(finalResult.result, (_a = {},
_a[keyName] = fieldResult,
_a));
}
}
else {
var fragment = void 0;
if (isInlineFragment(selection)) {
fragment = selection;
}
else {
fragment = fragmentMap[selection.name.value];
if (!fragment) {
throw new Error("No fragment named " + selection.name.value);
}
}
var typeCondition = fragment.typeCondition.name.value;
var match = execContext.fragmentMatcher(rootValue, typeCondition, contextValue);
if (match) {
var fragmentExecResult = _this.executeSelectionSet({
selectionSet: fragment.selectionSet,
rootValue: rootValue,
execContext: execContext,
parentKind: fragment.kind,
});
if (match === 'heuristic' && fragmentExecResult.missing) {
fragmentExecResult = __assign({}, fragmentExecResult, { missing: fragmentExecResult.missing.map(function (info) {
return __assign({}, info, { tolerable: true });
}) });
}
merge(finalResult.result, handleMissing(fragmentExecResult));
}
}
};
selectionSet.selections.forEach(handleSelection);
if (!didReadTypename &&
this.addTypename &&
parentKind !== "OperationDefinition") {
handleSelection({
kind: "Field",
name: {
kind: "Name",
value: "__typename",
},
});
}
return finalResult;
};
StoreReader.prototype.executeField = function (field, rootValue, execContext) {
var variables = execContext.variableValues, contextValue = execContext.contextValue;
var fieldName = field.name.value;
var args = argumentsObjectFromField(field, variables);
var info = {
resultKey: resultKeyNameFromField(field),
directives: getDirectiveInfoFromField(field, variables),
};
var readStoreResult = readStoreResolver(fieldName, rootValue, args, contextValue, info);
if (!field.selectionSet) {
return readStoreResult;
}
if (readStoreResult.result == null) {
return readStoreResult;
}
function handleMissing(res) {
var missing = null;
if (readStoreResult.missing) {
missing = missing || [];
missing.push.apply(missing, readStoreResult.missing);
}
if (res.missing) {
missing = missing || [];
missing.push.apply(missing, res.missing);
}
return {
result: res.result,
missing: missing,
};
}
if (Array.isArray(readStoreResult.result)) {
return handleMissing(this.executeSubSelectedArray(field, readStoreResult.result, execContext));
}
return handleMissing(this.executeSelectionSet({
selectionSet: field.selectionSet,
rootValue: readStoreResult.result,
execContext: execContext,
parentKind: field.kind,
}));
};
StoreReader.prototype.executeSubSelectedArray = function (field, result, execContext) {
var _this = this;
var missing = null;
function handleMissing(childResult) {
if (childResult.missing) {
missing = missing || [];
missing.push.apply(missing, childResult.missing);
}
return childResult.result;
}
result = result.map(function (item) {
if (item === null) {
return null;
}
if (Array.isArray(item)) {
return handleMissing(_this.executeSubSelectedArray(field, item, execContext));
}
return handleMissing(_this.executeSelectionSet({
selectionSet: field.selectionSet,
rootValue: item,
execContext: execContext,
parentKind: field.kind,
}));
});
return { result: result, missing: missing };
};
return StoreReader;
}());
export { StoreReader };
export function assertIdValue(idValue) {
if (!isIdValue(idValue)) {
throw new Error("Encountered a sub-selection on the query, but the store doesn't have an object reference. This should never happen during normal use unless you have custom code that is directly manipulating the store; please file an issue.");
}
}
var readStoreResolver = function (fieldName, idValue, args, context, _a) {
function readStoreResolver(fieldName, idValue, args, context, _a) {
var resultKey = _a.resultKey, directives = _a.directives;

@@ -53,102 +291,37 @@ assertIdValue(idValue);

if (typeof fieldValue === 'undefined') {
if (!context.returnPartialData) {
throw new Error("Can't find field " + storeKeyName + " on object (" + objId + ") " + JSON.stringify(obj, null, 2) + ".");
}
context.hasMissingField = true;
return fieldValue;
return {
result: fieldValue,
missing: [{
objectId: objId,
fieldName: storeKeyName,
tolerable: false,
}],
};
}
if (isJsonValue(fieldValue)) {
if (idValue.previousResult &&
isEqual(idValue.previousResult[resultKey], fieldValue.json)) {
return idValue.previousResult[resultKey];
}
return fieldValue.json;
fieldValue = fieldValue.json;
}
if (idValue.previousResult) {
fieldValue = addPreviousResultToIdValues(fieldValue, idValue.previousResult[resultKey]);
}
return fieldValue;
};
export function diffQueryAgainstStore(_a) {
var store = _a.store, query = _a.query, variables = _a.variables, previousResult = _a.previousResult, _b = _a.returnPartialData, returnPartialData = _b === void 0 ? true : _b, _c = _a.rootId, rootId = _c === void 0 ? 'ROOT_QUERY' : _c, fragmentMatcherFunction = _a.fragmentMatcherFunction, config = _a.config;
var queryDefinition = getQueryDefinition(query);
variables = assign({}, getDefaultValues(queryDefinition), variables);
var context = {
store: store,
returnPartialData: returnPartialData,
dataIdFromObject: (config && config.dataIdFromObject) || null,
cacheRedirects: (config && config.cacheRedirects) || {},
hasMissingField: false,
};
var rootIdValue = {
type: 'id',
id: rootId,
previousResult: previousResult,
};
var result = graphqlAnywhere(readStoreResolver, query, rootIdValue, context, variables, {
fragmentMatcher: fragmentMatcherFunction,
resultMapper: resultMapper,
});
return {
result: result,
complete: !context.hasMissingField,
result: fieldValue,
};
}
export function assertIdValue(idValue) {
if (!isIdValue(idValue)) {
throw new Error("Encountered a sub-selection on the query, but the store doesn't have an object reference. This should never happen during normal use unless you have custom code that is directly manipulating the store; please file an issue.");
}
}
function addPreviousResultToIdValues(value, previousResult) {
if (isIdValue(value)) {
return __assign({}, value, { previousResult: previousResult });
}
else if (Array.isArray(value)) {
var idToPreviousResult_1 = new Map();
if (Array.isArray(previousResult)) {
previousResult.forEach(function (item) {
if (item && item[ID_KEY]) {
idToPreviousResult_1.set(item[ID_KEY], item);
}
});
var hasOwn = Object.prototype.hasOwnProperty;
function merge(target, source) {
if (source !== null && typeof source === 'object' &&
source !== target) {
if (Object.isExtensible && !Object.isExtensible(target)) {
target = __assign({}, target);
}
return value.map(function (item, i) {
var itemPreviousResult = previousResult && previousResult[i];
if (isIdValue(item)) {
itemPreviousResult =
idToPreviousResult_1.get(item.id) || itemPreviousResult;
Object.keys(source).forEach(function (sourceKey) {
var sourceVal = source[sourceKey];
if (!hasOwn.call(target, sourceKey)) {
target[sourceKey] = sourceVal;
}
return addPreviousResultToIdValues(item, itemPreviousResult);
else {
target[sourceKey] = merge(target[sourceKey], sourceVal);
}
});
}
return value;
return target;
}
function resultMapper(resultFields, idValue) {
if (idValue.previousResult) {
var currentResultKeys_1 = Object.keys(resultFields);
var sameAsPreviousResult = Object.keys(idValue.previousResult).every(function (key) { return currentResultKeys_1.indexOf(key) > -1; }) &&
currentResultKeys_1.every(function (key) {
return areNestedArrayItemsStrictlyEqual(resultFields[key], idValue.previousResult[key]);
});
if (sameAsPreviousResult) {
return idValue.previousResult;
}
}
Object.defineProperty(resultFields, ID_KEY, {
enumerable: false,
configurable: true,
writable: false,
value: idValue.id,
});
return resultFields;
}
function areNestedArrayItemsStrictlyEqual(a, b) {
if (a === b) {
return true;
}
if (!Array.isArray(a) || !Array.isArray(b) || a.length !== b.length) {
return false;
}
return a.every(function (item, i) { return areNestedArrayItemsStrictlyEqual(item, b[i]); });
}
//# sourceMappingURL=readFromStore.js.map
import { DocumentNode } from 'graphql';
import { FragmentMatcher } from 'graphql-anywhere';
import { FragmentMatcher } from './readFromStore';
import { Transaction } from 'apollo-cache';

@@ -25,3 +25,2 @@ import { IdValue, StoreValue } from 'apollo-utilities';

}
export declare type NormalizedCacheFactory = (seed?: NormalizedCacheObject) => NormalizedCache;
export declare type OptimisticStoreItem = {

@@ -49,13 +48,10 @@ id: string;

cacheRedirects?: CacheResolverMap;
storeFactory?: NormalizedCacheFactory;
};
export declare type ReadStoreContext = {
store: NormalizedCache;
returnPartialData: boolean;
hasMissingField: boolean;
cacheRedirects: CacheResolverMap;
dataIdFromObject?: IdGetter;
readonly store: NormalizedCache;
readonly cacheRedirects: CacheResolverMap;
readonly dataIdFromObject?: IdGetter;
};
export interface FragmentMatcherInterface {
match(idValue: IdValue, typeCondition: string, context: ReadStoreContext): boolean;
match(idValue: IdValue, typeCondition: string, context: ReadStoreContext): boolean | 'heuristic';
}

@@ -65,5 +61,2 @@ export declare type PossibleTypesMap = {

};
export interface IdValueWithPreviousResult extends IdValue {
previousResult?: any;
}
export declare type IntrospectionResultData = {

@@ -70,0 +63,0 @@ __schema: {

import { SelectionSetNode, FieldNode, DocumentNode } from 'graphql';
import { FragmentMatcher } from 'graphql-anywhere';
import { FragmentMatcher } from './readFromStore';
import { FragmentMap } from 'apollo-utilities';
import { IdGetter, NormalizedCache, NormalizedCacheFactory } from './types';
import { IdGetter, NormalizedCache } from './types';
export declare class WriteError extends Error {

@@ -9,39 +9,46 @@ type: string;

export declare function enhanceErrorWithDocument(error: Error, document: DocumentNode): WriteError;
export declare function writeQueryToStore({ result, query, storeFactory, store, variables, dataIdFromObject, fragmentMap, fragmentMatcherFunction, }: {
result: Object;
query: DocumentNode;
store?: NormalizedCache;
storeFactory?: NormalizedCacheFactory;
variables?: Object;
dataIdFromObject?: IdGetter;
fragmentMap?: FragmentMap;
fragmentMatcherFunction?: FragmentMatcher;
}): NormalizedCache;
export declare type WriteContext = {
store: NormalizedCache;
storeFactory: NormalizedCacheFactory;
processedData?: {
readonly store: NormalizedCache;
readonly processedData?: {
[x: string]: FieldNode[];
};
variables?: any;
dataIdFromObject?: IdGetter;
fragmentMap?: FragmentMap;
fragmentMatcherFunction?: FragmentMatcher;
readonly variables?: any;
readonly dataIdFromObject?: IdGetter;
readonly fragmentMap?: FragmentMap;
readonly fragmentMatcherFunction?: FragmentMatcher;
};
export declare function writeResultToStore({ dataId, result, document, storeFactory, store, variables, dataIdFromObject, fragmentMatcherFunction, }: {
dataId: string;
result: any;
document: DocumentNode;
store?: NormalizedCache;
storeFactory?: NormalizedCacheFactory;
variables?: Object;
dataIdFromObject?: IdGetter;
fragmentMatcherFunction?: FragmentMatcher;
}): NormalizedCache;
export declare function writeSelectionSetToStore({ result, dataId, selectionSet, context, }: {
dataId: string;
result: any;
selectionSet: SelectionSetNode;
context: WriteContext;
}): NormalizedCache;
declare type StoreWriterOptions = {
addTypename?: boolean;
};
export declare class StoreWriter {
private addTypename;
constructor({ addTypename, }?: StoreWriterOptions);
writeQueryToStore({ query, result, store, variables, dataIdFromObject, fragmentMatcherFunction, }: {
query: DocumentNode;
result: Object;
store?: NormalizedCache;
variables?: Object;
dataIdFromObject?: IdGetter;
fragmentMatcherFunction?: FragmentMatcher;
}): NormalizedCache;
writeResultToStore({ dataId, result, document, store, variables, dataIdFromObject, fragmentMatcherFunction, }: {
dataId: string;
result: any;
document: DocumentNode;
store?: NormalizedCache;
variables?: Object;
dataIdFromObject?: IdGetter;
fragmentMatcherFunction?: FragmentMatcher;
}): NormalizedCache;
writeSelectionSetToStore({ result, dataId, selectionSet, context, parentKind, }: {
dataId: string;
result: any;
selectionSet: SelectionSetNode;
context: WriteContext;
parentKind: string;
}): NormalizedCache;
private writeFieldToStore;
private processArrayValue;
}
export {};
//# sourceMappingURL=writeToStore.d.ts.map

@@ -26,4 +26,5 @@ var __extends = (this && this.__extends) || (function () {

import { print } from 'graphql/language/printer';
import { assign, createFragmentMap, getDefaultValues, getFragmentDefinitions, getOperationDefinition, isField, isIdValue, isInlineFragment, isProduction, resultKeyNameFromField, shouldInclude, storeKeyNameFromField, getQueryDefinition, toIdValue, } from 'apollo-utilities';
import { defaultNormalizedCacheFactory, ObjectCache } from './objectCache';
import { assign, createFragmentMap, getDefaultValues, getFragmentDefinitions, getOperationDefinition, isField, isIdValue, isInlineFragment, isProduction, resultKeyNameFromField, shouldInclude, storeKeyNameFromField, toIdValue, isEqual, } from 'apollo-utilities';
import { ObjectCache } from './objectCache';
import { defaultNormalizedCacheFactory } from './depTrackingCache';
var WriteError = (function (_super) {

@@ -45,63 +46,66 @@ __extends(WriteError, _super);

}
export function writeQueryToStore(_a) {
var result = _a.result, query = _a.query, _b = _a.storeFactory, storeFactory = _b === void 0 ? defaultNormalizedCacheFactory : _b, _c = _a.store, store = _c === void 0 ? storeFactory() : _c, variables = _a.variables, dataIdFromObject = _a.dataIdFromObject, _d = _a.fragmentMap, fragmentMap = _d === void 0 ? {} : _d, fragmentMatcherFunction = _a.fragmentMatcherFunction;
var queryDefinition = getQueryDefinition(query);
variables = assign({}, getDefaultValues(queryDefinition), variables);
try {
return writeSelectionSetToStore({
var TYPENAME_FIELD = {
kind: 'Field',
name: {
kind: 'Name',
value: '__typename',
},
};
var StoreWriter = (function () {
function StoreWriter(_a) {
var _b = (_a === void 0 ? {} : _a).addTypename, addTypename = _b === void 0 ? false : _b;
this.addTypename = addTypename;
}
StoreWriter.prototype.writeQueryToStore = function (_a) {
var query = _a.query, result = _a.result, _b = _a.store, store = _b === void 0 ? defaultNormalizedCacheFactory() : _b, variables = _a.variables, dataIdFromObject = _a.dataIdFromObject, fragmentMatcherFunction = _a.fragmentMatcherFunction;
return this.writeResultToStore({
dataId: 'ROOT_QUERY',
result: result,
selectionSet: queryDefinition.selectionSet,
context: {
store: store,
storeFactory: storeFactory,
processedData: {},
variables: variables,
dataIdFromObject: dataIdFromObject,
fragmentMap: fragmentMap,
fragmentMatcherFunction: fragmentMatcherFunction,
},
document: query,
store: store,
variables: variables,
dataIdFromObject: dataIdFromObject,
fragmentMatcherFunction: fragmentMatcherFunction,
});
}
catch (e) {
throw enhanceErrorWithDocument(e, query);
}
}
export function writeResultToStore(_a) {
var dataId = _a.dataId, result = _a.result, document = _a.document, _b = _a.storeFactory, storeFactory = _b === void 0 ? defaultNormalizedCacheFactory : _b, _c = _a.store, store = _c === void 0 ? storeFactory() : _c, variables = _a.variables, dataIdFromObject = _a.dataIdFromObject, fragmentMatcherFunction = _a.fragmentMatcherFunction;
var operationDefinition = getOperationDefinition(document);
var selectionSet = operationDefinition.selectionSet;
var fragmentMap = createFragmentMap(getFragmentDefinitions(document));
variables = assign({}, getDefaultValues(operationDefinition), variables);
try {
return writeSelectionSetToStore({
result: result,
dataId: dataId,
selectionSet: selectionSet,
context: {
store: store,
storeFactory: storeFactory,
processedData: {},
variables: variables,
dataIdFromObject: dataIdFromObject,
fragmentMap: fragmentMap,
fragmentMatcherFunction: fragmentMatcherFunction,
},
});
}
catch (e) {
throw enhanceErrorWithDocument(e, document);
}
}
export function writeSelectionSetToStore(_a) {
var result = _a.result, dataId = _a.dataId, selectionSet = _a.selectionSet, context = _a.context;
var variables = context.variables, store = context.store, fragmentMap = context.fragmentMap;
selectionSet.selections.forEach(function (selection) {
var included = shouldInclude(selection, variables);
if (isField(selection)) {
var resultFieldKey = resultKeyNameFromField(selection);
var value = result[resultFieldKey];
if (included) {
};
StoreWriter.prototype.writeResultToStore = function (_a) {
var dataId = _a.dataId, result = _a.result, document = _a.document, _b = _a.store, store = _b === void 0 ? defaultNormalizedCacheFactory() : _b, variables = _a.variables, dataIdFromObject = _a.dataIdFromObject, fragmentMatcherFunction = _a.fragmentMatcherFunction;
var operationDefinition = getOperationDefinition(document);
try {
return this.writeSelectionSetToStore({
result: result,
dataId: dataId,
selectionSet: operationDefinition.selectionSet,
context: {
store: store,
processedData: {},
variables: assign({}, getDefaultValues(operationDefinition), variables),
dataIdFromObject: dataIdFromObject,
fragmentMap: createFragmentMap(getFragmentDefinitions(document)),
fragmentMatcherFunction: fragmentMatcherFunction,
},
parentKind: operationDefinition.kind,
});
}
catch (e) {
throw enhanceErrorWithDocument(e, document);
}
};
StoreWriter.prototype.writeSelectionSetToStore = function (_a) {
var _this = this;
var result = _a.result, dataId = _a.dataId, selectionSet = _a.selectionSet, context = _a.context, parentKind = _a.parentKind;
var variables = context.variables, store = context.store, fragmentMap = context.fragmentMap;
var didWriteTypename = false;
selectionSet.selections.forEach(function (selection) {
if (!shouldInclude(selection, variables)) {
return;
}
if (isField(selection)) {
var resultFieldKey = resultKeyNameFromField(selection);
var value = result[resultFieldKey];
if (resultFieldKey === "__typename") {
didWriteTypename = true;
}
if (typeof value !== 'undefined') {
writeFieldToStore({
_this.writeFieldToStore({
dataId: dataId,

@@ -124,40 +128,165 @@ value: value,

}
else {
var fragment = void 0;
if (isInlineFragment(selection)) {
fragment = selection;
}
else {
fragment = (fragmentMap || {})[selection.name.value];
if (!fragment) {
throw new Error("No fragment named " + selection.name.value + ".");
}
}
var matches = true;
if (context.fragmentMatcherFunction && fragment.typeCondition) {
var idValue = toIdValue({ id: 'self', typename: undefined });
var fakeContext = {
store: new ObjectCache({ self: result }),
cacheRedirects: {},
};
var match = context.fragmentMatcherFunction(idValue, fragment.typeCondition.name.value, fakeContext);
if (!isProduction() && match === 'heuristic') {
console.error('WARNING: heuristic fragment matching going on!');
}
matches = !!match;
}
if (matches) {
_this.writeSelectionSetToStore({
result: result,
selectionSet: fragment.selectionSet,
dataId: dataId,
context: context,
parentKind: fragment.kind,
});
}
}
});
if (!didWriteTypename &&
this.addTypename &&
parentKind !== "OperationDefinition" &&
typeof result.__typename === "string") {
this.writeFieldToStore({
dataId: dataId,
value: result.__typename,
field: TYPENAME_FIELD,
context: context,
});
}
return store;
};
StoreWriter.prototype.writeFieldToStore = function (_a) {
var field = _a.field, value = _a.value, dataId = _a.dataId, context = _a.context;
var _b;
var variables = context.variables, dataIdFromObject = context.dataIdFromObject, store = context.store;
var storeValue;
var storeObject;
var storeFieldName = storeKeyNameFromField(field, variables);
if (!field.selectionSet || value === null) {
storeValue =
value != null && typeof value === 'object'
?
{ type: 'json', json: value }
:
value;
}
else if (Array.isArray(value)) {
var generatedId = dataId + "." + storeFieldName;
storeValue = this.processArrayValue(value, generatedId, field.selectionSet, context, field.kind);
}
else {
var fragment = void 0;
if (isInlineFragment(selection)) {
fragment = selection;
var valueDataId = dataId + "." + storeFieldName;
var generated = true;
if (!isGeneratedId(valueDataId)) {
valueDataId = '$' + valueDataId;
}
else {
fragment = (fragmentMap || {})[selection.name.value];
if (!fragment) {
throw new Error("No fragment named " + selection.name.value + ".");
if (dataIdFromObject) {
var semanticId = dataIdFromObject(value);
if (semanticId && isGeneratedId(semanticId)) {
throw new Error('IDs returned by dataIdFromObject cannot begin with the "$" character.');
}
if (semanticId ||
(typeof semanticId === 'number' && semanticId === 0)) {
valueDataId = semanticId;
generated = false;
}
}
var matches = true;
if (context.fragmentMatcherFunction && fragment.typeCondition) {
var idValue = toIdValue({ id: 'self', typename: undefined });
var fakeContext = {
store: new ObjectCache({ self: result }),
returnPartialData: false,
hasMissingField: false,
cacheRedirects: {},
};
matches = context.fragmentMatcherFunction(idValue, fragment.typeCondition.name.value, fakeContext);
if (!isProduction() && fakeContext.returnPartialData) {
console.error('WARNING: heuristic fragment matching going on!');
if (!isDataProcessed(valueDataId, field, context.processedData)) {
this.writeSelectionSetToStore({
dataId: valueDataId,
result: value,
selectionSet: field.selectionSet,
context: context,
parentKind: field.kind,
});
}
var typename = value.__typename;
storeValue = toIdValue({ id: valueDataId, typename: typename }, generated);
storeObject = store.get(dataId);
var escapedId = storeObject && storeObject[storeFieldName];
if (escapedId !== storeValue && isIdValue(escapedId)) {
var hadTypename = escapedId.typename !== undefined;
var hasTypename = typename !== undefined;
var typenameChanged = hadTypename && hasTypename && escapedId.typename !== typename;
if (generated && !escapedId.generated && !typenameChanged) {
throw new Error("Store error: the application attempted to write an object with no provided id" +
(" but the store already contains an id of " + escapedId.id + " for this object. The selectionSet") +
" that was trying to be written is:\n" +
print(field));
}
if (hadTypename && !hasTypename) {
throw new Error("Store error: the application attempted to write an object with no provided typename" +
(" but the store already contains an object with typename of " + escapedId.typename + " for the object of id " + escapedId.id + ". The selectionSet") +
" that was trying to be written is:\n" +
print(field));
}
if (escapedId.generated) {
if (typenameChanged) {
if (!generated) {
store.delete(escapedId.id);
}
}
else {
mergeWithGenerated(escapedId.id, storeValue.id, store);
}
}
}
if (included && matches) {
writeSelectionSetToStore({
result: result,
selectionSet: fragment.selectionSet,
dataId: dataId,
}
storeObject = store.get(dataId);
if (!storeObject || !isEqual(storeValue, storeObject[storeFieldName])) {
store.set(dataId, __assign({}, storeObject, (_b = {}, _b[storeFieldName] = storeValue, _b)));
}
};
StoreWriter.prototype.processArrayValue = function (value, generatedId, selectionSet, context, parentKind) {
var _this = this;
return value.map(function (item, index) {
if (item === null) {
return null;
}
var itemDataId = generatedId + "." + index;
if (Array.isArray(item)) {
return _this.processArrayValue(item, itemDataId, selectionSet, context, parentKind);
}
var generated = true;
if (context.dataIdFromObject) {
var semanticId = context.dataIdFromObject(item);
if (semanticId) {
itemDataId = semanticId;
generated = false;
}
}
if (!isDataProcessed(itemDataId, selectionSet, context.processedData)) {
_this.writeSelectionSetToStore({
dataId: itemDataId,
result: item,
selectionSet: selectionSet,
context: context,
parentKind: parentKind,
});
}
}
});
return store;
}
return toIdValue({ id: itemDataId, typename: item.__typename }, generated);
});
};
return StoreWriter;
}());
export { StoreWriter };
function isGeneratedId(id) {

@@ -167,13 +296,26 @@ return id[0] === '$';

function mergeWithGenerated(generatedKey, realKey, cache) {
if (generatedKey === realKey) {
return false;
}
var generated = cache.get(generatedKey);
var real = cache.get(realKey);
var madeChanges = false;
Object.keys(generated).forEach(function (key) {
var value = generated[key];
var realValue = real[key];
if (isIdValue(value) && isGeneratedId(value.id) && isIdValue(realValue)) {
mergeWithGenerated(value.id, realValue.id, cache);
if (isIdValue(value) &&
isGeneratedId(value.id) &&
isIdValue(realValue) &&
!isEqual(value, realValue) &&
mergeWithGenerated(value.id, realValue.id, cache)) {
madeChanges = true;
}
cache.delete(generatedKey);
cache.set(realKey, __assign({}, generated, real));
});
cache.delete(generatedKey);
var newRealValue = __assign({}, generated, real);
if (isEqual(newRealValue, real)) {
return madeChanges;
}
cache.set(realKey, newRealValue);
return true;
}

@@ -197,117 +339,2 @@ function isDataProcessed(dataId, field, processedData) {

}
function writeFieldToStore(_a) {
var field = _a.field, value = _a.value, dataId = _a.dataId, context = _a.context;
var _b;
var variables = context.variables, dataIdFromObject = context.dataIdFromObject, store = context.store;
var storeValue;
var storeObject;
var storeFieldName = storeKeyNameFromField(field, variables);
var shouldMerge = false;
var generatedKey = '';
if (!field.selectionSet || value === null) {
storeValue =
value != null && typeof value === 'object'
?
{ type: 'json', json: value }
:
value;
}
else if (Array.isArray(value)) {
var generatedId = dataId + "." + storeFieldName;
storeValue = processArrayValue(value, generatedId, field.selectionSet, context);
}
else {
var valueDataId = dataId + "." + storeFieldName;
var generated = true;
if (!isGeneratedId(valueDataId)) {
valueDataId = '$' + valueDataId;
}
if (dataIdFromObject) {
var semanticId = dataIdFromObject(value);
if (semanticId && isGeneratedId(semanticId)) {
throw new Error('IDs returned by dataIdFromObject cannot begin with the "$" character.');
}
if (semanticId || (typeof semanticId === 'number' && semanticId === 0)) {
valueDataId = semanticId;
generated = false;
}
}
if (!isDataProcessed(valueDataId, field, context.processedData)) {
writeSelectionSetToStore({
dataId: valueDataId,
result: value,
selectionSet: field.selectionSet,
context: context,
});
}
var typename = value.__typename;
storeValue = toIdValue({ id: valueDataId, typename: typename }, generated);
storeObject = store.get(dataId);
var escapedId = storeObject && storeObject[storeFieldName];
if (escapedId !== storeValue && isIdValue(escapedId)) {
var hadTypename = escapedId.typename !== undefined;
var hasTypename = typename !== undefined;
var typenameChanged = hadTypename && hasTypename && escapedId.typename !== typename;
if (generated && !escapedId.generated && !typenameChanged) {
throw new Error("Store error: the application attempted to write an object with no provided id" +
(" but the store already contains an id of " + escapedId.id + " for this object. The selectionSet") +
" that was trying to be written is:\n" +
print(field));
}
if (hadTypename && !hasTypename) {
throw new Error("Store error: the application attempted to write an object with no provided typename" +
(" but the store already contains an object with typename of " + escapedId.typename + " for the object of id " + escapedId.id + ". The selectionSet") +
" that was trying to be written is:\n" +
print(field));
}
if (escapedId.generated) {
generatedKey = escapedId.id;
if (typenameChanged) {
if (!generated) {
store.delete(generatedKey);
}
}
else {
shouldMerge = true;
}
}
}
}
var newStoreObj = __assign({}, store.get(dataId), (_b = {}, _b[storeFieldName] = storeValue, _b));
if (shouldMerge) {
mergeWithGenerated(generatedKey, storeValue.id, store);
}
storeObject = store.get(dataId);
if (!storeObject || storeValue !== storeObject[storeFieldName]) {
store.set(dataId, newStoreObj);
}
}
function processArrayValue(value, generatedId, selectionSet, context) {
return value.map(function (item, index) {
if (item === null) {
return null;
}
var itemDataId = generatedId + "." + index;
if (Array.isArray(item)) {
return processArrayValue(item, itemDataId, selectionSet, context);
}
var generated = true;
if (context.dataIdFromObject) {
var semanticId = context.dataIdFromObject(item);
if (semanticId) {
itemDataId = semanticId;
generated = false;
}
}
if (!isDataProcessed(itemDataId, selectionSet, context.processedData)) {
writeSelectionSetToStore({
dataId: itemDataId,
result: item,
selectionSet: selectionSet,
context: context,
});
}
return toIdValue({ id: itemDataId, typename: item.__typename }, generated);
});
}
//# sourceMappingURL=writeToStore.js.map
{
"name": "apollo-cache-inmemory",
"version": "1.3.0-verify.4",
"version": "1.3.0",
"description": "Core abstract of Caching layer for Apollo Client",

@@ -28,3 +28,3 @@ "author": "James Baxley <james@meteor.com>",

"scripts": {
"prepare": "npm run lint && npm run build",
"prepare": "npm run build",
"coverage": "jest --coverage",

@@ -44,5 +44,5 @@ "test": "jest",

"dependencies": {
"apollo-cache": "^1.2.0-verify.4",
"apollo-utilities": "^1.1.0-verify.4",
"graphql-anywhere": "^4.2.0-verify.4"
"apollo-cache": "^1.1.17",
"apollo-utilities": "^1.0.21",
"optimism": "^0.6.6"
},

@@ -64,4 +64,3 @@ "peerDependencies": {

"testURL": "http://localhost"
},
"gitHead": "aa5d083bdc328149e506b275c07d978b3b7fefd9"
}
}
import gql, { disableFragmentWarnings } from 'graphql-tag';
import { toIdValue, stripSymbols } from 'apollo-utilities';
import { toIdValue } from 'apollo-utilities';
import { defaultNormalizedCacheFactory } from '../objectCache';
import { diffQueryAgainstStore, ID_KEY } from '../readFromStore';
import { writeQueryToStore } from '../writeToStore';
import { StoreReader } from '../readFromStore';
import { StoreWriter } from '../writeToStore';
import { HeuristicFragmentMatcher } from '../fragmentMatcher';

@@ -29,2 +29,5 @@ import { defaultDataIdFromObject } from '../inMemoryCache';

describe('diffing queries against the store', () => {
const reader = new StoreReader();
const writer = new StoreWriter();
it(

@@ -36,3 +39,3 @@ 'expects named fragments to return complete as true when diffd against ' +

const queryResult = diffQueryAgainstStore({
const queryResult = reader.diffQueryAgainstStore({
store,

@@ -69,3 +72,3 @@ query: gql`

const queryResult = diffQueryAgainstStore({
const queryResult = reader.diffQueryAgainstStore({
store,

@@ -125,3 +128,3 @@ query: gql`

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
result,

@@ -132,3 +135,3 @@ query,

expect(
diffQueryAgainstStore({
reader.diffQueryAgainstStore({
store,

@@ -161,3 +164,3 @@ query,

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
result,

@@ -178,3 +181,3 @@ query: firstQuery,

const { complete } = diffQueryAgainstStore({
const { complete } = reader.diffQueryAgainstStore({
store,

@@ -201,3 +204,3 @@ query: secondQuery,

};
const store = writeQueryToStore({
const store = writer.writeQueryToStore({
result: firstResult,

@@ -212,3 +215,3 @@ query: firstQuery,

return expect(() => {
diffQueryAgainstStore({
reader.diffQueryAgainstStore({
store,

@@ -238,3 +241,3 @@ query: unionQuery,

};
const store = writeQueryToStore({
const store = writer.writeQueryToStore({
result: firstResult,

@@ -257,3 +260,3 @@ query: firstQuery,

`;
const { complete } = diffQueryAgainstStore({
const { complete } = reader.diffQueryAgainstStore({
store,

@@ -286,3 +289,3 @@ query: unionQuery,

};
const store = writeQueryToStore({
const store = writer.writeQueryToStore({
result: firstResult,

@@ -309,3 +312,3 @@ query: firstQuery,

const { complete } = diffQueryAgainstStore({
const { complete } = reader.diffQueryAgainstStore({
store,

@@ -335,3 +338,3 @@ query: unionQuery,

};
const store = writeQueryToStore({
const store = writer.writeQueryToStore({
result: firstResult,

@@ -359,3 +362,3 @@ query: firstQuery,

expect(() => {
diffQueryAgainstStore({
reader.diffQueryAgainstStore({
store,

@@ -387,3 +390,3 @@ query: unionQuery,

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
result: firstResult,

@@ -428,3 +431,3 @@ query: firstQuery,

const simpleDiff = diffQueryAgainstStore({
const simpleDiff = reader.diffQueryAgainstStore({
store,

@@ -434,3 +437,3 @@ query: simpleQuery,

expect(stripSymbols(simpleDiff.result)).toEqual({
expect(simpleDiff.result).toEqual({
people_one: {

@@ -441,3 +444,3 @@ name: 'Luke Skywalker',

const inlineDiff = diffQueryAgainstStore({
const inlineDiff = reader.diffQueryAgainstStore({
store,

@@ -447,3 +450,3 @@ query: inlineFragmentQuery,

expect(stripSymbols(inlineDiff.result)).toEqual({
expect(inlineDiff.result).toEqual({
people_one: {

@@ -454,3 +457,3 @@ name: 'Luke Skywalker',

const namedDiff = diffQueryAgainstStore({
const namedDiff = reader.diffQueryAgainstStore({
store,

@@ -460,3 +463,3 @@ query: namedFragmentQuery,

expect(stripSymbols(namedDiff.result)).toEqual({
expect(namedDiff.result).toEqual({
people_one: {

@@ -468,3 +471,3 @@ name: 'Luke Skywalker',

expect(function() {
diffQueryAgainstStore({
reader.diffQueryAgainstStore({
store,

@@ -512,9 +515,13 @@ query: simpleQuery,

const store = writeQueryToStore({
function dataIdFromObject({ id }: { id: string}) {
return id;
}
const store = writer.writeQueryToStore({
query,
result: queryResult,
dataIdFromObject: ({ id }: { id: string }) => id,
dataIdFromObject,
});
const { result } = diffQueryAgainstStore({
const { result } = reader.diffQueryAgainstStore({
store,

@@ -524,14 +531,11 @@ query,

expect(stripSymbols(result)).toEqual(queryResult);
expect(result[ID_KEY]).toBe('ROOT_QUERY');
expect(result.a[0][ID_KEY]).toBe('a:1');
expect(result.a[1][ID_KEY]).toBe('a:2');
expect(result.a[2][ID_KEY]).toBe('a:3');
expect(result.c[ID_KEY]).toBe('$ROOT_QUERY.c');
expect(result.c.e[0][ID_KEY]).toBe('e:1');
expect(result.c.e[1][ID_KEY]).toBe('e:2');
expect(result.c.e[2][ID_KEY]).toBe('e:3');
expect(result.c.e[3][ID_KEY]).toBe('e:4');
expect(result.c.e[4][ID_KEY]).toBe('e:5');
expect(result.c.g[ID_KEY]).toBe('$ROOT_QUERY.c.g');
expect(result).toEqual(queryResult);
expect(dataIdFromObject(result.a[0])).toBe('a:1');
expect(dataIdFromObject(result.a[1])).toBe('a:2');
expect(dataIdFromObject(result.a[2])).toBe('a:3');
expect(dataIdFromObject(result.c.e[0])).toBe('e:1');
expect(dataIdFromObject(result.c.e[1])).toBe('e:2');
expect(dataIdFromObject(result.c.e[2])).toBe('e:3');
expect(dataIdFromObject(result.c.e[3])).toBe('e:4');
expect(dataIdFromObject(result.c.e[4])).toBe('e:5');
});

@@ -560,3 +564,3 @@

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query,

@@ -571,3 +575,3 @@ result: queryResult,

const { result } = diffQueryAgainstStore({
const { result } = reader.diffQueryAgainstStore({
store,

@@ -602,3 +606,3 @@ query,

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query,

@@ -613,3 +617,3 @@ result: queryResult,

const { result } = diffQueryAgainstStore({
const { result } = reader.diffQueryAgainstStore({
store,

@@ -620,3 +624,3 @@ query,

expect(stripSymbols(result)).toEqual(queryResult);
expect(result).toEqual(queryResult);
expect(result).not.toEqual(previousResult);

@@ -651,3 +655,3 @@ expect(result.a).toEqual(previousResult.a);

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query,

@@ -665,3 +669,3 @@ result: queryResult,

const { result } = diffQueryAgainstStore({
const { result } = reader.diffQueryAgainstStore({
store,

@@ -689,3 +693,3 @@ query,

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query,

@@ -699,3 +703,3 @@ result: queryResult,

const { result } = diffQueryAgainstStore({
const { result } = reader.diffQueryAgainstStore({
store,

@@ -706,3 +710,3 @@ query,

expect(stripSymbols(result)).toEqual(queryResult);
expect(result).toEqual(queryResult);
expect(result.a[0]).toEqual(previousResult.a[0]);

@@ -735,3 +739,3 @@ expect(result.a[1]).toEqual(previousResult.a[1]);

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query,

@@ -749,3 +753,3 @@ result: queryResult,

const { result } = diffQueryAgainstStore({
const { result } = reader.diffQueryAgainstStore({
store,

@@ -783,3 +787,3 @@ query,

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query,

@@ -797,3 +801,3 @@ result: queryResult,

const { result } = diffQueryAgainstStore({
const { result } = reader.diffQueryAgainstStore({
store,

@@ -804,3 +808,3 @@ query,

expect(stripSymbols(result)).toEqual(queryResult);
expect(result).toEqual(queryResult);
expect(result).not.toEqual(previousResult);

@@ -859,3 +863,3 @@ expect(result.a).not.toEqual(previousResult.a);

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query,

@@ -868,5 +872,5 @@ result: queryResult,

a: [
{ id: 'a:3', b: 1.3, [ID_KEY]: 'a:3' },
{ id: 'a:2', b: 1.2, [ID_KEY]: 'a:2' },
{ id: 'a:1', b: 1.1, [ID_KEY]: 'a:1' },
{ id: 'a:3', b: 1.3 },
{ id: 'a:2', b: 1.2 },
{ id: 'a:1', b: 1.1 },
],

@@ -876,7 +880,7 @@ c: {

e: [
{ id: 'e:4', f: 3.4, [ID_KEY]: 'e:4' },
{ id: 'e:2', f: 3.2, [ID_KEY]: 'e:2' },
{ id: 'e:5', f: 3.5, [ID_KEY]: 'e:5' },
{ id: 'e:3', f: 3.3, [ID_KEY]: 'e:3' },
{ id: 'e:1', f: 3.1, [ID_KEY]: 'e:1' },
{ id: 'e:4', f: 3.4 },
{ id: 'e:2', f: 3.2 },
{ id: 'e:5', f: 3.5 },
{ id: 'e:3', f: 3.3 },
{ id: 'e:1', f: 3.1 },
],

@@ -887,3 +891,3 @@ g: { h: 4 },

const { result } = diffQueryAgainstStore({
const { result } = reader.diffQueryAgainstStore({
store,

@@ -894,3 +898,3 @@ query,

expect(stripSymbols(result)).toEqual(queryResult);
expect(result).toEqual(queryResult);
expect(result).not.toEqual(previousResult);

@@ -930,3 +934,3 @@ expect(result.a).not.toEqual(previousResult.a);

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query,

@@ -941,3 +945,3 @@ result: queryResult,

const { result } = diffQueryAgainstStore({
const { result } = reader.diffQueryAgainstStore({
store,

@@ -948,3 +952,3 @@ query,

expect(stripSymbols(result)).toEqual(queryResult);
expect(result).toEqual(queryResult);
expect(result).not.toEqual(previousResult);

@@ -988,3 +992,3 @@ expect(result.a).toEqual(previousResult.a);

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query: listQuery,

@@ -1008,3 +1012,3 @@ result: listResult,

const { result } = diffQueryAgainstStore({
const { result } = reader.diffQueryAgainstStore({
store,

@@ -1011,0 +1015,0 @@ query: itemQuery,

@@ -7,3 +7,3 @@ import { assign, omit } from 'lodash';

import { NormalizedCache, StoreObject, HeuristicFragmentMatcher } from '../';
import { readQueryFromStore } from '../readFromStore';
import { StoreReader } from '../readFromStore';
import { defaultNormalizedCacheFactory } from '../objectCache';

@@ -15,2 +15,4 @@

describe('reading from the store', () => {
const reader = new StoreReader();
it('runs a nested query with proper fragment fields in arrays', () => {

@@ -35,3 +37,3 @@ withError(() => {

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -76,5 +78,6 @@ query: gql`

});
it('rejects malformed queries', () => {
expect(() => {
readQueryFromStore({
reader.readQueryFromStore({
store: defaultNormalizedCacheFactory(),

@@ -94,3 +97,3 @@ query: gql`

expect(() => {
readQueryFromStore({
reader.readQueryFromStore({
store: defaultNormalizedCacheFactory(),

@@ -118,3 +121,3 @@ query: gql`

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -161,3 +164,3 @@ query: gql`

const result = readQueryFromStore({
const result = reader.readQueryFromStore({
store,

@@ -195,3 +198,3 @@ query,

const result = readQueryFromStore({
const result = reader.readQueryFromStore({
store,

@@ -236,3 +239,3 @@ query,

const result = readQueryFromStore({
const result = reader.readQueryFromStore({
store,

@@ -276,3 +279,3 @@ query,

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -346,3 +349,3 @@ query: gql`

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -431,3 +434,3 @@ query: gql`

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -489,3 +492,3 @@ query: gql`

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -542,3 +545,3 @@ query: gql`

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -584,3 +587,3 @@ query: gql`

expect(() => {
readQueryFromStore({
reader.readQueryFromStore({
store,

@@ -612,3 +615,3 @@ query: gql`

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -653,3 +656,3 @@ query: gql`

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -691,3 +694,3 @@ query: gql`

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -755,3 +758,3 @@ query: gql`

const queryResult1 = readQueryFromStore({
const queryResult1 = reader.readQueryFromStore({
store,

@@ -784,3 +787,3 @@ rootId: 'abcde',

const queryResult2 = readQueryFromStore({
const queryResult2 = reader.readQueryFromStore({
store,

@@ -820,3 +823,3 @@ rootId: 'abcdef',

const queryResult = readQueryFromStore({
const queryResult = reader.readQueryFromStore({
store,

@@ -823,0 +826,0 @@ query: gql`

@@ -9,6 +9,8 @@ import { getFragmentDefinitions, createFragmentMap } from 'apollo-utilities';

import { DepTrackingCache } from '../depTrackingCache';
import {
HeuristicFragmentMatcher,
writeQueryToStore,
readQueryFromStore,
StoreReader,
StoreWriter,
} from '../';

@@ -19,11 +21,12 @@

function storeRoundtrip(query: DocumentNode, result: any, variables = {}) {
const fragmentMap = createFragmentMap(getFragmentDefinitions(query));
const store = writeQueryToStore({
const reader = new StoreReader();
const writer = new StoreWriter();
const store = writer.writeQueryToStore({
result,
query,
variables,
fragmentMap,
});
const reconstructedResult = readQueryFromStore({
const readOptions = {
store,

@@ -33,5 +36,56 @@ query,

fragmentMatcherFunction,
};
const reconstructedResult = reader.readQueryFromStore(readOptions);
expect(reconstructedResult).toEqual(result);
// Make sure the result is identical if we haven't written anything new
// to the store. https://github.com/apollographql/apollo-client/pull/3394
expect(store).toBeInstanceOf(DepTrackingCache);
expect(reader.readQueryFromStore(readOptions)).toBe(reconstructedResult);
// Now make sure subtrees of the result are identical even after we write
// an additional bogus field to the store.
writer.writeQueryToStore({
store,
result: { oyez: 1234 },
query: gql`
{
oyez
}
`,
});
expect(stripSymbols(reconstructedResult)).toEqual(result);
const deletedRootResult = reader.readQueryFromStore(readOptions);
expect(deletedRootResult).toEqual(result);
if (deletedRootResult === reconstructedResult) {
// We don't expect the new result to be identical to the previous result,
// but there are some rare cases where that can happen, and it's a good
// thing, because it means the caching system is working slightly better
// than expected... and we don't need to continue with the rest of the
// comparison logic below.
return;
}
function expectStrictEqualExceptArrays(a, b) {
if (Array.isArray(a)) {
// The caching system caches result objects but not result arrays, so we
// recursively compare array elements using expectStrictEqualExceptArrays.
expect(Array.isArray(b)).toBe(true);
expect(a.length).toBe(b.length);
a.forEach((aItem, index) => {
expectStrictEqualExceptArrays(aItem, b[index]);
});
} else {
expect(a).toBe(b);
}
}
Object.keys(result).forEach(key => {
expectStrictEqualExceptArrays(
deletedRootResult[key],
reconstructedResult[key],
);
});
}

@@ -38,0 +92,0 @@

@@ -18,7 +18,3 @@ import { cloneDeep, assign, omit } from 'lodash';

import {
writeQueryToStore,
writeResultToStore,
writeSelectionSetToStore,
} from '../writeToStore';
import { StoreWriter } from '../writeToStore';

@@ -50,2 +46,4 @@ import { defaultNormalizedCacheFactory } from '../objectCache';

describe('writing to the store', () => {
const writer = new StoreWriter();
it('properly normalizes a trivial item', () => {

@@ -69,3 +67,3 @@ const query = gql`

expect(
writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -96,3 +94,3 @@ result: cloneDeep(result),

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
result,

@@ -131,3 +129,3 @@ query,

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
result,

@@ -171,3 +169,3 @@ query,

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
result,

@@ -214,3 +212,3 @@ query,

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
result,

@@ -248,3 +246,3 @@ query,

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
result,

@@ -294,3 +292,3 @@ query,

expect(
writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -340,3 +338,3 @@ result: cloneDeep(result),

expect(
writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -385,3 +383,3 @@ result: cloneDeep(result),

expect(
writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -440,3 +438,3 @@ result: cloneDeep(result),

expect(
writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -492,3 +490,3 @@ result: cloneDeep(result),

expect(
writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -543,3 +541,3 @@ result: cloneDeep(result),

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
query,

@@ -591,3 +589,3 @@ result: cloneDeep(result),

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
query,

@@ -627,3 +625,3 @@ result: cloneDeep(result),

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
query,

@@ -667,3 +665,3 @@ result: cloneDeep(result),

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
query,

@@ -714,3 +712,3 @@ result: cloneDeep(result),

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
query,

@@ -790,3 +788,3 @@ result: cloneDeep(result),

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
query,

@@ -881,3 +879,3 @@ result: cloneDeep(result),

const normalized = writeQueryToStore({
const normalized = writer.writeQueryToStore({
query,

@@ -945,3 +943,3 @@ result: cloneDeep(result),

const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query,

@@ -966,3 +964,3 @@ result: cloneDeep(result),

const store2 = writeQueryToStore({
const store2 = writer.writeQueryToStore({
store,

@@ -1004,3 +1002,3 @@ query: query2,

expect(
writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -1034,3 +1032,3 @@ result: cloneDeep(result),

expect(
writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -1202,3 +1200,3 @@ result: cloneDeep(result),

expect(
writeSelectionSetToStore({
writer.writeSelectionSetToStore({
dataId: '5',

@@ -1208,3 +1206,2 @@ selectionSet: def.selectionSet,

context: {
storeFactory: defaultNormalizedCacheFactory,
store: defaultNormalizedCacheFactory(),

@@ -1270,3 +1267,3 @@ variables,

expect(
writeQueryToStore({
writer.writeQueryToStore({
result: data,

@@ -1313,3 +1310,3 @@ query,

expect(
writeQueryToStore({
writer.writeQueryToStore({
result: data,

@@ -1354,3 +1351,3 @@ query,

expect(
writeQueryToStore({
writer.writeQueryToStore({
result: data,

@@ -1401,3 +1398,3 @@ query,

expect(
writeQueryToStore({
writer.writeQueryToStore({
result: data,

@@ -1482,3 +1479,3 @@ query,

});
const storeWithoutId = writeQueryToStore({
const storeWithoutId = writer.writeQueryToStore({
result: dataWithoutId,

@@ -1489,3 +1486,3 @@ query: queryWithoutId,

expect(storeWithoutId.toObject()).toEqual(expStoreWithoutId.toObject());
const storeWithId = writeQueryToStore({
const storeWithId = writer.writeQueryToStore({
result: dataWithId,

@@ -1568,3 +1565,3 @@ query: queryWithId,

// write the first object, without an ID, placeholder
const store = writeQueryToStore({
const store = writer.writeQueryToStore({
result: dataWithPlaceholder,

@@ -1577,3 +1574,3 @@ query,

// replace with another one of different type with ID
writeQueryToStore({
writer.writeQueryToStore({
result: dataWithAuthor,

@@ -1587,3 +1584,3 @@ query,

// and go back to the original:
writeQueryToStore({
writer.writeQueryToStore({
result: dataWithPlaceholder,

@@ -1614,3 +1611,3 @@ query,

expect(() => {
writeQueryToStore({
writer.writeQueryToStore({
result,

@@ -1638,3 +1635,3 @@ query,

};
const store = writeQueryToStore({
const store = writer.writeQueryToStore({
query,

@@ -1644,3 +1641,3 @@ result: cloneDeep(result),

const newStore = writeQueryToStore({
const newStore = writer.writeQueryToStore({
query,

@@ -1677,3 +1674,3 @@ result: cloneDeep(result),

const newStore = writeResultToStore({
const newStore = writer.writeResultToStore({
dataId: 'ROOT_QUERY',

@@ -1701,3 +1698,3 @@ result,

return withWarning(() => {
const newStore = writeResultToStore({
const newStore = writer.writeResultToStore({
dataId: 'ROOT_QUERY',

@@ -1767,3 +1764,3 @@ result,

return withWarning(() => {
const newStore = writeResultToStore({
const newStore = writer.writeResultToStore({
dataId: 'ROOT_QUERY',

@@ -1794,3 +1791,3 @@ result,

return withWarning(() => {
const newStore = writeResultToStore({
const newStore = writer.writeResultToStore({
dataId: 'ROOT_QUERY',

@@ -1812,3 +1809,3 @@ result,

const newStore = writeResultToStore({
const newStore = writer.writeResultToStore({
dataId: 'ROOT_QUERY',

@@ -1836,3 +1833,3 @@ result,

const fragmentMatcherFunction = new HeuristicFragmentMatcher().match;
const newStore = writeResultToStore({
const newStore = writer.writeResultToStore({
dataId: 'ROOT_QUERY',

@@ -1875,3 +1872,3 @@ result,

expect(() => {
writeQueryToStore({
writer.writeQueryToStore({
store,

@@ -1896,3 +1893,3 @@ result: {

expect(() => {
writeResultToStore({
writer.writeResultToStore({
store,

@@ -1921,3 +1918,3 @@ result: {

writeQueryToStore({
writer.writeQueryToStore({
query: gql`

@@ -1940,3 +1937,3 @@ {

writeQueryToStore({
writer.writeQueryToStore({
query: gql`

@@ -1988,3 +1985,3 @@ {

writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -2027,3 +2024,3 @@ result: {

writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -2088,3 +2085,3 @@ result: {

writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -2128,3 +2125,3 @@ result: {

writeQueryToStore({
writer.writeQueryToStore({
query,

@@ -2131,0 +2128,0 @@ result: {

@@ -32,3 +32,3 @@ import { isTest, warnOnceInDevelopment, IdValue } from 'apollo-utilities';

context: ReadStoreContext,
): boolean {
): boolean | 'heuristic' {
const obj = context.store.get(idValue.id);

@@ -67,4 +67,3 @@

context.returnPartialData = true;
return true;
return 'heuristic';
}

@@ -90,4 +89,3 @@

context.returnPartialData = true;
return true;
return 'heuristic';
}

@@ -94,0 +92,0 @@ }

@@ -17,5 +17,9 @@ import { DocumentNode } from 'graphql';

} from './types';
import { writeResultToStore } from './writeToStore';
import { readQueryFromStore, diffQueryAgainstStore } from './readFromStore';
import { defaultNormalizedCacheFactory } from './objectCache';
import { StoreReader } from './readFromStore';
import { StoreWriter } from './writeToStore';
import { defaultNormalizedCacheFactory, DepTrackingCache } from './depTrackingCache';
import { wrap, CacheKeyNode, OptimisticWrapperFunction } from './optimism';
import { record } from './recordingCache';

@@ -26,3 +30,2 @@ const defaultConfig: ApolloReducerConfig = {

addTypename: true,
storeFactory: defaultNormalizedCacheFactory,
};

@@ -46,5 +49,8 @@

protected optimistic: OptimisticStoreItem[] = [];
private watches: Cache.WatchOptions[] = [];
private watches = new Set<Cache.WatchOptions>();
private addTypename: boolean;
private typenameDocumentCache = new WeakMap<DocumentNode, DocumentNode>();
private typenameDocumentCache = new Map<DocumentNode, DocumentNode>();
private storeReader: StoreReader;
private storeWriter: StoreWriter;
private cacheKeyRoot = new CacheKeyNode();

@@ -75,3 +81,43 @@ // Set this while in a transaction to prevent broadcasts...

this.addTypename = this.config.addTypename;
this.data = this.config.storeFactory();
this.data = defaultNormalizedCacheFactory();
this.storeReader = new StoreReader({
addTypename: this.config.addTypename,
cacheKeyRoot: this.cacheKeyRoot,
});
this.storeWriter = new StoreWriter({
addTypename: this.config.addTypename,
});
const cache = this;
const { maybeBroadcastWatch } = cache;
this.maybeBroadcastWatch = wrap((c: Cache.WatchOptions) => {
return maybeBroadcastWatch.call(this, c);
}, {
makeCacheKey(c: Cache.WatchOptions) {
if (c.optimistic && cache.optimistic.length > 0) {
// If we're reading optimistic data, it doesn't matter if this.data
// is a DepTrackingCache, since it will be ignored.
return;
}
if (c.previousResult) {
// If a previousResult was provided, assume the caller would prefer
// to compare the previous data to the new data to determine whether
// to broadcast, so we should disable caching by returning here, to
// give maybeBroadcastWatch a chance to do that comparison.
return;
}
if (cache.data instanceof DepTrackingCache) {
// Return a cache key (thus enabling caching) only if we're currently
// using a data store that can track cache dependencies.
return cache.cacheKeyRoot.lookup(
c.query,
JSON.stringify(c.variables),
);
}
}
});
}

@@ -98,5 +144,9 @@

return readQueryFromStore({
store: this.config.storeFactory(this.extract(query.optimistic)),
query: this.transformDocument(query.query),
const store = (query.optimistic && this.optimistic.length)
? defaultNormalizedCacheFactory(this.extract(true))
: this.data;
return this.storeReader.readQueryFromStore({
store,
query: query.query,
variables: query.variables,

@@ -111,7 +161,7 @@ rootId: query.rootId,

public write(write: Cache.WriteOptions): void {
writeResultToStore({
this.storeWriter.writeResultToStore({
dataId: write.dataId,
result: write.result,
variables: write.variables,
document: this.transformDocument(write.query),
document: write.query,
store: this.data,

@@ -126,5 +176,9 @@ dataIdFromObject: this.config.dataIdFromObject,

public diff<T>(query: Cache.DiffOptions): Cache.DiffResult<T> {
return diffQueryAgainstStore({
store: this.config.storeFactory(this.extract(query.optimistic)),
query: this.transformDocument(query.query),
const store = (query.optimistic && this.optimistic.length)
? defaultNormalizedCacheFactory(this.extract(true))
: this.data;
return this.storeReader.diffQueryAgainstStore({
store: store,
query: query.query,
variables: query.variables,

@@ -139,6 +193,6 @@ returnPartialData: query.returnPartialData,

public watch(watch: Cache.WatchOptions): () => void {
this.watches.push(watch);
this.watches.add(watch);
return () => {
this.watches = this.watches.filter(c => c !== watch);
this.watches.delete(watch);
};

@@ -245,4 +299,5 @@ }

return this.read({
query: this.transformDocument(
getFragmentQueryDocument(options.fragment, options.fragmentName),
query: getFragmentQueryDocument(
options.fragment,
options.fragmentName,
),

@@ -261,3 +316,3 @@ variables: options.variables,

result: options.data,
query: this.transformDocument(options.query),
query: options.query,
variables: options.variables,

@@ -273,4 +328,5 @@ });

result: options.data,
query: this.transformDocument(
getFragmentQueryDocument(options.fragment, options.fragmentName),
query: getFragmentQueryDocument(
options.fragment,
options.fragmentName,
),

@@ -282,20 +338,36 @@ variables: options.variables,

protected broadcastWatches() {
// Skip this when silenced (like inside a transaction)
if (this.silenceBroadcast) return;
if (!this.silenceBroadcast) {
const optimistic = this.optimistic.length > 0;
this.watches.forEach((c: Cache.WatchOptions) => {
this.maybeBroadcastWatch(c);
if (optimistic) {
// If we're broadcasting optimistic data, make sure we rebroadcast
// the real data once we're no longer in an optimistic state.
(this.maybeBroadcastWatch as OptimisticWrapperFunction<
(c: Cache.WatchOptions) => void
>).dirty(c);
}
});
}
}
// right now, we invalidate all queries whenever anything changes
this.watches.forEach((c: Cache.WatchOptions) => {
const newData = this.diff({
query: c.query,
variables: c.variables,
// This method is wrapped in the constructor so that it will be called only
// if the data that would be broadcast has changed.
private maybeBroadcastWatch(c: Cache.WatchOptions) {
const previousResult = c.previousResult && c.previousResult();
// TODO: previousResult isn't in the types - this will only work
// with ObservableQuery which is in a different package
previousResult: (c as any).previousResult && c.previousResult(),
optimistic: c.optimistic,
});
const newData = this.diff({
query: c.query,
variables: c.variables,
previousResult,
optimistic: c.optimistic,
});
c.callback(newData);
});
if (previousResult &&
previousResult === newData.result) {
return;
}
c.callback(newData);
}
}

@@ -1,14 +0,23 @@

import graphqlAnywhere, { Resolver, ExecInfo } from 'graphql-anywhere';
import {
IdValue,
assign,
isEqual,
getDefaultValues,
getQueryDefinition,
isEqual,
DirectiveInfo,
FragmentMap,
IdValue,
StoreValue,
argumentsObjectFromField,
createFragmentMap,
getDirectiveInfoFromField,
getFragmentDefinitions,
getMainDefinition,
getStoreKeyName,
isField,
isIdValue,
isInlineFragment,
isJsonValue,
isIdValue,
resultKeyNameFromField,
shouldInclude,
toIdValue,
getStoreKeyName,
StoreValue,
} from 'apollo-utilities';

@@ -19,52 +28,508 @@

import {
ReadQueryOptions,
IdValueWithPreviousResult,
ReadStoreContext,
DiffQueryAgainstStoreOptions,
ReadQueryOptions,
StoreObject,
} from './types';
/**
* The key which the cache id for a given value is stored in the result object. This key is private
* and should not be used by Apollo client users.
*
* Uses a symbol if available in the environment.
*
* @private
*/
export const ID_KEY = typeof Symbol !== 'undefined' ? Symbol('id') : '@@id';
import {
DocumentNode,
FieldNode,
FragmentDefinitionNode,
InlineFragmentNode,
SelectionSetNode,
SelectionNode,
} from 'graphql';
/**
* Resolves the result of a query solely from the store (i.e. never hits the server).
*
* @param {Store} store The {@link NormalizedCache} used by Apollo for the `data` portion of the
* store.
*
* @param {DocumentNode} query The query document to resolve from the data available in the store.
*
* @param {Object} [variables] A map from the name of a variable to its value. These variables can
* be referenced by the query document.
*
* @param {any} previousResult The previous result returned by this function for the same query.
* If nothing in the store changed since that previous result then values from the previous result
* will be returned to preserve referential equality.
*/
export function readQueryFromStore<QueryType>(
options: ReadQueryOptions,
): QueryType {
const optsPatch = { returnPartialData: false };
import { wrap, CacheKeyNode } from './optimism';
export { OptimisticWrapperFunction } from './optimism';
return diffQueryAgainstStore<QueryType>({
...options,
...optsPatch,
}).result;
import { DepTrackingCache } from './depTrackingCache';
import { QueryKeyMaker } from './queryKeyMaker';
export type VariableMap = { [name: string]: any };
export type FragmentMatcher = (
rootValue: any,
typeCondition: string,
context: ReadStoreContext,
) => boolean | 'heuristic';
type ExecContext = {
query: DocumentNode;
fragmentMap: FragmentMap;
contextValue: ReadStoreContext;
variableValues: VariableMap;
fragmentMatcher: FragmentMatcher;
};
type ExecInfo = {
resultKey: string;
directives: DirectiveInfo;
};
export type ExecResultMissingField = {
objectId: string;
fieldName: string;
tolerable: boolean;
};
export type ExecResult<R = any> = {
result: R;
// Empty array if no missing fields encountered while computing result.
missing?: ExecResultMissingField[];
};
type ExecStoreQueryOptions = {
query: DocumentNode;
rootValue: IdValue;
contextValue: ReadStoreContext;
variableValues: VariableMap;
// Default matcher always matches all fragments
fragmentMatcher: FragmentMatcher;
};
type ExecSelectionSetOptions = {
selectionSet: SelectionSetNode;
rootValue: any;
execContext: ExecContext;
parentKind: string;
};
type StoreReaderOptions = {
addTypename?: boolean;
cacheKeyRoot?: CacheKeyNode;
};
export class StoreReader {
private addTypename: boolean;
private cacheKeyRoot: CacheKeyNode;
private keyMaker: QueryKeyMaker;
constructor({
addTypename = false,
cacheKeyRoot = new CacheKeyNode,
}: StoreReaderOptions = {}) {
const reader = this;
const {
executeStoreQuery,
executeSelectionSet,
} = reader;
reader.addTypename = addTypename;
reader.cacheKeyRoot = cacheKeyRoot;
reader.keyMaker = new QueryKeyMaker(cacheKeyRoot);
this.executeStoreQuery = wrap((options: ExecStoreQueryOptions) => {
return executeStoreQuery.call(reader, options);
}, {
makeCacheKey({
query,
rootValue,
contextValue,
variableValues,
}: ExecStoreQueryOptions) {
// The result of executeStoreQuery can be safely cached only if the
// underlying store is capable of tracking dependencies and invalidating
// the cache when relevant data have changed.
if (contextValue.store instanceof DepTrackingCache) {
return reader.cacheKeyRoot.lookup(
reader.keyMaker.forQuery(query).lookupQuery(query),
contextValue.store,
JSON.stringify(variableValues),
);
}
}
});
this.executeSelectionSet = wrap((options: ExecSelectionSetOptions) => {
return executeSelectionSet.call(reader, options);
}, {
makeCacheKey({
selectionSet,
rootValue,
execContext,
}: ExecSelectionSetOptions) {
if (execContext.contextValue.store instanceof DepTrackingCache) {
return reader.cacheKeyRoot.lookup(
reader.keyMaker.forQuery(execContext.query).lookupSelectionSet(selectionSet),
execContext.contextValue.store,
JSON.stringify(execContext.variableValues),
// Unlike executeStoreQuery, executeSelectionSet can be called
// recursively on nested objects, so it's important to include the
// ID of the current parent object in the cache key.
rootValue.id,
);
}
}
});
}
/**
* Resolves the result of a query solely from the store (i.e. never hits the server).
*
* @param {Store} store The {@link NormalizedCache} used by Apollo for the `data` portion of the
* store.
*
* @param {DocumentNode} query The query document to resolve from the data available in the store.
*
* @param {Object} [variables] A map from the name of a variable to its value. These variables can
* be referenced by the query document.
*
* @param {any} previousResult The previous result returned by this function for the same query.
* If nothing in the store changed since that previous result then values from the previous result
* will be returned to preserve referential equality.
*/
public readQueryFromStore<QueryType>(
options: ReadQueryOptions,
): QueryType {
const optsPatch = { returnPartialData: false };
return this.diffQueryAgainstStore<QueryType>({
...options,
...optsPatch,
}).result;
}
/**
* Given a store and a query, return as much of the result as possible and
* identify if any data was missing from the store.
* @param {DocumentNode} query A parsed GraphQL query document
* @param {Store} store The Apollo Client store object
* @param {any} previousResult The previous result returned by this function for the same query
* @return {result: Object, complete: [boolean]}
*/
public diffQueryAgainstStore<T>({
store,
query,
variables,
previousResult,
returnPartialData = true,
rootId = 'ROOT_QUERY',
fragmentMatcherFunction,
config,
}: DiffQueryAgainstStoreOptions): Cache.DiffResult<T> {
// Throw the right validation error by trying to find a query in the document
const queryDefinition = getQueryDefinition(query);
variables = assign({}, getDefaultValues(queryDefinition), variables);
const context: ReadStoreContext = {
// Global settings
store,
dataIdFromObject: (config && config.dataIdFromObject) || null,
cacheRedirects: (config && config.cacheRedirects) || {},
};
const execResult = this.executeStoreQuery({
query,
rootValue: {
type: 'id',
id: rootId,
generated: true,
typename: 'Query',
},
contextValue: context,
variableValues: variables,
fragmentMatcher: fragmentMatcherFunction,
});
const hasMissingFields =
execResult.missing && execResult.missing.length > 0;
if (hasMissingFields && ! returnPartialData) {
execResult.missing.forEach(info => {
if (info.tolerable) return;
throw new Error(
`Can't find field ${info.fieldName} on object (${info.objectId}) ${
JSON.stringify(store.get(info.objectId), null, 2)
}.`
);
});
}
if (previousResult) {
if (isEqual(previousResult, execResult.result)) {
execResult.result = previousResult;
}
}
return {
result: execResult.result,
complete: !hasMissingFields,
};
}
/**
* Based on graphql function from graphql-js:
*
* graphql(
* schema: GraphQLSchema,
* requestString: string,
* rootValue?: ?any,
* contextValue?: ?any,
* variableValues?: ?{[key: string]: any},
* operationName?: ?string
* ): Promise<GraphQLResult>
*
* The default export as of graphql-anywhere is sync as of 4.0,
* but below is an exported alternative that is async.
* In the 5.0 version, this will be the only export again
* and it will be async
*
*/
private executeStoreQuery({
query,
rootValue,
contextValue,
variableValues,
// Default matcher always matches all fragments
fragmentMatcher = () => true,
}: ExecStoreQueryOptions): ExecResult {
const mainDefinition = getMainDefinition(query);
const fragments = getFragmentDefinitions(query);
const fragmentMap = createFragmentMap(fragments);
const execContext: ExecContext = {
query,
fragmentMap,
contextValue,
variableValues,
fragmentMatcher,
};
return this.executeSelectionSet({
selectionSet: mainDefinition.selectionSet,
rootValue,
execContext,
parentKind: mainDefinition.kind,
});
}
private executeSelectionSet({
selectionSet,
rootValue,
execContext,
parentKind,
}: ExecSelectionSetOptions): ExecResult {
const { fragmentMap, contextValue, variableValues: variables } = execContext;
const finalResult: ExecResult = {
result: {},
};
let didReadTypename = false;
function handleMissing<T>(result: ExecResult<T>): T {
if (result.missing) {
finalResult.missing = finalResult.missing || [];
finalResult.missing.push(...result.missing);
}
return result.result;
}
const handleSelection = (selection: SelectionNode) => {
if (!shouldInclude(selection, variables)) {
// Skip this entirely
return;
}
if (isField(selection)) {
const fieldResult = handleMissing(
this.executeField(selection, rootValue, execContext)
);
const keyName = resultKeyNameFromField(selection);
if (keyName === "__typename") {
didReadTypename = true;
}
if (typeof fieldResult !== 'undefined') {
merge(finalResult.result, {
[keyName]: fieldResult,
});
}
} else {
let fragment: InlineFragmentNode | FragmentDefinitionNode;
if (isInlineFragment(selection)) {
fragment = selection;
} else {
// This is a named fragment
fragment = fragmentMap[selection.name.value];
if (!fragment) {
throw new Error(`No fragment named ${selection.name.value}`);
}
}
const typeCondition = fragment.typeCondition.name.value;
const match = execContext.fragmentMatcher(rootValue, typeCondition, contextValue);
if (match) {
let fragmentExecResult = this.executeSelectionSet({
selectionSet: fragment.selectionSet,
rootValue,
execContext,
parentKind: fragment.kind,
});
if (match === 'heuristic' && fragmentExecResult.missing) {
fragmentExecResult = {
...fragmentExecResult,
missing: fragmentExecResult.missing.map(info => {
return { ...info, tolerable: true };
}),
};
}
merge(finalResult.result, handleMissing(fragmentExecResult));
}
}
};
selectionSet.selections.forEach(handleSelection);
if (! didReadTypename &&
this.addTypename &&
// Analogous to the isRoot parameter that addTypenameToDocument passes
// to addTypenameToSelectionSet to avoid adding __typename to the root
// query operation's selection set.
parentKind !== "OperationDefinition") {
handleSelection({
kind: "Field",
name: {
kind: "Name",
value: "__typename",
},
});
}
return finalResult;
}
private executeField(
field: FieldNode,
rootValue: any,
execContext: ExecContext,
): ExecResult {
const { variableValues: variables, contextValue } = execContext;
const fieldName = field.name.value;
const args = argumentsObjectFromField(field, variables);
const info: ExecInfo = {
resultKey: resultKeyNameFromField(field),
directives: getDirectiveInfoFromField(field, variables),
};
const readStoreResult = readStoreResolver(
fieldName,
rootValue,
args,
contextValue,
info,
);
// Handle all scalar types here
if (!field.selectionSet) {
return readStoreResult;
}
// From here down, the field has a selection set, which means it's trying to
// query a GraphQLObjectType
if (readStoreResult.result == null) {
// Basically any field in a GraphQL response can be null, or missing
return readStoreResult;
}
function handleMissing<T>(res: ExecResult<T>): ExecResult<T> {
let missing: ExecResultMissingField[] = null;
if (readStoreResult.missing) {
missing = missing || [];
missing.push(...readStoreResult.missing);
}
if (res.missing) {
missing = missing || [];
missing.push(...res.missing);
}
return {
result: res.result,
missing,
};
}
if (Array.isArray(readStoreResult.result)) {
return handleMissing(this.executeSubSelectedArray(
field,
readStoreResult.result,
execContext,
));
}
// Returned value is an object, and the query has a sub-selection. Recurse.
return handleMissing(this.executeSelectionSet({
selectionSet: field.selectionSet,
rootValue: readStoreResult.result,
execContext,
parentKind: field.kind,
}));
}
private executeSubSelectedArray(
field: FieldNode,
result: any[],
execContext: ExecContext,
): ExecResult {
let missing: ExecResultMissingField[] = null;
function handleMissing<T>(childResult: ExecResult<T>): T {
if (childResult.missing) {
missing = missing || [];
missing.push(...childResult.missing);
}
return childResult.result;
}
result = result.map(item => {
// null value in array
if (item === null) {
return null;
}
// This is a nested array, recurse
if (Array.isArray(item)) {
return handleMissing(this.executeSubSelectedArray(field, item, execContext));
}
// This is an object, run the selection set on it
return handleMissing(this.executeSelectionSet({
selectionSet: field.selectionSet,
rootValue: item,
execContext,
parentKind: field.kind,
}));
});
return { result, missing };
}
}
const readStoreResolver: Resolver = (
export function assertIdValue(idValue: IdValue) {
if (!isIdValue(idValue)) {
throw new Error(`Encountered a sub-selection on the query, but the store doesn't have \
an object reference. This should never happen during normal use unless you have custom code \
that is directly manipulating the store; please file an issue.`);
}
}
function readStoreResolver(
fieldName: string,
idValue: IdValueWithPreviousResult,
idValue: IdValue,
args: any,
context: ReadStoreContext,
{ resultKey, directives }: ExecInfo,
) => {
): ExecResult<StoreValue> {
assertIdValue(idValue);

@@ -84,3 +549,3 @@

let fieldValue: StoreValue | string | void = void 0;
let fieldValue: StoreValue | void = void 0;

@@ -117,239 +582,49 @@ if (obj) {

if (typeof fieldValue === 'undefined') {
if (!context.returnPartialData) {
throw new Error(
`Can't find field ${storeKeyName} on object (${objId}) ${JSON.stringify(
obj,
null,
2,
)}.`,
);
}
context.hasMissingField = true;
return fieldValue;
return {
result: fieldValue,
missing: [{
objectId: objId,
fieldName: storeKeyName,
tolerable: false,
}],
};
}
// if this is an object scalar, it must be a json blob and we have to unescape it
if (isJsonValue(fieldValue)) {
// If the JSON blob is the same now as in the previous result, return the previous result to
// maintain referential equality.
//
// `isEqual` will first perform a referential equality check (with `===`) in case the JSON
// value has not changed in the store, and then a deep equality check if that fails in case a
// new JSON object was returned by the API but that object may still be the same.
if (
idValue.previousResult &&
isEqual(idValue.previousResult[resultKey], fieldValue.json)
) {
return idValue.previousResult[resultKey];
}
return fieldValue.json;
fieldValue = fieldValue.json;
}
// If we had a previous result, try adding that previous result value for this field to our field
// value. This will create a new value without mutating the old one.
if (idValue.previousResult) {
fieldValue = addPreviousResultToIdValues(
fieldValue,
idValue.previousResult[resultKey],
);
}
return fieldValue;
};
/**
* Given a store and a query, return as much of the result as possible and
* identify if any data was missing from the store.
* @param {DocumentNode} query A parsed GraphQL query document
* @param {Store} store The Apollo Client store object
* @param {any} previousResult The previous result returned by this function for the same query
* @return {result: Object, complete: [boolean]}
*/
export function diffQueryAgainstStore<T>({
store,
query,
variables,
previousResult,
returnPartialData = true,
rootId = 'ROOT_QUERY',
fragmentMatcherFunction,
config,
}: DiffQueryAgainstStoreOptions): Cache.DiffResult<T> {
// Throw the right validation error by trying to find a query in the document
const queryDefinition = getQueryDefinition(query);
variables = assign({}, getDefaultValues(queryDefinition), variables);
const context: ReadStoreContext = {
// Global settings
store,
returnPartialData,
dataIdFromObject: (config && config.dataIdFromObject) || null,
cacheRedirects: (config && config.cacheRedirects) || {},
// Flag set during execution
hasMissingField: false,
};
const rootIdValue = {
type: 'id',
id: rootId,
previousResult,
};
const result = graphqlAnywhere(
readStoreResolver,
query,
rootIdValue,
context,
variables,
{
fragmentMatcher: fragmentMatcherFunction,
resultMapper,
},
);
return {
result: result as T,
complete: !context.hasMissingField,
result: fieldValue,
};
}
export function assertIdValue(idValue: IdValue) {
if (!isIdValue(idValue)) {
throw new Error(`Encountered a sub-selection on the query, but the store doesn't have \
an object reference. This should never happen during normal use unless you have custom code \
that is directly manipulating the store; please file an issue.`);
}
}
const hasOwn = Object.prototype.hasOwnProperty;
/**
* Adds a previous result value to id values in a nested array. For a single id value and a single
* previous result then the previous value is added directly.
*
* For arrays we put all of the ids from the previous result array in a map and add them to id
* values with the same id.
*
* This function does not mutate. Instead it returns new instances of modified values.
*
* @private
*/
function addPreviousResultToIdValues(value: any, previousResult: any): any {
// If the value is an `IdValue`, add the previous result to it whether or not that
// `previousResult` is undefined.
//
// If the value is an array, recurse over each item trying to add the `previousResult` for that
// item.
if (isIdValue(value)) {
return {
...value,
previousResult,
};
} else if (Array.isArray(value)) {
const idToPreviousResult: Map<string, any> = new Map();
function merge(
target: { [key: string]: any },
source: { [key: string]: any },
) {
if (source !== null && typeof source === 'object' &&
// Due to result caching, it's possible that source and target will
// be === at some point in the tree, which means we can stop early.
source !== target) {
// If the previous result was an array, we want to build up our map of ids to previous results
// using the private `ID_KEY` property that is added in `resultMapper`.
if (Array.isArray(previousResult)) {
previousResult.forEach(item => {
// item can be null
if (item && item[ID_KEY]) {
idToPreviousResult.set(item[ID_KEY], item);
// idToPreviousResult[item[ID_KEY]] = item;
}
});
// In case the target has been frozen, make an extensible copy so that
// we can merge properties into the copy.
if (Object.isExtensible && !Object.isExtensible(target)) {
target = { ...target };
}
// For every value we want to add the previous result.
return value.map((item, i) => {
// By default the previous result for this item will be in the same array position as this
// item.
let itemPreviousResult = previousResult && previousResult[i];
// If the item is an id value, we should check to see if there is a previous result for this
// specific id. If there is, that will be the value for `itemPreviousResult`.
if (isIdValue(item)) {
itemPreviousResult =
idToPreviousResult.get(item.id) || itemPreviousResult;
Object.keys(source).forEach(sourceKey => {
const sourceVal = source[sourceKey];
if (!hasOwn.call(target, sourceKey)) {
target[sourceKey] = sourceVal;
} else {
target[sourceKey] = merge(target[sourceKey], sourceVal);
}
return addPreviousResultToIdValues(item, itemPreviousResult);
});
}
// Return the value, nothing changed.
return value;
}
/**
* Maps a result from `graphql-anywhere` to a final result value.
*
* If the result and the previous result from the `idValue` pass a shallow equality test, we just
* return the `previousResult` to maintain referential equality.
*
* We also add a private id property to the result that we can use later on.
*
* @private
*/
function resultMapper(resultFields: any, idValue: IdValueWithPreviousResult) {
// If we had a previous result, we may be able to return that and preserve referential equality
if (idValue.previousResult) {
const currentResultKeys = Object.keys(resultFields);
const sameAsPreviousResult =
// Confirm that we have the same keys in both the current result and the previous result.
Object.keys(idValue.previousResult).every(
key => currentResultKeys.indexOf(key) > -1,
) &&
// Perform a shallow comparison of the result fields with the previous result. If all of
// the shallow fields are referentially equal to the fields of the previous result we can
// just return the previous result.
//
// While we do a shallow comparison of objects, but we do a deep comparison of arrays.
currentResultKeys.every(key =>
areNestedArrayItemsStrictlyEqual(
resultFields[key],
idValue.previousResult[key],
),
);
if (sameAsPreviousResult) {
return idValue.previousResult;
}
}
Object.defineProperty(resultFields, ID_KEY, {
enumerable: false,
configurable: true,
writable: false,
value: idValue.id,
});
return resultFields;
return target;
}
type NestedArray<T> = T | Array<T | Array<T | Array<T>>>;
/**
* Compare all the items to see if they are all referentially equal in two arrays no matter how
* deeply nested the arrays are.
*
* @private
*/
function areNestedArrayItemsStrictlyEqual(
a: NestedArray<any>,
b: NestedArray<any>,
): boolean {
// If `a` and `b` are referentially equal, return true.
if (a === b) {
return true;
}
// If either `a` or `b` are not an array or not of the same length return false. `a` and `b` are
// known to not be equal here, we checked above.
if (!Array.isArray(a) || !Array.isArray(b) || a.length !== b.length) {
return false;
}
// Otherwise let us compare all of the array items (which are potentially nested arrays!) to see
// if they are equal.
return a.every((item, i) => areNestedArrayItemsStrictlyEqual(item, b[i]));
}
import { DocumentNode } from 'graphql';
import { FragmentMatcher } from 'graphql-anywhere';
import { FragmentMatcher } from './readFromStore';
import { Transaction } from 'apollo-cache';

@@ -48,6 +48,2 @@ import { IdValue, StoreValue } from 'apollo-utilities';

export type NormalizedCacheFactory = (
seed?: NormalizedCacheObject,
) => NormalizedCache;
export type OptimisticStoreItem = {

@@ -78,11 +74,8 @@ id: string;

cacheRedirects?: CacheResolverMap;
storeFactory?: NormalizedCacheFactory;
};
export type ReadStoreContext = {
store: NormalizedCache;
returnPartialData: boolean;
hasMissingField: boolean;
cacheRedirects: CacheResolverMap;
dataIdFromObject?: IdGetter;
readonly store: NormalizedCache;
readonly cacheRedirects: CacheResolverMap;
readonly dataIdFromObject?: IdGetter;
};

@@ -95,3 +88,3 @@

context: ReadStoreContext,
): boolean;
): boolean | 'heuristic';
}

@@ -101,21 +94,2 @@

/**
* This code needs an optional `previousResult` property on `IdValue` so that when the results
* returned from the store are the same, we can just return the `previousResult` and not a new
* value thus preserving referential equality.
*
* The `previousResult` property is added to our `IdValue`s in the `graphql-anywhere` resolver so
* that they can be in the right position for `resultMapper` to test equality and return whichever
* result is appropriate.
*
* `resultMapper` takes the `previousResult`s and performs a shallow referential equality check. If
* that passes then instead of returning the object created by `graphql-anywhere` the
* `resultMapper` function will instead return the `previousResult`. This process is bottom-up so
* we start at the leaf results and swap them for `previousResult`s all the way up until we get to
* the root object.
*/
export interface IdValueWithPreviousResult extends IdValue {
previousResult?: any;
}
export type IntrospectionResultData = {

@@ -122,0 +96,0 @@ __schema: {

@@ -6,7 +6,6 @@ import {

InlineFragmentNode,
OperationDefinitionNode,
FragmentDefinitionNode,
} from 'graphql';
import { print } from 'graphql/language/printer';
import { FragmentMatcher } from 'graphql-anywhere';
import { FragmentMatcher } from './readFromStore';

@@ -28,8 +27,9 @@ import {

storeKeyNameFromField,
getQueryDefinition,
StoreValue,
toIdValue,
isEqual,
} from 'apollo-utilities';
import { defaultNormalizedCacheFactory, ObjectCache } from './objectCache';
import { ObjectCache } from './objectCache';
import { defaultNormalizedCacheFactory } from './depTrackingCache';

@@ -39,3 +39,2 @@ import {

NormalizedCache,
NormalizedCacheFactory,
ReadStoreContext,

@@ -59,144 +58,150 @@ StoreObject,

/**
* Writes the result of a query to the store.
*
* @param result The result object returned for the query document.
*
* @param query The query document whose result we are writing to the store.
*
* @param store The {@link NormalizedCache} used by Apollo for the `data` portion of the store.
*
* @param variables A map from the name of a variable to its value. These variables can be
* referenced by the query document.
*
* @param dataIdFromObject A function that returns an object identifier given a particular result
* object. See the store documentation for details and an example of this function.
*
* @param fragmentMap A map from the name of a fragment to its fragment definition. These fragments
* can be referenced within the query document.
*
* @param fragmentMatcherFunction A function to use for matching fragment conditions in GraphQL documents
*/
export function writeQueryToStore({
result,
query,
storeFactory = defaultNormalizedCacheFactory,
store = storeFactory(),
variables,
dataIdFromObject,
fragmentMap = {} as FragmentMap,
fragmentMatcherFunction,
}: {
result: Object;
query: DocumentNode;
store?: NormalizedCache;
storeFactory?: NormalizedCacheFactory;
variables?: Object;
dataIdFromObject?: IdGetter;
fragmentMap?: FragmentMap;
fragmentMatcherFunction?: FragmentMatcher;
}): NormalizedCache {
const queryDefinition: OperationDefinitionNode = getQueryDefinition(query);
export type WriteContext = {
readonly store: NormalizedCache;
readonly processedData?: { [x: string]: FieldNode[] };
readonly variables?: any;
readonly dataIdFromObject?: IdGetter;
readonly fragmentMap?: FragmentMap;
readonly fragmentMatcherFunction?: FragmentMatcher;
};
variables = assign({}, getDefaultValues(queryDefinition), variables);
type StoreWriterOptions = {
addTypename?: boolean;
};
try {
return writeSelectionSetToStore({
const TYPENAME_FIELD: FieldNode = {
kind: 'Field',
name: {
kind: 'Name',
value: '__typename',
},
};
export class StoreWriter {
private addTypename: boolean;
constructor({
addTypename = false,
}: StoreWriterOptions = {}) {
this.addTypename = addTypename;
}
/**
* Writes the result of a query to the store.
*
* @param result The result object returned for the query document.
*
* @param query The query document whose result we are writing to the store.
*
* @param store The {@link NormalizedCache} used by Apollo for the `data` portion of the store.
*
* @param variables A map from the name of a variable to its value. These variables can be
* referenced by the query document.
*
* @param dataIdFromObject A function that returns an object identifier given a particular result
* object. See the store documentation for details and an example of this function.
*
* @param fragmentMatcherFunction A function to use for matching fragment conditions in GraphQL documents
*/
public writeQueryToStore({
query,
result,
store = defaultNormalizedCacheFactory(),
variables,
dataIdFromObject,
fragmentMatcherFunction,
}: {
query: DocumentNode;
result: Object;
store?: NormalizedCache;
variables?: Object;
dataIdFromObject?: IdGetter;
fragmentMatcherFunction?: FragmentMatcher;
}): NormalizedCache {
return this.writeResultToStore({
dataId: 'ROOT_QUERY',
result,
selectionSet: queryDefinition.selectionSet,
context: {
store,
storeFactory,
processedData: {},
variables,
dataIdFromObject,
fragmentMap,
fragmentMatcherFunction,
},
document: query,
store,
variables,
dataIdFromObject,
fragmentMatcherFunction,
});
} catch (e) {
throw enhanceErrorWithDocument(e, query);
}
}
export type WriteContext = {
store: NormalizedCache;
storeFactory: NormalizedCacheFactory;
processedData?: { [x: string]: FieldNode[] };
variables?: any;
dataIdFromObject?: IdGetter;
fragmentMap?: FragmentMap;
fragmentMatcherFunction?: FragmentMatcher;
};
public writeResultToStore({
dataId,
result,
document,
store = defaultNormalizedCacheFactory(),
variables,
dataIdFromObject,
fragmentMatcherFunction,
}: {
dataId: string;
result: any;
document: DocumentNode;
store?: NormalizedCache;
variables?: Object;
dataIdFromObject?: IdGetter;
fragmentMatcherFunction?: FragmentMatcher;
}): NormalizedCache {
// XXX TODO REFACTOR: this is a temporary workaround until query normalization is made to work with documents.
const operationDefinition = getOperationDefinition(document);
export function writeResultToStore({
dataId,
result,
document,
storeFactory = defaultNormalizedCacheFactory,
store = storeFactory(),
variables,
dataIdFromObject,
fragmentMatcherFunction,
}: {
dataId: string;
result: any;
document: DocumentNode;
store?: NormalizedCache;
storeFactory?: NormalizedCacheFactory;
variables?: Object;
dataIdFromObject?: IdGetter;
fragmentMatcherFunction?: FragmentMatcher;
}): NormalizedCache {
// XXX TODO REFACTOR: this is a temporary workaround until query normalization is made to work with documents.
const operationDefinition = getOperationDefinition(document);
const selectionSet = operationDefinition.selectionSet;
const fragmentMap = createFragmentMap(getFragmentDefinitions(document));
try {
return this.writeSelectionSetToStore({
result,
dataId,
selectionSet: operationDefinition.selectionSet,
context: {
store,
processedData: {},
variables: assign(
{},
getDefaultValues(operationDefinition),
variables,
),
dataIdFromObject,
fragmentMap: createFragmentMap(getFragmentDefinitions(document)),
fragmentMatcherFunction,
},
parentKind: operationDefinition.kind,
});
} catch (e) {
throw enhanceErrorWithDocument(e, document);
}
}
variables = assign({}, getDefaultValues(operationDefinition), variables);
public writeSelectionSetToStore({
result,
dataId,
selectionSet,
context,
parentKind,
}: {
dataId: string;
result: any;
selectionSet: SelectionSetNode;
context: WriteContext;
parentKind: string;
}): NormalizedCache {
const { variables, store, fragmentMap } = context;
let didWriteTypename = false;
try {
return writeSelectionSetToStore({
result,
dataId,
selectionSet,
context: {
store,
storeFactory,
processedData: {},
variables,
dataIdFromObject,
fragmentMap,
fragmentMatcherFunction,
},
});
} catch (e) {
throw enhanceErrorWithDocument(e, document);
}
}
selectionSet.selections.forEach(selection => {
if (!shouldInclude(selection, variables)) {
return;
}
export function writeSelectionSetToStore({
result,
dataId,
selectionSet,
context,
}: {
dataId: string;
result: any;
selectionSet: SelectionSetNode;
context: WriteContext;
}): NormalizedCache {
const { variables, store, fragmentMap } = context;
if (isField(selection)) {
const resultFieldKey: string = resultKeyNameFromField(selection);
const value: any = result[resultFieldKey];
selectionSet.selections.forEach(selection => {
const included = shouldInclude(selection, variables);
if (resultFieldKey === "__typename") {
didWriteTypename = true;
}
if (isField(selection)) {
const resultFieldKey: string = resultKeyNameFromField(selection);
const value: any = result[resultFieldKey];
if (included) {
if (typeof value !== 'undefined') {
writeFieldToStore({
this.writeFieldToStore({
dataId,

@@ -208,3 +213,3 @@ value,

} else {
// if this is a defered field we don't need to throw / warn
// if this is a defered field we don't need to throw / wanr
const isDefered =

@@ -232,54 +237,271 @@ selection.directives &&

}
} else {
// This is not a field, so it must be a fragment, either inline or named
let fragment: InlineFragmentNode | FragmentDefinitionNode;
if (isInlineFragment(selection)) {
fragment = selection;
} else {
// Named fragment
fragment = (fragmentMap || {})[selection.name.value];
if (!fragment) {
throw new Error(`No fragment named ${selection.name.value}.`);
}
}
let matches = true;
if (context.fragmentMatcherFunction && fragment.typeCondition) {
// TODO we need to rewrite the fragment matchers for this to work properly and efficiently
// Right now we have to pretend that we're passing in an idValue and that there's a store
// on the context.
const idValue = toIdValue({ id: 'self', typename: undefined });
const fakeContext: ReadStoreContext = {
// NOTE: fakeContext always uses ObjectCache
// since this is only to ensure the return value of 'matches'
store: new ObjectCache({ self: result }),
cacheRedirects: {},
};
const match = context.fragmentMatcherFunction(
idValue,
fragment.typeCondition.name.value,
fakeContext,
);
if (!isProduction() && match === 'heuristic') {
console.error('WARNING: heuristic fragment matching going on!');
}
matches = !!match;
}
if (matches) {
this.writeSelectionSetToStore({
result,
selectionSet: fragment.selectionSet,
dataId,
context,
parentKind: fragment.kind,
});
}
}
});
if (! didWriteTypename &&
this.addTypename &&
// Analogous to the isRoot parameter that addTypenameToDocument passes
// to addTypenameToSelectionSet to avoid adding __typename to the root
// query operation's selection set.
parentKind !== "OperationDefinition" &&
typeof result.__typename === "string") {
this.writeFieldToStore({
dataId,
value: result.__typename,
field: TYPENAME_FIELD,
context,
});
}
return store;
}
private writeFieldToStore({
field,
value,
dataId,
context,
}: {
field: FieldNode;
value: any;
dataId: string;
context: WriteContext;
}) {
const { variables, dataIdFromObject, store } = context;
let storeValue: StoreValue;
let storeObject: StoreObject;
const storeFieldName: string = storeKeyNameFromField(field, variables);
// If this is a scalar value...
if (!field.selectionSet || value === null) {
storeValue =
value != null && typeof value === 'object'
? // If the scalar value is a JSON blob, we have to "escape" it so it can’t pretend to be
// an id.
{ type: 'json', json: value }
: // Otherwise, just store the scalar directly in the store.
value;
} else if (Array.isArray(value)) {
const generatedId = `${dataId}.${storeFieldName}`;
storeValue = this.processArrayValue(
value,
generatedId,
field.selectionSet,
context,
field.kind,
);
} else {
// This is not a field, so it must be a fragment, either inline or named
let fragment: InlineFragmentNode | FragmentDefinitionNode;
// It's an object
let valueDataId = `${dataId}.${storeFieldName}`;
let generated = true;
if (isInlineFragment(selection)) {
fragment = selection;
} else {
// Named fragment
fragment = (fragmentMap || {})[selection.name.value];
// We only prepend the '$' if the valueDataId isn't already a generated
// id.
if (!isGeneratedId(valueDataId)) {
valueDataId = '$' + valueDataId;
}
if (!fragment) {
throw new Error(`No fragment named ${selection.name.value}.`);
if (dataIdFromObject) {
const semanticId = dataIdFromObject(value);
// We throw an error if the first character of the id is '$. This is
// because we use that character to designate an Apollo-generated id
// and we use the distinction between user-desiginated and application-provided
// ids when managing overwrites.
if (semanticId && isGeneratedId(semanticId)) {
throw new Error(
'IDs returned by dataIdFromObject cannot begin with the "$" character.',
);
}
if (
semanticId ||
(typeof semanticId === 'number' && semanticId === 0)
) {
valueDataId = semanticId;
generated = false;
}
}
let matches = true;
if (context.fragmentMatcherFunction && fragment.typeCondition) {
// TODO we need to rewrite the fragment matchers for this to work properly and efficiently
// Right now we have to pretend that we're passing in an idValue and that there's a store
// on the context.
const idValue = toIdValue({ id: 'self', typename: undefined });
const fakeContext: ReadStoreContext = {
// NOTE: fakeContext always uses ObjectCache
// since this is only to ensure the return value of 'matches'
store: new ObjectCache({ self: result }),
returnPartialData: false,
hasMissingField: false,
cacheRedirects: {},
};
matches = context.fragmentMatcherFunction(
idValue,
fragment.typeCondition.name.value,
fakeContext,
if (!isDataProcessed(valueDataId, field, context.processedData)) {
this.writeSelectionSetToStore({
dataId: valueDataId,
result: value,
selectionSet: field.selectionSet,
context,
parentKind: field.kind,
});
}
// We take the id and escape it (i.e. wrap it with an enclosing object).
// This allows us to distinguish IDs from normal scalars.
const typename = value.__typename;
storeValue = toIdValue({ id: valueDataId, typename }, generated);
// check if there was a generated id at the location where we're
// about to place this new id. If there was, we have to merge the
// data from that id with the data we're about to write in the store.
storeObject = store.get(dataId);
const escapedId =
storeObject && (storeObject[storeFieldName] as IdValue | undefined);
if (escapedId !== storeValue && isIdValue(escapedId)) {
const hadTypename = escapedId.typename !== undefined;
const hasTypename = typename !== undefined;
const typenameChanged =
hadTypename && hasTypename && escapedId.typename !== typename;
// If there is already a real id in the store and the current id we
// are dealing with is generated, we throw an error.
// One exception we allow is when the typename has changed, which occurs
// when schema defines a union, both with and without an ID in the same place.
// checks if we "lost" the read id
if (generated && !escapedId.generated && !typenameChanged) {
throw new Error(
`Store error: the application attempted to write an object with no provided id` +
` but the store already contains an id of ${
escapedId.id
} for this object. The selectionSet` +
` that was trying to be written is:\n` +
print(field),
);
}
// checks if we "lost" the typename
if (hadTypename && !hasTypename) {
throw new Error(
`Store error: the application attempted to write an object with no provided typename` +
` but the store already contains an object with typename of ${
escapedId.typename
} for the object of id ${escapedId.id}. The selectionSet` +
` that was trying to be written is:\n` +
print(field),
);
}
if (escapedId.generated) {
// We should only merge if it's an object of the same type,
// otherwise we should delete the generated object
if (typenameChanged) {
// Only delete the generated object when the old object was
// inlined, and the new object is not. This is indicated by
// the old id being generated, and the new id being real.
if (!generated) {
store.delete(escapedId.id);
}
} else {
mergeWithGenerated(escapedId.id, (storeValue as IdValue).id, store);
}
}
}
}
storeObject = store.get(dataId);
if (!storeObject || !isEqual(storeValue, storeObject[storeFieldName])) {
store.set(dataId, {
...storeObject,
[storeFieldName]: storeValue,
});
}
}
private processArrayValue(
value: any[],
generatedId: string,
selectionSet: SelectionSetNode,
context: WriteContext,
parentKind: string,
): any[] {
return value.map((item: any, index: any) => {
if (item === null) {
return null;
}
let itemDataId = `${generatedId}.${index}`;
if (Array.isArray(item)) {
return this.processArrayValue(
item,
itemDataId,
selectionSet,
context,
parentKind,
);
if (!isProduction() && fakeContext.returnPartialData) {
console.error('WARNING: heuristic fragment matching going on!');
}
let generated = true;
if (context.dataIdFromObject) {
const semanticId = context.dataIdFromObject(item);
if (semanticId) {
itemDataId = semanticId;
generated = false;
}
}
if (included && matches) {
writeSelectionSetToStore({
result,
selectionSet: fragment.selectionSet,
dataId,
if (!isDataProcessed(itemDataId, selectionSet, context.processedData)) {
this.writeSelectionSetToStore({
dataId: itemDataId,
result: item,
selectionSet,
context,
parentKind,
});
}
}
});
return store;
return toIdValue(
{ id: itemDataId, typename: item.__typename },
generated,
);
});
}
}

@@ -297,5 +519,10 @@

cache: NormalizedCache,
) {
): boolean {
if (generatedKey === realKey) {
return false;
}
const generated = cache.get(generatedKey);
const real = cache.get(realKey);
let madeChanges = false;

@@ -305,8 +532,21 @@ Object.keys(generated).forEach(key => {

const realValue = real[key];
if (isIdValue(value) && isGeneratedId(value.id) && isIdValue(realValue)) {
mergeWithGenerated(value.id, realValue.id, cache);
if (isIdValue(value) &&
isGeneratedId(value.id) &&
isIdValue(realValue) &&
! isEqual(value, realValue) &&
mergeWithGenerated(value.id, realValue.id, cache)) {
madeChanges = true;
}
cache.delete(generatedKey);
cache.set(realKey, { ...generated, ...real } as StoreObject);
});
cache.delete(generatedKey);
const newRealValue = { ...generated, ...real };
if (isEqual(newRealValue, real)) {
return madeChanges;
}
cache.set(realKey, newRealValue);
return true;
}

@@ -335,198 +575,1 @@

}
function writeFieldToStore({
field,
value,
dataId,
context,
}: {
field: FieldNode;
value: any;
dataId: string;
context: WriteContext;
}) {
const { variables, dataIdFromObject, store } = context;
let storeValue: StoreValue;
let storeObject: StoreObject;
const storeFieldName: string = storeKeyNameFromField(field, variables);
// specifies if we need to merge existing keys in the store
let shouldMerge = false;
// If we merge, this will be the generatedKey
let generatedKey: string = '';
// If this is a scalar value...
if (!field.selectionSet || value === null) {
storeValue =
value != null && typeof value === 'object'
? // If the scalar value is a JSON blob, we have to "escape" it so it can’t pretend to be
// an id.
{ type: 'json', json: value }
: // Otherwise, just store the scalar directly in the store.
value;
} else if (Array.isArray(value)) {
const generatedId = `${dataId}.${storeFieldName}`;
storeValue = processArrayValue(
value,
generatedId,
field.selectionSet,
context,
);
} else {
// It's an object
let valueDataId = `${dataId}.${storeFieldName}`;
let generated = true;
// We only prepend the '$' if the valueDataId isn't already a generated
// id.
if (!isGeneratedId(valueDataId)) {
valueDataId = '$' + valueDataId;
}
if (dataIdFromObject) {
const semanticId = dataIdFromObject(value);
// We throw an error if the first character of the id is '$. This is
// because we use that character to designate an Apollo-generated id
// and we use the distinction between user-desiginated and application-provided
// ids when managing overwrites.
if (semanticId && isGeneratedId(semanticId)) {
throw new Error(
'IDs returned by dataIdFromObject cannot begin with the "$" character.',
);
}
if (semanticId || (typeof semanticId === 'number' && semanticId === 0)) {
valueDataId = semanticId;
generated = false;
}
}
if (!isDataProcessed(valueDataId, field, context.processedData)) {
writeSelectionSetToStore({
dataId: valueDataId,
result: value,
selectionSet: field.selectionSet,
context,
});
}
// We take the id and escape it (i.e. wrap it with an enclosing object).
// This allows us to distinguish IDs from normal scalars.
const typename = value.__typename;
storeValue = toIdValue({ id: valueDataId, typename }, generated);
// check if there was a generated id at the location where we're
// about to place this new id. If there was, we have to merge the
// data from that id with the data we're about to write in the store.
storeObject = store.get(dataId);
const escapedId =
storeObject && (storeObject[storeFieldName] as IdValue | undefined);
if (escapedId !== storeValue && isIdValue(escapedId)) {
const hadTypename = escapedId.typename !== undefined;
const hasTypename = typename !== undefined;
const typenameChanged =
hadTypename && hasTypename && escapedId.typename !== typename;
// If there is already a real id in the store and the current id we
// are dealing with is generated, we throw an error.
// One exception we allow is when the typename has changed, which occurs
// when schema defines a union, both with and without an ID in the same place.
// checks if we "lost" the read id
if (generated && !escapedId.generated && !typenameChanged) {
throw new Error(
`Store error: the application attempted to write an object with no provided id` +
` but the store already contains an id of ${
escapedId.id
} for this object. The selectionSet` +
` that was trying to be written is:\n` +
print(field),
);
}
// checks if we "lost" the typename
if (hadTypename && !hasTypename) {
throw new Error(
`Store error: the application attempted to write an object with no provided typename` +
` but the store already contains an object with typename of ${
escapedId.typename
} for the object of id ${escapedId.id}. The selectionSet` +
` that was trying to be written is:\n` +
print(field),
);
}
if (escapedId.generated) {
generatedKey = escapedId.id;
// We should only merge if it's an object of the same type,
// otherwise we should delete the generated object
if (typenameChanged) {
// Only delete the generated object when the old object was
// inlined, and the new object is not. This is indicated by
// the old id being generated, and the new id being real.
if (!generated) {
store.delete(generatedKey);
}
} else {
shouldMerge = true;
}
}
}
}
const newStoreObj = {
...store.get(dataId),
[storeFieldName]: storeValue,
} as StoreObject;
if (shouldMerge) {
mergeWithGenerated(generatedKey, (storeValue as IdValue).id, store);
}
storeObject = store.get(dataId);
if (!storeObject || storeValue !== storeObject[storeFieldName]) {
store.set(dataId, newStoreObj);
}
}
function processArrayValue(
value: any[],
generatedId: string,
selectionSet: SelectionSetNode,
context: WriteContext,
): any[] {
return value.map((item: any, index: any) => {
if (item === null) {
return null;
}
let itemDataId = `${generatedId}.${index}`;
if (Array.isArray(item)) {
return processArrayValue(item, itemDataId, selectionSet, context);
}
let generated = true;
if (context.dataIdFromObject) {
const semanticId = context.dataIdFromObject(item);
if (semanticId) {
itemDataId = semanticId;
generated = false;
}
}
if (!isDataProcessed(itemDataId, selectionSet, context.processedData)) {
writeSelectionSetToStore({
dataId: itemDataId,
result: item,
selectionSet,
context,
});
}
return toIdValue({ id: itemDataId, typename: item.__typename }, generated);
});
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc