Comparing version 3.2.0 to 3.2.1
@@ -57,3 +57,7 @@ 'use strict'; | ||
} | ||
context.select(selection, cache); | ||
if (context.scalars[pureType]) { | ||
context.select(selection, cache); | ||
} else { | ||
context.select(selection); | ||
} | ||
return null; | ||
@@ -381,3 +385,3 @@ } | ||
if (cache.data.length === 0) { | ||
context.select(selection, cache); | ||
context.select(selection); | ||
} | ||
@@ -384,0 +388,0 @@ const proxy = new Proxy(cache.data, arrayProxyHandler); |
@@ -15,2 +15,8 @@ import { type CacheNormalizationHandler } from './normalization'; | ||
export type CacheOptions = { | ||
/** | ||
* Maximum age of cache data in milliseconds, expired data nodes are subjected | ||
* to garbage collection. | ||
* | ||
* @default Infinity | ||
*/ | ||
readonly maxAge?: number; | ||
@@ -22,2 +28,8 @@ /** | ||
readonly normalization?: boolean | CacheNormalizationHandler; | ||
/** | ||
* Maximum time in milliseconds to keep stale data in cache, while allowing | ||
* stale-while-revalidate background fetches. | ||
* | ||
* @default 0 | ||
*/ | ||
readonly staleWhileRevalidate?: number; | ||
@@ -55,3 +67,11 @@ }; | ||
#private; | ||
/** | ||
* Maximum age of cache data in milliseconds, expired data nodes are subjected | ||
* to garbage collection. | ||
*/ | ||
get maxAge(): number; | ||
/** | ||
* Maximum time in milliseconds to keep stale data in cache, while allowing | ||
* stale-while-revalidate background fetches. | ||
*/ | ||
get staleWhileRevalidate(): number; | ||
@@ -58,0 +78,0 @@ get normalizationOptions(): CacheNormalizationHandler | undefined; |
@@ -31,2 +31,3 @@ 'use strict'; | ||
var _maxAge, _staleWhileRevalidate, _normalizationOptions, _data, _normalizedObjects, _dataRefs, _subscriptions, _normalizedSubscriptions, _Cache_instances, subscribeNormalized_fn, _notifySubscribers; | ||
const MINIMUM_CACHE_AGE = 100; | ||
class Cache { | ||
@@ -114,3 +115,3 @@ constructor(data, { | ||
}); | ||
__privateSet(this, _maxAge, Math.max(maxAge, 0)); | ||
__privateSet(this, _maxAge, Math.max(maxAge, MINIMUM_CACHE_AGE)); | ||
__privateSet(this, _staleWhileRevalidate, Math.max(staleWhileRevalidate, 0)); | ||
@@ -124,5 +125,13 @@ if (normalization$1) { | ||
} | ||
/** | ||
* Maximum age of cache data in milliseconds, expired data nodes are subjected | ||
* to garbage collection. | ||
*/ | ||
get maxAge() { | ||
return __privateGet(this, _maxAge); | ||
} | ||
/** | ||
* Maximum time in milliseconds to keep stale data in cache, while allowing | ||
* stale-while-revalidate background fetches. | ||
*/ | ||
get staleWhileRevalidate() { | ||
@@ -211,5 +220,5 @@ return __privateGet(this, _staleWhileRevalidate); | ||
// immediate use. Their responses are only meaningful to a cache with | ||
// normalization enabled, where it updates subscribing clients. | ||
// We force a short expiration to let it survives the next render, | ||
// then leave it up for GC. | ||
// normalization enabled, where it already updates listeners. | ||
// | ||
// We force a short expiration here to let it survive the next render. | ||
type === "mutation" || type === "subscription" ? { | ||
@@ -216,0 +225,0 @@ data, |
import { Cache, type CacheGetOptions } from '../Cache'; | ||
import type { Disposable } from '../Disposable'; | ||
import type { Resetable } from '../Resetable'; | ||
import type { ScalarsEnumsHash, Schema } from '../Schema'; | ||
import type { Selectable } from '../Selectable'; | ||
export type SchemaContext<T extends Record<string, unknown> = Record<string, unknown>> = T & Disposable & Selectable & { | ||
export type SchemaContext<T extends Record<string, unknown> = Record<string, unknown>> = T & Disposable & Resetable & Selectable & { | ||
cache: Cache; | ||
@@ -7,0 +8,0 @@ readonly aliasLength?: number; |
@@ -20,3 +20,3 @@ 'use strict'; | ||
aliasLength, | ||
cache: cachePolicy === "no-cache" || cachePolicy === "no-store" || cachePolicy === "reload" ? new index.Cache(void 0, { maxAge: Infinity }) : cache, | ||
cache: cachePolicy === "no-cache" || cachePolicy === "no-store" || cachePolicy === "reload" ? new index.Cache(void 0, { maxAge: 0 }) : cache, | ||
cacheOptions: { | ||
@@ -35,7 +35,17 @@ includeExpired: cachePolicy === "default" || cachePolicy === "force-cache" || cachePolicy === "only-if-cached" | ||
const { data, expiresAt: age = Infinity } = cacheNode != null ? cacheNode : {}; | ||
this.shouldFetch || (this.shouldFetch = data === void 0 || age < now - 100); | ||
this.hasCacheHit || (this.hasCacheHit = data !== void 0); | ||
this.notifyCacheUpdate || (this.notifyCacheUpdate = data === void 0); | ||
if (cacheNode) { | ||
this.shouldFetch || (this.shouldFetch = data === void 0 || // Add 100 ms leeway to avoiding infinite fetch loops for caches with | ||
// immedidate staleness. | ||
age < now); | ||
this.hasCacheHit || (this.hasCacheHit = data !== void 0); | ||
this.notifyCacheUpdate || (this.notifyCacheUpdate = data === void 0); | ||
} | ||
selectSubscriptions.forEach((fn) => fn(selection, cacheNode)); | ||
}, | ||
reset() { | ||
this.shouldFetch = false; | ||
this.hasCacheHit = false; | ||
this.hasCacheMiss = false; | ||
this.notifyCacheUpdate = cachePolicy !== "default"; | ||
}, | ||
subscribeSelect(callback) { | ||
@@ -42,0 +52,0 @@ selectSubscriptions.add(callback); |
@@ -54,6 +54,30 @@ import type { BaseGeneratedSchema, FetchOptions } from '.'; | ||
export type ResolverParts<TSchema extends BaseGeneratedSchema> = { | ||
/** | ||
* The schema accessors for capturing selections and reading values from the | ||
* cache. | ||
*/ | ||
accessor: TSchema; | ||
/** | ||
* A container object for internal states. | ||
*/ | ||
context: SchemaContext; | ||
/** | ||
* A promise that resolves the query, mutation or subscription. A one-off | ||
* counterpart to `subscribe()`. | ||
*/ | ||
resolve: () => Promise<unknown>; | ||
/** | ||
* Restores the previous selections set from an internal cache, used during | ||
* refetches where selections must be cleared periodically to prevent stale | ||
* inputs. | ||
*/ | ||
restorePreviousSelections: () => void; | ||
/** | ||
* The current selections set to be used for query building. | ||
*/ | ||
selections: Set<Selection>; | ||
/** | ||
* Sends pending queries and continuously listens to cache changes. A | ||
* "streaming" counterpart to `resolve()`. | ||
*/ | ||
subscribe: (callbacks?: { | ||
@@ -65,3 +89,3 @@ onComplete?: () => void; | ||
}; | ||
export type CreateResolverFn<TSchema extends BaseGeneratedSchema> = (options?: ResolveOptions) => ResolverParts<TSchema>; | ||
export type CreateResolverFn<TSchema extends BaseGeneratedSchema> = (options?: ResolveOptions & SubscribeOptions) => ResolverParts<TSchema>; | ||
export type ResolveFn<TSchema extends BaseGeneratedSchema> = <TData = unknown>(fn: DataFn<TSchema, TData>, options?: ResolveOptions) => Promise<TData>; | ||
@@ -129,2 +153,2 @@ export type SubscribeFn<TSchema extends BaseGeneratedSchema> = <TData = unknown>(fn: DataFn<TSchema, TData>, options?: SubscribeOptions) => AsyncIterableIterator<TData> & { | ||
}; | ||
export declare const createResolvers: <TSchema extends BaseGeneratedSchema>({ aliasLength, batchWindow, cache: targetCache, debugger: debug, depthLimit, fetchOptions, fetchOptions: { cachePolicy: defaultCachePolicy, retryPolicy: defaultRetryPoliy, }, scalars, schema, parentContext, }: CreateResolversOptions) => Resolvers<TSchema>; | ||
export declare const createResolvers: <TSchema extends BaseGeneratedSchema>({ aliasLength, batchWindow, cache: resolverCache, debugger: debug, depthLimit, fetchOptions, fetchOptions: { cachePolicy: defaultCachePolicy, retryPolicy: defaultRetryPoliy, }, scalars, schema, parentContext, }: CreateResolversOptions) => Resolvers<TSchema>; |
@@ -16,10 +16,13 @@ 'use strict'; | ||
const pendingQueries = /* @__PURE__ */ new WeakMap(); | ||
const getInteraction = (subject, object) => { | ||
const interaction = /* @__PURE__ */ new Set(); | ||
const getIntersection = (subject, object) => { | ||
if (typeof subject.intersection === "function") { | ||
return subject.intersection(object); | ||
} | ||
const intersection = /* @__PURE__ */ new Set(); | ||
for (const item of object) { | ||
if (subject.has(item)) { | ||
interaction.add(item); | ||
intersection.add(item); | ||
} | ||
} | ||
return interaction; | ||
return intersection; | ||
}; | ||
@@ -29,3 +32,3 @@ const createResolvers = ({ | ||
batchWindow, | ||
cache: targetCache, | ||
cache: resolverCache, | ||
debugger: debug, | ||
@@ -51,25 +54,31 @@ depthLimit, | ||
} = {}) => { | ||
let prevSelections = /* @__PURE__ */ new Set(); | ||
const prevSelections = /* @__PURE__ */ new Set(); | ||
const replaceSet = (target, source) => { | ||
target.clear(); | ||
for (const value of source) { | ||
target.add(value); | ||
} | ||
}; | ||
const selections = /* @__PURE__ */ new Set(); | ||
const context$1 = context.createContext({ | ||
aliasLength, | ||
cache: targetCache, | ||
cache: resolverCache, | ||
cachePolicy, | ||
depthLimit, | ||
cachePolicy, | ||
scalars, | ||
schema | ||
}); | ||
context$1.subscribeSelect((selection, cache) => { | ||
if (false === (onSelect == null ? void 0 : onSelect(selection, cache))) { | ||
return; | ||
} | ||
const targetSelections = (cache == null ? void 0 : cache.data) === null || Array.isArray(cache == null ? void 0 : cache.data) && cache.data.length === 0 ? ( | ||
context$1.subscribeSelect((selection, selectionCache) => { | ||
const targetSelections = selectionCache === void 0 ? ( | ||
// For empty arrays and null objects, trigger sub-selections made | ||
// in previous selections. | ||
getInteraction(selection.getLeafNodes(), prevSelections) | ||
getIntersection(selection.getLeafNodes(), prevSelections) | ||
) : [selection]; | ||
for (const selection2 of targetSelections) { | ||
if (!selections.has(selection2)) { | ||
if (false === (onSelect == null ? void 0 : onSelect(selection2, selectionCache))) { | ||
continue; | ||
} | ||
selections.add(selection2); | ||
parentContext == null ? void 0 : parentContext.select(selection2, cache); | ||
parentContext == null ? void 0 : parentContext.select(selection2, selectionCache); | ||
} | ||
@@ -94,3 +103,3 @@ } | ||
const pendingSelections = batching.addSelections( | ||
targetCache, | ||
resolverCache, | ||
selectionsCacheKey, | ||
@@ -107,2 +116,3 @@ selections | ||
var _a, _b; | ||
pendingQueries.delete(pendingSelections); | ||
if (batchWindow) { | ||
@@ -114,3 +124,3 @@ await new Promise( | ||
const uniqueSelections = /* @__PURE__ */ new Set(); | ||
(_a = batching.getSelectionsSet(targetCache, selectionsCacheKey)) == null ? void 0 : _a.forEach( | ||
(_a = batching.getSelectionsSet(resolverCache, selectionsCacheKey)) == null ? void 0 : _a.forEach( | ||
(selections2) => { | ||
@@ -122,4 +132,3 @@ selections2.forEach((selection) => { | ||
); | ||
pendingQueries.delete(pendingSelections); | ||
batching.delSelectionSet(targetCache, selectionsCacheKey); | ||
batching.delSelectionSet(resolverCache, selectionsCacheKey); | ||
const results = await resolveSelections.fetchSelections(uniqueSelections, { | ||
@@ -134,3 +143,3 @@ cache: context$1.cache, | ||
if (cachePolicy !== "no-store") { | ||
targetCaches.add(targetCache); | ||
targetCaches.add(resolverCache); | ||
} | ||
@@ -140,5 +149,10 @@ updateCaches.updateCaches(results, [...targetCaches], { | ||
}); | ||
correlatedCaches.delete(targetCache); | ||
correlatedCaches.delete(resolverCache); | ||
return results; | ||
}, | ||
// When neughty users are adding selections every next microtask, we | ||
// forcibly start the fetch after a number of delays. This number is | ||
// picked arbitrarily, it should be a number that is large enough to | ||
// prevent excessive fetches but small enough to not block the | ||
// actual fetch indefinitely. | ||
{ | ||
@@ -150,5 +164,13 @@ debounceLimit: 20, | ||
); | ||
pendingQueries.get(pendingSelections)().then( | ||
const currentPromise = pendingQueries.get(pendingSelections)(); | ||
const promiseDropped = () => { | ||
var _a; | ||
const activePromise = (_a = pendingQueries.get(pendingSelections)) == null ? void 0 : _a(); | ||
return activePromise && currentPromise !== activePromise; | ||
}; | ||
currentPromise.then( | ||
() => { | ||
prevSelections = new Set(selections); | ||
if (promiseDropped()) return; | ||
if (selections.size === 0) return; | ||
replaceSet(prevSelections, selections); | ||
selections.clear(); | ||
@@ -159,3 +181,4 @@ }, | ||
).finally(() => { | ||
context$1.shouldFetch = false; | ||
if (promiseDropped()) return; | ||
context$1.reset(); | ||
}); | ||
@@ -173,3 +196,3 @@ } | ||
console.warn( | ||
"[GQty] No selections found. If you are reading from the global accessors, try using the first argument instead." | ||
"[GQty] No selections found! If you are reading from the global accessors, try using the first argument instead." | ||
); | ||
@@ -209,4 +232,4 @@ } | ||
} | ||
{ | ||
let prevSelectionsUpdated = false; | ||
if (subscriptionSelections.size) { | ||
let lastSelectionsUpdated = false; | ||
const promise = new Promise((resolve2, reject) => { | ||
@@ -222,8 +245,10 @@ const unsubscribe2 = resolveSelections.subscribeSelections( | ||
[{ data, error, extensions: extensions2 }], | ||
cachePolicy !== "no-store" && context$1.cache !== targetCache ? [context$1.cache, targetCache] : [context$1.cache], | ||
cachePolicy !== "no-store" && context$1.cache !== resolverCache ? [context$1.cache, resolverCache] : [context$1.cache], | ||
{ skipNotify: !context$1.notifyCacheUpdate } | ||
); | ||
if (!prevSelectionsUpdated) { | ||
prevSelectionsUpdated = true; | ||
prevSelections = new Set(selections); | ||
if (!lastSelectionsUpdated) { | ||
lastSelectionsUpdated = true; | ||
if (selections.size > 0) { | ||
replaceSet(prevSelections, selections); | ||
} | ||
} | ||
@@ -253,3 +278,10 @@ } else ; | ||
}; | ||
return { accessor, context: context$1, resolve, selections, subscribe }; | ||
return { | ||
accessor, | ||
context: context$1, | ||
resolve, | ||
restorePreviousSelections: () => replaceSet(selections, prevSelections), | ||
selections, | ||
subscribe | ||
}; | ||
}; | ||
@@ -259,3 +291,3 @@ return { | ||
resolve: async (fn, options) => { | ||
var _a, _b, _c; | ||
var _a, _b; | ||
const { accessor, resolve, selections } = createResolver(options); | ||
@@ -269,3 +301,7 @@ const dataFn = () => fn(accessor); | ||
(_b = options == null ? void 0 : options.onFetch) == null ? void 0 : _b.call(options, fetchPromise); | ||
return (_c = dataFn()) != null ? _c : pick.pick(accessor, selections); | ||
const result = dataFn(); | ||
if (result === void 0) { | ||
return pick.pick(accessor, selections); | ||
} | ||
return result; | ||
}, | ||
@@ -272,0 +308,0 @@ subscribe: (fn, { onSubscribe, ...options } = {}) => { |
@@ -125,3 +125,3 @@ 'use strict'; | ||
onSubscribe == null ? void 0 : onSubscribe(); | ||
} else if (type === "subscription") { | ||
} else { | ||
throw new index.GQtyError(`Please specify a subscriber for subscriptions.`); | ||
@@ -128,0 +128,0 @@ } |
@@ -12,4 +12,4 @@ import type { Unsubscribe } from './Unsubscribe'; | ||
* Notifying subscribers about the intended dispoal of this object, | ||
* subscribers are responsible to remove any references and release any | ||
* resources. | ||
* subscribers are responsible to remove any references in the local scope and | ||
* release active resources. | ||
*/ | ||
@@ -16,0 +16,0 @@ dispose(): void; |
{ | ||
"name": "gqty", | ||
"version": "3.2.0", | ||
"version": "3.2.1", | ||
"description": "The No-GraphQL Client for TypeScript", | ||
@@ -5,0 +5,0 @@ "sideEffects": false, |
@@ -8,4 +8,13 @@ import type { CacheDataContainer } from './Cache'; | ||
export interface Selectable { | ||
select(selection: Selection, cache?: CacheDataContainer): boolean | void; | ||
select(selection: Selection, | ||
/** | ||
* When no cache container is provided, it currently means virtual | ||
* selections on null objects or empty arrays where proxies cannot be | ||
* further created. | ||
* | ||
* The selectable context is responsible to restore a meaningful | ||
* sub-selection tree regarding these selections. | ||
*/ | ||
cache?: CacheDataContainer): boolean | void; | ||
subscribeSelect(callback: Selectable['select']): Unsubscribe; | ||
} |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
292958
168
8235