@sanity/groq-store
Advanced tools
Comparing version 4.1.3 to 5.0.0-pink-lizard
@@ -1,82 +0,71 @@ | ||
import type BrowserEventSource from '@sanity/eventsource/browser' | ||
import {default as groq} from 'groq' | ||
import type NodeEventSource from '@sanity/eventsource/node' | ||
import type {SanityDocument} from '@sanity/types' | ||
import type { ClientPerspective } from '@sanity/client' | ||
import LRUCache from 'mnemonist/lru-cache' | ||
import type { SanityDocument } from '@sanity/client' | ||
/** @public */ | ||
export declare interface Config { | ||
projectId: string | ||
dataset: string | ||
/** | ||
* Keep dataset up to date with remote changes. | ||
* @defaultValue false | ||
*/ | ||
listen?: boolean | ||
/** | ||
* Optional token, if you want to receive drafts, or read data from private datasets | ||
* NOTE: Needs custom EventSource to work in browsers | ||
*/ | ||
token?: string | ||
/** | ||
* Optional limit on number of documents, to prevent using too much memory unexpectedly | ||
* Throws on the first operation (query, retrieval, subscription) if reaching this limit. | ||
*/ | ||
documentLimit?: number | ||
/** | ||
* "Replaces" published documents with drafts, if available. | ||
* Note that document IDs will not reflect draft status, currently | ||
*/ | ||
overlayDrafts?: boolean | ||
/** | ||
* Throttle the event emits to batch updates. | ||
* @defaultValue 50 | ||
*/ | ||
subscriptionThrottleMs?: number | ||
/** | ||
* Optional EventSource. Necessary to authorize using token in the browser, since | ||
* the native window.EventSource does not accept headers. | ||
*/ | ||
EventSource?: EnvImplementations['EventSource'] | ||
/** | ||
* Optional allow list filter for document types. You can use this to limit the amount of documents by declaring the types you want to sync. Note that since you're fetching a subset of your dataset, queries that works against your Content Lake might not work against the local groq-store. | ||
* @example ['page', 'product', 'sanity.imageAsset'] | ||
*/ | ||
includeTypes?: string[] | ||
requestTagPrefix?: string | ||
/** | ||
* @alpha | ||
*/ | ||
export declare interface ClientConfigLike { | ||
projectId: ClientProjectId | ||
dataset: ClientDataset | ||
perspective: DocumentCachePerspective | ||
} | ||
/** @public */ | ||
export declare interface EnvImplementations { | ||
EventSource: typeof NodeEventSource | typeof BrowserEventSource | typeof window.EventSource | ||
getDocuments: ( | ||
options: Pick< | ||
Config, | ||
'projectId' | 'dataset' | 'token' | 'documentLimit' | 'includeTypes' | 'requestTagPrefix' | ||
>, | ||
) => Promise<SanityDocument[]> | ||
} | ||
/** | ||
* TODO add `ClientDataset` to `@sanity/client` | ||
* @alpha | ||
*/ | ||
export declare type ClientDataset = string | ||
export {groq} | ||
export { ClientPerspective } | ||
/** @public */ | ||
export declare interface GroqStore { | ||
query: <R = any>(groqQuery: string, params?: Record<string, unknown> | undefined) => Promise<R> | ||
getDocument: (documentId: string) => Promise<SanityDocument | null> | ||
getDocuments: (documentIds: string[]) => Promise<(SanityDocument | null)[]> | ||
subscribe: <R = any>( | ||
groqQuery: string, | ||
params: Record<string, unknown>, | ||
callback: (err: Error | undefined, result?: R) => void, | ||
) => Subscription | ||
close: () => Promise<void> | ||
} | ||
/** | ||
* TODO add `ClientProjectId` to `@sanity/client` | ||
* @alpha | ||
*/ | ||
export declare type ClientProjectId = string | ||
/** @public */ | ||
export declare function groqStore(config: Config): GroqStore | ||
/** | ||
* @alpha | ||
*/ | ||
export declare type DocumentCacheKey = | ||
`${ClientProjectId}-${ClientDataset}-${DocumentCachePerspective}-${DocumentId}` | ||
/** @public */ | ||
export declare interface Subscription { | ||
unsubscribe: () => Promise<void> | ||
/** | ||
* The only perspectives supported by the cache is `published` and `previewDrafts`. | ||
* @alpha | ||
*/ | ||
export declare type DocumentCachePerspective = Extract< | ||
ClientPerspective, | ||
'published' | 'previewDrafts' | ||
> | ||
/** | ||
* @alpha | ||
*/ | ||
export declare type DocumentId = string | ||
export { SanityDocument } | ||
/** | ||
* @alpha | ||
*/ | ||
export declare interface SanityDocumentLike { | ||
_id: DocumentId | ||
} | ||
/** | ||
* @alpha | ||
*/ | ||
export declare const unstable__documentsCache: LRUCache< | ||
| `${string}-${string}-previewDrafts-${string}` | ||
| `${string}-${string}-published-${string}`, | ||
SanityDocument | ||
> | ||
export declare function unstable__getDocumentCacheKey( | ||
config: ClientConfigLike, | ||
document: SanityDocumentLike, | ||
): DocumentCacheKey | ||
export {} |
@@ -1,519 +0,31 @@ | ||
'use strict'; | ||
Object.defineProperty(exports, '__esModule', { | ||
value: true | ||
}); | ||
var EventSourcePolyfill = require('@sanity/eventsource/node'); | ||
var deepEqual = require('fast-deep-equal'); | ||
var groq = require('groq'); | ||
var groqJs = require('groq-js'); | ||
var throttleDebounce = require('throttle-debounce'); | ||
var mendoza = require('mendoza'); | ||
var get = require('simple-get'); | ||
var split = require('split2'); | ||
function _interopDefaultCompat(e) { | ||
return e && typeof e === 'object' && 'default' in e ? e : { | ||
default: e | ||
}; | ||
} | ||
var EventSourcePolyfill__default = /*#__PURE__*/_interopDefaultCompat(EventSourcePolyfill); | ||
var deepEqual__default = /*#__PURE__*/_interopDefaultCompat(deepEqual); | ||
var groq__default = /*#__PURE__*/_interopDefaultCompat(groq); | ||
var get__default = /*#__PURE__*/_interopDefaultCompat(get); | ||
var split__default = /*#__PURE__*/_interopDefaultCompat(split); | ||
function isDraft(doc) { | ||
return doc._id.startsWith("drafts."); | ||
} | ||
function getPublishedId(document) { | ||
return isDraft(document) ? document._id.slice(7) : document._id; | ||
} | ||
const isNativeBrowserEventSource = eventSource => typeof window !== "undefined" && eventSource.addEventListener === window.EventSource.prototype.addEventListener; | ||
const isPolyfillEventSource = eventSource => !isNativeBrowserEventSource(eventSource); | ||
const addEventSourceListener = (eventSource, type, listener) => { | ||
if (isPolyfillEventSource(eventSource)) { | ||
eventSource.addEventListener(type, listener); | ||
} else { | ||
eventSource.addEventListener(type, listener, false); | ||
} | ||
}; | ||
const encodeQueryString = _ref2 => { | ||
let { | ||
query, | ||
params = {}, | ||
options = {} | ||
} = _ref2; | ||
const searchParams = new URLSearchParams(); | ||
import LRUCache from 'mnemonist/lru-cache'; | ||
const unstable__documentsCache = new LRUCache(1024); | ||
function unstable__getDocumentCacheKey(config, document) { | ||
const { | ||
tag, | ||
...opts | ||
} = options; | ||
if (tag) searchParams.set("tag", tag); | ||
searchParams.set("query", query); | ||
for (const [key, value] of Object.entries(params)) { | ||
searchParams.set("$".concat(key), JSON.stringify(value)); | ||
} | ||
for (const [key, value] of Object.entries(opts)) { | ||
if (value) searchParams.set(key, "".concat(value)); | ||
} | ||
return "?".concat(searchParams); | ||
}; | ||
function listen(EventSourceImpl, config, handlers) { | ||
const { | ||
projectId, | ||
dataset, | ||
token, | ||
includeTypes, | ||
requestTagPrefix | ||
perspective | ||
} = config; | ||
const headers = token ? { | ||
Authorization: "Bearer ".concat(token) | ||
} : void 0; | ||
const options = requestTagPrefix ? { | ||
tag: requestTagPrefix, | ||
effectFormat: "mendoza" | ||
} : { | ||
effectFormat: "mendoza" | ||
}; | ||
const searchParams = encodeQueryString(Array.isArray(includeTypes) && includeTypes.length > 0 ? { | ||
query: "*[_type in $includeTypes]", | ||
params: { | ||
includeTypes | ||
}, | ||
options | ||
} : { | ||
query: "*", | ||
options | ||
}); | ||
const url = "https://".concat(projectId, ".api.sanity.io/v1/data/listen/").concat(dataset).concat(searchParams); | ||
const es = new EventSourceImpl(url, { | ||
withCredentials: true, | ||
headers | ||
}); | ||
addEventSourceListener(es, "welcome", handlers.open); | ||
addEventSourceListener(es, "mutation", getMutationParser(handlers.next)); | ||
addEventSourceListener(es, "channelError", msg => { | ||
es.close(); | ||
let data; | ||
try { | ||
data = JSON.parse(msg.data); | ||
} catch (err) { | ||
handlers.error(new Error("Unknown error parsing listener message")); | ||
return; | ||
} | ||
handlers.error(new Error(data.message || data.error || "Listener returned HTTP ".concat(data.statusCode))); | ||
}); | ||
addEventSourceListener(es, "error", err => { | ||
const origin = typeof window !== "undefined" && window.location.origin; | ||
const hintSuffix = origin ? ", and that the CORS-origin (".concat(origin, ") is allowed") : ""; | ||
const errorMessage = isErrorLike(err) ? " (".concat(err.message, ")") : ""; | ||
handlers.error(new Error("Error establishing listener - check that the project ID and dataset are correct".concat(hintSuffix).concat(errorMessage))); | ||
}); | ||
return { | ||
unsubscribe: () => Promise.resolve(es.close()) | ||
}; | ||
} | ||
function getMutationParser(cb) { | ||
return msg => { | ||
let data; | ||
try { | ||
data = JSON.parse(msg.data); | ||
} catch (err) { | ||
return; | ||
} | ||
cb(data); | ||
}; | ||
} | ||
function isErrorLike(err) { | ||
return typeof err === "object" && err !== null && "message" in err; | ||
} | ||
function applyPatchWithoutRev(doc, patch) { | ||
const patchDoc = { | ||
...doc | ||
}; | ||
delete patchDoc._rev; | ||
return mendoza.applyPatch(patchDoc, patch); | ||
} | ||
function compareString(a, b) { | ||
if (a > b) return 1; | ||
if (a < b) return -1; | ||
return 0; | ||
} | ||
const DEBOUNCE_MS = 25; | ||
function noop() { | ||
return Promise.resolve(); | ||
} | ||
function getSyncingDataset(config, onNotifyUpdate, _ref3) { | ||
let { | ||
getDocuments, | ||
EventSource | ||
} = _ref3; | ||
const { | ||
projectId, | ||
dataset, | ||
listen: useListener, | ||
overlayDrafts, | ||
documentLimit, | ||
token, | ||
includeTypes, | ||
requestTagPrefix | ||
} = config; | ||
let stagedDocs; | ||
let previousTrx; | ||
let flushTimeout; | ||
const onUpdate = docs => { | ||
stagedDocs = void 0; | ||
flushTimeout = void 0; | ||
previousTrx = void 0; | ||
const finalDocs = overlayDrafts ? overlay(docs) : docs; | ||
finalDocs.sort((a, b) => compareString(a._id, b._id)); | ||
onNotifyUpdate(finalDocs); | ||
}; | ||
const dereference = overlayDrafts ? _ref4 => { | ||
let { | ||
_ref | ||
} = _ref4; | ||
const doc = indexedDocuments.get("drafts.".concat(_ref)) || indexedDocuments.get(_ref); | ||
if (!doc) { | ||
return Promise.resolve(doc); | ||
} | ||
if (isDraft(doc)) { | ||
return Promise.resolve(pretendThatItsPublished(doc)); | ||
} | ||
return Promise.resolve({ | ||
...doc, | ||
_originalId: doc._id | ||
}); | ||
} : _ref5 => { | ||
let { | ||
_ref | ||
} = _ref5; | ||
return Promise.resolve(indexedDocuments.get(_ref)); | ||
}; | ||
if (!useListener) { | ||
const loaded2 = getDocuments({ | ||
projectId, | ||
dataset, | ||
documentLimit, | ||
token, | ||
includeTypes, | ||
requestTagPrefix | ||
}).then(onUpdate).then(noop); | ||
return { | ||
unsubscribe: noop, | ||
loaded: loaded2, | ||
dereference | ||
}; | ||
} | ||
const indexedDocuments = /* @__PURE__ */new Map(); | ||
let documents; | ||
const buffer = []; | ||
let onDoneLoading; | ||
let onLoadError; | ||
const loaded = new Promise((resolve, reject) => { | ||
onDoneLoading = resolve; | ||
onLoadError = reject; | ||
}); | ||
const onOpen = async () => { | ||
const initial = await getDocuments({ | ||
projectId, | ||
dataset, | ||
documentLimit, | ||
token, | ||
includeTypes, | ||
requestTagPrefix | ||
}); | ||
documents = applyBufferedMutations(initial, buffer); | ||
documents.forEach(doc => indexedDocuments.set(doc._id, doc)); | ||
onUpdate(documents); | ||
onDoneLoading(); | ||
}; | ||
const onMutationReceived = msg => { | ||
if (documents) { | ||
applyMutation(msg); | ||
scheduleUpdate(documents, msg); | ||
} else { | ||
buffer.push(msg); | ||
} | ||
}; | ||
const listener = listen(EventSource, config, { | ||
next: onMutationReceived, | ||
open: onOpen, | ||
error: error => onLoadError(error) | ||
}); | ||
const scheduleUpdate = (docs, msg) => { | ||
clearTimeout(flushTimeout); | ||
if (previousTrx !== msg.transactionId && stagedDocs) { | ||
onUpdate(stagedDocs); | ||
previousTrx = void 0; | ||
} else { | ||
previousTrx = msg.transactionId; | ||
stagedDocs = docs.slice(); | ||
} | ||
flushTimeout = setTimeout(onUpdate, DEBOUNCE_MS, docs.slice()); | ||
}; | ||
const applyMutation = msg => { | ||
if (!msg.effects || msg.documentId.startsWith("_.")) { | ||
return; | ||
} | ||
const document = indexedDocuments.get(msg.documentId) || null; | ||
replaceDocument(msg.documentId, applyPatchWithoutRev(document, msg.effects.apply)); | ||
}; | ||
const replaceDocument = (id, document) => { | ||
const current = indexedDocuments.get(id); | ||
const docs = documents || []; | ||
const position = current ? docs.indexOf(current) : -1; | ||
if (position === -1 && document) { | ||
docs.push(document); | ||
indexedDocuments.set(id, document); | ||
} else if (document) { | ||
docs.splice(position, 1, document); | ||
indexedDocuments.set(id, document); | ||
} else { | ||
docs.splice(position, 1); | ||
indexedDocuments.delete(id); | ||
} | ||
}; | ||
return { | ||
unsubscribe: listener.unsubscribe, | ||
loaded, | ||
dereference | ||
}; | ||
} | ||
function applyBufferedMutations(documents, mutations) { | ||
const groups = /* @__PURE__ */new Map(); | ||
mutations.forEach(mutation => { | ||
const group = groups.get(mutation.documentId) || []; | ||
group.push(mutation); | ||
groups.set(mutation.documentId, group); | ||
}); | ||
groups.forEach((group, id) => { | ||
const document = documents.find(doc => doc._id === id); | ||
if (!document) { | ||
console.warn("Received mutation for missing document %s", id); | ||
return; | ||
} | ||
let hasFoundRevision = false; | ||
let current = document; | ||
group.forEach(mutation => { | ||
hasFoundRevision = hasFoundRevision || mutation.previousRev === document._rev; | ||
if (!hasFoundRevision) { | ||
return; | ||
_id | ||
} = document; | ||
if (![projectId, dataset, perspective, _id].every(input => typeof input === "string" && input.length > 0)) { | ||
throw new Error("Invalid document cache key, all inputs must be non-empty strings", { | ||
cause: { | ||
config, | ||
document | ||
} | ||
if (mutation.effects) { | ||
current = applyPatchWithoutRev(current, mutation.effects.apply); | ||
} | ||
}); | ||
documents.splice(documents.indexOf(document), 1, current); | ||
}); | ||
return documents; | ||
} | ||
function overlay(documents) { | ||
const overlayed = /* @__PURE__ */new Map(); | ||
documents.forEach(doc => { | ||
const existing = overlayed.get(getPublishedId(doc)); | ||
if (doc._id.startsWith("drafts.")) { | ||
overlayed.set(getPublishedId(doc), pretendThatItsPublished(doc)); | ||
} else if (!existing) { | ||
overlayed.set(doc._id, { | ||
...doc, | ||
_originalId: doc._id | ||
}); | ||
} | ||
}); | ||
return Array.from(overlayed.values()); | ||
} | ||
function pretendThatItsPublished(doc) { | ||
return { | ||
...doc, | ||
_id: getPublishedId(doc), | ||
_originalId: doc._id | ||
}; | ||
} | ||
var __freeze = Object.freeze; | ||
var __defProp = Object.defineProperty; | ||
var __template = (cooked, raw) => __freeze(__defProp(cooked, "raw", { | ||
value: __freeze(raw || cooked.slice()) | ||
})); | ||
var _a; | ||
function groqStore$1(config, envImplementations) { | ||
let documents = []; | ||
const executeThrottled = throttleDebounce.throttle(config.subscriptionThrottleMs || 50, executeAllSubscriptions); | ||
const activeSubscriptions = []; | ||
let dataset; | ||
async function loadDataset() { | ||
if (!dataset) { | ||
dataset = getSyncingDataset(config, docs => { | ||
documents = docs; | ||
executeThrottled(); | ||
}, envImplementations); | ||
} | ||
await dataset.loaded; | ||
} | ||
async function query(groqQuery, params) { | ||
await loadDataset(); | ||
const tree = groqJs.parse(groqQuery, { | ||
params | ||
}); | ||
const result = await groqJs.evaluate(tree, { | ||
dataset: documents, | ||
params, | ||
dereference: dataset.dereference | ||
}); | ||
return result.get(); | ||
} | ||
async function getDocument(documentId) { | ||
await loadDataset(); | ||
return query(groq__default.default(_a || (_a = __template(["*[_id == $id][0]"]))), { | ||
id: documentId | ||
}); | ||
} | ||
async function getDocuments(documentIds) { | ||
await loadDataset(); | ||
const subQueries = documentIds.map(id => '*[_id == "'.concat(id, '"][0]')).join(",\n"); | ||
return query("[".concat(subQueries, "]")); | ||
} | ||
function subscribe(groqQuery, params, callback) { | ||
if (!config.listen) { | ||
throw new Error("Cannot use `subscribe()` without `listen: true`"); | ||
} | ||
const subscription = { | ||
query: groqQuery, | ||
params, | ||
callback | ||
}; | ||
activeSubscriptions.push(subscription); | ||
let unsubscribed = false; | ||
const unsubscribe = () => { | ||
if (unsubscribed) { | ||
return Promise.resolve(); | ||
if (perspective !== "published" && perspective !== "previewDrafts") { | ||
throw new Error('Invalid document cache key, perspective must be "published" or "previewDrafts"', { | ||
cause: { | ||
config, | ||
document | ||
} | ||
unsubscribed = true; | ||
activeSubscriptions.splice(activeSubscriptions.indexOf(subscription), 1); | ||
return Promise.resolve(); | ||
}; | ||
executeQuerySubscription(subscription); | ||
return { | ||
unsubscribe | ||
}; | ||
} | ||
function executeQuerySubscription(subscription) { | ||
return query(subscription.query, subscription.params).then(res => { | ||
if ("previousResult" in subscription && deepEqual__default.default(subscription.previousResult, res)) { | ||
return; | ||
} | ||
subscription.previousResult = res; | ||
subscription.callback(void 0, res); | ||
}).catch(err => { | ||
subscription.callback(err); | ||
}); | ||
} | ||
function executeAllSubscriptions() { | ||
activeSubscriptions.forEach(executeQuerySubscription); | ||
} | ||
function close() { | ||
executeThrottled.cancel(); | ||
return dataset ? dataset.unsubscribe() : Promise.resolve(); | ||
} | ||
return { | ||
query, | ||
getDocument, | ||
getDocuments, | ||
subscribe, | ||
close | ||
}; | ||
return `${projectId}-${dataset}-${perspective}-${_id}`; | ||
} | ||
function isStreamError(result) { | ||
if (!result) { | ||
return false; | ||
} | ||
if (!("error" in result) || typeof result.error !== "object" || result.error === null) { | ||
return false; | ||
} | ||
return "description" in result.error && typeof result.error.description === "string" && !("_id" in result); | ||
} | ||
function getError(body) { | ||
if (typeof body === "object" && "error" in body && "message" in body) { | ||
return body.message || body.error; | ||
} | ||
return "<unknown error>"; | ||
} | ||
function isRelevantDocument(doc) { | ||
return !doc._id.startsWith("_."); | ||
} | ||
const getDocuments = function getDocuments2(_ref6) { | ||
let { | ||
projectId, | ||
dataset, | ||
token, | ||
documentLimit, | ||
includeTypes = [], | ||
requestTagPrefix | ||
} = _ref6; | ||
const baseUrl = new URL("https://".concat(projectId, ".api.sanity.io/v1/data/export/").concat(dataset)); | ||
if (requestTagPrefix) { | ||
baseUrl.searchParams.set("tag", requestTagPrefix); | ||
} | ||
if (includeTypes.length > 0) { | ||
baseUrl.searchParams.set("types", includeTypes == null ? void 0 : includeTypes.join(",")); | ||
} | ||
const url = baseUrl.toString(); | ||
const headers = token ? { | ||
Authorization: "Bearer ".concat(token) | ||
} : void 0; | ||
return new Promise((resolve, reject) => { | ||
get__default.default({ | ||
url, | ||
headers | ||
}, (err, response) => { | ||
if (err) { | ||
reject(err); | ||
return; | ||
} | ||
response.setEncoding("utf8"); | ||
const chunks = []; | ||
if (response.statusCode !== 200) { | ||
response.on("data", chunk => chunks.push(chunk)).on("end", () => { | ||
const body = JSON.parse(Buffer.concat(chunks).toString("utf8")); | ||
reject(new Error("Error streaming dataset: ".concat(getError(body)))); | ||
}); | ||
return; | ||
} | ||
const documents = []; | ||
response.pipe(split__default.default(JSON.parse)).on("data", doc => { | ||
if (isStreamError(doc)) { | ||
reject(new Error("Error streaming dataset: ".concat(doc.error))); | ||
return; | ||
} | ||
if (doc && isRelevantDocument(doc)) { | ||
documents.push(doc); | ||
} | ||
if (documentLimit && documents.length > documentLimit) { | ||
reject(new Error("Error streaming dataset: Reached limit of ".concat(documentLimit, " documents"))); | ||
response.destroy(); | ||
} | ||
}).on("end", () => resolve(documents)); | ||
}); | ||
}); | ||
}; | ||
function assertEnvSupport() { | ||
const [major] = process.version.replace(/^v/, "").split(".", 1).map(Number); | ||
if (major < 14) { | ||
throw new Error("Node.js version 14 or higher required"); | ||
} | ||
} | ||
function groqStore(config) { | ||
var _a; | ||
assertEnvSupport(); | ||
return groqStore$1(config, { | ||
EventSource: (_a = config.EventSource) != null ? _a : EventSourcePolyfill__default.default, | ||
getDocuments | ||
}); | ||
} | ||
Object.defineProperty(exports, 'groq', { | ||
enumerable: true, | ||
get: function () { | ||
return groq__default.default; | ||
} | ||
}); | ||
exports.groqStore = groqStore; | ||
export { unstable__documentsCache, unstable__getDocumentCacheKey }; | ||
//# sourceMappingURL=index.js.map |
167
package.json
{ | ||
"name": "@sanity/groq-store", | ||
"version": "4.1.3", | ||
"description": "Stream dataset to memory for in-memory querying", | ||
"keywords": [ | ||
"sanity", | ||
"memory", | ||
"query", | ||
"groq" | ||
], | ||
"homepage": "https://github.com/sanity-io/groq-store#readme", | ||
"version": "5.0.0-pink-lizard", | ||
"homepage": "https://github.com/sanity-io/visual-editing/tree/main/packages/groq-store#readme", | ||
"bugs": { | ||
"url": "https://github.com/sanity-io/groq-store/issues" | ||
"url": "https://github.com/sanity-io/visual-editing/issues" | ||
}, | ||
"repository": { | ||
"type": "git", | ||
"url": "git+ssh://git@github.com/sanity-io/groq-store.git" | ||
"url": "git+ssh://git@github.com/sanity-io/visual-editing.git", | ||
"directory": "packages/groq-store" | ||
}, | ||
@@ -22,94 +16,117 @@ "license": "MIT", | ||
"sideEffects": false, | ||
"type": "commonjs", | ||
"type": "module", | ||
"exports": { | ||
".": { | ||
"types": "./dist/index.d.ts", | ||
"browser": { | ||
"source": "./src/browser/index.ts", | ||
"require": "./dist/index.browser.js", | ||
"import": "./dist/index.browser.mjs" | ||
}, | ||
"deno": "./dist/index.browser.mjs", | ||
"worker": "./dist/index.browser.mjs", | ||
"source": "./src/index.ts", | ||
"require": "./dist/index.js", | ||
"require": "./dist/index.cjs", | ||
"node": { | ||
"module": "./dist/index.mjs", | ||
"import": "./dist/index.cjs.mjs" | ||
"module": "./dist/index.js", | ||
"import": "./dist/index.cjs.js" | ||
}, | ||
"import": "./dist/index.mjs", | ||
"default": "./dist/index.mjs" | ||
"import": "./dist/index.js", | ||
"default": "./dist/index.js" | ||
}, | ||
"./package.json": "./package.json" | ||
}, | ||
"main": "./dist/index.js", | ||
"module": "./dist/index.mjs", | ||
"main": "./dist/index.cjs", | ||
"module": "./dist/index.js", | ||
"source": "./src/index.ts", | ||
"browser": { | ||
"./dist/index.js": "./dist/index.browser.js", | ||
"./dist/index.mjs": "./dist/index.browser.mjs" | ||
}, | ||
"types": "./dist/index.d.ts", | ||
"files": [ | ||
"dist", | ||
"src" | ||
"CHANGELOG.md" | ||
], | ||
"scripts": { | ||
"prebuild": "rimraf dist", | ||
"build": "pkg build --strict && pkg --strict", | ||
"lint": "eslint .", | ||
"prepublishOnly": "npm run build", | ||
"start": "cd example && npm start", | ||
"test": "vitest" | ||
}, | ||
"browserslist": [ | ||
"> 0.2% and supports es6-module and supports es6-module-dynamic-import and not dead and not IE 11", | ||
"> 0.2% and last 2 versions and supports es6-module and supports es6-module-dynamic-import and not dead and not IE 11", | ||
"maintained node versions" | ||
], | ||
"prettier": { | ||
"plugins": [ | ||
"prettier-plugin-packagejson" | ||
], | ||
"semi": false, | ||
"singleQuote": true | ||
}, | ||
"eslintConfig": { | ||
"parser": "@typescript-eslint/parser", | ||
"parserOptions": { | ||
"ecmaFeatures": { | ||
"jsx": true | ||
}, | ||
"ecmaVersion": 2018, | ||
"sourceType": "module" | ||
}, | ||
"settings": { | ||
"react": { | ||
"version": "detect" | ||
} | ||
}, | ||
"plugins": [ | ||
"@typescript-eslint", | ||
"simple-import-sort", | ||
"prettier" | ||
], | ||
"extends": [ | ||
"eslint:recommended", | ||
"plugin:prettier/recommended", | ||
"plugin:@typescript-eslint/eslint-recommended", | ||
"plugin:@typescript-eslint/recommended" | ||
], | ||
"rules": { | ||
"no-console": "error", | ||
"no-warning-comments": [ | ||
"warn", | ||
{ | ||
"location": "start", | ||
"terms": [ | ||
"todo", | ||
"@todo", | ||
"fixme" | ||
] | ||
} | ||
], | ||
"@typescript-eslint/explicit-module-boundary-types": "error", | ||
"@typescript-eslint/member-delimiter-style": "off", | ||
"@typescript-eslint/no-empty-interface": "off", | ||
"@typescript-eslint/no-explicit-any": "warn", | ||
"prettier/prettier": "warn", | ||
"simple-import-sort/exports": "warn", | ||
"simple-import-sort/imports": "warn" | ||
}, | ||
"root": true | ||
}, | ||
"dependencies": { | ||
"@sanity/eventsource": "^5.0.0", | ||
"@sanity/types": "^3.14.5", | ||
"fast-deep-equal": "3.1.3", | ||
"groq": "^3.14.5", | ||
"groq-js": "1.3.0", | ||
"mendoza": "3.0.3", | ||
"simple-get": "4.0.1", | ||
"split2": "4.2.0", | ||
"throttle-debounce": "5.0.0" | ||
"mnemonist": "0.39.5" | ||
}, | ||
"devDependencies": { | ||
"@commitlint/cli": "^17.7.2", | ||
"@commitlint/config-conventional": "^17.7.0", | ||
"@sanity/client": "^6.6.0", | ||
"@sanity/pkg-utils": "^3.0.0", | ||
"@sanity/semantic-release-preset": "^4.1.4", | ||
"@types/node": "^18.18.4", | ||
"@types/split2": "^4.2.1", | ||
"@types/throttle-debounce": "^5.0.0", | ||
"@typescript-eslint/eslint-plugin": "^6.7.0", | ||
"@typescript-eslint/parser": "^6.7.0", | ||
"@vitest/coverage-v8": "^0.34.6", | ||
"eslint": "^8.49.0", | ||
"@sanity/client": "6.7.1-pink-lizard.0", | ||
"@sanity/pkg-utils": "^3.2.2", | ||
"@typescript-eslint/eslint-plugin": "^6.9.1", | ||
"@typescript-eslint/parser": "^6.9.1", | ||
"eslint": "^8.52.0", | ||
"eslint-config-prettier": "^9.0.0", | ||
"eslint-config-sanity": "^7.0.1", | ||
"eslint-plugin-prettier": "^5.0.0", | ||
"eslint-plugin-react": "^7.33.2", | ||
"eslint-plugin-prettier": "^5.0.1", | ||
"eslint-plugin-simple-import-sort": "^10.0.0", | ||
"husky": "^8.0.3", | ||
"lint-staged": "^14.0.1", | ||
"ls-engines": "^0.9.0", | ||
"prettier": "^3.0.3", | ||
"prettier-plugin-packagejson": "^2.4.6", | ||
"rimraf": "^5.0.0", | ||
"typescript": "^5.2.2", | ||
"vitest": "^0.34.6", | ||
"vitest-github-actions-reporter": "^0.10.0" | ||
"channels": "0.0.0", | ||
"visual-editing-helpers": "0.0.0" | ||
}, | ||
"peerDependencies": { | ||
"@sanity/client": "6.7.1-pink-lizard.0" | ||
}, | ||
"engines": { | ||
"node": ">= 18" | ||
"node": ">=18" | ||
}, | ||
"publishConfig": { | ||
"access": "public", | ||
"provenance": true | ||
"tag": "pink-lizard" | ||
}, | ||
"scripts": { | ||
"prebuild": "rimraf dist", | ||
"build": "pkg build --strict && pkg --strict", | ||
"lint": "eslint .", | ||
"test": "tsc --noEmit", | ||
"watch": "pkg watch --strict" | ||
} | ||
} | ||
} |
118
README.md
# @sanity/groq-store | ||
[](https://npm-stat.com/charts.html?package=@sanity/groq-store) | ||
[](https://www.npmjs.com/package/@sanity/groq-store) | ||
[](https://www.npmjs.com/package/@sanity/groq-store) | ||
[![gzip size][gzip-badge]][bundlephobia] | ||
[![size][size-badge]][bundlephobia] | ||
In-memory GROQ store. Streams all available documents from Sanity into an in-memory database and allows you to query them there. | ||
> **Warning** | ||
> | ||
> Don't use this package directly, yet. | ||
> | ||
> It's a new version of [`@sanity/groq-store`](https://github.com/sanity-io/groq-store) that uses Content Source Maps instead of groq-js to subscribe to queries that are kept in sync with document mutations in real time. | ||
> | ||
> It's currently used by `@sanity/presentation`. | ||
> We're planning on using it in `@sanity/core-loader` and `@sanity/preview-kit`. | ||
> | ||
> When the API is stable we'll document it and https://github.com/sanity-io/groq-store will be archived and link to this README. | ||
> This notice will be removed and you can start using it directly. | ||
## Targets | ||
- Node.js >= 14 | ||
- Modern browsers (Edge >= 14, Chrome, Safari, Firefox etc) | ||
## Caveats | ||
- Streams _entire_ dataset to memory, so generally not recommended for large datasets | ||
- Needs custom event source to work with tokens in browser | ||
## Installation | ||
```bash | ||
npm i @sanity/groq-store | ||
``` | ||
## Usage | ||
```js | ||
import {groqStore, groq} from '@sanity/groq-store' | ||
// import SanityEventSource from '@sanity/eventsource' | ||
const store = groqStore({ | ||
projectId: 'abc123', | ||
dataset: 'blog', | ||
// Keep dataset up to date with remote changes. Default: false | ||
listen: true, | ||
// "Replaces" published documents with drafts, if available. | ||
// Note that document IDs will not reflect draft status, currently | ||
overlayDrafts: true, | ||
// Optional token, if you want to receive drafts, or read data from private datasets | ||
// NOTE: Needs custom EventSource to work in browsers | ||
token: 'someAuthToken', | ||
// Optional limit on number of documents, to prevent using too much memory unexpectedly | ||
// Throws on the first operation (query, retrieval, subscription) if reaching this limit. | ||
documentLimit: 10000, | ||
// Optional EventSource. Necessary to authorize using token in the browser, since | ||
// the native window.EventSource does not accept headers. | ||
// EventSource: SanityEventSource, | ||
// Optional allow list filter for document types. You can use this to limit the amount of documents by declaring the types you want to sync. Note that since you're fetching a subset of your dataset, queries that works against your Content Lake might not work against the local groq-store. | ||
// You can quickly list all your types using this query: `array::unique(*[]._type)` | ||
includeTypes: ['post', 'page', 'product', 'sanity.imageAsset'], | ||
}) | ||
store.query(groq`*[_type == "author"]`).then((docs) => { | ||
console.log(docs) | ||
}) | ||
store.getDocument('grrm').then((grrm) => { | ||
console.log(grrm) | ||
}) | ||
store.getDocuments(['grrm', 'jrrt']).then(([grrm, jrrt]) => { | ||
console.log(grrm, jrrt) | ||
}) | ||
const sub = store.subscribe( | ||
groq`*[_type == $type][] {name}`, // Query | ||
{type: 'author'}, // Params | ||
(err, result) => { | ||
if (err) { | ||
console.error('Oh no, an error:', err) | ||
return | ||
} | ||
console.log('Result:', result) | ||
}, | ||
) | ||
// Later, to close subscription: | ||
sub.unsubscribe() | ||
// Later, to close listener: | ||
store.close() | ||
``` | ||
## License | ||
MIT © [Sanity.io](https://www.sanity.io/) | ||
## Release new version | ||
Run ["CI & Release" workflow](https://github.com/sanity-io/groq-store/actions). | ||
Make sure to select the main branch and check "Release new version". | ||
Version will be automatically bumped based on [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/) since the last release. | ||
Semantic release will only release on configured branches, so it is safe to run release on any branch. | ||
Note: commits with `chore:` will be ignored. If you want updated dependencies to trigger | ||
a new version, use `fix(deps):` instead. | ||
[gzip-badge]: https://img.shields.io/bundlephobia/minzip/@sanity/groq-store?label=gzip%20size&style=flat-square | ||
[size-badge]: https://img.shields.io/bundlephobia/min/@sanity/groq-store?label=size&style=flat-square | ||
[bundlephobia]: https://bundlephobia.com/package/@sanity/groq-store | ||
[gzip-badge]: https://img.shields.io/bundlephobia/minzip/@sanity/groq-store@pink-lizard?label=gzip%20size&style=flat-square | ||
[size-badge]: https://img.shields.io/bundlephobia/min/@sanity/groq-store@pink-lizard?label=size&style=flat-square | ||
[bundlephobia]: https://bundlephobia.com/package/@sanity/groq-store@pink-lizard |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
No website
QualityPackage does not have a website.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
2
11
1
Yes
19983
10
135
2
23
+ Addedmnemonist@0.39.5
+ Added@sanity/client@6.7.1-pink-lizard.0(transitive)
+ Addedmnemonist@0.39.5(transitive)
+ Addedobliterator@2.0.5(transitive)
- Removed@sanity/eventsource@^5.0.0
- Removed@sanity/types@^3.14.5
- Removedfast-deep-equal@3.1.3
- Removedgroq@^3.14.5
- Removedgroq-js@1.3.0
- Removedmendoza@3.0.3
- Removedsimple-get@4.0.1
- Removedsplit2@4.2.0
- Removedthrottle-debounce@5.0.0
- Removed@sanity/client@6.28.2(transitive)
- Removed@sanity/types@3.77.2(transitive)
- Removed@types/react@19.0.10(transitive)
- Removedcsstype@3.1.3(transitive)
- Removeddecompress-response@6.0.0(transitive)
- Removedfast-deep-equal@3.1.3(transitive)
- Removedgroq@3.77.2(transitive)
- Removedgroq-js@1.3.0(transitive)
- Removedmendoza@3.0.3(transitive)
- Removedonce@1.4.0(transitive)
- Removedsimple-concat@1.0.1(transitive)
- Removedsimple-get@4.0.1(transitive)
- Removedsplit2@4.2.0(transitive)
- Removedthrottle-debounce@5.0.0(transitive)
- Removedwrappy@1.0.2(transitive)