@sanity/core-loader
Advanced tools
Comparing version 0.4.4-pink-lizard to 0.5.0-pink-lizard
# Changelog | ||
## [0.5.0-pink-lizard](https://github.com/sanity-io/visual-editing/compare/core-loader-v0.4.4-pink-lizard...core-loader-v0.5.0-pink-lizard) (2023-11-08) | ||
### ⚠ BREAKING CHANGES | ||
* lazy load live mode ([#281](https://github.com/sanity-io/visual-editing/issues/281)) | ||
* renamed `studioUrl` to `allowStudioOrigin`to clarify it's CORS related | ||
### Bug Fixes | ||
* lazy load live mode ([#281](https://github.com/sanity-io/visual-editing/issues/281)) ([e52991c](https://github.com/sanity-io/visual-editing/commit/e52991cc974df76647c4ede51de16527c14e6c10)) | ||
* renamed `studioUrl` to `allowStudioOrigin`to clarify it's CORS related ([589a7c2](https://github.com/sanity-io/visual-editing/commit/589a7c29ef61bb53f249847b4d5b9ae78ad252f2)) | ||
## [0.4.4-pink-lizard](https://github.com/sanity-io/visual-editing/compare/core-loader-v0.4.3-pink-lizard...core-loader-v0.4.4-pink-lizard) (2023-11-07) | ||
@@ -4,0 +17,0 @@ |
import { Cache as Cache_2 } from 'async-cache-dedupe' | ||
import { ContentSourceMap } from '@sanity/client' | ||
import type { ContentSourceMap } from '@sanity/client' | ||
import type { QueryParams } from '@sanity/client' | ||
@@ -13,2 +13,3 @@ import type { SanityClient } from '@sanity/client' | ||
/** @public */ | ||
export declare const createQueryStore: ( | ||
@@ -18,7 +19,47 @@ options: CreateQueryStoreOptions, | ||
/** @public */ | ||
export declare interface CreateQueryStoreOptions { | ||
client: SanityClient | SanityStegaClient | ||
studioUrl: string | ||
} | ||
/** @public */ | ||
export declare type EnableLiveMode = ( | ||
options: EnableLiveModeOptions, | ||
) => () => void | ||
/** @public */ | ||
export declare interface EnableLiveModeOptions { | ||
/** | ||
* The origin that are allowed to connect to the loader. | ||
* If left unspecified it will default to the current origin, and the Studio will have to be hosted by the same origin. | ||
* @example `https://my.sanity.studio` | ||
* @defaultValue `location.origin` | ||
*/ | ||
allowStudioOrigin: string | ||
/** | ||
* Fires when a connection is established to a parent Studio window. | ||
*/ | ||
onConnect?: () => void | ||
/** | ||
* Fires when a connection is established to a parent Studio window and then lost. | ||
*/ | ||
onDisconnect?: () => void | ||
} | ||
/** @internal */ | ||
export declare interface Fetcher { | ||
hydrate: <Response, Error>( | ||
query: string, | ||
params: QueryParams, | ||
initialData?: Response, | ||
initialSourceMap?: ContentSourceMap, | ||
) => QueryStoreState<Response, Error> | ||
fetch: <Response, Error>( | ||
query: string, | ||
params: QueryParams, | ||
$fetch: MapStore<QueryStoreState<Response, Error>>, | ||
controller: AbortController, | ||
) => void | ||
} | ||
declare type Get<T, K extends KeyofBase> = Extract<T, { [K1 in K]: any }>[K] | ||
@@ -30,8 +71,2 @@ | ||
export declare interface LiveModeState { | ||
enabled: boolean | ||
connected: boolean | ||
studioOrigin: string | ||
} | ||
export declare interface MapStore<Value extends object = any> | ||
@@ -118,2 +153,5 @@ extends WritableAtom<Value> { | ||
export { QueryParams } | ||
/** @public */ | ||
export declare interface QueryStore { | ||
@@ -126,3 +164,4 @@ createFetcherStore: <Response = unknown, Error = unknown>( | ||
) => MapStore<QueryStoreState<Response, Error>> | ||
$LiveMode: MapStore<LiveModeState> | ||
enableLiveMode: EnableLiveMode | ||
/** @internal */ | ||
unstable__cache: Cache_2 & { | ||
@@ -136,2 +175,3 @@ fetch: <Response>(key: string) => Promise<{ | ||
/** @public */ | ||
export declare interface QueryStoreState<Response, Error> { | ||
@@ -221,6 +261,9 @@ loading: boolean | ||
/** @internal */ | ||
export declare type SetFetcher = (fetcher: Fetcher) => () => void | ||
/** | ||
* Store with a way to manually change the value. | ||
*/ | ||
declare interface WritableAtom<Value = any> extends ReadableAtom<Value> { | ||
export declare interface WritableAtom<Value = any> extends ReadableAtom<Value> { | ||
/** | ||
@@ -227,0 +270,0 @@ * Change store value. |
@@ -1,740 +0,3 @@ | ||
import { createCache } from 'async-cache-dedupe'; | ||
import { SanityStegaClient, stegaEncodeSourceMap } from '@sanity/client/stega'; | ||
let tasks = 0; | ||
let resolves = []; | ||
function startTask() { | ||
tasks += 1; | ||
return () => { | ||
tasks -= 1; | ||
if (tasks === 0) { | ||
let prevResolves = resolves; | ||
resolves = []; | ||
for (let i of prevResolves) i(); | ||
} | ||
}; | ||
} | ||
let clean = Symbol('clean'); | ||
let listenerQueue = []; | ||
let atom = (initialValue, level) => { | ||
let listeners = []; | ||
let $atom = { | ||
get() { | ||
if (!$atom.lc) { | ||
$atom.listen(() => {})(); | ||
} | ||
return $atom.value; | ||
}, | ||
l: level || 0, | ||
lc: 0, | ||
listen(listener, listenerLevel) { | ||
$atom.lc = listeners.push(listener, listenerLevel || $atom.l) / 2; | ||
return () => { | ||
let index = listeners.indexOf(listener); | ||
if (~index) { | ||
listeners.splice(index, 2); | ||
if (! --$atom.lc) $atom.off(); | ||
} | ||
}; | ||
}, | ||
notify(changedKey) { | ||
let runListenerQueue = !listenerQueue.length; | ||
for (let i = 0; i < listeners.length; i += 2) { | ||
listenerQueue.push(listeners[i], listeners[i + 1], $atom.value, changedKey); | ||
} | ||
if (runListenerQueue) { | ||
for (let i = 0; i < listenerQueue.length; i += 4) { | ||
let skip; | ||
for (let j = i + 1; !skip && (j += 4) < listenerQueue.length;) { | ||
if (listenerQueue[j] < listenerQueue[i + 1]) { | ||
skip = listenerQueue.push(listenerQueue[i], listenerQueue[i + 1], listenerQueue[i + 2], listenerQueue[i + 3]); | ||
} | ||
} | ||
if (!skip) { | ||
listenerQueue[i](listenerQueue[i + 2], listenerQueue[i + 3]); | ||
} | ||
} | ||
listenerQueue.length = 0; | ||
} | ||
}, | ||
off() {}, | ||
/* It will be called on last listener unsubscribing. | ||
We will redefine it in onMount and onStop. */ | ||
set(data) { | ||
if ($atom.value !== data) { | ||
$atom.value = data; | ||
$atom.notify(); | ||
} | ||
}, | ||
subscribe(listener, listenerLevel) { | ||
let unbind = $atom.listen(listener, listenerLevel); | ||
listener($atom.value); | ||
return unbind; | ||
}, | ||
value: initialValue | ||
}; | ||
if (process.env.NODE_ENV !== 'production') { | ||
$atom[clean] = () => { | ||
listeners = []; | ||
$atom.lc = 0; | ||
$atom.off(); | ||
}; | ||
} | ||
return $atom; | ||
}; | ||
const MOUNT = 5; | ||
const UNMOUNT = 6; | ||
const REVERT_MUTATION = 10; | ||
let on = (object, listener, eventKey, mutateStore) => { | ||
object.events = object.events || {}; | ||
if (!object.events[eventKey + REVERT_MUTATION]) { | ||
object.events[eventKey + REVERT_MUTATION] = mutateStore(eventProps => { | ||
// eslint-disable-next-line no-sequences | ||
object.events[eventKey].reduceRight((event, l) => (l(event), event), { | ||
shared: {}, | ||
...eventProps | ||
}); | ||
}); | ||
} | ||
object.events[eventKey] = object.events[eventKey] || []; | ||
object.events[eventKey].push(listener); | ||
return () => { | ||
let currentListeners = object.events[eventKey]; | ||
let index = currentListeners.indexOf(listener); | ||
currentListeners.splice(index, 1); | ||
if (!currentListeners.length) { | ||
delete object.events[eventKey]; | ||
object.events[eventKey + REVERT_MUTATION](); | ||
delete object.events[eventKey + REVERT_MUTATION]; | ||
} | ||
}; | ||
}; | ||
let STORE_UNMOUNT_DELAY = 1000; | ||
let onMount = ($store, initialize) => { | ||
let listener = payload => { | ||
let destroy = initialize(payload); | ||
if (destroy) $store.events[UNMOUNT].push(destroy); | ||
}; | ||
return on($store, listener, MOUNT, runListeners => { | ||
let originListen = $store.listen; | ||
$store.listen = (...args) => { | ||
if (!$store.lc && !$store.active) { | ||
$store.active = true; | ||
runListeners(); | ||
} | ||
return originListen(...args); | ||
}; | ||
let originOff = $store.off; | ||
$store.events[UNMOUNT] = []; | ||
$store.off = () => { | ||
originOff(); | ||
setTimeout(() => { | ||
if ($store.active && !$store.lc) { | ||
$store.active = false; | ||
for (let destroy of $store.events[UNMOUNT]) destroy(); | ||
$store.events[UNMOUNT] = []; | ||
} | ||
}, STORE_UNMOUNT_DELAY); | ||
}; | ||
if (process.env.NODE_ENV !== 'production') { | ||
let originClean = $store[clean]; | ||
$store[clean] = () => { | ||
for (let destroy of $store.events[UNMOUNT]) destroy(); | ||
$store.events[UNMOUNT] = []; | ||
$store.active = false; | ||
originClean(); | ||
}; | ||
} | ||
return () => { | ||
$store.listen = originListen; | ||
$store.off = originOff; | ||
}; | ||
}); | ||
}; | ||
function listenKeys($store, keys, listener) { | ||
let keysSet = new Set([...keys, undefined]); | ||
return $store.listen((value, changed) => { | ||
if (keysSet.has(changed)) { | ||
listener(value, changed); | ||
} | ||
}); | ||
} | ||
let map = (value = {}) => { | ||
let $map = atom(value); | ||
$map.setKey = function (key, newValue) { | ||
if (typeof newValue === 'undefined') { | ||
if (key in $map.value) { | ||
$map.value = { | ||
...$map.value | ||
}; | ||
delete $map.value[key]; | ||
$map.notify(key); | ||
} | ||
} else if ($map.value[key] !== newValue) { | ||
$map.value = { | ||
...$map.value, | ||
[key]: newValue | ||
}; | ||
$map.notify(key); | ||
} | ||
}; | ||
return $map; | ||
}; | ||
// Unique ID creation requires a high quality random # generator. In the browser we therefore | ||
// require the crypto API and do not support built-in fallback to lower quality random number | ||
// generators (like Math.random()). | ||
let getRandomValues; | ||
const rnds8 = new Uint8Array(16); | ||
function rng() { | ||
// lazy load so that environments that need to polyfill have a chance to do so | ||
if (!getRandomValues) { | ||
// getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. | ||
getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto); | ||
if (!getRandomValues) { | ||
throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); | ||
} | ||
} | ||
return getRandomValues(rnds8); | ||
} | ||
/** | ||
* Convert array of 16 byte values to UUID string format of the form: | ||
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX | ||
*/ | ||
const byteToHex = []; | ||
for (let i = 0; i < 256; ++i) { | ||
byteToHex.push((i + 0x100).toString(16).slice(1)); | ||
} | ||
function unsafeStringify(arr, offset = 0) { | ||
// Note: Be careful editing this code! It's been tuned for performance | ||
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 | ||
return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; | ||
} | ||
const randomUUID = typeof crypto !== 'undefined' && crypto.randomUUID && crypto.randomUUID.bind(crypto); | ||
var native = { | ||
randomUUID | ||
}; | ||
function v4(options, buf, offset) { | ||
if (native.randomUUID && !buf && !options) { | ||
return native.randomUUID(); | ||
} | ||
options = options || {}; | ||
const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` | ||
rnds[6] = rnds[6] & 0x0f | 0x40; | ||
rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided | ||
if (buf) { | ||
offset = offset || 0; | ||
for (let i = 0; i < 16; ++i) { | ||
buf[offset + i] = rnds[i]; | ||
} | ||
return buf; | ||
} | ||
return unsafeStringify(rnds); | ||
} | ||
const RESPONSE_TIMEOUT = 1e3; | ||
const HEARTBEAT_INTERVAL = 3e3; | ||
const INTERNAL_MSG_TYPES = ["channel/disconnect", "channel/response"]; | ||
const HANDSHAKE_MSG_TYPES = ["handshake/syn", "handshake/syn-ack", "handshake/ack"]; | ||
const isObject = value => { | ||
return Boolean(value) && typeof value === "object" && !Array.isArray(value); | ||
}; | ||
const isInternalMessage = type => { | ||
return INTERNAL_MSG_TYPES.some(t => t === type); | ||
}; | ||
const isHandshakeMessage = type => { | ||
return HANDSHAKE_MSG_TYPES.some(t => t === type); | ||
}; | ||
const isHeartbeatMessage = type => { | ||
return type === "channel/heartbeat"; | ||
}; | ||
function createChannel(config) { | ||
const inFrame = window.self !== window.top; | ||
const connections = config.connections.map(connection => { | ||
return { | ||
buffer: [], | ||
config: connection, | ||
heartbeat: null, | ||
id: null, | ||
status: "fresh" | ||
// target, | ||
// targetId, | ||
// targetOrigin, | ||
}; | ||
}); | ||
function flush(connection) { | ||
const toFlush = [...connection.buffer]; | ||
connection.buffer.splice(0, connection.buffer.length); | ||
toFlush.forEach(({ | ||
type, | ||
data | ||
}) => { | ||
post(connection, type, data); | ||
}); | ||
} | ||
function startHeartbeat(connection) { | ||
stopHeartbeat(connection); | ||
if (connection.config.heartbeat) { | ||
const heartbeatInverval = typeof connection.config.heartbeat === "number" ? connection.config.heartbeat : HEARTBEAT_INTERVAL; | ||
connection.heartbeat = window.setInterval(() => { | ||
send("channel/heartbeat", void 0, [connection]); | ||
}, heartbeatInverval); | ||
} | ||
} | ||
function stopHeartbeat(connection) { | ||
if (connection.heartbeat) { | ||
window.clearInterval(connection.heartbeat); | ||
} | ||
} | ||
function setConnectionStatus(connection, newStatus) { | ||
const prevStatus = connection.status; | ||
if (prevStatus !== newStatus) { | ||
connection.status = newStatus; | ||
config.onStatusUpdate?.(newStatus, prevStatus, connection); | ||
if (newStatus === "connected") { | ||
flush(connection); | ||
startHeartbeat(connection); | ||
} | ||
if (newStatus === "disconnected") { | ||
stopHeartbeat(connection); | ||
} | ||
} | ||
} | ||
function findConnection(e) { | ||
const { | ||
source, | ||
origin, | ||
data | ||
} = e; | ||
if (isObject(data)) { | ||
return connections.find(connection => { | ||
return config.id === data.to && connection.config.id === data.from && connection.config.target === source && (connection.config.targetOrigin === origin || connection.config.targetOrigin === "*") && ( | ||
// Must match the connection id or be a handshake | ||
connection.id === data.connectionId || isHandshakeMessage(data.type)); | ||
}); | ||
} | ||
return void 0; | ||
} | ||
function post(connection, type, data) { | ||
return new Promise((resolve, reject) => { | ||
const msg = { | ||
id: v4(), | ||
type, | ||
connectionId: connection.id, | ||
from: config.id, | ||
to: connection.config.id, | ||
data | ||
}; | ||
const isInternal = isInternalMessage(type); | ||
const isHandshake = isHandshakeMessage(type); | ||
const isHeartbeat = isHeartbeatMessage(type); | ||
const activeConnection = connections.find(c => c.id === connection.id && c.status === "connected"); | ||
if (isInternal || isHandshake || isHeartbeat || activeConnection) { | ||
if (!isInternal || isHeartbeat) { | ||
const maxWait = setTimeout(() => { | ||
if (msg.connectionId === connection.id) { | ||
reject({ | ||
reason: `Received no response to message '${msg.id}' on client '${config.id}'`, | ||
msg, | ||
connection | ||
}); | ||
} else { | ||
resolve(msg.id); | ||
} | ||
}, RESPONSE_TIMEOUT); | ||
const transact = e => { | ||
const { | ||
data: eventData | ||
} = e; | ||
if (eventData.type === "channel/response" && eventData.data?.responseTo && eventData.data.responseTo === msg.id) { | ||
window.removeEventListener("message", transact, false); | ||
clearTimeout(maxWait); | ||
resolve(msg.id); | ||
} | ||
}; | ||
window.addEventListener("message", transact, false); | ||
} | ||
try { | ||
const targetOrigin = isHandshake ? "*" : connection.config.targetOrigin; | ||
connection.config.target.postMessage(msg, { | ||
targetOrigin | ||
}); | ||
if (isInternal || isHandshake) resolve(msg.id); | ||
return; | ||
} catch (e) { | ||
console.error("Failed to postMessage", e, { | ||
msg, | ||
connection | ||
}); | ||
reject({ | ||
reason: `Failed to postMessage '${msg.id}' on client '${config.id}'`, | ||
msg, | ||
connection | ||
}); | ||
} | ||
} | ||
if (connection.status === "fresh" || connection.status === "connecting") { | ||
connection.buffer.push({ | ||
type, | ||
data | ||
}); | ||
resolve(msg.id); | ||
} | ||
reject({ | ||
reason: `Will not send message '${msg.id}' on client '${config.id}'`, | ||
msg, | ||
connection | ||
}); | ||
}); | ||
} | ||
function postMany(connections2, type, data) { | ||
return Promise.allSettled(connections2.map(connection => post(connection, type, data))); | ||
} | ||
function handleHandshake(connection, e) { | ||
if (e.data.type === "handshake/syn") { | ||
const id = e.data.data?.id || connection.id; | ||
connection.id = id; | ||
post(connection, "handshake/syn-ack", { | ||
id | ||
}); | ||
} | ||
if (e.data.type === "handshake/syn-ack") { | ||
const id = e.data.data?.id || connection.id; | ||
connection.id = id; | ||
setConnectionStatus(connection, "connected"); | ||
post(connection, "handshake/ack", { | ||
id | ||
}); | ||
} | ||
if (e.data.type === "handshake/ack") { | ||
const id = e.data.data?.id || connection.id; | ||
connection.id = id; | ||
setConnectionStatus(connection, "connected"); | ||
} | ||
} | ||
function handleEvents(e) { | ||
const connection = findConnection(e); | ||
if (!connection) return; | ||
const { | ||
data | ||
} = e; | ||
if (isHandshakeMessage(data.type)) { | ||
handleHandshake(connection, e); | ||
} else if (data.type === "channel/disconnect") { | ||
setConnectionStatus(connection, "disconnected"); | ||
} else if (data.type === "channel/response") ;else { | ||
const args = [data.type, data.data]; | ||
config.handler(...args); | ||
post(connection, "channel/response", { | ||
responseTo: data.id | ||
}); | ||
} | ||
} | ||
function disconnect() { | ||
window.removeEventListener("message", handleEvents, false); | ||
const connectionsToDisconnect = connections.filter(({ | ||
status | ||
}) => status === "connecting" || status === "connected"); | ||
if (!connectionsToDisconnect.length) return; | ||
postMany(connectionsToDisconnect, "channel/disconnect"); | ||
connectionsToDisconnect.forEach(connection => { | ||
setConnectionStatus(connection, "disconnected"); | ||
}); | ||
} | ||
function connect() { | ||
window.addEventListener("message", handleEvents, false); | ||
const inactiveConnections = connections.filter(connection => ["disconnected", "fresh", "unhealthy"].includes(connection.status)); | ||
return Promise.all(inactiveConnections.map(connection => { | ||
setConnectionStatus(connection, "connecting"); | ||
return post(connection, "handshake/syn", { | ||
id: v4() | ||
}); | ||
})); | ||
} | ||
async function send(type, data, connectionSubset) { | ||
const results = await postMany(connectionSubset || connections, type, data); | ||
results.forEach(result => { | ||
if (result.status === "rejected") { | ||
const connection = connections.find(connection2 => connection2.status === "connected" && connection2.id === result.reason.connection.id); | ||
if (connection) { | ||
setConnectionStatus(connection, "unhealthy"); | ||
} | ||
} | ||
}); | ||
return results; | ||
} | ||
connect(); | ||
return { | ||
disconnect, | ||
inFrame, | ||
send | ||
}; | ||
} | ||
function createLiveModeStore(options) { | ||
const { | ||
client, | ||
studioUrl, | ||
$perspective | ||
} = options; | ||
const { | ||
projectId, | ||
dataset | ||
} = client.config(); | ||
let channel = null; | ||
const initialLiveMode = { | ||
enabled: false, | ||
connected: false, | ||
studioOrigin: "" | ||
}; | ||
const $LiveMode = map(initialLiveMode); | ||
const cache = /* @__PURE__ */new Map(); | ||
if (typeof document !== "undefined") { | ||
onMount($LiveMode, () => { | ||
$LiveMode.setKey("enabled", true); | ||
const studioOrigin = new URL(studioUrl, location.origin).origin; | ||
$LiveMode.setKey("studioOrigin", studioOrigin); | ||
channel = createChannel({ | ||
id: "loaders", | ||
onStatusUpdate(status) { | ||
if (status === "connected") { | ||
$LiveMode.setKey("connected", true); | ||
} else if (status === "disconnected" || status === "unhealthy") { | ||
$LiveMode.setKey("connected", false); | ||
} | ||
}, | ||
connections: [{ | ||
target: parent, | ||
targetOrigin: studioOrigin, | ||
id: "presentation" | ||
}], | ||
handler: (type, data) => { | ||
if (type === "loader/perspective" && data.projectId === projectId && data.dataset === dataset) { | ||
$perspective.set(data.perspective); | ||
updateLiveQueries(); | ||
} else if (type === "loader/query-change" && data.projectId === projectId && data.dataset === dataset) { | ||
const { | ||
perspective, | ||
query, | ||
params | ||
} = data; | ||
if (client instanceof SanityStegaClient && client.config().stega.enabled && data.resultSourceMap) { | ||
cache.set(JSON.stringify({ | ||
perspective, | ||
query, | ||
params | ||
}), { | ||
...data, | ||
result: stegaEncodeSourceMap(data.result, data.resultSourceMap, client.config().stega, { | ||
projectId: data.projectId, | ||
dataset: data.dataset | ||
}) | ||
}); | ||
} else { | ||
cache.set(JSON.stringify({ | ||
perspective, | ||
query, | ||
params | ||
}), data); | ||
} | ||
updateLiveQueries(); | ||
} | ||
} | ||
}); | ||
const unlistenConnection = listenKeys($LiveMode, ["connected"], () => {}); | ||
return () => { | ||
unlistenConnection(); | ||
$LiveMode.setKey("enabled", false); | ||
$LiveMode.setKey("connected", false); | ||
channel?.disconnect(); | ||
channel = null; | ||
}; | ||
}); | ||
} | ||
const liveQueries = /* @__PURE__ */new Set(); | ||
const addLiveQuery = (query, params, $fetch) => { | ||
const liveQuery = { | ||
query, | ||
params, | ||
$fetch | ||
}; | ||
liveQueries.add(liveQuery); | ||
emitQueryListen(); | ||
return () => { | ||
liveQueries.delete(liveQuery); | ||
emitQueryListen(); | ||
}; | ||
}; | ||
const emitQueryListen = () => { | ||
if (!channel) { | ||
throw new Error("No channel"); | ||
} | ||
const perspective = $perspective.get(); | ||
for (const { | ||
query, | ||
params, | ||
$fetch | ||
} of liveQueries) { | ||
channel.send("loader/query-listen", { | ||
projectId, | ||
dataset, | ||
perspective, | ||
query, | ||
params | ||
}); | ||
$fetch.setKey("loading", true); | ||
} | ||
}; | ||
function updateLiveQueries() { | ||
const perspective = $perspective.get(); | ||
const documentsOnPage = []; | ||
for (const { | ||
query, | ||
params, | ||
$fetch | ||
} of liveQueries) { | ||
const key = JSON.stringify({ | ||
perspective, | ||
query, | ||
params | ||
}); | ||
const value = cache.get(key); | ||
if (value) { | ||
$fetch.setKey("data", value.result); | ||
$fetch.setKey("sourceMap", value.resultSourceMap); | ||
$fetch.setKey("loading", false); | ||
documentsOnPage.push(...(value.resultSourceMap?.documents ?? [])); | ||
} | ||
} | ||
channel?.send("loader/documents", { | ||
projectId, | ||
dataset, | ||
perspective, | ||
documents: documentsOnPage | ||
}); | ||
} | ||
const runLiveFetch = async (query, params, $fetch, controller) => { | ||
try { | ||
const removeLiveQuery = addLiveQuery(query, params, $fetch); | ||
controller.signal.addEventListener("abort", () => { | ||
removeLiveQuery(); | ||
updateLiveQueries(); | ||
}, { | ||
once: true | ||
}); | ||
updateLiveQueries(); | ||
$fetch.setKey("error", void 0); | ||
if (controller.signal.aborted) return; | ||
} catch (error) { | ||
$fetch.setKey("error", error); | ||
$fetch.setKey("loading", false); | ||
} | ||
}; | ||
return { | ||
$LiveMode, | ||
runLiveFetch | ||
}; | ||
} | ||
const createQueryStore = options => { | ||
const { | ||
client, | ||
studioUrl | ||
} = options; | ||
const { | ||
projectId, | ||
dataset, | ||
resultSourceMap, | ||
perspective | ||
} = client.config(); | ||
if (!projectId) throw new Error("Missing projectId"); | ||
if (!dataset) throw new Error("Missing dataset"); | ||
if (!resultSourceMap) { | ||
client.config({ | ||
resultSourceMap: "withKeyArraySelector" | ||
}); | ||
} | ||
if (perspective !== "published" && perspective !== "previewDrafts") { | ||
client.config({ | ||
perspective: "published" | ||
}); | ||
} | ||
const $perspective = atom(client.config().perspective); | ||
const { | ||
$LiveMode, | ||
runLiveFetch | ||
} = createLiveModeStore({ | ||
client, | ||
studioUrl, | ||
$perspective | ||
}); | ||
const cache = createCache().define("fetch", async key => { | ||
const { | ||
query, | ||
params = {} | ||
} = JSON.parse(key); | ||
const { | ||
result, | ||
resultSourceMap: resultSourceMap2 | ||
} = await client.fetch(query, params, { | ||
filterResponse: false | ||
}); | ||
return { | ||
result, | ||
resultSourceMap: resultSourceMap2 | ||
}; | ||
}); | ||
const runFetch = async (query, params, $fetch, controller) => { | ||
if (controller.signal.aborted) return; | ||
if ($LiveMode.get().connected) { | ||
return runLiveFetch(query, params, $fetch, controller); | ||
} | ||
const finishTask = startTask(); | ||
try { | ||
$fetch.setKey("loading", true); | ||
$fetch.setKey("error", void 0); | ||
const response = await cache.fetch(JSON.stringify({ | ||
query, | ||
params | ||
})); | ||
if (controller.signal.aborted) return; | ||
$fetch.setKey("data", response.result); | ||
$fetch.setKey("sourceMap", response.resultSourceMap); | ||
} catch (error) { | ||
$fetch.setKey("error", error); | ||
} finally { | ||
$fetch.setKey("loading", false); | ||
finishTask(); | ||
} | ||
}; | ||
const createFetcherStore = (query, params = {}, initialData, initialSourceMap) => { | ||
const $fetch = map({ | ||
loading: true, | ||
error: void 0, | ||
data: initialData, | ||
sourceMap: initialSourceMap | ||
}); | ||
onMount($fetch, () => { | ||
let controller = new AbortController(); | ||
runFetch(query, params, $fetch, controller); | ||
const unListenKeys = listenKeys($LiveMode, ["enabled", "connected"], () => { | ||
controller.abort(); | ||
controller = new AbortController(); | ||
runFetch(query, params, $fetch, controller); | ||
}); | ||
return () => { | ||
unListenKeys(); | ||
controller.abort(); | ||
}; | ||
}); | ||
return $fetch; | ||
}; | ||
return { | ||
createFetcherStore, | ||
$LiveMode, | ||
unstable__cache: cache | ||
}; | ||
}; | ||
export { createQueryStore }; | ||
import 'async-cache-dedupe'; | ||
export { createQueryStore } from './_chunks/index-5jOPu9-4.js'; | ||
//# sourceMappingURL=index.js.map |
{ | ||
"name": "@sanity/core-loader", | ||
"version": "0.4.4-pink-lizard", | ||
"version": "0.5.0-pink-lizard", | ||
"homepage": "https://github.com/sanity-io/visual-editing/tree/main/packages/core-loader#readme", | ||
@@ -100,3 +100,3 @@ "bugs": { | ||
"async-cache-dedupe": "2.0.0", | ||
"@sanity/groq-store": "5.1.3-pink-lizard" | ||
"@sanity/groq-store": "5.2.0-pink-lizard" | ||
}, | ||
@@ -103,0 +103,0 @@ "devDependencies": { |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
204963
18
1858
1
+ Added@sanity/groq-store@5.2.0-pink-lizard(transitive)
- Removed@sanity/groq-store@5.1.3-pink-lizard(transitive)