@sanity/mutate
Advanced tools
Comparing version 0.11.0-canary.3 to 0.11.0
import { isKeyedElement, isArrayElement, isPropertyElement, stringify } from "./stringify.js"; | ||
import { isObject } from "./isObject.js"; | ||
import { applyPatches as applyPatches$1, parsePatch } from "@sanity/diff-match-patch"; | ||
import { webcrypto } from "node:crypto"; | ||
function keyOf(value) { | ||
@@ -207,14 +206,2 @@ return value !== null && typeof value == "object" && typeof value._key == "string" && value._key || null; | ||
} | ||
const urlAlphabet = "useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict", POOL_SIZE_MULTIPLIER = 128; | ||
let pool, poolOffset; | ||
function fillPool(bytes) { | ||
!pool || pool.length < bytes ? (pool = Buffer.allocUnsafe(bytes * POOL_SIZE_MULTIPLIER), webcrypto.getRandomValues(pool), poolOffset = 0) : poolOffset + bytes > pool.length && (webcrypto.getRandomValues(pool), poolOffset = 0), poolOffset += bytes; | ||
} | ||
function nanoid(size = 21) { | ||
fillPool(size -= 0); | ||
let id = ""; | ||
for (let i = poolOffset - size; i < poolOffset; i++) | ||
id += urlAlphabet[pool[i] & 63]; | ||
return id; | ||
} | ||
function hasId(doc) { | ||
@@ -233,5 +220,4 @@ return "_id" in doc; | ||
hasId, | ||
nanoid, | ||
splice | ||
}; | ||
//# sourceMappingURL=utils.js.map |
@@ -404,3 +404,3 @@ import {Call} from 'hotscript' | ||
/** | ||
* Formats an intersection object type, so it outputs as `{"foo": 1, "bar": 1}` instead of `{"foo": 1} & {"bar": 2}` | ||
* Formats an intersection object type, so it outputs as `{"foo": 1, "bar": 1}` instead of `{"foo": 1} & {"bar": 2}`` | ||
*/ | ||
@@ -774,3 +774,3 @@ export declare type Format<A> = A extends { | ||
export declare type SanityDocumentBase = { | ||
export declare interface SanityDocumentBase { | ||
_id?: string | ||
@@ -777,0 +777,0 @@ _type: string |
import { arrify } from "./_chunks-es/arrify.js"; | ||
import { splice, applyPatchMutation, nanoid, assignId, hasId } from "./_chunks-es/utils.js"; | ||
import { splice, applyPatchMutation, assignId, hasId } from "./_chunks-es/utils.js"; | ||
import { applyNodePatch, applyOp, applyPatches } from "./_chunks-es/utils.js"; | ||
import { nanoid } from "nanoid"; | ||
function applyInCollection(collection, mutations) { | ||
@@ -5,0 +6,0 @@ return arrify(mutations).reduce((prev, mutation) => { |
import {Observable} from 'rxjs' | ||
import {QueryParams} from '@sanity/client' | ||
import {RawPatch} from 'mendoza' | ||
import {ReconnectEvent} from '@sanity/client' | ||
import {SanityClient} from '@sanity/client' | ||
import {WelcomeEvent} from '@sanity/client' | ||
export declare interface AccessibleDocumentResult { | ||
id: string | ||
document: SanityDocumentBase | ||
accessible: true | ||
} | ||
export declare type AnyArray<T = any> = T[] | readonly T[] | ||
@@ -40,57 +51,48 @@ | ||
export declare interface ContentLakeStore { | ||
meta: { | ||
/** | ||
* A stream of events for anything that happens in the store | ||
*/ | ||
events: Observable<OptimisticDocumentEvent | RemoteDocumentEvent> | ||
/** | ||
* A stream of current staged changes | ||
*/ | ||
stage: Observable<MutationGroup[]> | ||
/** | ||
* A stream of current conflicts. TODO: Needs more work | ||
*/ | ||
conflicts: Observable<Conflict[]> | ||
} | ||
/** | ||
* Applies the given mutations. Mutations are not guaranteed to be submitted in the same transaction | ||
* Can this mutate both local and remote documents at the same time | ||
*/ | ||
mutate(mutation: Mutation[]): MutationResult | ||
/** | ||
* Makes sure the given mutations are posted in a single transaction | ||
*/ | ||
transaction( | ||
transaction: | ||
| { | ||
id?: string | ||
mutations: Mutation[] | ||
} | ||
| Mutation[], | ||
): MutationResult | ||
/** | ||
* Checkout a document for editing. This is required to be able to see optimistic changes | ||
*/ | ||
observe(id: string): Observable<SanityDocumentBase | undefined> | ||
/** | ||
* Observe events for a given document id | ||
*/ | ||
observeEvents( | ||
id: string, | ||
): Observable<RemoteDocumentEvent | OptimisticDocumentEvent> | ||
/** | ||
* Optimize list of pending mutations | ||
*/ | ||
optimize(): void | ||
/** | ||
* Submit pending mutations | ||
*/ | ||
submit(): Promise<SubmitResult[]> | ||
} | ||
/** | ||
* Creates a function that can be used to listen for events that happens in a single document | ||
* Features | ||
* - builtin retrying and connection recovery (track disconnected state by listening for `reconnect` events) | ||
* - builtin mutation event ordering (they might arrive out of order), lost events detection (/listen endpoint doesn't guarantee delivery) and recovery | ||
* - discards already-applied mutation events received while fetching the initial document snapshot | ||
* @param options | ||
*/ | ||
export declare function createDocumentEventListener(options: { | ||
loadDocument: DocumentLoader | ||
listenerEvents: Observable< | ||
WelcomeEvent | ListenerMutationEvent | ReconnectEvent | ||
> | ||
}): <Doc extends SanityDocumentBase>( | ||
documentId: string, | ||
) => Observable<ListenerEvent> | ||
export declare function createContentLakeStore( | ||
backend: StoreBackend, | ||
): ContentLakeStore | ||
/** | ||
* Creates a "dataloader" style document loader that fetches from the /doc endpoint | ||
* @param options | ||
*/ | ||
export declare function createDocumentLoader(options: { | ||
client: SanityClient | ||
tag?: string | ||
durationSelector?: () => Observable<unknown> | ||
}): (key: string) => Observable<DocumentResult> | ||
/** | ||
* Creates a function that can be used to listen for document snapshots | ||
* Emits the latest snapshot of the document along with the latest event | ||
* @param options | ||
*/ | ||
export declare function createDocumentUpdateListener(options: { | ||
listenDocumentEvents: (documentId: string) => Observable<ListenerEvent> | ||
}): <Doc extends SanityDocumentBase>( | ||
documentId: string, | ||
) => Observable<DocumentUpdate<Doc>> | ||
export declare function createIdSetListener(client: SanityClient): ( | ||
queryFilter: string, | ||
params?: QueryParams, | ||
options?: { | ||
tag?: string | ||
}, | ||
) => Observable<DocumentIdSetEvent> | ||
export declare type CreateIfNotExistsMutation<Doc extends SanityDocumentBase> = | ||
@@ -109,2 +111,10 @@ { | ||
/** | ||
* Creates a local dataset that allows subscribing to documents by id and submitting mutations to be optimistically applied | ||
* @param backend | ||
*/ | ||
export declare function createOptimisticStore( | ||
backend: LocalDatasetBackend, | ||
): LocalDataset | ||
export declare type CreateOrReplaceMutation<Doc extends SanityDocumentBase> = { | ||
@@ -115,8 +125,20 @@ type: 'createOrReplace' | ||
export declare type Dataset<Doc extends SanityDocumentBase> = { | ||
get(id: string): Doc | undefined | ||
set(id: string, doc: Doc | undefined): void | ||
delete(id: string): void | ||
} | ||
/** | ||
* @param listenDocumentUpdates – a function that takes a document id and returns an observable of document snapshots | ||
* @param options | ||
*/ | ||
export declare function createReadOnlyStore( | ||
listenDocumentUpdates: DocumentUpdateListener<SanityDocumentBase>, | ||
options?: { | ||
shutdownDelay?: number | ||
}, | ||
): ReadOnlyDocumentStore | ||
/** | ||
* Creates a (low level) shared listener that will emit 'welcome' for all new subscribers immediately, and thereafter emit every listener event, including welcome, mutation, and reconnects | ||
*/ | ||
export declare function createSharedListener( | ||
options: ListenerOptions, | ||
): Observable<WelcomeEvent | ListenerMutationEvent | ReconnectEvent> | ||
export declare type DecOp<Amount extends number> = { | ||
@@ -149,2 +171,46 @@ type: 'dec' | ||
export declare type DocumentIdSetEvent = | ||
| { | ||
type: 'sync' | ||
documentIds: string[] | ||
} | ||
| { | ||
type: 'reconnect' | ||
} | ||
| { | ||
type: 'op' | ||
op: 'add' | 'remove' | ||
documentId: string | ||
} | ||
export declare type DocumentIdSetState = { | ||
status: 'connecting' | 'reconnecting' | 'connected' | ||
event: DocumentIdSetEvent | InitialEvent | ||
snapshot: string[] | ||
} | ||
export declare type DocumentLoader = ( | ||
documentIds: string, | ||
) => Observable<DocumentResult> | ||
export declare type DocumentMap<Doc extends SanityDocumentBase> = { | ||
get(id: string): Doc | undefined | ||
set(id: string, doc: Doc | undefined): void | ||
delete(id: string): void | ||
} | ||
export declare type DocumentResult = | ||
| AccessibleDocumentResult | ||
| InaccessibleDocumentResult | ||
export declare interface DocumentUpdate<Doc extends SanityDocumentBase> { | ||
documentId: string | ||
snapshot: Doc | undefined | ||
event: ListenerEvent<Doc> | ||
} | ||
export declare type DocumentUpdateListener<Doc extends SanityDocumentBase> = ( | ||
id: string, | ||
) => Observable<DocumentUpdate<Doc>> | ||
export declare type ElementType<T extends AnyArray> = | ||
@@ -211,2 +277,10 @@ T extends AnyArray<infer E> ? E : unknown | ||
export declare interface InaccessibleDocumentResult { | ||
accessible: false | ||
id: string | ||
reason: InaccessibleReason | ||
} | ||
export declare type InaccessibleReason = 'existence' | 'permission' | ||
export declare type IncOp<Amount extends number> = { | ||
@@ -219,2 +293,6 @@ type: 'inc' | ||
export declare type InitialEvent = { | ||
type: 'connect' | ||
} | ||
export declare type Insert = { | ||
@@ -227,2 +305,4 @@ before?: string | ||
export declare type InsertMethod = 'sorted' | 'prepend' | 'append' | ||
export declare type InsertOp< | ||
@@ -268,11 +348,45 @@ Items extends AnyArray, | ||
export declare type ListenerEvent< | ||
Doc extends SanityDocumentBase = SanityDocumentBase, | ||
> = ListenerSyncEvent<Doc> | ListenerMutationEvent | ListenerReconnectEvent | ||
export declare interface ListenerMutationEvent { | ||
type: 'mutation' | ||
documentId: string | ||
transactionId: string | ||
resultRev: string | ||
previousRev: string | ||
effects: Required<SanityMutationEvent>['effects'] | ||
mutations: Required<SanityMutationEvent['mutations']> | ||
effects: { | ||
apply: RawPatch | ||
} | ||
mutations: SanityMutation[] | ||
transition: 'update' | 'appear' | 'disappear' | ||
} | ||
export declare interface ListenerOptions { | ||
client: SanityClient | ||
/** | ||
* Provide a custom filter to the listener. By default, this listener will include all events | ||
* Note: make sure the filter includes events from documents you will subscribe to. | ||
*/ | ||
filter?: string | ||
/** | ||
* Whether to include system documents or not | ||
* This will be ignored if a custom filter is provided | ||
*/ | ||
includeSystemDocuments?: boolean | ||
/** | ||
* How long after the last subscriber is unsubscribed to keep the connection open | ||
*/ | ||
shutdownDelay?: number | ||
/** | ||
* Include mutations in listener events | ||
*/ | ||
includeMutations?: boolean | ||
/** | ||
* Request tag | ||
*/ | ||
tag?: string | ||
} | ||
export declare interface ListenerReconnectEvent { | ||
@@ -282,8 +396,75 @@ type: 'reconnect' | ||
export declare interface ListenerSyncEvent { | ||
export declare interface ListenerSyncEvent< | ||
Doc extends SanityDocumentBase = SanityDocumentBase, | ||
> { | ||
type: 'sync' | ||
transactionId?: string | ||
document: SanityDocumentBase | undefined | ||
document: Doc | undefined | ||
} | ||
export declare interface LocalDataset { | ||
meta: { | ||
/** | ||
* A stream of events for anything that happens in the store | ||
*/ | ||
events: Observable<OptimisticDocumentEvent | RemoteDocumentEvent> | ||
/** | ||
* A stream of current staged changes | ||
*/ | ||
stage: Observable<MutationGroup[]> | ||
/** | ||
* A stream of current conflicts. TODO: Needs more work | ||
*/ | ||
conflicts: Observable<Conflict[]> | ||
} | ||
/** | ||
* Applies the given mutations. Mutations are not guaranteed to be submitted in the same transaction | ||
* Can this mutate both local and remote documents at the same time | ||
*/ | ||
mutate(mutation: Mutation[]): MutationResult | ||
/** | ||
* Makes sure the given mutations are posted in a single transaction | ||
*/ | ||
transaction( | ||
transaction: | ||
| { | ||
id?: string | ||
mutations: Mutation[] | ||
} | ||
| Mutation[], | ||
): MutationResult | ||
/** | ||
* Checkout a document for editing. This is required to be able to see optimistic changes | ||
*/ | ||
listen(id: string): Observable<SanityDocumentBase | undefined> | ||
/** | ||
* Listen for events for a given document id | ||
*/ | ||
listenEvents( | ||
id: string, | ||
): Observable<RemoteDocumentEvent | OptimisticDocumentEvent> | ||
/** | ||
* Optimize list of pending mutations | ||
*/ | ||
optimize(): void | ||
/** | ||
* Submit pending mutations | ||
*/ | ||
submit(): Promise<SubmitResult[]> | ||
} | ||
export declare interface LocalDatasetBackend { | ||
/** | ||
* Sets up a subscription to a document | ||
* The first event should either be a sync event or an error event. | ||
* After that, it should emit mutation events, error events or sync events | ||
* @param id | ||
*/ | ||
listen: (id: string) => Observable<ListenerEvent> | ||
submit: (mutationGroups: Transaction[]) => Observable<SubmitResult> | ||
} | ||
export declare type MapTuple<T, U> = { | ||
[K in keyof T]: U | ||
} | ||
export declare type Merge<R extends Result<any, any>, E> = R[0] extends null | ||
@@ -469,6 +650,14 @@ ? Ok<R[1] & E> | ||
export declare type RawOperation = any | ||
export declare interface ReadOnlyDocumentStore { | ||
listenDocument: <Doc extends SanityDocumentBase>( | ||
id: string, | ||
) => Observable<DocumentUpdate<Doc>> | ||
listenDocuments: < | ||
Doc extends SanityDocumentBase, | ||
const IdTuple extends string[], | ||
>( | ||
id: IdTuple, | ||
) => Observable<MapTuple<IdTuple, DocumentUpdate<Doc>>> | ||
} | ||
export declare type RawPatch = RawOperation[] | ||
export declare type RelativePosition = 'before' | 'after' | ||
@@ -478,7 +667,2 @@ | ||
export declare type RemoteListenerEvent = | ||
| ListenerSyncEvent | ||
| ListenerMutationEvent | ||
| ListenerReconnectEvent | ||
export declare interface RemoteMutationEvent { | ||
@@ -552,2 +736,9 @@ type: 'mutation' | ||
export declare type SanityDecPatch = { | ||
id: string | ||
dec: { | ||
[path: string]: number | ||
} | ||
} | ||
export declare type SanityDeleteMutation = { | ||
@@ -566,3 +757,3 @@ delete: { | ||
export declare type SanityDocumentBase = { | ||
export declare interface SanityDocumentBase { | ||
_id?: string | ||
@@ -575,2 +766,9 @@ _type: string | ||
export declare type SanityIncPatch = { | ||
id: string | ||
inc: { | ||
[path: string]: number | ||
} | ||
} | ||
export declare type SanityInsertPatch = { | ||
@@ -590,20 +788,10 @@ id: string | ||
export declare type SanityMutationEvent = { | ||
type: 'mutation' | ||
documentId: string | ||
eventId: string | ||
identity: string | ||
mutations: SanityMutation[] | ||
previousRev?: string | ||
resultRev?: string | ||
result?: SanityDocumentBase | ||
previous?: SanityDocumentBase | null | ||
effects?: { | ||
apply: RawPatch | ||
} | ||
timestamp: string | ||
transactionId: string | ||
transition: 'update' | 'appear' | 'disappear' | ||
visibility: 'query' | 'transaction' | ||
} | ||
export declare type SanityPatch = | ||
| SanitySetPatch | ||
| SanityUnsetPatch | ||
| SanityInsertPatch | ||
| SanitySetIfMissingPatch | ||
| SanityDiffMatchPatch | ||
| SanityIncPatch | ||
| SanityDecPatch | ||
@@ -665,13 +853,2 @@ export declare type SanityPatchMutation = { | ||
export declare interface StoreBackend { | ||
/** | ||
* Sets up a subscription to a document | ||
* The first event should either be a sync event or an error event. | ||
* After that, it should emit mutation events, error events or sync events | ||
* @param id | ||
*/ | ||
observe: (id: string) => Observable<RemoteListenerEvent> | ||
submit: (mutationGroups: Transaction[]) => Observable<SubmitResult> | ||
} | ||
export declare function stringify(pathArray: Path): string | ||
@@ -705,2 +882,7 @@ | ||
/** Converts a stream of id set listener events into a state containing the list of document ids */ | ||
export declare function toState(options?: { | ||
insert?: InsertMethod | ||
}): (input$: Observable<DocumentIdSetEvent>) => Observable<DocumentIdSetState> | ||
export declare interface Transaction { | ||
@@ -707,0 +889,0 @@ id?: string |
@@ -1,5 +0,14 @@ | ||
import { finalize, share, ReplaySubject, timer, Subject, merge, map, EMPTY, lastValueFrom, toArray, defer, filter, mergeMap, of, tap } from "rxjs"; | ||
import { finalize, share, ReplaySubject, timer, Subject, merge, map, EMPTY, lastValueFrom, toArray, defer, filter, mergeMap, of, tap, combineLatest, switchMap, concat, throwError, concatMap, catchError, BehaviorSubject, bufferWhen, takeUntil, Observable, takeWhile, scheduled, asyncScheduler } from "rxjs"; | ||
import { decodeAll } from "./_chunks-es/decode.js"; | ||
import { applyPatch } from "mendoza"; | ||
import { getMutationDocumentId, applyMutations, commit, squashMutationGroups, toTransactions, squashDMPStrings, rebase } from "./_chunks-es/toTransactions.js"; | ||
import { nanoid } from "nanoid"; | ||
import { assignId, hasId, applyPatchMutation, applyNodePatch, applyPatches } from "./_chunks-es/utils.js"; | ||
import { stringifyPatches, makePatches } from "@sanity/diff-match-patch"; | ||
import { getAtPath } from "./_chunks-es/getAtPath.js"; | ||
import { startsWith, stringify } from "./_chunks-es/stringify.js"; | ||
import groupBy from "lodash/groupBy.js"; | ||
import lodashPartition from "lodash/partition.js"; | ||
import { scan, mergeMap as mergeMap$1, map as map$1, filter as filter$1 } from "rxjs/operators"; | ||
import keyBy from "lodash/keyBy.js"; | ||
import sortedIndex from "lodash/sortedIndex.js"; | ||
function omitRev(document) { | ||
@@ -11,3 +20,3 @@ if (document === void 0) | ||
} | ||
function applyMendozaPatch(document, patch, patchBaseRev) { | ||
function applyMendozaPatch(document, patch2, patchBaseRev) { | ||
if (patchBaseRev !== document?._rev) | ||
@@ -17,3 +26,3 @@ throw new Error( | ||
); | ||
const next = applyPatch(omitRev(document), patch); | ||
const next = applyPatch(omitRev(document), patch2); | ||
return next === null ? void 0 : next; | ||
@@ -33,3 +42,102 @@ } | ||
} | ||
function createDataset() { | ||
function getMutationDocumentId(mutation) { | ||
if (mutation.type === "patch") | ||
return mutation.id; | ||
if (mutation.type === "create") | ||
return mutation.document._id; | ||
if (mutation.type === "delete") | ||
return mutation.id; | ||
if (mutation.type === "createIfNotExists" || mutation.type === "createOrReplace") | ||
return mutation.document._id; | ||
throw new Error("Invalid mutation type"); | ||
} | ||
function applyAll(current, mutation) { | ||
return mutation.reduce((doc, m) => { | ||
const res = applyDocumentMutation(doc, m); | ||
if (res.status === "error") | ||
throw new Error(res.message); | ||
return res.status === "noop" ? doc : res.after; | ||
}, current); | ||
} | ||
function applyDocumentMutation(document, mutation) { | ||
if (mutation.type === "create") | ||
return create(document, mutation); | ||
if (mutation.type === "createIfNotExists") | ||
return createIfNotExists(document, mutation); | ||
if (mutation.type === "delete") | ||
return del(document, mutation); | ||
if (mutation.type === "createOrReplace") | ||
return createOrReplace(document, mutation); | ||
if (mutation.type === "patch") | ||
return patch(document, mutation); | ||
throw new Error(`Invalid mutation type: ${mutation.type}`); | ||
} | ||
function create(document, mutation) { | ||
if (document) | ||
return { status: "error", message: "Document already exist" }; | ||
const result = assignId(mutation.document, nanoid); | ||
return { status: "created", id: result._id, after: result }; | ||
} | ||
function createIfNotExists(document, mutation) { | ||
return hasId(mutation.document) ? document ? { status: "noop" } : { status: "created", id: mutation.document._id, after: mutation.document } : { | ||
status: "error", | ||
message: "Cannot createIfNotExists on document without _id" | ||
}; | ||
} | ||
function createOrReplace(document, mutation) { | ||
return hasId(mutation.document) ? document ? { | ||
status: "updated", | ||
id: mutation.document._id, | ||
before: document, | ||
after: mutation.document | ||
} : { status: "created", id: mutation.document._id, after: mutation.document } : { | ||
status: "error", | ||
message: "Cannot createIfNotExists on document without _id" | ||
}; | ||
} | ||
function del(document, mutation) { | ||
return document ? mutation.id !== document._id ? { status: "error", message: "Delete mutation targeted wrong document" } : { | ||
status: "deleted", | ||
id: mutation.id, | ||
before: document, | ||
after: void 0 | ||
} : { status: "noop" }; | ||
} | ||
function patch(document, mutation) { | ||
if (!document) | ||
return { | ||
status: "error", | ||
message: "Cannot apply patch on nonexistent document" | ||
}; | ||
const next = applyPatchMutation(mutation, document); | ||
return document === next ? { status: "noop" } : { status: "updated", id: mutation.id, before: document, after: next }; | ||
} | ||
function applyMutations(mutations, documentMap) { | ||
const updatedDocs = /* @__PURE__ */ Object.create(null); | ||
for (const mutation of mutations) { | ||
const documentId = getMutationDocumentId(mutation); | ||
if (!documentId) | ||
throw new Error("Unable to get document id from mutation"); | ||
const before = updatedDocs[documentId]?.after || documentMap.get(documentId), res = applyDocumentMutation(before, mutation); | ||
if (res.status === "error") | ||
throw new Error(res.message); | ||
res.status !== "noop" && (res.status === "updated" || res.status === "created" || res.status === "deleted") && (documentId in updatedDocs || (updatedDocs[documentId] = { before, after: void 0, muts: [] }), documentMap.set(documentId, res.after), updatedDocs[documentId].after = res.after); | ||
} | ||
return Object.entries(updatedDocs).map( | ||
// eslint-disable-next-line no-shadow | ||
([id, { before, after, muts }]) => ({ | ||
id, | ||
status: after ? before ? "updated" : "created" : "deleted", | ||
mutations: muts, | ||
before, | ||
after | ||
}) | ||
); | ||
} | ||
function commit(results, documentMap) { | ||
results.forEach((result) => { | ||
(result.status === "created" || result.status === "updated") && documentMap.set(result.id, result.after), result.status === "deleted" && documentMap.delete(result.id); | ||
}); | ||
} | ||
function createDocumentMap() { | ||
const documents = /* @__PURE__ */ new Map(); | ||
@@ -42,2 +150,202 @@ return { | ||
} | ||
function takeUntilRight(arr, predicate, opts) { | ||
const result = []; | ||
for (const item of arr.slice().reverse()) { | ||
if (predicate(item)) | ||
return result; | ||
result.push(item); | ||
} | ||
return result.reverse(); | ||
} | ||
function isEqualPath(p1, p2) { | ||
return stringify(p1) === stringify(p2); | ||
} | ||
function supersedes(later, earlier) { | ||
return (earlier.type === "set" || earlier.type === "unset") && (later.type === "set" || later.type === "unset"); | ||
} | ||
function squashNodePatches(patches) { | ||
return compactSetIfMissingPatches( | ||
compactSetPatches(compactUnsetPatches(patches)) | ||
); | ||
} | ||
function compactUnsetPatches(patches) { | ||
return patches.reduce( | ||
(earlierPatches, laterPatch) => { | ||
if (laterPatch.op.type !== "unset") | ||
return earlierPatches.push(laterPatch), earlierPatches; | ||
const unaffected = earlierPatches.filter( | ||
(earlierPatch) => !startsWith(laterPatch.path, earlierPatch.path) | ||
); | ||
return unaffected.push(laterPatch), unaffected; | ||
}, | ||
[] | ||
); | ||
} | ||
function compactSetPatches(patches) { | ||
return patches.reduceRight( | ||
(laterPatches, earlierPatch) => (laterPatches.find( | ||
(later) => supersedes(later.op, earlierPatch.op) && isEqualPath(later.path, earlierPatch.path) | ||
) || laterPatches.unshift(earlierPatch), laterPatches), | ||
[] | ||
); | ||
} | ||
function compactSetIfMissingPatches(patches) { | ||
return patches.reduce( | ||
(previousPatches, laterPatch) => laterPatch.op.type !== "setIfMissing" ? (previousPatches.push(laterPatch), previousPatches) : (takeUntilRight( | ||
previousPatches, | ||
(patch2) => patch2.op.type === "unset" | ||
).find( | ||
(precedingPatch) => precedingPatch.op.type === "setIfMissing" && isEqualPath(precedingPatch.path, laterPatch.path) | ||
) || previousPatches.push(laterPatch), previousPatches), | ||
[] | ||
); | ||
} | ||
function compactDMPSetPatches(base, patches) { | ||
let edge = base; | ||
return patches.reduce( | ||
(earlierPatches, laterPatch) => { | ||
const before = edge; | ||
if (edge = applyNodePatch(laterPatch, edge), laterPatch.op.type === "set" && typeof laterPatch.op.value == "string") { | ||
const current = getAtPath(laterPatch.path, before); | ||
if (typeof current == "string") { | ||
const replaced = { | ||
...laterPatch, | ||
op: { | ||
type: "diffMatchPatch", | ||
value: stringifyPatches( | ||
makePatches(current, laterPatch.op.value) | ||
) | ||
} | ||
}; | ||
return earlierPatches.flatMap((ep) => isEqualPath(ep.path, laterPatch.path) && ep.op.type === "diffMatchPatch" ? [] : ep).concat(replaced); | ||
} | ||
} | ||
return earlierPatches.push(laterPatch), earlierPatches; | ||
}, | ||
[] | ||
); | ||
} | ||
function squashDMPStrings(remote, mutationGroups) { | ||
return mutationGroups.map((mutationGroup) => ({ | ||
...mutationGroup, | ||
mutations: dmpIfyMutations(remote, mutationGroup.mutations) | ||
})); | ||
} | ||
function dmpIfyMutations(store, mutations) { | ||
return mutations.map((mutation, i) => mutation.type === "patch" ? dmpifyPatchMutation(store.get(mutation.id), mutation) : mutation); | ||
} | ||
function dmpifyPatchMutation(base, mutation) { | ||
return base ? { | ||
...mutation, | ||
patches: compactDMPSetPatches(base, mutation.patches) | ||
} : mutation; | ||
} | ||
function mergeMutationGroups(mutationGroups) { | ||
return chunkWhile(mutationGroups, (group) => !group.transaction).flatMap( | ||
(chunk) => ({ | ||
...chunk[0], | ||
mutations: chunk.flatMap((c) => c.mutations) | ||
}) | ||
); | ||
} | ||
function chunkWhile(arr, predicate) { | ||
const res = []; | ||
let currentChunk = []; | ||
return arr.forEach((item) => { | ||
predicate(item) ? currentChunk.push(item) : (currentChunk.length > 0 && res.push(currentChunk), currentChunk = [], res.push([item])); | ||
}), currentChunk.length > 0 && res.push(currentChunk), res; | ||
} | ||
function squashMutationGroups(staged) { | ||
return mergeMutationGroups(staged).map((transaction) => ({ | ||
...transaction, | ||
mutations: squashMutations(transaction.mutations) | ||
})).map((transaction) => ({ | ||
...transaction, | ||
mutations: transaction.mutations.map((mutation) => mutation.type !== "patch" ? mutation : { | ||
...mutation, | ||
patches: squashNodePatches(mutation.patches) | ||
}) | ||
})); | ||
} | ||
function squashMutations(mutations) { | ||
const byDocument = groupBy(mutations, getMutationDocumentId); | ||
return Object.values(byDocument).flatMap((documentMutations) => squashCreateIfNotExists(squashDelete(documentMutations)).flat().reduce((acc, docMutation) => { | ||
const prev = acc[acc.length - 1]; | ||
return (!prev || prev.type === "patch") && docMutation.type === "patch" ? acc.slice(0, -1).concat({ | ||
...docMutation, | ||
patches: (prev?.patches || []).concat(docMutation.patches) | ||
}) : acc.concat(docMutation); | ||
}, [])); | ||
} | ||
function squashCreateIfNotExists(mutations) { | ||
return mutations.length === 0 ? mutations : mutations.reduce((previousMuts, laterMut) => laterMut.type !== "createIfNotExists" ? (previousMuts.push(laterMut), previousMuts) : (takeUntilRight(previousMuts, (m) => m.type === "delete").find( | ||
(precedingPatch) => precedingPatch.type === "createIfNotExists" | ||
) || previousMuts.push(laterMut), previousMuts), []); | ||
} | ||
function squashDelete(mutations) { | ||
return mutations.length === 0 ? mutations : mutations.reduce((previousMuts, laterMut) => laterMut.type === "delete" ? [laterMut] : (previousMuts.push(laterMut), previousMuts), []); | ||
} | ||
function rebase(documentId, oldBase, newBase, stagedMutations) { | ||
let edge = oldBase; | ||
const dmpified = stagedMutations.map((transaction) => { | ||
const mutations = transaction.mutations.flatMap((mut) => { | ||
if (getMutationDocumentId(mut) !== documentId) | ||
return []; | ||
const before = edge; | ||
return edge = applyAll(edge, [mut]), !before || mut.type !== "patch" ? mut : { | ||
type: "dmpified", | ||
mutation: { | ||
...mut, | ||
// Todo: make compactDMPSetPatches return pairs of patches that was dmpified with their | ||
// original as dmpPatches and original is not 1:1 (e..g some of the original may not be dmpified) | ||
dmpPatches: compactDMPSetPatches(before, mut.patches), | ||
original: mut.patches | ||
} | ||
}; | ||
}); | ||
return { ...transaction, mutations }; | ||
}); | ||
let newBaseWithDMPForOldBaseApplied = newBase; | ||
return dmpified.map((transaction) => { | ||
const applied = []; | ||
return transaction.mutations.forEach((mut) => { | ||
if (mut.type === "dmpified") | ||
try { | ||
newBaseWithDMPForOldBaseApplied = applyPatches( | ||
mut.mutation.dmpPatches, | ||
newBaseWithDMPForOldBaseApplied | ||
), applied.push(mut); | ||
} catch { | ||
console.warn("Failed to apply dmp patch, falling back to original"); | ||
try { | ||
newBaseWithDMPForOldBaseApplied = applyPatches( | ||
mut.mutation.original, | ||
newBaseWithDMPForOldBaseApplied | ||
), applied.push(mut); | ||
} catch (second) { | ||
throw new Error( | ||
`Failed to apply patch for document "${documentId}": ${second.message}` | ||
); | ||
} | ||
} | ||
else | ||
newBaseWithDMPForOldBaseApplied = applyAll( | ||
newBaseWithDMPForOldBaseApplied, | ||
[mut] | ||
); | ||
}); | ||
}), [stagedMutations.map((transaction) => ({ | ||
...transaction, | ||
mutations: transaction.mutations.map((mut) => mut.type !== "patch" || getMutationDocumentId(mut) !== documentId ? mut : { | ||
...mut, | ||
patches: mut.patches.map((patch2) => patch2.op.type !== "set" ? patch2 : { | ||
...patch2, | ||
op: { | ||
...patch2.op, | ||
value: getAtPath(patch2.path, newBaseWithDMPForOldBaseApplied) | ||
} | ||
}) | ||
}) | ||
})), newBaseWithDMPForOldBaseApplied]; | ||
} | ||
function createReplayMemoizer(expiry) { | ||
@@ -68,9 +376,9 @@ const memo = /* @__PURE__ */ Object.create(null); | ||
new Error( | ||
"No mutation received from backend. The listener is likely set up with `excludeMutations: true`. If your app need to now about mutations, make sure the listener is set up to include mutations" | ||
"No mutation received from backend. The listener is likely set up with `excludeMutations: true`. If your app need to know about mutations, make sure the listener is set up to include mutations" | ||
) | ||
), didEmitMutationsAccessWarning = !0); | ||
} | ||
const EMPTY_ARRAY = []; | ||
function createContentLakeStore(backend) { | ||
const local = createDataset(), remote = createDataset(), memoize = createReplayMemoizer(1e3); | ||
const EMPTY_ARRAY$1 = []; | ||
function createOptimisticStore(backend) { | ||
const local = createDocumentMap(), remote = createDocumentMap(), memoize = createReplayMemoizer(1e3); | ||
let stagedChanges = []; | ||
@@ -85,3 +393,3 @@ const remoteEvents$ = new Subject(), localMutations$ = new Subject(), stage$ = new Subject(); | ||
function getRemoteEvents(id) { | ||
return backend.observe(id).pipe( | ||
return backend.listen(id).pipe( | ||
filter( | ||
@@ -126,3 +434,3 @@ (event) => event.type !== "reconnect" | ||
// overwritten below | ||
mutations: EMPTY_ARRAY | ||
mutations: EMPTY_ARRAY$1 | ||
}; | ||
@@ -149,3 +457,3 @@ return event.mutations ? emittedEvent.mutations = decodeAll( | ||
} | ||
function observeEvents(id) { | ||
function listenEvents(id) { | ||
return defer( | ||
@@ -200,4 +508,4 @@ () => memoize(id, merge(getLocalEvents(id), getRemoteEvents(id))) | ||
}, | ||
observeEvents, | ||
observe: (id) => observeEvents(id).pipe( | ||
listenEvents, | ||
listen: (id) => listenEvents(id).pipe( | ||
map( | ||
@@ -223,5 +531,503 @@ (event) => event.type === "optimistic" ? event.after : event.after.local | ||
} | ||
function toTransactions(groups) { | ||
return groups.map((group) => group.transaction && group.id !== void 0 ? { id: group.id, mutations: group.mutations } : { mutations: group.mutations }); | ||
} | ||
function createReadOnlyStore(listenDocumentUpdates, options = {}) { | ||
const cache = /* @__PURE__ */ new Map(), { shutdownDelay } = options; | ||
function listenDocument(id) { | ||
if (cache.has(id)) | ||
return cache.get(id); | ||
const cached = listenDocumentUpdates(id).pipe( | ||
finalize(() => cache.delete(id)), | ||
share({ | ||
resetOnRefCountZero: typeof shutdownDelay == "number" ? () => timer(shutdownDelay) : !0, | ||
connector: () => new ReplaySubject(1) | ||
}) | ||
); | ||
return cache.set(id, cached), cached; | ||
} | ||
return { | ||
listenDocument, | ||
listenDocuments(ids) { | ||
return combineLatest( | ||
ids.map((id) => listenDocument(id)) | ||
); | ||
} | ||
}; | ||
} | ||
class FetchError extends Error { | ||
cause; | ||
constructor(message, extra) { | ||
super(message), this.cause = extra?.cause, this.name = "FetchError"; | ||
} | ||
} | ||
class PermissionDeniedError extends Error { | ||
cause; | ||
constructor(message, extra) { | ||
super(message), this.cause = extra?.cause, this.name = "PermissionDeniedError"; | ||
} | ||
} | ||
class ChannelError extends Error { | ||
constructor(message) { | ||
super(message), this.name = "ChannelError"; | ||
} | ||
} | ||
class DisconnectError extends Error { | ||
constructor(message) { | ||
super(message), this.name = "DisconnectError"; | ||
} | ||
} | ||
class OutOfSyncError extends Error { | ||
/** | ||
* Attach state to the error for debugging/reporting | ||
*/ | ||
state; | ||
constructor(message, state) { | ||
super(message), this.name = "OutOfSyncError", this.state = state; | ||
} | ||
} | ||
class DeadlineExceededError extends OutOfSyncError { | ||
constructor(message, state) { | ||
super(message, state), this.name = "DeadlineExceededError"; | ||
} | ||
} | ||
class MaxBufferExceededError extends OutOfSyncError { | ||
constructor(message, state) { | ||
super(message, state), this.name = "MaxBufferExceededError"; | ||
} | ||
} | ||
function isClientError(e) { | ||
return typeof e != "object" || !e ? !1 : "statusCode" in e && "response" in e; | ||
} | ||
function discardChainTo(chain, revision) { | ||
const revisionIndex = chain.findIndex((event) => event.resultRev === revision); | ||
return split(chain, revisionIndex + 1); | ||
} | ||
function split(array, index) { | ||
return index < 0 ? [[], array] : [array.slice(0, index), array.slice(index)]; | ||
} | ||
function toOrderedChains(events) { | ||
const parents = {}; | ||
return events.forEach((event) => { | ||
parents[event.resultRev] = events.find( | ||
(other) => other.resultRev === event.previousRev | ||
); | ||
}), Object.entries(parents).filter(([, parent]) => !parent).map((orphan) => { | ||
const [headRev] = orphan; | ||
let current = events.find((event) => event.resultRev === headRev); | ||
const sortedList = []; | ||
for (; current; ) | ||
sortedList.push(current), current = events.find((event) => event.previousRev === current?.resultRev); | ||
return sortedList; | ||
}); | ||
} | ||
function partition(array, predicate) { | ||
return lodashPartition(array, predicate); | ||
} | ||
const DEFAULT_MAX_BUFFER_SIZE = 20, DEFAULT_DEADLINE_MS = 3e4, EMPTY_ARRAY = []; | ||
function sequentializeListenerEvents(options) { | ||
const { | ||
resolveChainDeadline = DEFAULT_DEADLINE_MS, | ||
maxBufferSize = DEFAULT_MAX_BUFFER_SIZE, | ||
onDiscard, | ||
onBrokenChain | ||
} = options || {}; | ||
return (input$) => input$.pipe( | ||
scan( | ||
(state, event) => { | ||
if (event.type === "mutation" && !state.base) | ||
throw new Error( | ||
"Invalid state. Cannot create a sequence without a base" | ||
); | ||
if (event.type === "sync") | ||
return { | ||
base: { revision: event.document?._rev }, | ||
buffer: EMPTY_ARRAY, | ||
emitEvents: [event] | ||
}; | ||
if (event.type === "mutation") { | ||
const orderedChains = toOrderedChains( | ||
state.buffer.concat(event) | ||
).map((chain) => { | ||
const [discarded, rest] = discardChainTo( | ||
chain, | ||
state.base.revision | ||
); | ||
return onDiscard && discarded.length > 0 && onDiscard(discarded), rest; | ||
}), [applicableChains, _nextBuffer] = partition( | ||
orderedChains, | ||
(chain) => state.base.revision === chain[0]?.previousRev | ||
), nextBuffer = _nextBuffer.flat(); | ||
if (applicableChains.length > 1) | ||
throw new Error("Expected at most one applicable chain"); | ||
if (applicableChains.length > 0 && applicableChains[0].length > 0) { | ||
const lastMutation = applicableChains[0].at(-1); | ||
return { | ||
base: { revision: ( | ||
// special case: if the mutation deletes the document it technically has no revision, despite | ||
// resultRev pointing at a transaction id. | ||
lastMutation.transition === "disappear" ? void 0 : lastMutation?.resultRev | ||
) }, | ||
emitEvents: applicableChains[0], | ||
buffer: nextBuffer | ||
}; | ||
} | ||
if (nextBuffer.length >= maxBufferSize) | ||
throw new MaxBufferExceededError( | ||
`Too many unchainable mutation events: ${state.buffer.length}`, | ||
state | ||
); | ||
return { | ||
...state, | ||
buffer: nextBuffer, | ||
emitEvents: EMPTY_ARRAY | ||
}; | ||
} | ||
return { ...state, emitEvents: [event] }; | ||
}, | ||
{ | ||
emitEvents: EMPTY_ARRAY, | ||
base: void 0, | ||
buffer: EMPTY_ARRAY | ||
} | ||
), | ||
switchMap((state) => state.buffer.length > 0 ? (onBrokenChain?.(state.buffer), concat( | ||
of(state), | ||
timer(resolveChainDeadline).pipe( | ||
mergeMap$1( | ||
() => throwError(() => new DeadlineExceededError( | ||
`Did not resolve chain within a deadline of ${resolveChainDeadline}ms`, | ||
state | ||
)) | ||
) | ||
) | ||
)) : of(state)), | ||
mergeMap$1((state) => state.emitEvents) | ||
); | ||
} | ||
function createDocumentEventListener(options) { | ||
const { listenerEvents, loadDocument } = options; | ||
return function(documentId) { | ||
return listenerEvents.pipe( | ||
concatMap((event) => event.type === "mutation" ? event.documentId === documentId ? of(event) : EMPTY : event.type === "reconnect" ? of(event) : event.type === "welcome" ? loadDocument(documentId).pipe( | ||
catchError((err) => { | ||
const error = toError(err); | ||
return isClientError(error) ? throwError(() => error) : throwError( | ||
() => new FetchError( | ||
`An unexpected error occurred while fetching document: ${error?.message}`, | ||
{ cause: error } | ||
) | ||
); | ||
}), | ||
map((result) => { | ||
if (result.accessible) | ||
return result.document; | ||
if (result.reason === "permission") | ||
throw new PermissionDeniedError( | ||
`Permission denied. Make sure the current user (or token) has permission to read the document with ID="${documentId}".` | ||
); | ||
}), | ||
map( | ||
(doc) => ({ | ||
type: "sync", | ||
document: doc | ||
}) | ||
) | ||
) : EMPTY), | ||
sequentializeListenerEvents({ | ||
maxBufferSize: 10, | ||
resolveChainDeadline: 1e4 | ||
}) | ||
); | ||
}; | ||
} | ||
function toError(maybeErr) { | ||
return maybeErr instanceof Error ? maybeErr : typeof maybeErr == "object" && maybeErr ? Object.assign(new Error(), maybeErr) : new Error(String(maybeErr)); | ||
} | ||
const defaultDurationSelector = () => scheduled(of(0), asyncScheduler); | ||
function createDataLoader(options) { | ||
const durationSelector = options.durationSelector || defaultDurationSelector, requests$ = new BehaviorSubject(void 0), unsubscribes$ = new Subject(), batchResponses = requests$.pipe( | ||
filter((req) => !!req), | ||
bufferWhen(durationSelector), | ||
map((requests) => requests.filter((request) => !request.cancelled)), | ||
filter((requests) => requests.length > 0), | ||
mergeMap((requests) => { | ||
const keys = requests.map((request) => request.key), responses = options.onLoad(keys).pipe( | ||
takeUntil( | ||
unsubscribes$.pipe( | ||
filter(() => requests.every((request) => request.cancelled)) | ||
) | ||
), | ||
mergeMap( | ||
(batchResult) => requests.map((request, i) => ({ | ||
type: "value", | ||
request, | ||
response: batchResult[i] | ||
})) | ||
) | ||
), responseEnds = requests.map((request) => ({ | ||
request, | ||
type: "complete" | ||
})); | ||
return concat(responses, responseEnds); | ||
}), | ||
share() | ||
); | ||
return (key) => new Observable((subscriber) => { | ||
const mutableRequestState = { key, cancelled: !1 }, emit = defer(() => (requests$.next(mutableRequestState), EMPTY)), subscription = merge( | ||
batchResponses.pipe( | ||
filter((batchResult) => batchResult.request === mutableRequestState), | ||
takeWhile((batchResult) => batchResult.type !== "complete"), | ||
map((batchResult) => batchResult.response) | ||
), | ||
emit | ||
).subscribe(subscriber); | ||
return () => { | ||
mutableRequestState.cancelled = !0, unsubscribes$.next(), subscription.unsubscribe(); | ||
}; | ||
}); | ||
} | ||
function dedupedFetchDocuments(client, ids, options) { | ||
const unique = [...new Set(ids)]; | ||
return fetchDocuments(client, unique, options).pipe( | ||
map((results) => { | ||
const byId = keyBy(results, (result) => result.id); | ||
return ids.map((id) => byId[id]); | ||
}) | ||
); | ||
} | ||
function createDocumentLoader(options) { | ||
const { client, tag } = options; | ||
return createDataLoader({ | ||
onLoad: (ids) => dedupedFetchDocuments(client, ids, { tag }) | ||
}); | ||
} | ||
function fetchDocuments(client, ids, options) { | ||
const requestOptions = { | ||
uri: client.getDataUrl("doc", ids.join(",")), | ||
json: !0, | ||
tag: options?.tag | ||
}; | ||
return client.observable.request(requestOptions).pipe( | ||
map((response) => { | ||
const documents = keyBy(response.documents, (entry) => entry._id), omitted = keyBy(response.omitted, (entry) => entry.id); | ||
return ids.map((id) => { | ||
if (documents[id]) | ||
return { id, accessible: !0, document: documents[id] }; | ||
const omittedEntry = omitted[id]; | ||
return omittedEntry ? omittedEntry.reason === "permission" ? { | ||
id, | ||
accessible: !1, | ||
reason: "permission" | ||
} : { | ||
id, | ||
accessible: !1, | ||
reason: "existence" | ||
} : { id, accessible: !1, reason: "existence" }; | ||
}); | ||
}) | ||
); | ||
} | ||
function listenWithErrors(client, query, params, options) { | ||
return client.listen(query, params, options).pipe( | ||
map((event) => { | ||
if (event.type === "mutation") | ||
return event; | ||
if (event.type === "disconnect") | ||
throw new DisconnectError(`DisconnectError: ${event.reason}`); | ||
if (event.type === "channelError") | ||
throw new ChannelError(`ChannelError: ${event.message}`); | ||
return event; | ||
}) | ||
); | ||
} | ||
const INITIAL_STATE = { | ||
status: "connecting", | ||
event: { type: "connect" }, | ||
snapshot: [] | ||
}; | ||
function createIdSetListener(client) { | ||
return function(queryFilter, params, options = {}) { | ||
const { tag } = options, query = `*[${queryFilter}]._id`; | ||
function fetchFilter() { | ||
return client.observable.fetch(query, params, { | ||
tag: tag ? tag + ".fetch" : void 0 | ||
}).pipe( | ||
map$1((result) => { | ||
if (!Array.isArray(result)) | ||
throw new Error( | ||
`Expected query to return array of documents, but got ${typeof result}` | ||
); | ||
return result; | ||
}) | ||
); | ||
} | ||
return listenWithErrors(client, query, params, { | ||
visibility: "transaction", | ||
events: ["welcome", "mutation", "reconnect"], | ||
includeResult: !1, | ||
includeMutations: !1, | ||
tag: tag ? tag + ".listen" : void 0 | ||
}).pipe( | ||
mergeMap$1((event) => event.type === "welcome" ? fetchFilter().pipe(map$1((result) => ({ type: "sync", result }))) : of(event)), | ||
map$1((event) => { | ||
if (event.type === "mutation") | ||
return event.transition === "update" ? void 0 : event.transition === "appear" ? { | ||
type: "op", | ||
op: "add", | ||
documentId: event.documentId | ||
} : event.transition === "disappear" ? { | ||
type: "op", | ||
op: "remove", | ||
documentId: event.documentId | ||
} : void 0; | ||
if (event.type === "sync") | ||
return { type: "sync", documentIds: event.result }; | ||
if (event.type === "reconnect") | ||
return { type: "reconnect" }; | ||
}), | ||
// ignore undefined | ||
filter$1((ev) => !!ev) | ||
); | ||
}; | ||
} | ||
function toState(options = {}) { | ||
const { insert: insertOption = "sorted" } = options; | ||
return (input$) => input$.pipe( | ||
scan((state, event) => { | ||
if (event.type === "reconnect") | ||
return { | ||
...state, | ||
event, | ||
status: "reconnecting" | ||
}; | ||
if (event.type === "sync") | ||
return { | ||
...state, | ||
event, | ||
status: "connected" | ||
}; | ||
if (event.type === "op") { | ||
if (event.op === "add") | ||
return { | ||
event, | ||
status: "connected", | ||
snapshot: insert(state.snapshot, event.documentId, insertOption) | ||
}; | ||
if (event.op === "remove") | ||
return { | ||
event, | ||
status: "connected", | ||
snapshot: state.snapshot.filter((id) => id !== event.documentId) | ||
}; | ||
throw new Error(`Unexpected operation: ${event.op}`); | ||
} | ||
return state; | ||
}, INITIAL_STATE) | ||
); | ||
} | ||
function insert(array, element, strategy) { | ||
let index; | ||
return strategy === "prepend" ? index = 0 : strategy === "append" ? index = array.length : index = sortedIndex(array, element), array.toSpliced(index, 0, element); | ||
} | ||
function shareReplayLatest(configOrPredicate, config) { | ||
return _shareReplayLatest( | ||
typeof configOrPredicate == "function" ? { predicate: configOrPredicate, ...config } : configOrPredicate | ||
); | ||
} | ||
function _shareReplayLatest(config) { | ||
return (source) => { | ||
let latest, emitted = !1; | ||
const { predicate, ...shareConfig } = config, wrapped = source.pipe( | ||
tap((value) => { | ||
config.predicate(value) && (emitted = !0, latest = value); | ||
}), | ||
finalize(() => { | ||
emitted = !1, latest = void 0; | ||
}), | ||
share(shareConfig) | ||
), emitLatest = new Observable((subscriber) => { | ||
emitted && subscriber.next(latest), subscriber.complete(); | ||
}); | ||
return merge(wrapped, emitLatest); | ||
}; | ||
} | ||
function createSharedListener(options) { | ||
const { | ||
client, | ||
filter: filter2, | ||
tag, | ||
shutdownDelay, | ||
includeSystemDocuments, | ||
includeMutations | ||
} = options, query = filter2 ? `*[${filter2}]` : includeSystemDocuments ? '*[!(_id in path("_.**"))]' : "*"; | ||
return listenWithErrors( | ||
client, | ||
query, | ||
{}, | ||
{ | ||
events: ["welcome", "mutation", "reconnect"], | ||
includeResult: !1, | ||
includePreviousRevision: !1, | ||
visibility: "transaction", | ||
effectFormat: "mendoza", | ||
...includeMutations ? {} : { includeMutations: !1 }, | ||
tag | ||
} | ||
).pipe( | ||
shareReplayLatest({ | ||
// note: resetOnError and resetOnComplete are both default true | ||
resetOnError: !0, | ||
resetOnComplete: !0, | ||
predicate: (event) => event.type === "welcome" || event.type === "reconnect", | ||
resetOnRefCountZero: typeof shutdownDelay == "number" ? () => timer(shutdownDelay) : !0 | ||
}) | ||
); | ||
} | ||
function createDocumentUpdateListener(options) { | ||
const { listenDocumentEvents } = options; | ||
return function(documentId) { | ||
return listenDocumentEvents(documentId).pipe( | ||
scan( | ||
(prev, event) => { | ||
if (event.type === "sync") | ||
return { | ||
event, | ||
documentId, | ||
snapshot: event.document | ||
}; | ||
if (event.type === "mutation") { | ||
if (prev?.event === void 0) | ||
throw new Error( | ||
"Received a mutation event before sync event. Something is wrong" | ||
); | ||
if (!event.effects.apply) | ||
throw new Error( | ||
"No effects found on listener event. The listener must be set up to use effectFormat=mendoza." | ||
); | ||
return { | ||
event, | ||
documentId, | ||
snapshot: applyMutationEventEffects(prev.snapshot, event) | ||
}; | ||
} | ||
return { documentId, snapshot: prev?.snapshot, event }; | ||
}, | ||
void 0 | ||
), | ||
// ignore seed value | ||
filter((update) => update !== void 0) | ||
); | ||
}; | ||
} | ||
export { | ||
createContentLakeStore | ||
createDocumentEventListener, | ||
createDocumentLoader, | ||
createDocumentUpdateListener, | ||
createIdSetListener, | ||
createOptimisticStore, | ||
createReadOnlyStore, | ||
createSharedListener, | ||
toState | ||
}; | ||
//# sourceMappingURL=_unstable_store.js.map |
@@ -884,3 +884,3 @@ export declare type AnyArray<T = any> = T[] | readonly T[] | ||
export declare type SanityDocumentBase = { | ||
export declare interface SanityDocumentBase { | ||
_id?: string | ||
@@ -887,0 +887,0 @@ _type: string |
import { parse } from "./_chunks-es/parse.js"; | ||
import { stringify } from "./_chunks-es/stringify.js"; | ||
import { decode as decode$1, decodeAll } from "./_chunks-es/decode.js"; | ||
import { encode as encode$1, encodeAll, encodeMutation as encodeMutation$2, encodeTransaction } from "./_chunks-es/encode.js"; | ||
import { isObject } from "./_chunks-es/isObject.js"; | ||
@@ -148,4 +147,4 @@ import { arrify } from "./_chunks-es/arrify.js"; | ||
} | ||
function encode(mutations) { | ||
return mutations.flatMap((m) => encodeMutation$1(m)); | ||
function encode$1(mutations) { | ||
return mutations.flatMap((m) => encodeMutation$2(m)); | ||
} | ||
@@ -155,3 +154,3 @@ function encodeItemRef$1(ref) { | ||
} | ||
function encodeMutation$1(mutation) { | ||
function encodeMutation$2(mutation) { | ||
if (mutation.type === "create" || mutation.type === "createIfNotExists" || mutation.type === "createOrReplace") | ||
@@ -223,10 +222,101 @@ return [[mutation.type, mutation.document]]; | ||
decode, | ||
encode | ||
}), index = /* @__PURE__ */ Object.freeze({ | ||
encode: encode$1 | ||
}); | ||
function encode(mutation) { | ||
return encodeMutation$1(mutation); | ||
} | ||
function encodeAll(mutations) { | ||
return mutations.flatMap(encode); | ||
} | ||
function encodeTransaction(transaction) { | ||
return { | ||
transactionId: transaction.id, | ||
mutations: encodeAll(transaction.mutations) | ||
}; | ||
} | ||
function encodeMutation$1(mutation) { | ||
if (mutation.type === "create" || mutation.type === "createIfNotExists" || mutation.type === "createOrReplace") | ||
return { [mutation.type]: mutation.document }; | ||
if (mutation.type === "delete") | ||
return { | ||
delete: { id: mutation.id } | ||
}; | ||
const ifRevisionID = mutation.options?.ifRevision; | ||
return mutation.patches.map((patch2) => ({ | ||
patch: { | ||
id: mutation.id, | ||
...ifRevisionID && { ifRevisionID }, | ||
...patchToSanity(patch2) | ||
} | ||
})); | ||
} | ||
function patchToSanity(patch2) { | ||
const { path, op } = patch2; | ||
if (op.type === "unset") | ||
return { unset: [stringify(path)] }; | ||
if (op.type === "insert") | ||
return { | ||
insert: { | ||
[op.position]: stringify([...path, op.referenceItem]), | ||
items: op.items | ||
} | ||
}; | ||
if (op.type === "diffMatchPatch") | ||
return { diffMatchPatch: { [stringify(path)]: op.value } }; | ||
if (op.type === "inc") | ||
return { inc: { [stringify(path)]: op.amount } }; | ||
if (op.type === "dec") | ||
return { dec: { [stringify(path)]: op.amount } }; | ||
if (op.type === "set" || op.type === "setIfMissing") | ||
return { [op.type]: { [stringify(path)]: op.value } }; | ||
if (op.type === "truncate") { | ||
const range = [ | ||
op.startIndex, | ||
typeof op.endIndex == "number" ? op.endIndex : "" | ||
].join(":"); | ||
return { unset: [`${stringify(path)}[${range}]`] }; | ||
} | ||
if (op.type === "upsert") | ||
return { | ||
unset: op.items.map( | ||
(item) => stringify([...path, { _key: item._key }]) | ||
), | ||
insert: { | ||
[op.position]: stringify([...path, op.referenceItem]), | ||
items: op.items | ||
} | ||
}; | ||
if (op.type === "assign") | ||
return { | ||
set: Object.fromEntries( | ||
Object.keys(op.value).map((key) => [ | ||
stringify(path.concat(key)), | ||
op.value[key] | ||
]) | ||
) | ||
}; | ||
if (op.type === "unassign") | ||
return { | ||
unset: op.keys.map((key) => stringify(path.concat(key))) | ||
}; | ||
if (op.type === "replace") | ||
return { | ||
insert: { | ||
replace: stringify(path.concat(op.referenceItem)), | ||
items: op.items | ||
} | ||
}; | ||
if (op.type === "remove") | ||
return { | ||
unset: [stringify(path.concat(op.referenceItem))] | ||
}; | ||
throw new Error(`Unknown operation type ${op.type}`); | ||
} | ||
var index = /* @__PURE__ */ Object.freeze({ | ||
__proto__: null, | ||
decode: decode$1, | ||
decodeAll, | ||
encode: encode$1, | ||
encode, | ||
encodeAll, | ||
encodeMutation: encodeMutation$2, | ||
encodeMutation: encodeMutation$1, | ||
encodeTransaction | ||
@@ -233,0 +323,0 @@ }); |
{ | ||
"name": "@sanity/mutate", | ||
"version": "0.11.0-canary.3", | ||
"version": "0.11.0", | ||
"description": "Experimental toolkit for working with Sanity mutations in JavaScript & TypeScript", | ||
@@ -35,2 +35,8 @@ "keywords": [ | ||
}, | ||
"./_unstable_store": { | ||
"source": "./src/_unstable_store.ts", | ||
"import": "./dist/_unstable_store.js", | ||
"require": "./dist/_unstable_store.cjs", | ||
"default": "./dist/_unstable_store.js" | ||
}, | ||
"./_unstable_apply": { | ||
@@ -42,20 +48,2 @@ "source": "./src/_unstable_apply.ts", | ||
}, | ||
"./_unstable_machine": { | ||
"source": "./src/_unstable_machine.ts", | ||
"browser": { | ||
"source": "./src/_unstable_machine.ts", | ||
"import": "./dist/_unstable_machine.browser.js", | ||
"require": "./dist/_unstable_machine.browser.cjs" | ||
}, | ||
"worker": "./dist/_unstable_machine.browser.js", | ||
"import": "./dist/_unstable_machine.js", | ||
"require": "./dist/_unstable_machine.cjs", | ||
"default": "./dist/_unstable_machine.js" | ||
}, | ||
"./_unstable_store": { | ||
"source": "./src/_unstable_store.ts", | ||
"import": "./dist/_unstable_store.js", | ||
"require": "./dist/_unstable_store.cjs", | ||
"default": "./dist/_unstable_store.js" | ||
}, | ||
"./package.json": "./package.json" | ||
@@ -74,5 +62,2 @@ }, | ||
], | ||
"_unstable_machine": [ | ||
"./dist/_unstable_machine.d.ts" | ||
], | ||
"_unstable_store": [ | ||
@@ -85,7 +70,9 @@ "./dist/_unstable_store.d.ts" | ||
"dist", | ||
"test" | ||
"test", | ||
"README.md", | ||
"package.json" | ||
], | ||
"browserslist": "extends @sanity/browserslist-config", | ||
"dependencies": { | ||
"@sanity/client": "^6.22.4", | ||
"@sanity/client": "^6.22.3", | ||
"@sanity/diff-match-patch": "^3.1.1", | ||
@@ -95,6 +82,7 @@ "hotscript": "^1.0.13", | ||
"mendoza": "^3.0.7", | ||
"nanoid": "^5.0.7", | ||
"rxjs": "^7.8.1" | ||
}, | ||
"devDependencies": { | ||
"@sanity/pkg-utils": "^6.11.10", | ||
"@sanity/pkg-utils": "^6.11.9", | ||
"@sanity/prettier-config": "^1.0.3", | ||
@@ -115,3 +103,2 @@ "@types/diff-match-patch": "^1.0.36", | ||
"eslint-plugin-unused-imports": "^3.2.0", | ||
"nanoid": "^5.0.8", | ||
"npm-run-all2": "^5.0.2", | ||
@@ -124,10 +111,2 @@ "prettier": "^3.3.3", | ||
}, | ||
"peerDependencies": { | ||
"xstate": "^5.19.0" | ||
}, | ||
"peerDependenciesMeta": { | ||
"xstate": { | ||
"optional": true | ||
} | ||
}, | ||
"engines": { | ||
@@ -153,3 +132,2 @@ "node": ">=18" | ||
"coverage": "vitest run --coverage", | ||
"example:visual-editing": "pnpm --filter example-visual-editing run dev", | ||
"example:web": "pnpm --filter example-web run dev", | ||
@@ -156,0 +134,0 @@ "check": "run-s typecheck pkg:build test", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
22
696888
58
7323
+ Addednanoid@^5.0.7
+ Addednanoid@5.0.9(transitive)
- Removedxstate@5.19.1(transitive)
Updated@sanity/client@^6.22.3