@toeverything/y-indexeddb
Advanced tools
Comparing version 0.7.0-canary.51 to 0.7.0-canary.52
@@ -1,39 +0,192 @@ | ||
import { openDB as D } from "idb"; | ||
import { mergeUpdates as V, Doc as W, applyUpdate as S, encodeStateVector as I, encodeStateAsUpdate as h, UndoManager as z, diffUpdate as J } from "yjs"; | ||
const P = 1, k = "affine-local"; | ||
function j(t) { | ||
t.createObjectStore("workspace", { keyPath: "id" }), t.createObjectStore("milestone", { keyPath: "id" }); | ||
import { openDB as f } from "idb"; | ||
import { applyUpdate as m, encodeStateVector as j, encodeStateVectorFromUpdate as C, encodeStateAsUpdate as A, mergeUpdates as v, diffUpdate as $, Doc as q, UndoManager as I } from "yjs"; | ||
const S = 1, y = "affine-local"; | ||
function k(e) { | ||
e.createObjectStore("workspace", { keyPath: "id" }), e.createObjectStore("milestone", { keyPath: "id" }); | ||
} | ||
let T; | ||
async function K(t) { | ||
return new Promise((s) => { | ||
const c = indexedDB.open(t); | ||
let a = !0; | ||
c.onsuccess = function() { | ||
c.result.close(), a || indexedDB.deleteDatabase(t), s(a); | ||
}, c.onupgradeneeded = function() { | ||
a = !1; | ||
const M = "lazy-provider-self-origin"; | ||
function L(e, o) { | ||
if (e.guid === o) | ||
return e; | ||
for (const n of e.subdocs) { | ||
const t = L(n, o); | ||
if (t) | ||
return t; | ||
} | ||
} | ||
const z = (e, o) => { | ||
let n = !1; | ||
const t = /* @__PURE__ */ new Map(), s = /* @__PURE__ */ new Map(), r = /* @__PURE__ */ new Set(); | ||
let c; | ||
async function p(a) { | ||
const i = a.guid, w = await o.queryDocState(i, { | ||
stateVector: j(a) | ||
}); | ||
t.set(i, []), w && m(a, w, M); | ||
const g = w ? C(w) : void 0; | ||
await o.sendDocUpdate(i, A(a, g)); | ||
} | ||
function u(a) { | ||
const i = /* @__PURE__ */ new Set(); | ||
s.set(a.guid, i); | ||
const w = async (E, D) => { | ||
D !== M && o.sendDocUpdate(a.guid, E).catch(console.error); | ||
}, g = (E) => { | ||
E.loaded.forEach((D) => { | ||
l(D).catch(console.error); | ||
}), E.removed.forEach((D) => { | ||
b(D); | ||
}); | ||
}; | ||
a.on("update", w), a.on("subdocs", g), i.add(() => { | ||
a.off("update", w), a.off("subdocs", g); | ||
}); | ||
} | ||
function d() { | ||
c = o.onDocUpdate?.((a, i) => { | ||
const w = L(e, a); | ||
w ? (m(w, i), t.has(a) && (t.get(a)?.forEach((g) => m(w, g)), t.delete(a))) : (console.warn("idb: doc not found", a), t.set(a, (t.get(a) ?? []).concat(i))); | ||
}); | ||
} | ||
async function l(a) { | ||
r.has(a.guid) || (r.add(a.guid), u(a), await p(a), await Promise.all( | ||
[...a.subdocs].filter((i) => i.shouldLoad).map((i) => l(i)) | ||
)); | ||
} | ||
function b(a) { | ||
r.delete(a.guid); | ||
const i = s.get(a.guid); | ||
i && (i.forEach((w) => w()), s.delete(a.guid)), a.subdocs.forEach(b); | ||
} | ||
function h() { | ||
s.forEach((a) => { | ||
a.forEach((i) => i()); | ||
}), s.clear(), r.clear(); | ||
} | ||
function O() { | ||
n = !0, l(e).catch(console.error), d(); | ||
} | ||
async function T() { | ||
n = !1, h(), c?.(), c = void 0; | ||
} | ||
return { | ||
get connected() { | ||
return n; | ||
}, | ||
passive: !0, | ||
connect: O, | ||
disconnect: T | ||
}; | ||
}, x = (e) => (e.preventDefault(), e.returnValue = "Data is not saved. Are you sure you want to leave?"), F = async (e) => { | ||
window.addEventListener("beforeunload", x, { | ||
capture: !0 | ||
}), await e, window.removeEventListener("beforeunload", x, { | ||
capture: !0 | ||
}); | ||
}; | ||
let B = 500; | ||
function G(e) { | ||
B = e; | ||
} | ||
async function Q(t, s, c = k) { | ||
const H = ({ | ||
dbName: e, | ||
mergeCount: o | ||
}) => { | ||
const n = f(e, S, { | ||
upgrade: k | ||
}); | ||
return { | ||
...{ | ||
queryDocState: async (s, r) => { | ||
try { | ||
const u = await (await n).transaction("workspace", "readonly").objectStore("workspace").get(s); | ||
if (!u) | ||
return !1; | ||
const { updates: d } = u, l = v(d.map(({ update: h }) => h)); | ||
return r?.stateVector ? $(l, r?.stateVector) : l; | ||
} catch (c) { | ||
if (!c.message?.includes("The database connection is closing.")) | ||
throw c; | ||
return !1; | ||
} | ||
}, | ||
sendDocUpdate: async (s, r) => { | ||
try { | ||
const p = (await n).transaction("workspace", "readwrite").objectStore("workspace"), { updates: u } = await p.get(s) ?? { updates: [] }; | ||
let d = [ | ||
...u, | ||
{ timestamp: Date.now(), update: r } | ||
]; | ||
if (o && d.length >= o) { | ||
const l = v(d.map(({ update: b }) => b)); | ||
d = [{ timestamp: Date.now(), update: l }]; | ||
} | ||
await F( | ||
p.put({ | ||
id: s, | ||
updates: d | ||
}) | ||
); | ||
} catch (c) { | ||
if (!c.message?.includes("The database connection is closing.")) | ||
throw c; | ||
} | ||
} | ||
}, | ||
disconnect: () => { | ||
n.then((s) => s.close()).catch(console.error); | ||
}, | ||
cleanup: async () => { | ||
await (await n).clear("workspace"); | ||
} | ||
}; | ||
}, J = (e, o = y) => { | ||
let n = null, t = null; | ||
return { | ||
connect: () => { | ||
n = H({ dbName: o, mergeCount: B }), t = z(e, n), t.connect(); | ||
}, | ||
disconnect: () => { | ||
n?.disconnect(), t?.disconnect(), n = null, t = null; | ||
}, | ||
cleanup: async () => { | ||
await n?.cleanup(); | ||
}, | ||
get connected() { | ||
return t?.connected || !1; | ||
} | ||
}; | ||
}; | ||
let P; | ||
async function N(e) { | ||
return new Promise((o) => { | ||
const n = indexedDB.open(e); | ||
let t = !0; | ||
n.onsuccess = function() { | ||
n.result.close(), t || indexedDB.deleteDatabase(e), o(t); | ||
}, n.onupgradeneeded = function() { | ||
t = !1; | ||
}; | ||
}); | ||
} | ||
async function K(e, o, n = y) { | ||
do | ||
if (!T || localStorage.getItem(`${c}-migration`) !== "true") { | ||
if (!P || localStorage.getItem(`${n}-migration`) !== "true") { | ||
try { | ||
T = await indexedDB.databases(); | ||
P = await indexedDB.databases(); | ||
} catch { | ||
if (await K(s)) { | ||
await D(s, 1).then(async (a) => { | ||
if (!a.objectStoreNames.contains("updates")) | ||
if (await N(o)) { | ||
await f(o, 1).then(async (t) => { | ||
if (!t.objectStoreNames.contains("updates")) | ||
return; | ||
const o = await a.transaction("updates", "readonly").objectStore("updates").getAll(); | ||
if (!Array.isArray(o) || !o.every((f) => f instanceof Uint8Array)) | ||
const r = await t.transaction("updates", "readonly").objectStore("updates").getAll(); | ||
if (!Array.isArray(r) || !r.every((d) => d instanceof Uint8Array)) | ||
return; | ||
const n = V(o), u = t.transaction("workspace", "readwrite").objectStore("workspace"); | ||
await u.get(s) || (console.log("upgrading the database"), await u.put({ | ||
id: s, | ||
const c = v(r), p = e.transaction("workspace", "readwrite").objectStore("workspace"); | ||
await p.get(o) || (console.log("upgrading the database"), await p.put({ | ||
id: o, | ||
updates: [ | ||
{ | ||
timestamp: Date.now(), | ||
update: n | ||
update: c | ||
} | ||
@@ -47,19 +200,19 @@ ] | ||
await Promise.all( | ||
T.map((a) => { | ||
if (a.name && a.version === 1) { | ||
const p = a.name, o = a.version; | ||
return D(p, o).then( | ||
async (n) => { | ||
if (!n.objectStoreNames.contains("updates")) | ||
P.map((t) => { | ||
if (t.name && t.version === 1) { | ||
const s = t.name, r = t.version; | ||
return f(s, r).then( | ||
async (c) => { | ||
if (!c.objectStoreNames.contains("updates")) | ||
return; | ||
const w = await n.transaction("updates", "readonly").objectStore("updates").getAll(); | ||
if (!Array.isArray(w) || !w.every((A) => A instanceof Uint8Array)) | ||
const u = await c.transaction("updates", "readonly").objectStore("updates").getAll(); | ||
if (!Array.isArray(u) || !u.every((h) => h instanceof Uint8Array)) | ||
return; | ||
const f = V(w), g = t.transaction("workspace", "readwrite").objectStore("workspace"); | ||
await g.get(p) || (console.log("upgrading the database"), await g.put({ | ||
id: p, | ||
const d = v(u), l = e.transaction("workspace", "readwrite").objectStore("workspace"); | ||
await l.get(s) || (console.log("upgrading the database"), await l.put({ | ||
id: s, | ||
updates: [ | ||
{ | ||
timestamp: Date.now(), | ||
update: f | ||
update: d | ||
} | ||
@@ -72,3 +225,3 @@ ] | ||
}) | ||
), localStorage.setItem(`${c}-migration`, "true"); | ||
), localStorage.setItem(`${n}-migration`, "true"); | ||
break; | ||
@@ -78,17 +231,17 @@ } | ||
} | ||
async function tt(t, s = k) { | ||
const o = await (await D(s, P, { | ||
upgrade: j | ||
})).transaction("workspace", "readonly").objectStore("workspace").get(t); | ||
return o ? V(o.updates.map(({ update: n }) => n)) : !1; | ||
async function Q(e, o = y) { | ||
const r = await (await f(o, S, { | ||
upgrade: k | ||
})).transaction("workspace", "readonly").objectStore("workspace").get(e); | ||
return r ? v(r.updates.map(({ update: c }) => c)) : !1; | ||
} | ||
async function et(t, s, c = k) { | ||
await (await D(c, P, { | ||
upgrade: j | ||
async function R(e, o, n = y) { | ||
await (await f(n, S, { | ||
upgrade: k | ||
})).transaction("workspace", "readwrite").objectStore("workspace").put({ | ||
id: t, | ||
id: e, | ||
updates: [ | ||
{ | ||
timestamp: Date.now(), | ||
update: s | ||
update: o | ||
} | ||
@@ -98,43 +251,38 @@ ] | ||
} | ||
const y = "indexeddb-provider-origin", M = "snapshot-origin"; | ||
let $ = 500; | ||
const L = (t) => (t.preventDefault(), t.returnValue = "Data is not saved. Are you sure you want to leave?"), U = async (t) => { | ||
window.addEventListener("beforeunload", L, { | ||
const U = "snapshot-origin", V = (e) => (e.preventDefault(), e.returnValue = "Data is not saved. Are you sure you want to leave?"), X = async (e) => { | ||
window.addEventListener("beforeunload", V, { | ||
capture: !0 | ||
}), await t, window.removeEventListener("beforeunload", L, { | ||
}), await e, window.removeEventListener("beforeunload", V, { | ||
capture: !0 | ||
}); | ||
}; | ||
function at(t) { | ||
$ = t; | ||
} | ||
function nt(t, s, c) { | ||
const a = new W(); | ||
S(a, s, M); | ||
const p = I(t), o = I(a), n = h( | ||
t, | ||
o | ||
), u = new z( | ||
[...a.share.keys()].map((f) => { | ||
const g = c(f); | ||
if (g === "Text") | ||
return a.getText(f); | ||
if (g === "Map") | ||
return a.getMap(f); | ||
if (g === "Array") | ||
return a.getArray(f); | ||
function Y(e, o, n) { | ||
const t = new q(); | ||
m(t, o, U); | ||
const s = j(e), r = j(t), c = A( | ||
e, | ||
r | ||
), p = new I( | ||
[...t.share.keys()].map((d) => { | ||
const l = n(d); | ||
if (l === "Text") | ||
return t.getText(d); | ||
if (l === "Map") | ||
return t.getMap(d); | ||
if (l === "Array") | ||
return t.getArray(d); | ||
throw new Error("Unknown type"); | ||
}), | ||
{ | ||
trackedOrigins: /* @__PURE__ */ new Set([M]) | ||
trackedOrigins: /* @__PURE__ */ new Set([U]) | ||
} | ||
); | ||
S(a, n, M), u.undo(); | ||
const w = h( | ||
a, | ||
p | ||
m(t, c, U), p.undo(); | ||
const u = A( | ||
t, | ||
s | ||
); | ||
S(t, w, M); | ||
m(e, u, U); | ||
} | ||
class R extends Error { | ||
class Z extends Error { | ||
constructor() { | ||
@@ -144,3 +292,3 @@ super("Early disconnect"); | ||
} | ||
class X extends Error { | ||
class ee extends Error { | ||
constructor() { | ||
@@ -150,180 +298,34 @@ super("Cleanup when connecting"); | ||
} | ||
const rt = async (t, s, c, a = k) => { | ||
const n = (await D(a, P, { | ||
upgrade: j | ||
})).transaction("milestone", "readwrite").objectStore("milestone"), u = await n.get("id"), w = h(s); | ||
u ? (u.milestone[c] = w, await n.put(u)) : await n.put({ | ||
id: t, | ||
const te = async (e, o, n, t = y) => { | ||
const c = (await f(t, S, { | ||
upgrade: k | ||
})).transaction("milestone", "readwrite").objectStore("milestone"), p = await c.get("id"), u = A(o); | ||
p ? (p.milestone[n] = u, await c.put(p)) : await c.put({ | ||
id: e, | ||
milestone: { | ||
[c]: w | ||
[n]: u | ||
} | ||
}); | ||
}, st = async (t, s = k) => { | ||
const o = await (await D(s, P, { | ||
upgrade: j | ||
})).transaction("milestone", "readonly").objectStore("milestone").get(t); | ||
return o ? o.milestone : null; | ||
}, ot = (t, s = k, c = !0) => { | ||
let a, p, o = !0, n = !1; | ||
const u = D(s, P, { | ||
upgrade: j | ||
}), w = /* @__PURE__ */ new WeakMap(), f = /* @__PURE__ */ new WeakMap(), g = /* @__PURE__ */ new WeakMap(), x = (i, e) => { | ||
if (w.has(e)) | ||
return w.get(e); | ||
const r = async function(l, m) { | ||
const v = await u; | ||
if (!n || m === y) | ||
return; | ||
const E = v.transaction("workspace", "readwrite").objectStore("workspace"); | ||
let d = await E.get(i); | ||
if (d || (d = { | ||
id: i, | ||
updates: [] | ||
}), d.updates.push({ | ||
timestamp: Date.now(), | ||
update: l | ||
}), d.updates.length > $) { | ||
const F = d.updates.map(({ update: H }) => H), C = new W(); | ||
C.transact(() => { | ||
F.forEach((H) => { | ||
S(C, H, y); | ||
}); | ||
}, y); | ||
const N = h(C); | ||
d = { | ||
id: i, | ||
updates: [ | ||
{ | ||
timestamp: Date.now(), | ||
update: N | ||
} | ||
] | ||
}, await U(E.put(d)); | ||
} else | ||
await U(E.put(d)); | ||
}; | ||
return w.set(e, r), r; | ||
}, A = (i, e) => { | ||
if (f.has(e)) | ||
return f.get(e); | ||
const r = async function() { | ||
(await u).close(); | ||
}; | ||
return f.set(e, r), r; | ||
}, _ = (i, e) => { | ||
if (g.has(e)) | ||
return g.get(e); | ||
const r = async function(l) { | ||
l.removed.forEach((m) => { | ||
B(m.guid, m); | ||
}), l.loaded.forEach((m) => { | ||
O(m.guid, m); | ||
}); | ||
}; | ||
return g.set(e, r), r; | ||
}; | ||
function O(i, e) { | ||
e.on("update", x(i, e)), e.on("destroy", A(i, e)), e.on("subdocs", _(i, e)), e.subdocs.forEach((r) => { | ||
O(r.guid, r); | ||
}); | ||
} | ||
function B(i, e) { | ||
e.subdocs.forEach((r) => { | ||
B(r.guid, r); | ||
}), e.off("update", x(i, e)), e.off("destroy", A(i, e)), e.off("subdocs", _(i, e)); | ||
} | ||
async function q(i, e) { | ||
const r = await u, b = r.transaction("workspace", "readwrite").objectStore("workspace"), l = await b.get(i); | ||
if (n) | ||
if (!l) | ||
await U( | ||
r.put("workspace", { | ||
id: i, | ||
updates: [ | ||
{ | ||
timestamp: Date.now(), | ||
update: h(e) | ||
} | ||
] | ||
}) | ||
); | ||
else { | ||
const m = l.updates.map(({ update: d }) => d), v = new W(); | ||
v.transact(() => { | ||
m.forEach((d) => { | ||
S(v, d, y); | ||
}); | ||
}, y); | ||
const E = J( | ||
h(e), | ||
h(v) | ||
); | ||
await U( | ||
b.put({ | ||
...l, | ||
updates: [ | ||
...l.updates, | ||
{ | ||
timestamp: Date.now(), | ||
update: E | ||
} | ||
] | ||
}) | ||
), e.transact(() => { | ||
m.forEach((d) => { | ||
S(e, d, y); | ||
}); | ||
}, y); | ||
} | ||
} | ||
const G = { | ||
connect: async () => { | ||
if (n) | ||
return; | ||
G.whenSynced = new Promise((r, b) => { | ||
o = !0, a = r, p = b; | ||
}), n = !0, O(t.guid, t); | ||
const i = await u; | ||
if (c && await Q(i, t.guid, s), !n) | ||
return; | ||
const e = []; | ||
for (e.push([t.guid, t]); e.length > 0; ) { | ||
const [r, b] = e.pop(); | ||
await q(r, b), b.subdocs.forEach((l) => { | ||
e.push([l.guid, l]); | ||
}); | ||
} | ||
o = !1, a(); | ||
}, | ||
disconnect() { | ||
n = !1, o && p(new R()), B(t.guid, t); | ||
}, | ||
async cleanup() { | ||
if (n) | ||
throw new X(); | ||
await (await u).delete("workspace", t.guid); | ||
}, | ||
whenSynced: Promise.resolve(), | ||
get connected() { | ||
return n; | ||
} | ||
}; | ||
return G; | ||
}, ne = async (e, o = y) => { | ||
const r = await (await f(o, S, { | ||
upgrade: k | ||
})).transaction("milestone", "readonly").objectStore("milestone").get(e); | ||
return r ? r.milestone : null; | ||
}; | ||
export { | ||
X as CleanupWhenConnectingError, | ||
k as DEFAULT_DB_NAME, | ||
R as EarlyDisconnectError, | ||
ot as createIndexedDBProvider, | ||
P as dbVersion, | ||
tt as downloadBinary, | ||
st as getMilestones, | ||
rt as markMilestone, | ||
et as overwriteBinary, | ||
nt as revertUpdate, | ||
at as setMergeCount, | ||
Q as tryMigrate, | ||
j as upgradeDB, | ||
U as writeOperation | ||
ee as CleanupWhenConnectingError, | ||
y as DEFAULT_DB_NAME, | ||
Z as EarlyDisconnectError, | ||
J as createIndexedDBProvider, | ||
S as dbVersion, | ||
Q as downloadBinary, | ||
ne as getMilestones, | ||
te as markMilestone, | ||
R as overwriteBinary, | ||
Y as revertUpdate, | ||
G as setMergeCount, | ||
K as tryMigrate, | ||
k as upgradeDB, | ||
X as writeOperation | ||
}; | ||
//# sourceMappingURL=index.js.map |
{ | ||
"name": "@toeverything/y-indexeddb", | ||
"type": "module", | ||
"version": "0.7.0-canary.51", | ||
"version": "0.7.0-canary.52", | ||
"description": "IndexedDB database adapter for Yjs", | ||
@@ -39,5 +39,6 @@ "repository": "toeverything/AFFiNE", | ||
"devDependencies": { | ||
"@blocksuite/blocks": "0.0.0-20230719163314-76d863fc-nightly", | ||
"@blocksuite/store": "0.0.0-20230719163314-76d863fc-nightly", | ||
"vite": "^4.4.4", | ||
"@affine/y-provider": "workspace:*", | ||
"@blocksuite/blocks": "0.0.0-20230721134812-6e0e3bef-nightly", | ||
"@blocksuite/store": "0.0.0-20230721134812-6e0e3bef-nightly", | ||
"vite": "^4.4.6", | ||
"vite-plugin-dts": "3.3.1", | ||
@@ -44,0 +45,0 @@ "y-indexeddb": "^9.0.11" |
import { Doc } from 'yjs'; | ||
import type { IndexedDBProvider, WorkspaceMilestone } from './shared'; | ||
import type { WorkspaceMilestone } from './shared'; | ||
export declare const writeOperation: (op: Promise<unknown>) => Promise<void>; | ||
export declare function setMergeCount(count: number): void; | ||
export declare function revertUpdate(doc: Doc, snapshotUpdate: Uint8Array, getMetadata: (key: string) => 'Text' | 'Map' | 'Array'): void; | ||
@@ -17,5 +16,5 @@ export declare class EarlyDisconnectError extends Error { | ||
*/ | ||
export declare const createIndexedDBProvider: (doc: Doc, dbName?: string, migrate?: boolean) => IndexedDBProvider; | ||
export * from './provider'; | ||
export * from './shared'; | ||
export * from './utils'; | ||
//# sourceMappingURL=index.d.ts.map |
@@ -9,3 +9,2 @@ import type { DBSchema, IDBPDatabase } from 'idb/build/entry'; | ||
cleanup: () => Promise<void>; | ||
whenSynced: Promise<void>; | ||
readonly connected: boolean; | ||
@@ -12,0 +11,0 @@ } |
{ | ||
"name": "@toeverything/y-indexeddb", | ||
"type": "module", | ||
"version": "0.7.0-canary.51", | ||
"version": "0.7.0-canary.52", | ||
"description": "IndexedDB database adapter for Yjs", | ||
@@ -43,5 +43,6 @@ "repository": "toeverything/AFFiNE", | ||
"devDependencies": { | ||
"@blocksuite/blocks": "0.0.0-20230719163314-76d863fc-nightly", | ||
"@blocksuite/store": "0.0.0-20230719163314-76d863fc-nightly", | ||
"vite": "^4.4.4", | ||
"@affine/y-provider": "0.7.0-canary.52", | ||
"@blocksuite/blocks": "0.0.0-20230721134812-6e0e3bef-nightly", | ||
"@blocksuite/store": "0.0.0-20230721134812-6e0e3bef-nightly", | ||
"vite": "^4.4.6", | ||
"vite-plugin-dts": "3.3.1", | ||
@@ -48,0 +49,0 @@ "y-indexeddb": "^9.0.11" |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
82845
17
425
6
1