@toeverything/y-indexeddb
Advanced tools
Comparing version 0.7.0-canary.52 to 0.7.0-canary.53
@@ -1,3 +0,3 @@ | ||
import { openDB as f } from "idb"; | ||
import { applyUpdate as m, encodeStateVector as j, encodeStateVectorFromUpdate as C, encodeStateAsUpdate as A, mergeUpdates as v, diffUpdate as $, Doc as q, UndoManager as I } from "yjs"; | ||
import { openDB as b } from "idb"; | ||
import { applyUpdate as m, encodeStateVector as j, encodeStateVectorFromUpdate as $, encodeStateAsUpdate as A, mergeUpdates as v, diffUpdate as q, Doc as I, UndoManager as z } from "yjs"; | ||
const S = 1, y = "affine-local"; | ||
@@ -7,8 +7,7 @@ function k(e) { | ||
} | ||
const M = "lazy-provider-self-origin"; | ||
function L(e, o) { | ||
if (e.guid === o) | ||
return e; | ||
for (const n of e.subdocs) { | ||
const t = L(n, o); | ||
for (const a of e.subdocs) { | ||
const t = L(a, o); | ||
if (t) | ||
@@ -18,66 +17,67 @@ return t; | ||
} | ||
const z = (e, o) => { | ||
let n = !1; | ||
const t = /* @__PURE__ */ new Map(), s = /* @__PURE__ */ new Map(), r = /* @__PURE__ */ new Set(); | ||
let c; | ||
async function p(a) { | ||
const i = a.guid, w = await o.queryDocState(i, { | ||
stateVector: j(a) | ||
const F = (e, o, a = {}) => { | ||
let t = !1; | ||
const s = /* @__PURE__ */ new Map(), r = /* @__PURE__ */ new Map(), c = /* @__PURE__ */ new Set(); | ||
let u; | ||
const { origin: i = "lazy-provider" } = a; | ||
async function p(n) { | ||
const d = n.guid, w = await o.queryDocState(d, { | ||
stateVector: j(n) | ||
}); | ||
t.set(i, []), w && m(a, w, M); | ||
const g = w ? C(w) : void 0; | ||
await o.sendDocUpdate(i, A(a, g)); | ||
s.set(d, []), w && m(n, w, i); | ||
const g = w ? $(w) : void 0; | ||
await o.sendDocUpdate(d, A(n, g)); | ||
} | ||
function u(a) { | ||
const i = /* @__PURE__ */ new Set(); | ||
s.set(a.guid, i); | ||
function l(n) { | ||
const d = /* @__PURE__ */ new Set(); | ||
r.set(n.guid, d); | ||
const w = async (E, D) => { | ||
D !== M && o.sendDocUpdate(a.guid, E).catch(console.error); | ||
i !== D && o.sendDocUpdate(n.guid, E).catch(console.error); | ||
}, g = (E) => { | ||
E.loaded.forEach((D) => { | ||
l(D).catch(console.error); | ||
f(D).catch(console.error); | ||
}), E.removed.forEach((D) => { | ||
b(D); | ||
M(D); | ||
}); | ||
}; | ||
a.on("update", w), a.on("subdocs", g), i.add(() => { | ||
a.off("update", w), a.off("subdocs", g); | ||
n.on("update", w), n.on("subdocs", g), d.add(() => { | ||
n.off("update", w), n.off("subdocs", g); | ||
}); | ||
} | ||
function d() { | ||
c = o.onDocUpdate?.((a, i) => { | ||
const w = L(e, a); | ||
w ? (m(w, i), t.has(a) && (t.get(a)?.forEach((g) => m(w, g)), t.delete(a))) : (console.warn("idb: doc not found", a), t.set(a, (t.get(a) ?? []).concat(i))); | ||
function h() { | ||
u = o.onDocUpdate?.((n, d) => { | ||
const w = L(e, n); | ||
w ? (m(w, d, i), s.has(n) && (s.get(n)?.forEach((g) => m(w, g, i)), s.delete(n))) : (console.warn("idb: doc not found", n), s.set(n, (s.get(n) ?? []).concat(d))); | ||
}); | ||
} | ||
async function l(a) { | ||
r.has(a.guid) || (r.add(a.guid), u(a), await p(a), await Promise.all( | ||
[...a.subdocs].filter((i) => i.shouldLoad).map((i) => l(i)) | ||
async function f(n) { | ||
c.has(n.guid) || (c.add(n.guid), l(n), await p(n), await Promise.all( | ||
[...n.subdocs].filter((d) => d.shouldLoad).map((d) => f(d)) | ||
)); | ||
} | ||
function b(a) { | ||
r.delete(a.guid); | ||
const i = s.get(a.guid); | ||
i && (i.forEach((w) => w()), s.delete(a.guid)), a.subdocs.forEach(b); | ||
function M(n) { | ||
c.delete(n.guid); | ||
const d = r.get(n.guid); | ||
d && (d.forEach((w) => w()), r.delete(n.guid)), n.subdocs.forEach(M); | ||
} | ||
function h() { | ||
s.forEach((a) => { | ||
a.forEach((i) => i()); | ||
}), s.clear(), r.clear(); | ||
function T() { | ||
r.forEach((n) => { | ||
n.forEach((d) => d()); | ||
}), r.clear(), c.clear(); | ||
} | ||
function O() { | ||
n = !0, l(e).catch(console.error), d(); | ||
function C() { | ||
t = !0, f(e).catch(console.error), h(); | ||
} | ||
async function T() { | ||
n = !1, h(), c?.(), c = void 0; | ||
async function O() { | ||
t = !1, T(), u?.(), u = void 0; | ||
} | ||
return { | ||
get connected() { | ||
return n; | ||
return t; | ||
}, | ||
passive: !0, | ||
connect: O, | ||
disconnect: T | ||
connect: C, | ||
disconnect: O | ||
}; | ||
}, x = (e) => (e.preventDefault(), e.returnValue = "Data is not saved. Are you sure you want to leave?"), F = async (e) => { | ||
}, x = (e) => (e.preventDefault(), e.returnValue = "Data is not saved. Are you sure you want to leave?"), H = async (e) => { | ||
window.addEventListener("beforeunload", x, { | ||
@@ -90,10 +90,10 @@ capture: !0 | ||
let B = 500; | ||
function G(e) { | ||
function J(e) { | ||
B = e; | ||
} | ||
const H = ({ | ||
const N = ({ | ||
dbName: e, | ||
mergeCount: o | ||
}) => { | ||
const n = f(e, S, { | ||
const a = b(e, S, { | ||
upgrade: k | ||
@@ -105,7 +105,7 @@ }); | ||
try { | ||
const u = await (await n).transaction("workspace", "readonly").objectStore("workspace").get(s); | ||
if (!u) | ||
const i = await (await a).transaction("workspace", "readonly").objectStore("workspace").get(s); | ||
if (!i) | ||
return !1; | ||
const { updates: d } = u, l = v(d.map(({ update: h }) => h)); | ||
return r?.stateVector ? $(l, r?.stateVector) : l; | ||
const { updates: p } = i, l = v(p.map(({ update: f }) => f)); | ||
return r?.stateVector ? q(l, r?.stateVector) : l; | ||
} catch (c) { | ||
@@ -119,15 +119,15 @@ if (!c.message?.includes("The database connection is closing.")) | ||
try { | ||
const p = (await n).transaction("workspace", "readwrite").objectStore("workspace"), { updates: u } = await p.get(s) ?? { updates: [] }; | ||
let d = [ | ||
...u, | ||
const u = (await a).transaction("workspace", "readwrite").objectStore("workspace"), { updates: i } = await u.get(s) ?? { updates: [] }; | ||
let p = [ | ||
...i, | ||
{ timestamp: Date.now(), update: r } | ||
]; | ||
if (o && d.length >= o) { | ||
const l = v(d.map(({ update: b }) => b)); | ||
d = [{ timestamp: Date.now(), update: l }]; | ||
if (o && p.length >= o) { | ||
const l = v(p.map(({ update: h }) => h)); | ||
p = [{ timestamp: Date.now(), update: l }]; | ||
} | ||
await F( | ||
p.put({ | ||
await H( | ||
u.put({ | ||
id: s, | ||
updates: d | ||
updates: p | ||
}) | ||
@@ -142,19 +142,19 @@ ); | ||
disconnect: () => { | ||
n.then((s) => s.close()).catch(console.error); | ||
a.then((s) => s.close()).catch(console.error); | ||
}, | ||
cleanup: async () => { | ||
await (await n).clear("workspace"); | ||
await (await a).clear("workspace"); | ||
} | ||
}; | ||
}, J = (e, o = y) => { | ||
let n = null, t = null; | ||
}, K = (e, o = y) => { | ||
let a = null, t = null; | ||
return { | ||
connect: () => { | ||
n = H({ dbName: o, mergeCount: B }), t = z(e, n), t.connect(); | ||
a = N({ dbName: o, mergeCount: B }), t = F(e, a, { origin: "idb" }), t.connect(); | ||
}, | ||
disconnect: () => { | ||
n?.disconnect(), t?.disconnect(), n = null, t = null; | ||
a?.disconnect(), t?.disconnect(), a = null, t = null; | ||
}, | ||
cleanup: async () => { | ||
await n?.cleanup(); | ||
await a?.cleanup(); | ||
}, | ||
@@ -167,9 +167,9 @@ get connected() { | ||
let P; | ||
async function N(e) { | ||
async function _(e) { | ||
return new Promise((o) => { | ||
const n = indexedDB.open(e); | ||
const a = indexedDB.open(e); | ||
let t = !0; | ||
n.onsuccess = function() { | ||
n.result.close(), t || indexedDB.deleteDatabase(e), o(t); | ||
}, n.onupgradeneeded = function() { | ||
a.onsuccess = function() { | ||
a.result.close(), t || indexedDB.deleteDatabase(e), o(t); | ||
}, a.onupgradeneeded = function() { | ||
t = !1; | ||
@@ -179,17 +179,17 @@ }; | ||
} | ||
async function K(e, o, n = y) { | ||
async function Q(e, o, a = y) { | ||
do | ||
if (!P || localStorage.getItem(`${n}-migration`) !== "true") { | ||
if (!P || localStorage.getItem(`${a}-migration`) !== "true") { | ||
try { | ||
P = await indexedDB.databases(); | ||
} catch { | ||
if (await N(o)) { | ||
await f(o, 1).then(async (t) => { | ||
if (await _(o)) { | ||
await b(o, 1).then(async (t) => { | ||
if (!t.objectStoreNames.contains("updates")) | ||
return; | ||
const r = await t.transaction("updates", "readonly").objectStore("updates").getAll(); | ||
if (!Array.isArray(r) || !r.every((d) => d instanceof Uint8Array)) | ||
if (!Array.isArray(r) || !r.every((p) => p instanceof Uint8Array)) | ||
return; | ||
const c = v(r), p = e.transaction("workspace", "readwrite").objectStore("workspace"); | ||
await p.get(o) || (console.log("upgrading the database"), await p.put({ | ||
const c = v(r), u = e.transaction("workspace", "readwrite").objectStore("workspace"); | ||
await u.get(o) || (console.log("upgrading the database"), await u.put({ | ||
id: o, | ||
@@ -211,10 +211,10 @@ updates: [ | ||
const s = t.name, r = t.version; | ||
return f(s, r).then( | ||
return b(s, r).then( | ||
async (c) => { | ||
if (!c.objectStoreNames.contains("updates")) | ||
return; | ||
const u = await c.transaction("updates", "readonly").objectStore("updates").getAll(); | ||
if (!Array.isArray(u) || !u.every((h) => h instanceof Uint8Array)) | ||
const i = await c.transaction("updates", "readonly").objectStore("updates").getAll(); | ||
if (!Array.isArray(i) || !i.every((f) => f instanceof Uint8Array)) | ||
return; | ||
const d = v(u), l = e.transaction("workspace", "readwrite").objectStore("workspace"); | ||
const p = v(i), l = e.transaction("workspace", "readwrite").objectStore("workspace"); | ||
await l.get(s) || (console.log("upgrading the database"), await l.put({ | ||
@@ -225,3 +225,3 @@ id: s, | ||
timestamp: Date.now(), | ||
update: d | ||
update: p | ||
} | ||
@@ -234,3 +234,3 @@ ] | ||
}) | ||
), localStorage.setItem(`${n}-migration`, "true"); | ||
), localStorage.setItem(`${a}-migration`, "true"); | ||
break; | ||
@@ -240,4 +240,4 @@ } | ||
} | ||
async function Q(e, o = y) { | ||
const r = await (await f(o, S, { | ||
async function R(e, o = y) { | ||
const r = await (await b(o, S, { | ||
upgrade: k | ||
@@ -247,4 +247,4 @@ })).transaction("workspace", "readonly").objectStore("workspace").get(e); | ||
} | ||
async function R(e, o, n = y) { | ||
await (await f(n, S, { | ||
async function X(e, o, a = y) { | ||
await (await b(a, S, { | ||
upgrade: k | ||
@@ -261,3 +261,3 @@ })).transaction("workspace", "readwrite").objectStore("workspace").put({ | ||
} | ||
const U = "snapshot-origin", V = (e) => (e.preventDefault(), e.returnValue = "Data is not saved. Are you sure you want to leave?"), X = async (e) => { | ||
const U = "snapshot-origin", V = (e) => (e.preventDefault(), e.returnValue = "Data is not saved. Are you sure you want to leave?"), Y = async (e) => { | ||
window.addEventListener("beforeunload", V, { | ||
@@ -269,4 +269,4 @@ capture: !0 | ||
}; | ||
function Y(e, o, n) { | ||
const t = new q(); | ||
function Z(e, o, a) { | ||
const t = new I(); | ||
m(t, o, U); | ||
@@ -276,11 +276,11 @@ const s = j(e), r = j(t), c = A( | ||
r | ||
), p = new I( | ||
[...t.share.keys()].map((d) => { | ||
const l = n(d); | ||
), u = new z( | ||
[...t.share.keys()].map((p) => { | ||
const l = a(p); | ||
if (l === "Text") | ||
return t.getText(d); | ||
return t.getText(p); | ||
if (l === "Map") | ||
return t.getMap(d); | ||
return t.getMap(p); | ||
if (l === "Array") | ||
return t.getArray(d); | ||
return t.getArray(p); | ||
throw new Error("Unknown type"); | ||
@@ -292,10 +292,10 @@ }), | ||
); | ||
m(t, c, U), p.undo(); | ||
const u = A( | ||
m(t, c, U), u.undo(); | ||
const i = A( | ||
t, | ||
s | ||
); | ||
m(e, u, U); | ||
m(e, i, U); | ||
} | ||
class Z extends Error { | ||
class ee extends Error { | ||
constructor() { | ||
@@ -305,3 +305,3 @@ super("Early disconnect"); | ||
} | ||
class ee extends Error { | ||
class te extends Error { | ||
constructor() { | ||
@@ -311,14 +311,14 @@ super("Cleanup when connecting"); | ||
} | ||
const te = async (e, o, n, t = y) => { | ||
const c = (await f(t, S, { | ||
const ne = async (e, o, a, t = y) => { | ||
const c = (await b(t, S, { | ||
upgrade: k | ||
})).transaction("milestone", "readwrite").objectStore("milestone"), p = await c.get("id"), u = A(o); | ||
p ? (p.milestone[n] = u, await c.put(p)) : await c.put({ | ||
})).transaction("milestone", "readwrite").objectStore("milestone"), u = await c.get("id"), i = A(o); | ||
u ? (u.milestone[a] = i, await c.put(u)) : await c.put({ | ||
id: e, | ||
milestone: { | ||
[n]: u | ||
[a]: i | ||
} | ||
}); | ||
}, ne = async (e, o = y) => { | ||
const r = await (await f(o, S, { | ||
}, ae = async (e, o = y) => { | ||
const r = await (await b(o, S, { | ||
upgrade: k | ||
@@ -329,17 +329,17 @@ })).transaction("milestone", "readonly").objectStore("milestone").get(e); | ||
export { | ||
ee as CleanupWhenConnectingError, | ||
te as CleanupWhenConnectingError, | ||
y as DEFAULT_DB_NAME, | ||
Z as EarlyDisconnectError, | ||
J as createIndexedDBProvider, | ||
ee as EarlyDisconnectError, | ||
K as createIndexedDBProvider, | ||
S as dbVersion, | ||
Q as downloadBinary, | ||
ne as getMilestones, | ||
te as markMilestone, | ||
R as overwriteBinary, | ||
Y as revertUpdate, | ||
G as setMergeCount, | ||
K as tryMigrate, | ||
R as downloadBinary, | ||
ae as getMilestones, | ||
ne as markMilestone, | ||
X as overwriteBinary, | ||
Z as revertUpdate, | ||
J as setMergeCount, | ||
Q as tryMigrate, | ||
k as upgradeDB, | ||
X as writeOperation | ||
Y as writeOperation | ||
}; | ||
//# sourceMappingURL=index.js.map |
{ | ||
"name": "@toeverything/y-indexeddb", | ||
"type": "module", | ||
"version": "0.7.0-canary.52", | ||
"version": "0.7.0-canary.53", | ||
"description": "IndexedDB database adapter for Yjs", | ||
@@ -40,5 +40,5 @@ "repository": "toeverything/AFFiNE", | ||
"@affine/y-provider": "workspace:*", | ||
"@blocksuite/blocks": "0.0.0-20230721134812-6e0e3bef-nightly", | ||
"@blocksuite/store": "0.0.0-20230721134812-6e0e3bef-nightly", | ||
"vite": "^4.4.6", | ||
"@blocksuite/blocks": "0.0.0-20230720073515-bea92e0f-nightly", | ||
"@blocksuite/store": "0.0.0-20230720073515-bea92e0f-nightly", | ||
"vite": "^4.4.7", | ||
"vite-plugin-dts": "3.3.1", | ||
@@ -45,0 +45,0 @@ "y-indexeddb": "^9.0.11" |
{ | ||
"name": "@toeverything/y-indexeddb", | ||
"type": "module", | ||
"version": "0.7.0-canary.52", | ||
"version": "0.7.0-canary.53", | ||
"description": "IndexedDB database adapter for Yjs", | ||
@@ -43,6 +43,6 @@ "repository": "toeverything/AFFiNE", | ||
"devDependencies": { | ||
"@affine/y-provider": "0.7.0-canary.52", | ||
"@blocksuite/blocks": "0.0.0-20230721134812-6e0e3bef-nightly", | ||
"@blocksuite/store": "0.0.0-20230721134812-6e0e3bef-nightly", | ||
"vite": "^4.4.6", | ||
"@affine/y-provider": "0.7.0-canary.53", | ||
"@blocksuite/blocks": "0.0.0-20230720073515-bea92e0f-nightly", | ||
"@blocksuite/store": "0.0.0-20230720073515-bea92e0f-nightly", | ||
"vite": "^4.4.7", | ||
"vite-plugin-dts": "3.3.1", | ||
@@ -49,0 +49,0 @@ "y-indexeddb": "^9.0.11" |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
83342
0