apical-store
Advanced tools
Comparing version 0.0.51 to 0.0.71
1050
dist/bundle.js
@@ -39,11 +39,14 @@ (function (global, factory) { | ||
/** | ||
* Creating an an observable array | ||
*/ | ||
const INSERT = "insert"; | ||
const UPDATE = "update"; | ||
const DELETE = "delete"; | ||
const REVERSE = "reverse"; | ||
const SHUFFLE = "shuffle"; | ||
const oMetaKey = Symbol.for("object-observer-meta-key-0"); | ||
// polyfill | ||
(function (global) { | ||
if (typeof global.queueMicrotask !== "function") { | ||
global.queueMicrotask = function (callback) { | ||
Promise.resolve().then(callback); | ||
}; | ||
} | ||
})(typeof global !== "undefined" | ||
? global | ||
: typeof window !== "undefined" | ||
? window | ||
: undefined); | ||
function findGrandParent(observable) { | ||
@@ -55,105 +58,100 @@ if (observable.parent) | ||
} | ||
function copy(v) { | ||
return JSON.parse(JSON.stringify({ tmp: v })).tmp; | ||
} | ||
function prepareObject(source, oMeta, visited) { | ||
const target = {}; | ||
target[oMetaKey] = oMeta; | ||
for (const key in source) { | ||
target[key] = getObservedOf(source[key], key, oMeta, visited); | ||
function copy(obj) { | ||
if (obj === null || typeof obj !== "object") { | ||
return obj; | ||
} | ||
return target; | ||
} | ||
function prepareArray(source, oMeta, visited) { | ||
let l = source.length; | ||
const target = new Array(l); | ||
target[oMetaKey] = oMeta; | ||
for (let i = 0; i < l; i++) { | ||
target[i] = getObservedOf(source[i], i, oMeta, visited); | ||
if (obj instanceof Date) { | ||
return new Date(obj.getTime()); | ||
} | ||
return target; | ||
} | ||
function callObserverSafe(listener, changes) { | ||
try { | ||
listener(changes); | ||
if (Array.isArray(obj)) { | ||
const arrCopy = []; | ||
for (const item of obj) { | ||
arrCopy.push(copy(item)); | ||
} | ||
return arrCopy; | ||
} | ||
catch (e) { | ||
console.error(`Failed to notify listener ${listener} with ${changes}`, e); | ||
const objCopy = {}; | ||
for (const key in obj) { | ||
if (obj.hasOwnProperty(key)) { | ||
objCopy[key] = copy(obj[key]); | ||
} | ||
} | ||
return objCopy; | ||
} | ||
function callObserversFromMT() { | ||
const batches = this.batches; | ||
this.batches = []; | ||
for (const [listener, changes] of batches) { | ||
callObserverSafe(listener, changes); | ||
/** | ||
* Constants | ||
*/ | ||
const INSERT = "insert"; | ||
const UPDATE = "update"; | ||
const DELETE = "delete"; | ||
const REVERSE = "reverse"; | ||
const SHUFFLE = "shuffle"; | ||
const oMetaKey = Symbol.for("object-observer-meta-key-0"); | ||
/** | ||
* Change class: | ||
* any change that would be sent to the observer will contain this class properties | ||
*/ | ||
class Change { | ||
constructor(type, path, value, oldValue, object, snapshot) { | ||
this.type = type; | ||
this.path = path; | ||
this.value = copy(value); | ||
this.oldValue = copy(oldValue); | ||
this.object = object; | ||
this.snapshot = snapshot; | ||
} | ||
} | ||
function callObservers(oMeta, changes) { | ||
let currentObservable = oMeta; | ||
const l = changes.length; | ||
do { | ||
let observers = currentObservable.observers; | ||
let i = observers.length; | ||
while (i--) { | ||
let target = observers[i]; | ||
if (changes.length) { | ||
if (currentObservable.batches.length === 0) { | ||
// @ts-ignore | ||
queueMicrotask(callObserversFromMT.bind(currentObservable)); | ||
} | ||
let rb; | ||
for (const batch of currentObservable.batches) { | ||
if (batch[0] === target) { | ||
rb = batch; | ||
break; | ||
} | ||
} | ||
if (!rb) { | ||
rb = [target, []]; | ||
currentObservable.batches.push(rb); | ||
} | ||
Array.prototype.push.apply(rb[1], changes); | ||
} | ||
const prepare = { | ||
object(source, oMeta, visited) { | ||
const target = {}; | ||
target[oMetaKey] = oMeta; | ||
for (const key in source) { | ||
target[key] = prepare.getObservedOf(source[key], key, oMeta, visited); | ||
} | ||
// cloning all the changes and notifying in context of parent | ||
const parent = currentObservable.parent; | ||
if (parent) { | ||
for (let j = 0; j < l; j++) { | ||
const change = changes[j]; | ||
changes[j] = new Change(change.type, [currentObservable.ownKey, ...change.path], change.value, change.oldValue, change.object, copy(findGrandParent(currentObservable).proxy)); | ||
} | ||
currentObservable = parent; | ||
return target; | ||
}, | ||
array(source, oMeta, visited) { | ||
let l = source.length; | ||
const target = new Array(l); | ||
target[oMetaKey] = oMeta; | ||
for (let i = 0; i < l; i++) { | ||
target[i] = prepare.getObservedOf(source[i], i, oMeta, visited); | ||
} | ||
return target; | ||
}, | ||
getObservedOf(item, key, parent, visited) { | ||
if (visited !== undefined && visited.has(item)) { | ||
return null; | ||
} | ||
else if (typeof item !== "object" || item === null) { | ||
return item; | ||
} | ||
else if (Array.isArray(item)) { | ||
return new ObservableArrayMeta({ | ||
target: item, | ||
ownKey: key, | ||
parent: parent, | ||
visited, | ||
}).proxy; | ||
} | ||
else if (item instanceof Date) { | ||
return item; | ||
} | ||
else { | ||
currentObservable = null; | ||
return new ObservableObjectMeta({ | ||
target: item, | ||
ownKey: key, | ||
parent: parent, | ||
visited, | ||
}).proxy; | ||
} | ||
} while (currentObservable); | ||
} | ||
function getObservedOf(item, key, parent, visited) { | ||
if (visited !== undefined && visited.has(item)) { | ||
return null; | ||
} | ||
else if (typeof item !== "object" || item === null) { | ||
return item; | ||
} | ||
else if (Array.isArray(item)) { | ||
return new ArrayOMeta({ | ||
target: item, | ||
ownKey: key, | ||
parent: parent, | ||
visited, | ||
}).proxy; | ||
} | ||
else if (item instanceof Date) { | ||
return item; | ||
} | ||
else { | ||
return new ObjectOMeta({ | ||
target: item, | ||
ownKey: key, | ||
parent: parent, | ||
visited, | ||
}).proxy; | ||
} | ||
} | ||
}, | ||
}; | ||
/*** | ||
* Proxied Array methods | ||
*/ | ||
function proxiedPop() { | ||
@@ -171,3 +169,3 @@ const oMeta = this[oMetaKey], target = oMeta.target, poppedIndex = target.length - 1; | ||
]; | ||
callObservers(oMeta, changes); | ||
oMeta.callObservers(changes); | ||
return popResult; | ||
@@ -178,3 +176,3 @@ } | ||
for (let i = 0; i < l; i++) { | ||
pushContent[i] = getObservedOf(arguments[i], initialLength + i, oMeta); | ||
pushContent[i] = prepare.getObservedOf(arguments[i], initialLength + i, oMeta); | ||
} | ||
@@ -186,3 +184,3 @@ const pushResult = Reflect.apply(target.push, target, pushContent); | ||
} | ||
callObservers(oMeta, changes); | ||
oMeta.callObservers(changes); | ||
return pushResult; | ||
@@ -213,3 +211,3 @@ } | ||
]; | ||
callObservers(oMeta, changes); | ||
oMeta.callObservers(changes); | ||
return shiftResult; | ||
@@ -220,3 +218,3 @@ } | ||
for (let i = 0; i < al; i++) { | ||
unshiftContent[i] = getObservedOf(arguments[i], i, oMeta); | ||
unshiftContent[i] = prepare.getObservedOf(arguments[i], i, oMeta); | ||
} | ||
@@ -239,3 +237,3 @@ const unshiftResult = Reflect.apply(target.unshift, target, unshiftContent); | ||
} | ||
callObservers(oMeta, changes); | ||
oMeta.callObservers(changes); | ||
return unshiftResult; | ||
@@ -259,3 +257,3 @@ } | ||
]; | ||
callObservers(oMeta, changes); | ||
oMeta.callObservers(changes); | ||
return this; | ||
@@ -279,3 +277,3 @@ } | ||
]; | ||
callObservers(oMeta, changes); | ||
oMeta.callObservers(changes); | ||
return this; | ||
@@ -302,3 +300,3 @@ } | ||
item = target[i]; | ||
target[i] = getObservedOf(item, i, oMeta); | ||
target[i] = prepare.getObservedOf(item, i, oMeta); | ||
if (i in prev) { | ||
@@ -318,3 +316,3 @@ tmpTarget = prev[i]; | ||
} | ||
callObservers(oMeta, changes); | ||
oMeta.callObservers(changes); | ||
} | ||
@@ -346,3 +344,3 @@ return this; | ||
if (nItem && typeof nItem === "object") { | ||
nItem = getObservedOf(nItem, i, oMeta); | ||
nItem = prepare.getObservedOf(nItem, i, oMeta); | ||
target[i] = nItem; | ||
@@ -363,3 +361,3 @@ } | ||
} | ||
callObservers(oMeta, changes); | ||
oMeta.callObservers(changes); | ||
} | ||
@@ -372,3 +370,3 @@ return this; | ||
for (let i = 0; i < splLen; i++) { | ||
spliceContent[i] = getObservedOf(arguments[i], i, oMeta); | ||
spliceContent[i] = prepare.getObservedOf(arguments[i], i, oMeta); | ||
} | ||
@@ -416,3 +414,3 @@ // calculate pointers | ||
} | ||
callObservers(oMeta, changes); | ||
oMeta.callObservers(changes); | ||
return spliceResult; | ||
@@ -431,16 +429,8 @@ } | ||
}; | ||
class Change { | ||
constructor(type, path, value, oldValue, object, snapshot) { | ||
this.type = type; | ||
this.path = path; | ||
this.value = copy(value); | ||
this.oldValue = copy(oldValue); | ||
this.object = object; | ||
this.snapshot = snapshot; | ||
} | ||
} | ||
class OMetaBase { | ||
class ObservableMeta { | ||
constructor(properties, cloningFunction) { | ||
this.observers = []; | ||
this.batches = []; | ||
this.runningSilentWork = false; | ||
const { target, parent, ownKey, visited = new Set() } = properties; | ||
@@ -470,3 +460,3 @@ if (parent && ownKey !== undefined) { | ||
if (value !== oldValue) { | ||
const newValue = getObservedOf(value, key, this); | ||
const newValue = prepare.getObservedOf(value, key, this); | ||
target[key] = newValue; | ||
@@ -486,3 +476,3 @@ if (oldValue && typeof oldValue === "object") { | ||
]; | ||
callObservers(this, changes); | ||
this.callObservers(changes); | ||
} | ||
@@ -503,14 +493,73 @@ return true; | ||
]; | ||
callObservers(this, changes); | ||
this.callObservers(changes); | ||
return true; | ||
} | ||
QueMicroTask(observableMeta) { | ||
let skip = false; | ||
if (findGrandParent(this).runningSilentWork) | ||
skip = true; | ||
queueMicrotask(() => { | ||
const batches = observableMeta.batches; | ||
observableMeta.batches = []; | ||
for (const [listener, changes] of batches) { | ||
try { | ||
if (skip) | ||
break; | ||
listener(changes); | ||
} | ||
catch (e) { | ||
console.error(`Failed to notify listener ${listener} with ${changes}:`, e); | ||
} | ||
} | ||
}); | ||
} | ||
callObservers(changes) { | ||
let currentObservable = this; | ||
const l = changes.length; | ||
do { | ||
let observers = currentObservable.observers; | ||
let i = observers.length; | ||
while (i--) { | ||
let target = observers[i]; | ||
if (changes.length) { | ||
if (currentObservable.batches.length === 0) { | ||
this.QueMicroTask(currentObservable); | ||
} | ||
let rb; | ||
for (const batch of currentObservable.batches) { | ||
if (batch[0] === target) { | ||
rb = batch; | ||
break; | ||
} | ||
} | ||
if (!rb) { | ||
rb = [target, []]; | ||
currentObservable.batches.push(rb); | ||
} | ||
Array.prototype.push.apply(rb[1], changes); | ||
} | ||
} | ||
// cloning all the changes and notifying in context of parent | ||
const parent = currentObservable.parent; | ||
if (parent) { | ||
for (let j = 0; j < l; j++) { | ||
const change = changes[j]; | ||
changes[j] = new Change(change.type, [currentObservable.ownKey, ...change.path], change.value, change.oldValue, change.object, copy(findGrandParent(currentObservable).proxy)); | ||
} | ||
currentObservable = parent; | ||
} | ||
else { | ||
currentObservable = null; | ||
} | ||
} while (currentObservable); | ||
} | ||
} | ||
class ObjectOMeta extends OMetaBase { | ||
class ObservableObjectMeta extends ObservableMeta { | ||
constructor(properties) { | ||
super(properties, prepareObject); | ||
super(properties, prepare.object); | ||
} | ||
} | ||
class ArrayOMeta extends OMetaBase { | ||
class ObservableArrayMeta extends ObservableMeta { | ||
constructor(properties) { | ||
super(properties, prepareArray); | ||
super(properties, prepare.array); | ||
} | ||
@@ -521,57 +570,67 @@ get(target, key) { | ||
} | ||
function observable(target) { | ||
const o = isObservable(target) | ||
? target | ||
: new ArrayOMeta({ | ||
target: target, | ||
ownKey: "", | ||
parent: null, | ||
}).proxy; | ||
function unobserve(observers) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
if (!observers) | ||
return yield __unobserve(o); | ||
else if (Array.isArray(observers)) | ||
return yield __unobserve(o, observers); | ||
else | ||
return yield __unobserve(o, [observers]); | ||
}); | ||
class Observable { | ||
constructor(target) { | ||
/** | ||
* An array of the all the observers registered to this observable | ||
*/ | ||
this.observers = []; | ||
this.target = Observable.isObservable(target) | ||
? target | ||
: new ObservableArrayMeta({ | ||
target: target, | ||
ownKey: "", | ||
parent: null, | ||
}).proxy; | ||
this.observers = this.target[oMetaKey].observers; | ||
} | ||
function observe(observer) { | ||
__observe(o, observer); | ||
/** | ||
* | ||
* Remove an observer from the list of observers | ||
* can be given a single observer | ||
* an array of observers | ||
* or no argument to remove all observers | ||
*/ | ||
unobserve(observers) { | ||
if (!observers) | ||
return this.__unobserve(); | ||
else if (Array.isArray(observers)) | ||
return this.__unobserve(observers); | ||
else | ||
return this.__unobserve([observers]); | ||
} | ||
function silently(work) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const observers = yield __unobserve(o); | ||
try { | ||
work(o); | ||
} | ||
finally { | ||
for (const observer of observers) { | ||
__observe(o, observer); | ||
} | ||
} | ||
}); | ||
/** | ||
* Register a new observer | ||
*/ | ||
observe(observer) { | ||
this.__observe(observer); | ||
} | ||
return { | ||
observe, | ||
unobserve, | ||
silently, | ||
observable: o, | ||
}; | ||
} | ||
function isObservable(input) { | ||
return !!(input && input[oMetaKey]); | ||
} | ||
function __observe(observable, observer) { | ||
const observers = observable[oMetaKey].observers; | ||
if (!observers.some((o) => o === observer)) { | ||
observers.push(observer); | ||
/** | ||
* Execute a callback silently (without calling the observers) | ||
*/ | ||
silently(work) { | ||
this.target[oMetaKey].runningSilentWork = true; | ||
try { | ||
work(this.target); | ||
} | ||
finally { | ||
this.target[oMetaKey].runningSilentWork = false; | ||
} | ||
} | ||
} | ||
function __unobserve(observable, observers) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
if (observable instanceof Promise) | ||
observable = yield Promise.resolve(observable); | ||
const existingObs = observable[oMetaKey].observers; | ||
/** | ||
* Get a non-observed copy of the observable array | ||
* changes to this copy wouldn't be replicated to the observable array | ||
* and wouldn't cause observers to be called | ||
*/ | ||
get copy() { | ||
return copy(this.target); | ||
} | ||
__observe(observer) { | ||
const observers = this.target[oMetaKey].observers; | ||
if (!observers.some((o) => o === observer)) { | ||
observers.push(observer); | ||
} | ||
} | ||
__unobserve(observers) { | ||
const existingObs = this.target[oMetaKey].observers; | ||
let length = existingObs.length; | ||
@@ -593,228 +652,13 @@ if (!length) { | ||
return spliced; | ||
}); | ||
} | ||
class IDB { | ||
constructor(name) { | ||
const request = indexedDB.open(name); | ||
request.onupgradeneeded = function (event) { | ||
const db = event.target.result; | ||
if (!db.objectStoreNames.contains(name)) { | ||
const objectStore = db.createObjectStore(name); | ||
objectStore.createIndex("idIndex", "_id", { unique: true }); | ||
} | ||
if (!db.objectStoreNames.contains('metadata')) { | ||
db.createObjectStore('metadata'); | ||
} | ||
}; | ||
const dbp = this.pr(request); | ||
this.store = (txMode, callback) => dbp.then((db) => callback(db.transaction(name, txMode, { durability: "relaxed" }).objectStore(name))); | ||
this.metadataStore = (txMode, callback) => dbp.then((db) => callback(db.transaction('metadata', txMode, { durability: "relaxed" }).objectStore('metadata'))); | ||
} | ||
/** | ||
* Converts IDB requests/transactions to promises. | ||
* when given any input it would return: | ||
* true: if it's an observable object (even if deeply nested inside observable array) | ||
* false: if not | ||
*/ | ||
pr(req) { | ||
return new Promise((resolve, reject) => { | ||
// @ts-ignore - file size hacks | ||
req.oncomplete = req.onsuccess = () => resolve(req.result); | ||
// @ts-ignore - file size hacks | ||
req.onabort = req.onerror = () => reject(req.error); | ||
}); | ||
static isObservable(input) { | ||
return !!(input && input[oMetaKey]); | ||
} | ||
/** | ||
* Converts cursor iterations to promises. | ||
*/ | ||
eachCursor(store, callback) { | ||
store.openCursor().onsuccess = function () { | ||
if (!this.result) | ||
return; | ||
callback(this.result); | ||
this.result.continue(); | ||
}; | ||
return this.pr(store.transaction); | ||
} | ||
/** | ||
* Get a value by its key. | ||
*/ | ||
get(key) { | ||
return this.store("readonly", (store) => this.pr(store.get(key))); | ||
} | ||
/** | ||
* Get values for a given set of keys. | ||
*/ | ||
getBulk(keys) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
return this.store("readonly", (store) => __awaiter(this, void 0, void 0, function* () { | ||
return Promise.all(keys.map((x) => this.pr(store.get(x)))); | ||
})); | ||
}); | ||
} | ||
/** | ||
* Set a value with a key. | ||
*/ | ||
set(key, value) { | ||
return this.store("readwrite", (store) => { | ||
store.put(value, key); | ||
return this.pr(store.transaction); | ||
}); | ||
} | ||
/** | ||
* Set multiple values at once. This is faster than calling set() multiple times. | ||
* It's also atomic – if one of the pairs can't be added, none will be added. | ||
*/ | ||
setBulk(entries) { | ||
return this.store("readwrite", (store) => { | ||
entries.forEach((entry) => store.put(entry[1], entry[0])); | ||
return this.pr(store.transaction); | ||
}); | ||
} | ||
/** | ||
* Delete multiple keys at once. | ||
*/ | ||
delBulk(keys) { | ||
return this.store("readwrite", (store) => { | ||
keys.forEach((key) => store.delete(key)); | ||
return this.pr(store.transaction); | ||
}); | ||
} | ||
/** | ||
* Clear all values in the store. | ||
*/ | ||
clear() { | ||
return this.store("readwrite", (store) => { | ||
store.clear(); | ||
return this.pr(store.transaction); | ||
}); | ||
} | ||
/** | ||
* Get all keys in the store. | ||
*/ | ||
keys() { | ||
return this.store("readonly", (store) => __awaiter(this, void 0, void 0, function* () { | ||
// Fast path for modern browsers | ||
if (store.getAllKeys) { | ||
return this.pr(store.getAllKeys()); | ||
} | ||
const items = []; | ||
yield this.eachCursor(store, (cursor) => items.push(cursor.key)); | ||
return items; | ||
})); | ||
} | ||
/** | ||
* Get all documents in the store. | ||
*/ | ||
values() { | ||
return this.store("readonly", (store) => __awaiter(this, void 0, void 0, function* () { | ||
// Fast path for modern browsers | ||
if (store.getAll) { | ||
return this.pr(store.getAll()); | ||
} | ||
const items = []; | ||
yield this.eachCursor(store, (cursor) => items.push(cursor.value)); | ||
return items; | ||
})); | ||
} | ||
/** | ||
* Get key by ID | ||
*/ | ||
byID(_id) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
return this.store("readonly", (store) => { | ||
return this.pr(store.index("idIndex").getKey(_id)); | ||
}); | ||
}); | ||
} | ||
/** | ||
* Get length of the DB. | ||
*/ | ||
length() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
return (yield this.keys()).length; | ||
}); | ||
} | ||
/** | ||
* Set metadata with a key. | ||
*/ | ||
setMetadata(key, value) { | ||
return this.metadataStore("readwrite", (store) => { | ||
store.put(value, key); | ||
return this.pr(store.transaction); | ||
}); | ||
} | ||
/** | ||
* Get metadata by its key. | ||
*/ | ||
getMetadata(key) { | ||
return this.metadataStore("readonly", (store) => this.pr(store.get(key))); | ||
} | ||
clearMetadata() { | ||
return this.metadataStore("readwrite", (store) => { | ||
store.clear(); | ||
return this.pr(store.transaction); | ||
}); | ||
} | ||
} | ||
class SyncService { | ||
constructor(baseUrl, token, table) { | ||
this.baseUrl = baseUrl; | ||
this.token = token; | ||
this.table = table; | ||
} | ||
fetchData() { | ||
return __awaiter(this, arguments, void 0, function* (version = 0) { | ||
let page = 0; | ||
let nextPage = true; | ||
let fetchedVersion = 0; | ||
let result = []; | ||
while (nextPage) { | ||
const url = `${this.baseUrl}/${this.table}/${version}/${page}`; | ||
const response = yield fetch(url, { | ||
method: "GET", | ||
headers: { | ||
Authorization: `Bearer ${this.token}`, | ||
}, | ||
}); | ||
const res = yield response.json(); | ||
const output = JSON.parse(res.output); | ||
nextPage = output.rows.length > 0 && version !== 0; | ||
fetchedVersion = output.version; | ||
result = result.concat(output.rows); | ||
page = page + 1; | ||
} | ||
return { version: fetchedVersion, rows: result }; | ||
}); | ||
} | ||
latestVersion() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const url = `${this.baseUrl}/${this.table}/0/Infinity`; | ||
const response = yield fetch(url, { | ||
method: "GET", | ||
headers: { | ||
Authorization: `Bearer ${this.token}`, | ||
}, | ||
}); | ||
const res = yield response.json(); | ||
if (res.success) | ||
return Number(JSON.parse(res.output).version); | ||
else | ||
return 0; | ||
}); | ||
} | ||
sendUpdates(data) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const url = `${this.baseUrl}/${this.table}`; | ||
const response = yield fetch(url, { | ||
method: "PUT", | ||
headers: { | ||
Authorization: `Bearer ${this.token}`, | ||
}, | ||
body: JSON.stringify(data), | ||
}); | ||
return Number((yield response.json()).output); | ||
}); | ||
} | ||
} | ||
function debounce(func, wait) { | ||
@@ -981,16 +825,24 @@ let timeoutId = null; | ||
class Store { | ||
constructor({ name, token, persist = true, endpoint, debounceRate, model, encode, decode, }) { | ||
constructor({ debounceRate, model, encode, decode, onSyncStart, onSyncEnd, localPersistence, remotePersistence, } = {}) { | ||
this.isOnline = true; | ||
this.deferredPresent = false; | ||
this.$$observableObject = observable([]); | ||
this.onSyncStart = () => { }; | ||
this.onSyncEnd = () => { }; | ||
this.$$observableObject = new Observable([]); | ||
this.$$changes = []; | ||
this.$$syncService = null; | ||
this.$$debounceRate = 100; | ||
this.$$lastProcessChanges = 0; | ||
this.$$model = Document; | ||
this.$$encode = (x) => x; | ||
this.$$decode = (x) => x; | ||
this.copy = this.$$observableObject.copy; | ||
this.new = this.$$model.new; | ||
this.sync = debounce(this.$$sync.bind(this), this.$$debounceRate); | ||
this.$$idb = new IDB(name); | ||
this.$$token = token; | ||
this.$$model = model || Document; | ||
if (onSyncStart) { | ||
this.onSyncStart = onSyncStart; | ||
} | ||
if (onSyncEnd) { | ||
this.onSyncEnd = onSyncEnd; | ||
} | ||
if (encode) { | ||
@@ -1005,8 +857,17 @@ this.$$encode = encode; | ||
} | ||
if (persist && endpoint) { | ||
if (localPersistence) { | ||
this.$$localPersistence = localPersistence; | ||
this.$$loadFromLocal(); | ||
this.$$setupObservers(); | ||
this.$$syncService = new SyncService(endpoint, this.$$token, name); | ||
} | ||
if (remotePersistence) { | ||
this.$$remotePersistence = remotePersistence; | ||
} | ||
} | ||
/** | ||
* Serializes an item of type T into an encoded JSON string. | ||
* Date objects are converted to a custom format before encoding. | ||
* @param item An instance of type T which extends Document. | ||
* @returns An encoded JSON string representing the item. | ||
*/ | ||
$$serialize(item) { | ||
@@ -1025,8 +886,13 @@ const stripped = item._stripDefaults ? item._stripDefaults() : item; | ||
} | ||
/** | ||
* Decodes a serialized string, parses it into a JavaScript object, and converts custom date formats back into Date objects. | ||
* @param line A string representing the serialized data. | ||
* @returns A new instance of the model with the deserialized data. | ||
*/ | ||
$$deserialize(line) { | ||
line = this.$$decode(line); | ||
const item = JSON.parse(line, function (key, val) { | ||
const item = JSON.parse(line, (key, val) => { | ||
if (key === "$$date") | ||
return new Date(val); | ||
let t = typeof val; | ||
const t = typeof val; | ||
if (t === "string" || t === "number" || t === "boolean" || val === null) | ||
@@ -1040,5 +906,13 @@ return val; | ||
} | ||
/** | ||
* Loads data from an IndexedDB instance, deserializes it, and updates the observable array silently without triggering observers. | ||
*/ | ||
$$loadFromLocal() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const deserialized = (yield this.$$idb.values()).map((x) => this.$$deserialize(x)); | ||
// Check if IndexedDB instance is available | ||
if (!this.$$localPersistence) | ||
return; | ||
// Retrieve values from IndexedDB and deserialize them | ||
const deserialized = yield Promise.all((yield this.$$localPersistence.getAll()).map((x) => this.$$deserialize(x))); | ||
// Update the observable array silently with deserialized data | ||
this.$$observableObject.silently((o) => { | ||
@@ -1051,5 +925,9 @@ o.splice(0, o.length, ...deserialized); | ||
return __awaiter(this, void 0, void 0, function* () { | ||
if (!this.$$localPersistence) | ||
return; | ||
if (this.$$changes.length === 0) | ||
return; | ||
this.onSyncStart(); | ||
this.$$lastProcessChanges = Date.now(); | ||
const toWriteLocally = []; | ||
const toSendRemotely = {}; | ||
const toWrite = []; | ||
const toDeffer = []; | ||
@@ -1062,4 +940,3 @@ const changesToProcess = [...this.$$changes]; // Create a copy of changes to process | ||
const serializedLine = this.$$serialize(item); | ||
toWriteLocally.push([item.id, serializedLine]); | ||
toSendRemotely[item.id] = serializedLine; | ||
toWrite.push([item.id, serializedLine]); | ||
toDeffer.push({ | ||
@@ -1070,8 +947,10 @@ ts: Date.now(), | ||
} | ||
yield this.$$idb.setBulk(toWriteLocally); | ||
const deferred = (yield this.$$idb.getMetadata("deferred")) || "[]"; | ||
let deferredArray = JSON.parse(deferred); | ||
if (this.isOnline && this.$$syncService && deferredArray.length === 0) { | ||
yield this.$$localPersistence.put(toWrite); | ||
let deferredArray = yield this.$$localPersistence.getDeferred(); | ||
if (this.isOnline && | ||
this.$$remotePersistence && | ||
deferredArray.length === 0) { | ||
try { | ||
yield this.$$syncService.sendUpdates(toSendRemotely); | ||
yield this.$$remotePersistence.put(toWrite); | ||
this.onSyncEnd(); | ||
return; | ||
@@ -1090,5 +969,5 @@ } | ||
*/ | ||
deferredArray = deferredArray.concat(...toDeffer); | ||
yield this.$$idb.setMetadata("deferred", JSON.stringify(deferredArray)); | ||
yield this.$$localPersistence.putDeferred(deferredArray.concat(...toDeffer)); | ||
this.deferredPresent = true; | ||
this.onSyncEnd(); | ||
}); | ||
@@ -1112,7 +991,2 @@ } | ||
} | ||
$$localVersion() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
return Number((yield this.$$idb.getMetadata("version")) || 0); | ||
}); | ||
} | ||
/** | ||
@@ -1152,7 +1026,12 @@ * | ||
return __awaiter(this, void 0, void 0, function* () { | ||
if (!this.$$syncService) { | ||
if (!this.$$localPersistence) { | ||
return { | ||
exception: "Sync service not available", | ||
exception: "Local persistence not available", | ||
}; | ||
} | ||
if (!this.$$remotePersistence) { | ||
return { | ||
exception: "Remote persistence not available", | ||
}; | ||
} | ||
if (!this.isOnline) { | ||
@@ -1164,6 +1043,5 @@ return { | ||
try { | ||
const localVersion = yield this.$$localVersion(); | ||
const remoteVersion = yield this.$$syncService.latestVersion(); | ||
const deferred = (yield this.$$idb.getMetadata("deferred")) || "[]"; | ||
let deferredArray = JSON.parse(deferred); | ||
const localVersion = yield this.$$localPersistence.getVersion(); | ||
const remoteVersion = yield this.$$remotePersistence.getVersion(); | ||
let deferredArray = yield this.$$localPersistence.getDeferred(); | ||
if (localVersion === remoteVersion && deferredArray.length === 0) { | ||
@@ -1175,11 +1053,14 @@ return { | ||
// fetch updates since our local version | ||
const remoteUpdates = yield this.$$syncService.fetchData(localVersion); | ||
const remoteUpdates = yield this.$$remotePersistence.getSince(localVersion); | ||
// check for conflicts | ||
deferredArray = deferredArray.filter((x) => { | ||
var _a; | ||
let item = this.$$deserialize(x.data); | ||
const conflict = remoteUpdates.rows.findIndex((y) => y.id === item.id); | ||
// take row-specific version if available, otherwise rely on latest version | ||
const comparison = Number(((_a = remoteUpdates.rows[conflict]) === null || _a === void 0 ? void 0 : _a.ts) || remoteVersion); | ||
if (conflict === -1) { | ||
return true; | ||
} | ||
else if (x.ts > remoteVersion) { | ||
else if (x.ts > comparison) { | ||
// there's a conflict, but the local change is newer | ||
@@ -1196,18 +1077,18 @@ remoteUpdates.rows.splice(conflict, 1); | ||
// we should start with remote | ||
for (const remote of remoteUpdates.rows) { | ||
yield this.$$idb.set(remote.id, remote.data); | ||
} | ||
yield this.$$localPersistence.put(remoteUpdates.rows.map((row) => [row.id, row.data])); | ||
// then local | ||
const updatedRows = {}; | ||
const updatedRows = new Map(); | ||
for (const local of deferredArray) { | ||
let item = this.$$deserialize(local.data); | ||
updatedRows[item.id] = local.data; | ||
updatedRows.set(item.id, local.data); | ||
// latest deferred write wins since it would overwrite the previous one | ||
} | ||
yield this.$$syncService.sendUpdates(updatedRows); | ||
yield this.$$remotePersistence.put([...updatedRows.keys()].map((x) => [x, updatedRows.get(x)])); | ||
// reset deferred | ||
yield this.$$idb.setMetadata("deferred", "[]"); | ||
yield this.$$localPersistence.putDeferred([]); | ||
this.deferredPresent = false; | ||
// set local version | ||
yield this.$$idb.setMetadata("version", remoteUpdates.version.toString()); | ||
// set local version to the version given by the current request | ||
// this might be outdated as soon as this functions ends | ||
// that's why this function will run on a while loop (below) | ||
yield this.$$localPersistence.putVersion(remoteUpdates.version); | ||
// but if we had deferred updates then the remoteUpdates.version is outdated | ||
@@ -1237,11 +1118,18 @@ // so we need to fetch the latest version again | ||
return __awaiter(this, void 0, void 0, function* () { | ||
this.onSyncStart(); | ||
let tries = []; | ||
let exceptionOccurred = false; | ||
while (!exceptionOccurred) { | ||
const result = yield this.$$syncTry(); | ||
if (result.exception) { | ||
exceptionOccurred = true; | ||
try { | ||
let exceptionOccurred = false; | ||
while (!exceptionOccurred) { | ||
const result = yield this.$$syncTry(); | ||
if (result.exception) { | ||
exceptionOccurred = true; | ||
} | ||
tries.push(result); | ||
} | ||
tries.push(result); | ||
} | ||
catch (e) { | ||
console.error(e); | ||
} | ||
this.onSyncEnd(); | ||
return tries; | ||
@@ -1254,15 +1142,15 @@ }); | ||
get list() { | ||
return this.$$observableObject.observable.filter((x) => !x.$$deleted); | ||
return this.$$observableObject.target.filter((x) => !x.$$deleted); | ||
} | ||
getByID(id) { | ||
return this.$$observableObject.observable.find((x) => x.id === id); | ||
return this.$$observableObject.target.find((x) => x.id === id); | ||
} | ||
add(item) { | ||
if (this.$$observableObject.observable.find((x) => x.id === item.id)) { | ||
if (this.$$observableObject.target.find((x) => x.id === item.id)) { | ||
throw new Error("Duplicate ID detected: " + JSON.stringify(item.id)); | ||
} | ||
this.$$observableObject.observable.push(item); | ||
this.$$observableObject.target.push(item); | ||
} | ||
delete(item) { | ||
const index = this.$$observableObject.observable.findIndex((x) => x.id === item.id); | ||
const index = this.$$observableObject.target.findIndex((x) => x.id === item.id); | ||
if (index === -1) { | ||
@@ -1274,9 +1162,9 @@ throw new Error("Item not found."); | ||
deleteByIndex(index) { | ||
if (!this.$$observableObject.observable[index]) { | ||
if (!this.$$observableObject.target[index]) { | ||
throw new Error("Item not found."); | ||
} | ||
this.$$observableObject.observable[index].$$deleted = true; | ||
this.$$observableObject.target[index].$$deleted = true; | ||
} | ||
deleteByID(id) { | ||
const index = this.$$observableObject.observable.findIndex((x) => x.id === id); | ||
const index = this.$$observableObject.target.findIndex((x) => x.id === id); | ||
if (index === -1) { | ||
@@ -1288,13 +1176,18 @@ throw new Error("Item not found."); | ||
updateByIndex(index, item) { | ||
if (!this.$$observableObject.observable[index]) { | ||
if (!this.$$observableObject.target[index]) { | ||
throw new Error("Item not found."); | ||
} | ||
if (this.$$observableObject.observable[index].id !== item.id) { | ||
if (this.$$observableObject.target[index].id !== item.id) { | ||
throw new Error("ID mismatch."); | ||
} | ||
this.$$observableObject.observable[index] = item; | ||
this.$$observableObject.target[index] = item; | ||
} | ||
isUpdated() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
return this.$$syncService ? ((yield this.$$syncService.latestVersion()) === (yield this.$$localVersion())) : true; | ||
if (this.$$localPersistence && this.$$remotePersistence) { | ||
return ((yield this.$$localPersistence.getVersion()) === | ||
(yield this.$$remotePersistence.getVersion())); | ||
} | ||
else | ||
return false; | ||
}); | ||
@@ -1304,22 +1197,229 @@ } | ||
function observe(input) { | ||
/** | ||
* Enhances a React component to automatically re-render when the observed store changes. | ||
* @param store - An instance of Store that extends Document. | ||
* @returns A higher-order function that takes a React component as an argument. | ||
*/ | ||
function observe(store) { | ||
return function (component) { | ||
let oCWM = component.prototype.componentWillMount || (() => { }); | ||
component.prototype.componentWillMount = function () { | ||
let unObservers = []; | ||
const originalComponentDidMount = component.prototype.componentDidMount || (() => { }); | ||
component.prototype.componentDidMount = function () { | ||
const unObservers = []; | ||
this.setState({}); | ||
const observer = () => this.setState({}); | ||
input.observe(observer); | ||
unObservers.push(() => input.unobserve(observer)); | ||
const oCWU = this.componentWillUnmount || (() => { }); | ||
if (Array.isArray(store)) { | ||
store.forEach((singleStore) => { | ||
// @ts-ignore | ||
singleStore.$$observableObject.observe(observer); | ||
unObservers.push(() => | ||
// @ts-ignore | ||
singleStore.$$observableObject.unobserve(observer)); | ||
}); | ||
} | ||
else { | ||
// @ts-ignore | ||
store.$$observableObject.observe(observer); | ||
// @ts-ignore | ||
store.$$observableObject.unobserve(observer); | ||
} | ||
const originalComponentWillUnmount = this.componentWillUnmount || (() => { }); | ||
this.componentWillUnmount = () => { | ||
unObservers.forEach((u) => u()); | ||
oCWU.call(this); | ||
unObservers.forEach((unObserver) => unObserver()); | ||
originalComponentWillUnmount.call(this); | ||
}; | ||
oCWM.call(this); | ||
originalComponentDidMount.call(this); | ||
}; | ||
return component; | ||
}; | ||
} | ||
class IDB { | ||
constructor({ name }) { | ||
const request = indexedDB.open(name); | ||
request.onupgradeneeded = function (event) { | ||
const db = event.target.result; | ||
if (!db.objectStoreNames.contains(name)) { | ||
const objectStore = db.createObjectStore(name); | ||
objectStore.createIndex("idIndex", "_id", { unique: true }); | ||
} | ||
if (!db.objectStoreNames.contains("metadata")) { | ||
db.createObjectStore("metadata"); | ||
} | ||
}; | ||
const dbp = this.pr(request); | ||
this.store = (txMode, callback) => dbp.then((db) => callback(db | ||
.transaction(name, txMode, { durability: "relaxed" }) | ||
.objectStore(name))); | ||
this.metadataStore = (txMode, callback) => dbp.then((db) => callback(db | ||
.transaction("metadata", txMode, { durability: "relaxed" }) | ||
.objectStore("metadata"))); | ||
} | ||
/** | ||
* Converts IDB requests/transactions to promises. | ||
*/ | ||
pr(req) { | ||
return new Promise((resolve, reject) => { | ||
// @ts-ignore - file size hacks | ||
req.oncomplete = req.onsuccess = () => resolve(req.result); | ||
// @ts-ignore - file size hacks | ||
req.onabort = req.onerror = () => reject(req.error); | ||
}); | ||
} | ||
/** | ||
* Converts cursor iterations to promises. | ||
*/ | ||
eachCursor(store, callback) { | ||
store.openCursor().onsuccess = function () { | ||
if (!this.result) | ||
return; | ||
callback(this.result); | ||
this.result.continue(); | ||
}; | ||
return this.pr(store.transaction); | ||
} | ||
/** | ||
* Set multiple values at once. This is faster than calling set() multiple times. | ||
* It's also atomic – if one of the pairs can't be added, none will be added. | ||
*/ | ||
put(entries) { | ||
return this.store("readwrite", (store) => { | ||
entries.forEach((entry) => store.put(entry[1], entry[0])); | ||
return this.pr(store.transaction); | ||
}); | ||
} | ||
/** | ||
* Get all documents in the store. | ||
*/ | ||
getAll() { | ||
return this.store("readonly", (store) => __awaiter(this, void 0, void 0, function* () { | ||
let rows = []; | ||
if (store.getAll) { | ||
rows = yield this.pr(store.getAll()); | ||
} | ||
else { | ||
yield this.eachCursor(store, (cursor) => rows.push(cursor.value)); | ||
} | ||
return rows; | ||
})); | ||
} | ||
getVersion() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
return Number((yield this.getMetadata("version")) || 0); | ||
}); | ||
} | ||
putVersion(version) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
yield this.setMetadata("version", JSON.stringify(version)); | ||
}); | ||
} | ||
getDeferred() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
return JSON.parse((yield this.getMetadata("deferred")) || "[]"); | ||
}); | ||
} | ||
putDeferred(arr) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
yield this.setMetadata("deferred", JSON.stringify(arr)); | ||
}); | ||
} | ||
/** | ||
* Set metadata with a key. | ||
*/ | ||
setMetadata(key, value) { | ||
return this.metadataStore("readwrite", (store) => { | ||
store.put(value, key); | ||
return this.pr(store.transaction); | ||
}); | ||
} | ||
/** | ||
* Get metadata by its key. | ||
*/ | ||
getMetadata(key) { | ||
return this.metadataStore("readonly", (store) => this.pr(store.get(key))); | ||
} | ||
/** | ||
* Clear all values in the store. | ||
*/ | ||
clear() { | ||
return this.store("readwrite", (store) => { | ||
store.clear(); | ||
return this.pr(store.transaction); | ||
}); | ||
} | ||
clearMetadata() { | ||
return this.metadataStore("readwrite", (store) => { | ||
store.clear(); | ||
return this.pr(store.transaction); | ||
}); | ||
} | ||
} | ||
class CloudFlareApexoDB { | ||
constructor({ endpoint, token, name, }) { | ||
this.baseUrl = endpoint; | ||
this.token = token; | ||
this.table = name; | ||
} | ||
getSince() { | ||
return __awaiter(this, arguments, void 0, function* (version = 0) { | ||
let page = 0; | ||
let nextPage = true; | ||
let fetchedVersion = 0; | ||
let result = []; | ||
while (nextPage) { | ||
const url = `${this.baseUrl}/${this.table}/${version}/${page}`; | ||
const response = yield fetch(url, { | ||
method: "GET", | ||
headers: { | ||
Authorization: `Bearer ${this.token}`, | ||
}, | ||
}); | ||
const res = yield response.json(); | ||
const output = JSON.parse(res.output); | ||
nextPage = output.rows.length > 0 && version !== 0; | ||
fetchedVersion = output.version; | ||
result = result.concat(output.rows); | ||
page = page + 1; | ||
} | ||
return { version: fetchedVersion, rows: result }; | ||
}); | ||
} | ||
getVersion() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const url = `${this.baseUrl}/${this.table}/0/Infinity`; | ||
const response = yield fetch(url, { | ||
method: "GET", | ||
headers: { | ||
Authorization: `Bearer ${this.token}`, | ||
}, | ||
}); | ||
const res = yield response.json(); | ||
if (res.success) | ||
return Number(JSON.parse(res.output).version); | ||
else | ||
return 0; | ||
}); | ||
} | ||
put(data) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const reqBody = data.reduce((record, item) => { | ||
record[item[0]] = item[1]; | ||
return record; | ||
}, {}); | ||
const url = `${this.baseUrl}/${this.table}`; | ||
yield fetch(url, { | ||
method: "PUT", | ||
headers: { | ||
Authorization: `Bearer ${this.token}`, | ||
}, | ||
body: JSON.stringify(reqBody), | ||
}); | ||
return; | ||
}); | ||
} | ||
} | ||
exports.CloudFlareApexoDB = CloudFlareApexoDB; | ||
exports.Document = Document; | ||
exports.IDB = IDB; | ||
exports.Store = Store; | ||
@@ -1326,0 +1426,0 @@ exports.SubDocument = SubDocument; |
export { Store } from "./store"; | ||
export { Document, SubDocument, mapSubModel } from "./model"; | ||
export { observe } from "./react"; | ||
export { LocalPersistence, IDB, deferredArray } from "./persistence/local"; | ||
export { CloudFlareApexoDB } from "./persistence/remote"; |
export { Store } from "./store"; | ||
export { Document, SubDocument, mapSubModel } from "./model"; | ||
export { observe } from "./react"; | ||
export { IDB } from "./persistence/local"; | ||
export { CloudFlareApexoDB } from "./persistence/remote"; |
import { Document } from "./model"; | ||
import { ObservableArray } from "./observable"; | ||
declare class Component { | ||
setState(obj: any): void; | ||
componentWillUnmount(): void; | ||
componentWillMount(): void; | ||
} | ||
export declare function observe<D extends Document, G extends ObservableArray<D[]>>(input: G): <C extends typeof Component>(component: C) => void; | ||
export {}; | ||
import { Store } from "./store"; | ||
/** | ||
* Enhances a React component to automatically re-render when the observed store changes. | ||
* @param store - An instance of Store that extends Document. | ||
* @returns A higher-order function that takes a React component as an argument. | ||
*/ | ||
export declare function observe<D extends Document, G extends Store<D>>(store: G | G[]): (component: any) => any; |
@@ -1,23 +0,37 @@ | ||
class Component { | ||
setState(obj) { } | ||
componentWillUnmount() { } | ||
componentWillMount() { } | ||
} | ||
export function observe(input) { | ||
/** | ||
* Enhances a React component to automatically re-render when the observed store changes. | ||
* @param store - An instance of Store that extends Document. | ||
* @returns A higher-order function that takes a React component as an argument. | ||
*/ | ||
export function observe(store) { | ||
return function (component) { | ||
let oCWM = component.prototype.componentWillMount || (() => { }); | ||
component.prototype.componentWillMount = function () { | ||
let unObservers = []; | ||
const originalComponentDidMount = component.prototype.componentDidMount || (() => { }); | ||
component.prototype.componentDidMount = function () { | ||
const unObservers = []; | ||
this.setState({}); | ||
const observer = () => this.setState({}); | ||
input.observe(observer); | ||
unObservers.push(() => input.unobserve(observer)); | ||
const oCWU = this.componentWillUnmount || (() => { }); | ||
if (Array.isArray(store)) { | ||
store.forEach((singleStore) => { | ||
// @ts-ignore | ||
singleStore.$$observableObject.observe(observer); | ||
unObservers.push(() => | ||
// @ts-ignore | ||
singleStore.$$observableObject.unobserve(observer)); | ||
}); | ||
} | ||
else { | ||
// @ts-ignore | ||
store.$$observableObject.observe(observer); | ||
// @ts-ignore | ||
store.$$observableObject.unobserve(observer); | ||
} | ||
const originalComponentWillUnmount = this.componentWillUnmount || (() => { }); | ||
this.componentWillUnmount = () => { | ||
unObservers.forEach((u) => u()); | ||
oCWU.call(this); | ||
unObservers.forEach((unObserver) => unObserver()); | ||
originalComponentWillUnmount.call(this); | ||
}; | ||
oCWM.call(this); | ||
originalComponentDidMount.call(this); | ||
}; | ||
return component; | ||
}; | ||
} |
@@ -0,14 +1,14 @@ | ||
import { LocalPersistence } from "./persistence/local"; | ||
import { Document } from "./model"; | ||
export type deferredArray = { | ||
ts: number; | ||
data: string; | ||
}[]; | ||
import { RemotePersistence } from "./persistence/remote"; | ||
export declare class Store<T extends Document> { | ||
isOnline: boolean; | ||
deferredPresent: boolean; | ||
private $$idb; | ||
onSyncStart: () => void; | ||
onSyncEnd: () => void; | ||
private $$observableObject; | ||
private $$changes; | ||
private $$token; | ||
private $$syncService; | ||
private $$localPersistence; | ||
private $$remotePersistence; | ||
private $$debounceRate; | ||
@@ -19,7 +19,3 @@ private $$lastProcessChanges; | ||
private $$decode; | ||
constructor({ name, token, persist, endpoint, debounceRate, model, encode, decode, }: { | ||
name: string; | ||
token: string; | ||
persist?: boolean; | ||
endpoint?: string; | ||
constructor({ debounceRate, model, encode, decode, onSyncStart, onSyncEnd, localPersistence, remotePersistence, }?: { | ||
debounceRate?: number; | ||
@@ -29,9 +25,26 @@ model?: typeof Document; | ||
decode?: (input: string) => string; | ||
onSyncStart?: () => void; | ||
onSyncEnd?: () => void; | ||
localPersistence?: LocalPersistence; | ||
remotePersistence?: RemotePersistence; | ||
}); | ||
/** | ||
* Serializes an item of type T into an encoded JSON string. | ||
* Date objects are converted to a custom format before encoding. | ||
* @param item An instance of type T which extends Document. | ||
* @returns An encoded JSON string representing the item. | ||
*/ | ||
private $$serialize; | ||
/** | ||
* Decodes a serialized string, parses it into a JavaScript object, and converts custom date formats back into Date objects. | ||
* @param line A string representing the serialized data. | ||
* @returns A new instance of the model with the deserialized data. | ||
*/ | ||
private $$deserialize; | ||
/** | ||
* Loads data from an IndexedDB instance, deserializes it, and updates the observable array silently without triggering observers. | ||
*/ | ||
private $$loadFromLocal; | ||
private $$processChanges; | ||
private $$setupObservers; | ||
private $$localVersion; | ||
/** | ||
@@ -69,7 +82,3 @@ * | ||
*/ | ||
$$syncTry(): Promise<{ | ||
pushed?: number; | ||
pulled?: number; | ||
exception?: string; | ||
}>; | ||
private $$syncTry; | ||
private $$sync; | ||
@@ -80,4 +89,8 @@ /** | ||
get list(): T[]; | ||
copy: T[]; | ||
getByID(id: string): T | undefined; | ||
add(item: T): void; | ||
new: <T_1 extends { | ||
_stripDefaults?<T_2 extends any>(this: T_2): T_2; | ||
}>(this: new () => T_1, data?: import("./model").RecursivePartial<T_1>) => T_1; | ||
delete(item: T): void; | ||
@@ -84,0 +97,0 @@ deleteByIndex(index: number): void; |
@@ -10,22 +10,28 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
}; | ||
import { observable } from "./observable"; | ||
import { IDB } from "./idb"; | ||
import { SyncService } from "./sync-service"; | ||
import { Observable } from "./observable"; | ||
import { debounce } from "./debounce"; | ||
import { Document } from "./model"; | ||
export class Store { | ||
constructor({ name, token, persist = true, endpoint, debounceRate, model, encode, decode, }) { | ||
constructor({ debounceRate, model, encode, decode, onSyncStart, onSyncEnd, localPersistence, remotePersistence, } = {}) { | ||
this.isOnline = true; | ||
this.deferredPresent = false; | ||
this.$$observableObject = observable([]); | ||
this.onSyncStart = () => { }; | ||
this.onSyncEnd = () => { }; | ||
this.$$observableObject = new Observable([]); | ||
this.$$changes = []; | ||
this.$$syncService = null; | ||
this.$$debounceRate = 100; | ||
this.$$lastProcessChanges = 0; | ||
this.$$model = Document; | ||
this.$$encode = (x) => x; | ||
this.$$decode = (x) => x; | ||
this.copy = this.$$observableObject.copy; | ||
this.new = this.$$model.new; | ||
this.sync = debounce(this.$$sync.bind(this), this.$$debounceRate); | ||
this.$$idb = new IDB(name); | ||
this.$$token = token; | ||
this.$$model = model || Document; | ||
if (onSyncStart) { | ||
this.onSyncStart = onSyncStart; | ||
} | ||
if (onSyncEnd) { | ||
this.onSyncEnd = onSyncEnd; | ||
} | ||
if (encode) { | ||
@@ -40,8 +46,17 @@ this.$$encode = encode; | ||
} | ||
if (persist && endpoint) { | ||
if (localPersistence) { | ||
this.$$localPersistence = localPersistence; | ||
this.$$loadFromLocal(); | ||
this.$$setupObservers(); | ||
this.$$syncService = new SyncService(endpoint, this.$$token, name); | ||
} | ||
if (remotePersistence) { | ||
this.$$remotePersistence = remotePersistence; | ||
} | ||
} | ||
/** | ||
* Serializes an item of type T into an encoded JSON string. | ||
* Date objects are converted to a custom format before encoding. | ||
* @param item An instance of type T which extends Document. | ||
* @returns An encoded JSON string representing the item. | ||
*/ | ||
$$serialize(item) { | ||
@@ -60,8 +75,13 @@ const stripped = item._stripDefaults ? item._stripDefaults() : item; | ||
} | ||
/** | ||
* Decodes a serialized string, parses it into a JavaScript object, and converts custom date formats back into Date objects. | ||
* @param line A string representing the serialized data. | ||
* @returns A new instance of the model with the deserialized data. | ||
*/ | ||
$$deserialize(line) { | ||
line = this.$$decode(line); | ||
const item = JSON.parse(line, function (key, val) { | ||
const item = JSON.parse(line, (key, val) => { | ||
if (key === "$$date") | ||
return new Date(val); | ||
let t = typeof val; | ||
const t = typeof val; | ||
if (t === "string" || t === "number" || t === "boolean" || val === null) | ||
@@ -75,5 +95,13 @@ return val; | ||
} | ||
/** | ||
* Loads data from an IndexedDB instance, deserializes it, and updates the observable array silently without triggering observers. | ||
*/ | ||
$$loadFromLocal() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const deserialized = (yield this.$$idb.values()).map((x) => this.$$deserialize(x)); | ||
// Check if IndexedDB instance is available | ||
if (!this.$$localPersistence) | ||
return; | ||
// Retrieve values from IndexedDB and deserialize them | ||
const deserialized = yield Promise.all((yield this.$$localPersistence.getAll()).map((x) => this.$$deserialize(x))); | ||
// Update the observable array silently with deserialized data | ||
this.$$observableObject.silently((o) => { | ||
@@ -86,5 +114,9 @@ o.splice(0, o.length, ...deserialized); | ||
return __awaiter(this, void 0, void 0, function* () { | ||
if (!this.$$localPersistence) | ||
return; | ||
if (this.$$changes.length === 0) | ||
return; | ||
this.onSyncStart(); | ||
this.$$lastProcessChanges = Date.now(); | ||
const toWriteLocally = []; | ||
const toSendRemotely = {}; | ||
const toWrite = []; | ||
const toDeffer = []; | ||
@@ -97,4 +129,3 @@ const changesToProcess = [...this.$$changes]; // Create a copy of changes to process | ||
const serializedLine = this.$$serialize(item); | ||
toWriteLocally.push([item.id, serializedLine]); | ||
toSendRemotely[item.id] = serializedLine; | ||
toWrite.push([item.id, serializedLine]); | ||
toDeffer.push({ | ||
@@ -105,8 +136,10 @@ ts: Date.now(), | ||
} | ||
yield this.$$idb.setBulk(toWriteLocally); | ||
const deferred = (yield this.$$idb.getMetadata("deferred")) || "[]"; | ||
let deferredArray = JSON.parse(deferred); | ||
if (this.isOnline && this.$$syncService && deferredArray.length === 0) { | ||
yield this.$$localPersistence.put(toWrite); | ||
let deferredArray = yield this.$$localPersistence.getDeferred(); | ||
if (this.isOnline && | ||
this.$$remotePersistence && | ||
deferredArray.length === 0) { | ||
try { | ||
yield this.$$syncService.sendUpdates(toSendRemotely); | ||
yield this.$$remotePersistence.put(toWrite); | ||
this.onSyncEnd(); | ||
return; | ||
@@ -125,5 +158,5 @@ } | ||
*/ | ||
deferredArray = deferredArray.concat(...toDeffer); | ||
yield this.$$idb.setMetadata("deferred", JSON.stringify(deferredArray)); | ||
yield this.$$localPersistence.putDeferred(deferredArray.concat(...toDeffer)); | ||
this.deferredPresent = true; | ||
this.onSyncEnd(); | ||
}); | ||
@@ -147,7 +180,2 @@ } | ||
} | ||
$$localVersion() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
return Number((yield this.$$idb.getMetadata("version")) || 0); | ||
}); | ||
} | ||
/** | ||
@@ -187,7 +215,12 @@ * | ||
return __awaiter(this, void 0, void 0, function* () { | ||
if (!this.$$syncService) { | ||
if (!this.$$localPersistence) { | ||
return { | ||
exception: "Sync service not available", | ||
exception: "Local persistence not available", | ||
}; | ||
} | ||
if (!this.$$remotePersistence) { | ||
return { | ||
exception: "Remote persistence not available", | ||
}; | ||
} | ||
if (!this.isOnline) { | ||
@@ -199,6 +232,5 @@ return { | ||
try { | ||
const localVersion = yield this.$$localVersion(); | ||
const remoteVersion = yield this.$$syncService.latestVersion(); | ||
const deferred = (yield this.$$idb.getMetadata("deferred")) || "[]"; | ||
let deferredArray = JSON.parse(deferred); | ||
const localVersion = yield this.$$localPersistence.getVersion(); | ||
const remoteVersion = yield this.$$remotePersistence.getVersion(); | ||
let deferredArray = yield this.$$localPersistence.getDeferred(); | ||
if (localVersion === remoteVersion && deferredArray.length === 0) { | ||
@@ -210,11 +242,14 @@ return { | ||
// fetch updates since our local version | ||
const remoteUpdates = yield this.$$syncService.fetchData(localVersion); | ||
const remoteUpdates = yield this.$$remotePersistence.getSince(localVersion); | ||
// check for conflicts | ||
deferredArray = deferredArray.filter((x) => { | ||
var _a; | ||
let item = this.$$deserialize(x.data); | ||
const conflict = remoteUpdates.rows.findIndex((y) => y.id === item.id); | ||
// take row-specific version if available, otherwise rely on latest version | ||
const comparison = Number(((_a = remoteUpdates.rows[conflict]) === null || _a === void 0 ? void 0 : _a.ts) || remoteVersion); | ||
if (conflict === -1) { | ||
return true; | ||
} | ||
else if (x.ts > remoteVersion) { | ||
else if (x.ts > comparison) { | ||
// there's a conflict, but the local change is newer | ||
@@ -231,18 +266,18 @@ remoteUpdates.rows.splice(conflict, 1); | ||
// we should start with remote | ||
for (const remote of remoteUpdates.rows) { | ||
yield this.$$idb.set(remote.id, remote.data); | ||
} | ||
yield this.$$localPersistence.put(remoteUpdates.rows.map((row) => [row.id, row.data])); | ||
// then local | ||
const updatedRows = {}; | ||
const updatedRows = new Map(); | ||
for (const local of deferredArray) { | ||
let item = this.$$deserialize(local.data); | ||
updatedRows[item.id] = local.data; | ||
updatedRows.set(item.id, local.data); | ||
// latest deferred write wins since it would overwrite the previous one | ||
} | ||
yield this.$$syncService.sendUpdates(updatedRows); | ||
yield this.$$remotePersistence.put([...updatedRows.keys()].map((x) => [x, updatedRows.get(x)])); | ||
// reset deferred | ||
yield this.$$idb.setMetadata("deferred", "[]"); | ||
yield this.$$localPersistence.putDeferred([]); | ||
this.deferredPresent = false; | ||
// set local version | ||
yield this.$$idb.setMetadata("version", remoteUpdates.version.toString()); | ||
// set local version to the version given by the current request | ||
// this might be outdated as soon as this functions ends | ||
// that's why this function will run on a while loop (below) | ||
yield this.$$localPersistence.putVersion(remoteUpdates.version); | ||
// but if we had deferred updates then the remoteUpdates.version is outdated | ||
@@ -272,11 +307,18 @@ // so we need to fetch the latest version again | ||
return __awaiter(this, void 0, void 0, function* () { | ||
this.onSyncStart(); | ||
let tries = []; | ||
let exceptionOccurred = false; | ||
while (!exceptionOccurred) { | ||
const result = yield this.$$syncTry(); | ||
if (result.exception) { | ||
exceptionOccurred = true; | ||
try { | ||
let exceptionOccurred = false; | ||
while (!exceptionOccurred) { | ||
const result = yield this.$$syncTry(); | ||
if (result.exception) { | ||
exceptionOccurred = true; | ||
} | ||
tries.push(result); | ||
} | ||
tries.push(result); | ||
} | ||
catch (e) { | ||
console.error(e); | ||
} | ||
this.onSyncEnd(); | ||
return tries; | ||
@@ -289,15 +331,15 @@ }); | ||
get list() { | ||
return this.$$observableObject.observable.filter((x) => !x.$$deleted); | ||
return this.$$observableObject.target.filter((x) => !x.$$deleted); | ||
} | ||
getByID(id) { | ||
return this.$$observableObject.observable.find((x) => x.id === id); | ||
return this.$$observableObject.target.find((x) => x.id === id); | ||
} | ||
add(item) { | ||
if (this.$$observableObject.observable.find((x) => x.id === item.id)) { | ||
if (this.$$observableObject.target.find((x) => x.id === item.id)) { | ||
throw new Error("Duplicate ID detected: " + JSON.stringify(item.id)); | ||
} | ||
this.$$observableObject.observable.push(item); | ||
this.$$observableObject.target.push(item); | ||
} | ||
delete(item) { | ||
const index = this.$$observableObject.observable.findIndex((x) => x.id === item.id); | ||
const index = this.$$observableObject.target.findIndex((x) => x.id === item.id); | ||
if (index === -1) { | ||
@@ -309,9 +351,9 @@ throw new Error("Item not found."); | ||
deleteByIndex(index) { | ||
if (!this.$$observableObject.observable[index]) { | ||
if (!this.$$observableObject.target[index]) { | ||
throw new Error("Item not found."); | ||
} | ||
this.$$observableObject.observable[index].$$deleted = true; | ||
this.$$observableObject.target[index].$$deleted = true; | ||
} | ||
deleteByID(id) { | ||
const index = this.$$observableObject.observable.findIndex((x) => x.id === id); | ||
const index = this.$$observableObject.target.findIndex((x) => x.id === id); | ||
if (index === -1) { | ||
@@ -323,15 +365,20 @@ throw new Error("Item not found."); | ||
updateByIndex(index, item) { | ||
if (!this.$$observableObject.observable[index]) { | ||
if (!this.$$observableObject.target[index]) { | ||
throw new Error("Item not found."); | ||
} | ||
if (this.$$observableObject.observable[index].id !== item.id) { | ||
if (this.$$observableObject.target[index].id !== item.id) { | ||
throw new Error("ID mismatch."); | ||
} | ||
this.$$observableObject.observable[index] = item; | ||
this.$$observableObject.target[index] = item; | ||
} | ||
isUpdated() { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
return this.$$syncService ? ((yield this.$$syncService.latestVersion()) === (yield this.$$localVersion())) : true; | ||
if (this.$$localPersistence && this.$$remotePersistence) { | ||
return ((yield this.$$localPersistence.getVersion()) === | ||
(yield this.$$remotePersistence.getVersion())); | ||
} | ||
else | ||
return false; | ||
}); | ||
} | ||
} |
{ | ||
"name": "apical-store", | ||
"version": "0.0.51", | ||
"version": "0.0.71", | ||
"description": "Mobx-Syncable-IndexedDB", | ||
@@ -5,0 +5,0 @@ "main": "dist/bundle.js", |
export { Store } from "./store"; | ||
export { Document, SubDocument, mapSubModel } from "./model"; | ||
export { observe } from "./react"; | ||
export { observe } from "./react"; | ||
export { LocalPersistence, IDB, deferredArray } from "./persistence/local"; | ||
export { CloudFlareApexoDB } from "./persistence/remote"; |
import { Document } from "./model"; | ||
import { ObservableArray } from "./observable"; | ||
import { Store } from "./store"; | ||
class Component { | ||
setState(obj: any) {} | ||
componentWillUnmount() {} | ||
componentDidMount() {} | ||
} | ||
/** | ||
* Enhances a React component to automatically re-render when the observed store changes. | ||
* @param store - An instance of Store that extends Document. | ||
* @returns A higher-order function that takes a React component as an argument. | ||
*/ | ||
export function observe<D extends Document, G extends Store<D>>( | ||
store: G | G[] | ||
): (component: any) => any { | ||
return function (component: any) { | ||
const originalComponentDidMount = | ||
component.prototype.componentDidMount || (() => {}); | ||
export function observe<D extends Document, G extends Store<D>>( | ||
store: G | ||
) { | ||
return function <C extends typeof Component>(component: C) { | ||
let oCWM = component.prototype.componentDidMount || (() => {}); | ||
component.prototype.componentDidMount = function () { | ||
let unObservers: (() => void)[] = []; | ||
this.setState({}); | ||
const observer = () => this.setState({}); | ||
store.$$observableObject.observe(observer); | ||
unObservers.push(() => store.$$observableObject.unobserve(observer)); | ||
const oCWU = this.componentWillUnmount || (() => {}); | ||
this.componentWillUnmount = () => { | ||
unObservers.forEach((u) => u()); | ||
oCWU.call(this); | ||
} | ||
oCWM.call(this); | ||
const unObservers: (() => void)[] = []; | ||
this.setState({}); | ||
const observer = () => this.setState({}); | ||
if (Array.isArray(store)) { | ||
store.forEach((singleStore) => { | ||
// @ts-ignore | ||
singleStore.$$observableObject.observe(observer); | ||
unObservers.push(() => | ||
// @ts-ignore | ||
singleStore.$$observableObject.unobserve(observer) | ||
); | ||
}); | ||
} else { | ||
// @ts-ignore | ||
store.$$observableObject.observe(observer); | ||
// @ts-ignore | ||
store.$$observableObject.unobserve(observer); | ||
} | ||
const originalComponentWillUnmount = | ||
this.componentWillUnmount || (() => {}); | ||
this.componentWillUnmount = () => { | ||
unObservers.forEach((unObserver) => unObserver()); | ||
originalComponentWillUnmount.call(this); | ||
}; | ||
originalComponentDidMount.call(this); | ||
}; | ||
return component; | ||
}; | ||
} |
240
src/store.ts
@@ -1,21 +0,22 @@ | ||
import { Change, observable, ObservableArray } from "./observable"; | ||
import { IDB } from "./idb"; | ||
import { SyncService } from "./sync-service"; | ||
import { Change, Observable } from "./observable"; | ||
import { deferredArray, LocalPersistence } from "./persistence/local"; | ||
import { debounce } from "./debounce"; | ||
import { Document, RecursivePartial } from "./model"; | ||
import { Document } from "./model"; | ||
import { RemotePersistence } from "./persistence/remote"; | ||
export type deferredArray = { ts: number; data: string }[]; | ||
export class Store<T extends Document> { | ||
export class Store< | ||
T extends Document, | ||
> { | ||
public isOnline = true; | ||
public deferredPresent: boolean = false; | ||
private $$idb: IDB; | ||
$$observableObject: ObservableArray<T[]> = observable([] as T[]); | ||
public onSyncStart: () => void = () => {}; | ||
public onSyncEnd: () => void = () => {}; | ||
private $$observableObject: Observable<T> = new Observable([] as T[]); | ||
private $$changes: Change<T[]>[] = []; | ||
private $$token: string; | ||
private $$syncService: SyncService | null = null; | ||
private $$token: string | undefined; | ||
private $$localPersistence: LocalPersistence | undefined; | ||
private $$remotePersistence: RemotePersistence | undefined; | ||
private $$debounceRate: number = 100; | ||
private $$lastProcessChanges: number = 0; | ||
private $$model: typeof Document; | ||
private $$model: typeof Document = Document; | ||
private $$encode: (input: string) => string = (x) => x; | ||
@@ -25,6 +26,2 @@ private $$decode: (input: string) => string = (x) => x; | ||
constructor({ | ||
name, | ||
token, | ||
persist = true, | ||
endpoint, | ||
debounceRate, | ||
@@ -34,7 +31,7 @@ model, | ||
decode, | ||
onSyncStart, | ||
onSyncEnd, | ||
localPersistence, | ||
remotePersistence, | ||
}: { | ||
name: string; | ||
token: string; | ||
persist?: boolean; | ||
endpoint?: string; | ||
debounceRate?: number; | ||
@@ -44,7 +41,14 @@ model?: typeof Document; | ||
decode?: (input: string) => string; | ||
}) { | ||
//TODO: token should be optional | ||
this.$$idb = new IDB(name); | ||
this.$$token = token; | ||
onSyncStart?: () => void; | ||
onSyncEnd?: () => void; | ||
localPersistence?: LocalPersistence; | ||
remotePersistence?: RemotePersistence; | ||
} = {}) { | ||
this.$$model = model || Document; | ||
if (onSyncStart) { | ||
this.onSyncStart = onSyncStart; | ||
} | ||
if (onSyncEnd) { | ||
this.onSyncEnd = onSyncEnd; | ||
} | ||
if (encode) { | ||
@@ -59,10 +63,19 @@ this.$$encode = encode; | ||
} | ||
if (persist && endpoint) { | ||
if (localPersistence) { | ||
this.$$localPersistence = localPersistence; | ||
this.$$loadFromLocal(); | ||
this.$$setupObservers(); | ||
this.$$syncService = new SyncService(endpoint, this.$$token, name); | ||
} | ||
if (remotePersistence) { | ||
this.$$remotePersistence = remotePersistence; | ||
} | ||
} | ||
private $$serialize(item: T) { | ||
/** | ||
* Serializes an item of type T into an encoded JSON string. | ||
* Date objects are converted to a custom format before encoding. | ||
* @param item An instance of type T which extends Document. | ||
* @returns An encoded JSON string representing the item. | ||
*/ | ||
private $$serialize(item: T): string { | ||
const stripped = item._stripDefaults ? item._stripDefaults() : item; | ||
@@ -79,7 +92,12 @@ const str = JSON.stringify(stripped, function (key, value) { | ||
private $$deserialize(line: string) { | ||
/** | ||
* Decodes a serialized string, parses it into a JavaScript object, and converts custom date formats back into Date objects. | ||
* @param line A string representing the serialized data. | ||
* @returns A new instance of the model with the deserialized data. | ||
*/ | ||
private $$deserialize(line: string): any { | ||
line = this.$$decode(line); | ||
const item = JSON.parse(line, function (key, val) { | ||
const item = JSON.parse(line, (key, val) => { | ||
if (key === "$$date") return new Date(val); | ||
let t = typeof val; | ||
const t = typeof val; | ||
if (t === "string" || t === "number" || t === "boolean" || val === null) | ||
@@ -93,6 +111,15 @@ return val; | ||
private async $$loadFromLocal() { | ||
const deserialized = (await this.$$idb.values()).map((x) => | ||
this.$$deserialize(x) | ||
) as T[]; | ||
/** | ||
* Loads data from an IndexedDB instance, deserializes it, and updates the observable array silently without triggering observers. | ||
*/ | ||
private async $$loadFromLocal(): Promise<void> { | ||
// Check if IndexedDB instance is available | ||
if (!this.$$localPersistence) return; | ||
// Retrieve values from IndexedDB and deserialize them | ||
const deserialized: T[] = await Promise.all( | ||
(await this.$$localPersistence.getAll()).map((x) => this.$$deserialize(x)) | ||
); | ||
// Update the observable array silently with deserialized data | ||
this.$$observableObject.silently((o) => { | ||
@@ -104,6 +131,8 @@ o.splice(0, o.length, ...deserialized); | ||
private async $$processChanges() { | ||
if (!this.$$localPersistence) return; | ||
if (this.$$changes.length === 0) return; | ||
this.onSyncStart(); | ||
this.$$lastProcessChanges = Date.now(); | ||
const toWriteLocally: [string, string][] = []; | ||
const toSendRemotely: { [key: string]: string } = {}; | ||
const toWrite: [string, string][] = []; | ||
const toDeffer: deferredArray = []; | ||
@@ -118,4 +147,3 @@ const changesToProcess = [...this.$$changes]; // Create a copy of changes to process | ||
const serializedLine = this.$$serialize(item); | ||
toWriteLocally.push([item.id, serializedLine]); | ||
toSendRemotely[item.id] = serializedLine; | ||
toWrite.push([item.id, serializedLine]); | ||
toDeffer.push({ | ||
@@ -126,9 +154,13 @@ ts: Date.now(), | ||
} | ||
await this.$$idb.setBulk(toWriteLocally); | ||
const deferred = (await this.$$idb.getMetadata("deferred")) || "[]"; | ||
let deferredArray = JSON.parse(deferred) as deferredArray; | ||
if (this.isOnline && this.$$syncService && deferredArray.length === 0) { | ||
await this.$$localPersistence.put(toWrite); | ||
let deferredArray = await this.$$localPersistence.getDeferred(); | ||
if ( | ||
this.isOnline && | ||
this.$$remotePersistence && | ||
deferredArray.length === 0 | ||
) { | ||
try { | ||
await this.$$syncService.sendUpdates(toSendRemotely); | ||
await this.$$remotePersistence.put(toWrite); | ||
this.onSyncEnd(); | ||
return; | ||
@@ -147,5 +179,7 @@ } catch (e) { | ||
*/ | ||
deferredArray = deferredArray.concat(...toDeffer); | ||
await this.$$idb.setMetadata("deferred", JSON.stringify(deferredArray)); | ||
await this.$$localPersistence.putDeferred( | ||
deferredArray.concat(...toDeffer) | ||
); | ||
this.deferredPresent = true; | ||
this.onSyncEnd(); | ||
} | ||
@@ -177,6 +211,2 @@ | ||
private async $$localVersion() { | ||
return Number((await this.$$idb.getMetadata("version")) || 0); | ||
} | ||
/** | ||
@@ -198,3 +228,3 @@ * | ||
* *************************************************************************** | ||
* | ||
* | ||
* The sync mechanism is as follows: | ||
@@ -220,7 +250,12 @@ * 1. Fetch the local version | ||
}> { | ||
if (!this.$$syncService) { | ||
if (!this.$$localPersistence) { | ||
return { | ||
exception: "Sync service not available", | ||
exception: "Local persistence not available", | ||
}; | ||
} | ||
if (!this.$$remotePersistence) { | ||
return { | ||
exception: "Remote persistence not available", | ||
}; | ||
} | ||
if (!this.isOnline) { | ||
@@ -232,6 +267,5 @@ return { | ||
try { | ||
const localVersion = await this.$$localVersion(); | ||
const remoteVersion = await this.$$syncService.latestVersion(); | ||
const deferred = (await this.$$idb.getMetadata("deferred")) || "[]"; | ||
let deferredArray = JSON.parse(deferred) as deferredArray; | ||
const localVersion = await this.$$localPersistence.getVersion(); | ||
const remoteVersion = await this.$$remotePersistence.getVersion(); | ||
let deferredArray = await this.$$localPersistence.getDeferred(); | ||
@@ -245,3 +279,5 @@ if (localVersion === remoteVersion && deferredArray.length === 0) { | ||
// fetch updates since our local version | ||
const remoteUpdates = await this.$$syncService.fetchData(localVersion); | ||
const remoteUpdates = await this.$$remotePersistence.getSince( | ||
localVersion | ||
); | ||
@@ -252,5 +288,13 @@ // check for conflicts | ||
const conflict = remoteUpdates.rows.findIndex((y) => y.id === item.id); | ||
// take row-specific version if available, otherwise rely on latest version | ||
const comparison = Number( | ||
( | ||
remoteUpdates.rows[conflict] as | ||
| { id: string; data: string; ts?: string } | ||
| undefined | ||
)?.ts || remoteVersion | ||
); | ||
if (conflict === -1) { | ||
return true; | ||
} else if (x.ts > remoteVersion) { | ||
} else if (x.ts > comparison) { | ||
// there's a conflict, but the local change is newer | ||
@@ -267,21 +311,25 @@ remoteUpdates.rows.splice(conflict, 1); | ||
// we should start with remote | ||
for (const remote of remoteUpdates.rows) { | ||
await this.$$idb.set(remote.id, remote.data); | ||
} | ||
await this.$$localPersistence.put( | ||
remoteUpdates.rows.map((row) => [row.id, row.data]) | ||
); | ||
// then local | ||
const updatedRows: { [key: string]: string } = {}; | ||
const updatedRows = new Map(); | ||
for (const local of deferredArray) { | ||
let item = this.$$deserialize(local.data); | ||
updatedRows[item.id] = local.data; | ||
updatedRows.set(item.id, local.data); | ||
// latest deferred write wins since it would overwrite the previous one | ||
} | ||
await this.$$syncService.sendUpdates(updatedRows); | ||
await this.$$remotePersistence.put( | ||
[...updatedRows.keys()].map((x) => [x, updatedRows.get(x)]) | ||
); | ||
// reset deferred | ||
await this.$$idb.setMetadata("deferred", "[]"); | ||
await this.$$localPersistence.putDeferred([]); | ||
this.deferredPresent = false; | ||
// set local version | ||
await this.$$idb.setMetadata("version", remoteUpdates.version.toString()); | ||
// set local version to the version given by the current request | ||
// this might be outdated as soon as this functions ends | ||
// that's why this function will run on a while loop (below) | ||
await this.$$localPersistence.putVersion(remoteUpdates.version); | ||
@@ -312,11 +360,17 @@ // but if we had deferred updates then the remoteUpdates.version is outdated | ||
private async $$sync() { | ||
this.onSyncStart(); | ||
let tries: { exception?: string; pushed?: number; pulled?: number }[] = []; | ||
let exceptionOccurred = false; | ||
while (!exceptionOccurred) { | ||
const result = await this.$$syncTry(); | ||
if (result.exception) { | ||
exceptionOccurred = true; | ||
try { | ||
let exceptionOccurred = false; | ||
while (!exceptionOccurred) { | ||
const result = await this.$$syncTry(); | ||
if (result.exception) { | ||
exceptionOccurred = true; | ||
} | ||
tries.push(result); | ||
} | ||
tries.push(result); | ||
} catch (e) { | ||
console.error(e); | ||
} | ||
this.onSyncEnd(); | ||
return tries; | ||
@@ -329,18 +383,23 @@ } | ||
get list() { | ||
return this.$$observableObject.observable.filter((x) => !x.$$deleted); | ||
return this.$$observableObject.target.filter((x) => !x.$$deleted); | ||
} | ||
copy = this.$$observableObject.copy; | ||
getByID(id: string) { | ||
return this.$$observableObject.observable.find((x) => x.id === id); | ||
return this.$$observableObject.target.find((x) => x.id === id); | ||
} | ||
add(item: T) { | ||
if (this.$$observableObject.observable.find((x) => x.id === item.id)) { | ||
if (this.$$observableObject.target.find((x) => x.id === item.id)) { | ||
throw new Error("Duplicate ID detected: " + JSON.stringify(item.id)); | ||
} | ||
this.$$observableObject.observable.push(item); | ||
this.$$observableObject.target.push(item); | ||
} | ||
new = this.$$model.new | ||
delete(item: T) { | ||
const index = this.$$observableObject.observable.findIndex( | ||
const index = this.$$observableObject.target.findIndex( | ||
(x) => x.id === item.id | ||
@@ -355,12 +414,10 @@ ); | ||
deleteByIndex(index: number) { | ||
if (!this.$$observableObject.observable[index]) { | ||
if (!this.$$observableObject.target[index]) { | ||
throw new Error("Item not found."); | ||
} | ||
this.$$observableObject.observable[index].$$deleted = true; | ||
this.$$observableObject.target[index].$$deleted = true; | ||
} | ||
deleteByID(id: string) { | ||
const index = this.$$observableObject.observable.findIndex( | ||
(x) => x.id === id | ||
); | ||
const index = this.$$observableObject.target.findIndex((x) => x.id === id); | ||
if (index === -1) { | ||
@@ -373,9 +430,9 @@ throw new Error("Item not found."); | ||
updateByIndex(index: number, item: T) { | ||
if (!this.$$observableObject.observable[index]) { | ||
if (!this.$$observableObject.target[index]) { | ||
throw new Error("Item not found."); | ||
} | ||
if (this.$$observableObject.observable[index].id !== item.id) { | ||
if (this.$$observableObject.target[index].id !== item.id) { | ||
throw new Error("ID mismatch."); | ||
} | ||
this.$$observableObject.observable[index] = item; | ||
this.$$observableObject.target[index] = item; | ||
} | ||
@@ -386,4 +443,9 @@ | ||
async isUpdated() { | ||
return this.$$syncService ? (await this.$$syncService.latestVersion() === await this.$$localVersion()) : true; | ||
if (this.$$localPersistence && this.$$remotePersistence) { | ||
return ( | ||
(await this.$$localPersistence.getVersion()) === | ||
(await this.$$remotePersistence.getVersion()) | ||
); | ||
} else return false; | ||
} | ||
} |
@@ -1,89 +0,91 @@ | ||
import { IDB } from '../src/idb'; | ||
import { describe, test, expect } from 'vitest'; | ||
import { deferredArray, IDB } from '../src/persistence/local'; | ||
import "fake-indexeddb/auto"; | ||
describe('IDB', () => { | ||
test('get', async () => { | ||
const idb = new IDB('testDB'); | ||
await idb.set('key1', 'value1'); | ||
const result = await idb.get('key1'); | ||
expect(result).toBe('value1'); | ||
}); | ||
import { describe, it, expect, beforeEach, afterEach } from 'vitest'; | ||
test('getBulk', async () => { | ||
const idb = new IDB('testDB'); | ||
await idb.set('key1', 'value1'); | ||
await idb.set('key2', 'value2'); | ||
const result = await idb.getBulk(['key1', 'key2']); | ||
expect(result).toEqual(['value1', 'value2']); | ||
}); | ||
describe('IDB Class', () => { | ||
const dbName = 'testDB'; | ||
let idb: IDB; | ||
test('set', async () => { | ||
const idb = new IDB('testDB'); | ||
await idb.set('key1', 'value1'); | ||
const result = await idb.get('key1'); | ||
expect(result).toBe('value1'); | ||
}); | ||
beforeEach(async () => { | ||
idb = new IDB({ name: dbName }); | ||
}); | ||
test('setBulk', async () => { | ||
const idb = new IDB('testDB'); | ||
await idb.setBulk([['key1', 'value1'], ['key2', 'value2']]); | ||
const result1 = await idb.get('key1'); | ||
const result2 = await idb.get('key2'); | ||
expect(result1).toBe('value1'); | ||
expect(result2).toBe('value2'); | ||
}); | ||
afterEach(async () => { | ||
await idb.clear(); | ||
await idb.clearMetadata() | ||
}); | ||
test('delBulk', async () => { | ||
const idb = new IDB('testDB'); | ||
await idb.set('key1', 'value1'); | ||
await idb.set('key2', 'value2'); | ||
await idb.delBulk(['key1', 'key2']); | ||
const result1 = await idb.get('key1'); | ||
const result2 = await idb.get('key2'); | ||
expect(result1).toBeUndefined(); | ||
expect(result2).toBeUndefined(); | ||
it('should initialize the database and object stores', async () => { | ||
// Simulate the request to ensure the database and object stores are created | ||
const request = indexedDB.open(dbName); | ||
const result = await new Promise<IDBDatabase>((resolve, reject) => { | ||
request.onsuccess = () => resolve(request.result); | ||
request.onerror = () => reject(request.error); | ||
}); | ||
test('clear', async () => { | ||
const idb = new IDB('testDB'); | ||
await idb.set('key1', 'value1'); | ||
await idb.set('key2', 'value2'); | ||
await idb.clear(); | ||
const result1 = await idb.get('key1'); | ||
const result2 = await idb.get('key2'); | ||
expect(result1).toBeUndefined(); | ||
expect(result2).toBeUndefined(); | ||
}); | ||
expect(result.objectStoreNames.contains(dbName)).toBe(true); | ||
expect(result.objectStoreNames.contains('metadata')).toBe(true); | ||
}); | ||
test('keys', async () => { | ||
const idb = new IDB('testDB'); | ||
await idb.set('key1', 'value1'); | ||
await idb.set('key2', 'value2'); | ||
const result = await idb.keys(); | ||
expect(result).toEqual(['key1', 'key2']); | ||
}); | ||
it('should store and retrieve multiple entries', async () => { | ||
const entries = [['key1', 'value1'], ['key2', 'value2']] as [string, string][]; | ||
await idb.put(entries); | ||
test('values', async () => { | ||
const idb = new IDB('testDB'); | ||
await idb.set('key1', 'value1'); | ||
await idb.set('key2', 'value2'); | ||
const result = await idb.values(); | ||
expect(result).toEqual(['value1', 'value2']); | ||
}); | ||
const allEntries = await idb.getAll(); | ||
expect(allEntries).toContain('value1'); | ||
expect(allEntries).toContain('value2'); | ||
}); | ||
test('setMetadata', async () => { | ||
const idb = new IDB('testDB'); | ||
await idb.setMetadata('metadata1', 'value1'); | ||
const result = await idb.getMetadata('metadata1'); | ||
expect(result).toBe('value1'); | ||
}); | ||
it('should store and retrieve metadata', async () => { | ||
await idb.setMetadata('testKey', 'testValue'); | ||
const value = await idb.getMetadata('testKey'); | ||
test('getMetadata', async () => { | ||
const idb = new IDB('testDB'); | ||
await idb.setMetadata('metadata1', 'value1'); | ||
const result = await idb.getMetadata('metadata1'); | ||
expect(result).toBe('value1'); | ||
}); | ||
}); | ||
expect(value).toBe('testValue'); | ||
}); | ||
it('should store and retrieve version', async () => { | ||
await idb.putVersion(1); | ||
const version = await idb.getVersion(); | ||
expect(version).toBe(1); | ||
}); | ||
it('should store and retrieve deferred array', async () => { | ||
const deferredArray: deferredArray = [{ data: "data", ts: 12 }, {data: "data2", ts: 24}]; | ||
await idb.putDeferred(deferredArray); | ||
const retrievedArray = await idb.getDeferred(); | ||
expect(retrievedArray).toEqual(deferredArray); | ||
}); | ||
it('should clear all entries', async () => { | ||
const entries = [['key1', 'value1'], ['key2', 'value2']] as [string, string][]; | ||
await idb.put(entries); | ||
await idb.clear(); | ||
const allEntries = await idb.getAll(); | ||
expect(allEntries.length).toBe(0); | ||
}); | ||
it('should clear metadata', async () => { | ||
await idb.setMetadata('testKey', 'testValue'); | ||
await idb.clearMetadata(); | ||
const value = await idb.getMetadata('testKey'); | ||
expect(value).toBeUndefined(); | ||
}); | ||
it('should handle concurrent transactions', async () => { | ||
const entries1 = [['key1', 'value1']] as [string, string][]; | ||
const entries2 = [['key2', 'value2']] as [string, string][]; | ||
await Promise.all([idb.put(entries1), idb.put(entries2)]); | ||
const allEntries = await idb.getAll(); | ||
expect(allEntries).toContain('value1'); | ||
expect(allEntries).toContain('value2'); | ||
}); | ||
}); |
@@ -1,118 +0,330 @@ | ||
import { describe, test, it, expect } from "vitest"; | ||
import { | ||
observable, | ||
isObservable, | ||
Change, | ||
ObservableArray, | ||
} from "../src/observable"; | ||
import { describe, test, it, expect, vi } from "vitest"; | ||
import { Change, Observable } from "../src/observable"; | ||
describe("observable", () => { | ||
test("should create an observable array", () => { | ||
const arr = [1, 2, 3]; | ||
const { observable: obsArr } = observable(arr); | ||
expect(isObservable(obsArr)).toBe(true); | ||
describe("initialization", () => { | ||
it("should initialize correctly with a regular array", () => { | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
expect(Observable.isObservable(observableArray.target)).toBe( | ||
true | ||
); | ||
expect(JSON.stringify(observableArray.target)).toEqual( | ||
JSON.stringify(arr) | ||
); | ||
}); | ||
it("should initialize correctly with an observable array", () => { | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
expect(Observable.isObservable(observableArray.target)).toBe( | ||
true | ||
); | ||
expect(JSON.stringify(observableArray.target)).toEqual( | ||
JSON.stringify(arr) | ||
); | ||
}); | ||
it("should maintain array methods and properties when adding elements", () => { | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
observableArray.target.push(4); | ||
expect(observableArray.target.length).toBe(4); | ||
expect(observableArray.target.includes(2)).toBe(true); | ||
}); | ||
}); | ||
test("should observe changes in the array", async () => { | ||
const arr = [1, 2, 3]; | ||
const { observable: obsArr, observe } = observable(arr); | ||
let changes: Change<number[]>[] = []; | ||
describe("isObservable", () => { | ||
it("should identify non-observable array", () => { | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
expect(Observable.isObservable(arr)).toBe(false); | ||
}); | ||
}); | ||
observe((c) => { | ||
changes = c; | ||
describe("observe", () => { | ||
it("should add an observer successfully when calling observe method", () => { | ||
const observer = (changes) => console.info(changes); | ||
const observableArray = new Observable([]); | ||
observableArray.observe(observer); | ||
expect(observableArray.observers).toContain(observer); | ||
}); | ||
test("should observe changes in the array", async () => { | ||
const arr = [1, 2, 3]; | ||
const o = new Observable(arr); | ||
let changes: Change<number[]>[] = []; | ||
obsArr.push(4); | ||
await new Promise((r) => setTimeout(r, 0)); | ||
o.observe((c) => { | ||
changes = c; | ||
}); | ||
expect(changes.length).toBe(1); | ||
expect(changes[0].type).toBe("insert"); | ||
expect(changes[0].path).toEqual([3]); | ||
expect(changes[0].value).toBe(4); | ||
}); | ||
o.target.push(4); | ||
await new Promise((r) => setTimeout(r, 100)); | ||
test("should unobserve changes in the array", async () => { | ||
const arr = [1, 2, 3]; | ||
const { observable: obsArr, observe, unobserve } = observable(arr); | ||
let changes: Change<number[]>[] = []; | ||
expect(changes.length).toBe(1); | ||
expect(changes[0].type).toBe("insert"); | ||
expect(changes[0].path).toEqual([3]); | ||
expect(changes[0].value).toBe(4); | ||
}); | ||
const observer = (c: Change<number[]>[]) => { | ||
changes = c; | ||
}; | ||
test("should observe multiple changes in the array", async () => { | ||
const arr = [1, 2, 3]; | ||
const o = new Observable(arr); | ||
let changes: Change<number[]>[] = []; | ||
observe(observer); | ||
o.observe((c) => { | ||
changes = c; | ||
}); | ||
obsArr.push(4); | ||
o.target.push(4); | ||
o.target.pop(); | ||
o.target.unshift(0); | ||
await new Promise((r) => setTimeout(r, 0)); | ||
await new Promise((r) => setTimeout(r, 0)); | ||
expect(changes.length).toBe(3); | ||
expect(changes[0].type).toBe("insert"); | ||
expect(changes[0].path).toEqual([3]); | ||
expect(changes[0].value).toBe(4); | ||
expect(changes[1].type).toBe("delete"); | ||
expect(changes[1].path).toEqual([3]); | ||
expect(changes[1].oldValue).toBe(4); | ||
expect(changes[2].type).toBe("insert"); | ||
expect(changes[2].path).toEqual([0]); | ||
expect(changes[2].value).toBe(0); | ||
}); | ||
expect(changes.length).toBe(1); | ||
test("should handle array modifications inside a nested array", async () => { | ||
const arr = [ | ||
[1, 2], | ||
[3, 4], | ||
]; | ||
const o = new Observable(arr); | ||
let changes: Change<number[][]>[] = []; | ||
await unobserve(observer); | ||
o.observe((c) => { | ||
changes = c; | ||
}); | ||
obsArr.push(5); | ||
o.target[0].push(5); | ||
await new Promise((r) => setTimeout(r, 0)); | ||
expect(changes.length).toBe(1); | ||
expect(changes.length).toBe(1); | ||
expect(changes[0].type).toBe("insert"); | ||
expect(changes[0].path).toEqual([0, 2]); | ||
expect(changes[0].value).toBe(5); | ||
}); | ||
}); | ||
test("should silently modify the array without notifying observers", () => { | ||
const arr = [1, 2, 3]; | ||
const { observable: obsArr, observe, silently } = observable(arr); | ||
let changes: Change<number[]>[] = []; | ||
describe("unobserve", () => { | ||
test("should unobserve changes in the array", async () => { | ||
const arr = [1, 2, 3]; | ||
const o = new Observable(arr); | ||
let changes: Change<number[]>[] = []; | ||
observe((c) => { | ||
changes = c; | ||
const observer = (c: Change<number[]>[]) => { | ||
changes = c; | ||
}; | ||
o.observe(observer); | ||
o.target.push(4); | ||
await new Promise((r) => setTimeout(r, 0)); | ||
expect(changes.length).toBe(1); | ||
o.unobserve(observer); | ||
o.target.push(5); | ||
expect(changes.length).toBe(1); | ||
}); | ||
silently((o) => { | ||
o.push(4); | ||
o.pop(); | ||
it("should handle removing non-existent observers gracefully", () => { | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
const observer = (changes) => {}; | ||
observableArray.observe(() => {}); | ||
observableArray.unobserve(observer); // Trying to unobserve a non-existent observer | ||
expect(observableArray.observers.length).toBe(1); | ||
}); | ||
expect(changes.length).toBe(0); | ||
}); | ||
it("should remove a specific observer when unobserve is called with that observer", () => { | ||
const observer1 = (changes) => console.info("Observer 1:", changes); | ||
const observer2 = (changes) => console.info("Observer 2:", changes); | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
test("should observe multiple changes in the array", async () => { | ||
const arr = [1, 2, 3]; | ||
const { observable: obsArr, observe } = observable(arr); | ||
let changes: Change<number[]>[] = []; | ||
observableArray.observe(observer1); | ||
observableArray.observe(observer2); | ||
expect(observableArray.observers.length).toBe(2); | ||
observe((c) => { | ||
changes = c; | ||
observableArray.unobserve(observer1); | ||
expect(observableArray.observers.length).toBe(1); | ||
expect(observableArray.observers[0]).toBe(observer2); | ||
}); | ||
obsArr.push(4); | ||
obsArr.pop(); | ||
obsArr.unshift(0); | ||
await new Promise((r) => setTimeout(r, 0)); | ||
it("should remove all observers when no argument is provided", () => { | ||
const observer1 = (changes) => console.info("Observer 1 called"); | ||
const observer2 = (changes) => console.info("Observer 2 called"); | ||
const observableArray = new Observable([1, 2, 3]); | ||
observableArray.observe(observer1); | ||
observableArray.observe(observer2); | ||
expect(changes.length).toBe(3); | ||
expect(changes[0].type).toBe("insert"); | ||
expect(changes[0].path).toEqual([3]); | ||
expect(changes[0].value).toBe(4); | ||
expect(changes[1].type).toBe("delete"); | ||
expect(changes[1].path).toEqual([3]); | ||
expect(changes[1].oldValue).toBe(4); | ||
expect(changes[2].type).toBe("insert"); | ||
expect(changes[2].path).toEqual([0]); | ||
expect(changes[2].value).toBe(0); | ||
observableArray.unobserve(); | ||
expect(observableArray.observers).toEqual([]); | ||
}); | ||
it("should not alter observers list if observer is not found", () => { | ||
const observer = (changes: Change<number[]>[]) => {}; | ||
const observableArray = new Observable<number>([]); | ||
observableArray.observe(observer); | ||
const result = observableArray.unobserve( | ||
(changes: Change<number[]>[]) => {} | ||
); | ||
expect(result).toEqual([]); | ||
}); | ||
it("should return removed observers", () => { | ||
const observer = (changes: Change<number[]>[]) => {}; | ||
const observableArray = new Observable([1, 2, 3]); | ||
observableArray.observe(observer); | ||
const result = observableArray.unobserve(); | ||
expect(result).toEqual([observer]); | ||
}); | ||
it("should return an empty array when no observers are removed", () => { | ||
const observer = (changes: Change<number[]>[]) => {}; | ||
const observableArray = new Observable([1, 2, 3]); | ||
observableArray.observe(observer); | ||
const result = observableArray.unobserve([]); | ||
expect(result).toEqual([]); | ||
}); | ||
}); | ||
test("should handle array modifications inside a nested array", async () => { | ||
const arr = [[1, 2], [3, 4]]; | ||
const { observable: obsArr, observe } = observable(arr); | ||
let changes: Change<number[][]>[] = []; | ||
describe("silently", () => { | ||
test("should silently modify the array without notifying observers", () => { | ||
const arr = [1, 2, 3]; | ||
const o = new Observable(arr); | ||
let changes: Change<number[]>[] = []; | ||
observe((c) => { | ||
changes = c; | ||
o.observe((c) => { | ||
changes = c; | ||
}); | ||
o.silently((o) => { | ||
o.push(4); | ||
o.pop(); | ||
}); | ||
expect(changes.length).toBe(0); | ||
}); | ||
obsArr[0].push(5); | ||
await new Promise((r) => setTimeout(r, 0)); | ||
it("should temporarily disable observers and re-enable them after execution", async () => { | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
let observerCalled = false; | ||
const observer = (changes: Change<number[]>[]) => { | ||
observerCalled = true; | ||
}; | ||
observableArray.observe(observer); | ||
observableArray.silently((o) => { | ||
o[0] = 10; | ||
}); | ||
await new Promise((r) => setTimeout(r, 10)); | ||
expect(observerCalled).toBe(false); | ||
expect(changes.length).toBe(1); | ||
expect(changes[0].type).toBe("insert"); | ||
expect(changes[0].path).toEqual([0, 2]); | ||
expect(changes[0].value).toBe(5); | ||
observableArray.target.push(100); | ||
await new Promise((r) => setTimeout(r, 10)); | ||
expect(observerCalled).toBe(true); | ||
}); | ||
it("should temporarily disable observers and re-enable them after execution (deep)", async () => { | ||
const arr = [{ numbers: [1] }, { numbers: [2] }, { numbers: [3] }]; | ||
const observableArray = new Observable(arr); | ||
let observerCalled = false; | ||
observableArray.observe((changes) => { | ||
observerCalled = true; | ||
}); | ||
observableArray.silently((o) => { | ||
o[0].numbers[0] = 10; | ||
}); | ||
await new Promise((r) => setTimeout(r, 10)); | ||
expect(observerCalled).toBe(false); | ||
observableArray.target[2].numbers[0] = 30; | ||
await new Promise((r) => setTimeout(r, 10)); | ||
expect(observerCalled).toBe(true); | ||
}); | ||
it("should persist changes made during the work function", async () => { | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
const observer = vi.fn(); | ||
observableArray.observe(observer); | ||
observableArray.silently((o) => { | ||
o[0] = 10; | ||
o.push(4); | ||
}); | ||
await new Promise((r) => setTimeout(r, 10)); | ||
expect(observer).toHaveBeenCalledTimes(0); | ||
expect(observableArray.copy).toEqual([10, 2, 3, 4]); | ||
}); | ||
it("should re-enable observers even if work function throws an exception", async () => { | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
let observerCalled = false; | ||
const observer = (changes: Change<number[]>[]) => { | ||
observerCalled = true; | ||
}; | ||
observableArray.observe(observer); | ||
expect(Observable.isObservable(observableArray.target)).toBe( | ||
true | ||
); | ||
expect(observableArray.copy).toEqual(arr); | ||
try { | ||
observableArray.silently((o) => { | ||
throw new Error("Exception in work function"); | ||
}); | ||
} catch (e) {} | ||
await new Promise((r) => setTimeout(r, 10)); | ||
expect(observerCalled).toBe(false); | ||
observableArray.target[0] = 12; | ||
await new Promise((r) => setTimeout(r, 10)); | ||
expect(observerCalled).toBe(true); | ||
}); | ||
// Changes made before the exception should persist | ||
it("should persist changes made before an exception is thrown during the work function execution", () => { | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
try { | ||
observableArray.silently((o) => { | ||
o[0] = 10; | ||
throw new Error("Exception during work function"); | ||
}); | ||
} catch (e) { | ||
// Exception thrown intentionally | ||
} | ||
expect(observableArray.target[0]).toBe(10); | ||
}); | ||
it("should propagate exception when work function throws an error", () => { | ||
const arr = [1, 2, 3]; | ||
const observableArray = new Observable(arr); | ||
const error = new Error("Test Error"); | ||
expect(() => { | ||
observableArray.silently(() => { | ||
throw error; | ||
}); | ||
}).toThrow(error); | ||
}); | ||
}); | ||
}); |
import { Store } from "../src/store"; | ||
import { describe, it, expect, vi, beforeEach } from "vitest"; | ||
import { Miniflare } from "miniflare"; | ||
import "fake-indexeddb/auto"; | ||
import { D1Database, KVNamespace } from "@cloudflare/workers-types"; | ||
import { readFileSync, writeFileSync } from "fs"; | ||
import { IDB } from "../src/persistence/local"; | ||
import { CloudFlareApexoDB } from "../src/persistence/remote"; | ||
import "fake-indexeddb/auto"; | ||
@@ -19,6 +21,2 @@ describe("Store", () => { | ||
store = new Store({ | ||
name: Math.random().toString(36).substring(7), | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
}); | ||
@@ -30,31 +28,5 @@ | ||
).replace( | ||
`var Auth = class { | ||
static async authenticate(token) { | ||
try { | ||
const response = await fetch("https://auth1.apexo.app", { | ||
method: "PUT", | ||
body: JSON.stringify({ operation: "jwt", token }) | ||
}); | ||
const result = await response.json(); | ||
if (!result.success) { | ||
return { success: false }; | ||
} | ||
const account = JSON.parse(atob(token)).payload.prefix; | ||
return { success: true, account }; | ||
} catch (e) { | ||
return { success: false }; | ||
} | ||
} | ||
};`, | ||
`var Auth = class { | ||
static async authenticate(token) { | ||
try { | ||
return { success: true, account: "ali" }; | ||
} catch (e) { | ||
return { success: false }; | ||
} | ||
} | ||
}` | ||
/const response(.|\n)*return \{ success: true, account \};/, | ||
`return {success: true, account: "ali"}` | ||
); | ||
writeFileSync("./worker.js", workerFile); | ||
@@ -70,2 +42,3 @@ | ||
env.DB = await mf.getD1Database("DB"); | ||
global.fetch = mf.dispatchFetch as any; | ||
@@ -78,3 +51,2 @@ await env.DB.exec( | ||
); | ||
global.fetch = mf.dispatchFetch as any; | ||
}); | ||
@@ -148,6 +120,10 @@ | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -168,3 +144,3 @@ | ||
expect(await (store as any).$$localVersion()).toBe(99); | ||
expect(await (store as any).$$localPersistence.getVersion()).toBe(99); | ||
}); | ||
@@ -184,6 +160,10 @@ | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -204,3 +184,3 @@ | ||
expect(await (store as any).$$localVersion()).toBe(123); | ||
expect(await (store as any).$$localPersistence.getVersion()).toBe(123); | ||
@@ -235,3 +215,3 @@ await env.DB.prepare( | ||
expect(await (store as any).$$localVersion()).toBe(124); | ||
expect(await (store as any).$$localPersistence.getVersion()).toBe(124); | ||
}); | ||
@@ -243,15 +223,23 @@ | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -288,15 +276,23 @@ await new Promise((r) => setTimeout(r, 300)); | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -330,15 +326,23 @@ | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -372,15 +376,23 @@ { | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -418,3 +430,3 @@ | ||
expect(store.list[0].id).toBe("12"); | ||
expect(await (store as any).$$localVersion()).toBe(version); | ||
expect(await (store as any).$$localPersistence.getVersion()).toBe(version); | ||
}); | ||
@@ -426,15 +438,23 @@ | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -477,3 +497,3 @@ { | ||
expect(store.list[0].name).toBe("alex2"); | ||
expect(await (store as any).$$localVersion()).toBe(version); | ||
expect(await (store as any).$$localPersistence.getVersion()).toBe(version); | ||
}); | ||
@@ -485,15 +505,23 @@ | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -534,5 +562,5 @@ | ||
expect((store as any).$$observableObject.observable.length).toBe(1); | ||
expect((store as any).$$observableObject.target.length).toBe(1); | ||
expect(store.list.length).toBe(0); | ||
expect(await (store as any).$$localVersion()).toBe(version); | ||
expect(await (store as any).$$localPersistence.getVersion()).toBe(version); | ||
}); | ||
@@ -544,15 +572,23 @@ | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -610,15 +646,23 @@ { | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -688,15 +732,23 @@ { | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -720,2 +772,12 @@ { | ||
it("should not sync if not online", async () => { | ||
store = new Store({ | ||
remotePersistence: new CloudFlareApexoDB({ | ||
endpoint: "https://apexo-database.vercel.app", | ||
token: "any", | ||
name: "staff" | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff" | ||
}) | ||
}); | ||
store.isOnline = false; | ||
@@ -728,10 +790,28 @@ { | ||
it("should not sync if sync service is not available", async () => { | ||
(store as any).$$syncService = null; | ||
it("should not sync if local persistence is not available", async () => { | ||
store = new Store({ | ||
remotePersistence: new CloudFlareApexoDB({ | ||
endpoint: "https://apexo-database.vercel.app", | ||
token: "any", | ||
name: "staff" | ||
}) | ||
}); | ||
{ | ||
const tries = await store.sync(); | ||
expect(tries[0].exception).toBe("Sync service not available"); | ||
expect(tries[0].exception).toBe("Local persistence not available"); | ||
} | ||
}); | ||
it("should not sync if remote persistence is not available", async () => { | ||
store = new Store({ | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
{ | ||
const tries = await store.sync(); | ||
expect(tries[0].exception).toBe("Remote persistence not available"); | ||
} | ||
}); | ||
it("should sync push (deferred) and pull at the same time", async () => { | ||
@@ -741,15 +821,23 @@ { | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
debounceRate: 1, | ||
@@ -805,15 +893,23 @@ }); | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -868,15 +964,23 @@ store.add({ id: "0", name: "ali" }); | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -940,19 +1044,27 @@ { | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
expect(await (store as any).$$localVersion()).toBe(0); | ||
expect(await (store as any).$$syncService.latestVersion()).toBe(0); | ||
expect(await (store as any).$$localPersistence.getVersion()).toBe(0); | ||
expect(await (store as any).$$remotePersistence.getVersion()).toBe(0); | ||
{ | ||
@@ -962,4 +1074,4 @@ const tries = await store.sync(); | ||
} | ||
expect(await (store as any).$$localVersion()).toBe(0); | ||
expect(await (store as any).$$syncService.latestVersion()).toBe(0); | ||
expect(await (store as any).$$localPersistence.getVersion()).toBe(0); | ||
expect(await (store as any).$$remotePersistence.getVersion()).toBe(0); | ||
}); | ||
@@ -971,16 +1083,24 @@ | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
debounceRate: 1000, | ||
@@ -992,21 +1112,21 @@ }); | ||
store.add({ id: "1", name: "alex" }); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 1 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 1 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 2 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 2 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 3 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 3 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 4 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 4 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 5 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 5 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 6 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 6 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 7 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 7 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 8 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 8 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 9 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 9 | ||
await new Promise((r) => setTimeout(r, 150)); | ||
expect((await (store as any).$$idb.values()).length).toBe(2); // 10.5 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(2); // 10.5 | ||
}); | ||
@@ -1018,16 +1138,24 @@ | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
debounceRate: 500, | ||
@@ -1039,30 +1167,38 @@ }); | ||
store.add({ id: "1", name: "alex" }); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 1 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 1 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 2 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 2 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 3 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 3 | ||
await new Promise((r) => setTimeout(r, 100)); | ||
expect((await (store as any).$$idb.values()).length).toBe(1); // 4 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(1); // 4 | ||
await new Promise((r) => setTimeout(r, 150)); | ||
expect((await (store as any).$$idb.values()).length).toBe(2); // 5.5 | ||
expect((await (store as any).$$localPersistence.getAll()).length).toBe(2); // 5.5 | ||
}); | ||
it("Deferred changes must pushes only the latest change", async () =>{ | ||
it("Deferred changes must pushes only the latest change", async () => { | ||
{ | ||
// clearing local database before starting | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -1084,3 +1220,2 @@ { | ||
expect(store.list.length).toBe(1); | ||
@@ -1133,19 +1268,27 @@ expect( | ||
it("If there are already deferred changes, no updates shall be sent unless there's a sync process", async () =>{ | ||
it("If there are already deferred changes, no updates shall be sent unless there's a sync process", async () => { | ||
{ | ||
// clearing local database before starting | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$idb.clear(); | ||
await (store as any).$$idb.clearMetadata(); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
name: "staff", | ||
token: token, | ||
persist: true, | ||
endpoint: "http://example.com", | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
@@ -1217,2 +1360,93 @@ { | ||
}); | ||
it("Rely on the specific version of the row when it is available", async () => { | ||
{ | ||
// clearing local database before starting | ||
store = new Store({ | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
await (store as any).$$localPersistence.clear(); | ||
await (store as any).$$localPersistence.clearMetadata(); | ||
} | ||
store = new Store({ | ||
remotePersistence: new CloudFlareApexoDB({ | ||
token, | ||
endpoint: "https://apexo-database.vercel.app", | ||
name: "staff", | ||
}), | ||
localPersistence: new IDB({ | ||
name: "staff", | ||
}), | ||
}); | ||
{ | ||
const tries = await store.sync(); | ||
expect(tries[0].exception).toBe("Nothing to sync"); | ||
} | ||
store.add({ id: "1", name: "alex" }); | ||
await new Promise((r) => setTimeout(r, 300)); | ||
{ | ||
const tries = await store.sync(); | ||
expect(tries[0].pulled).toBe(1); | ||
expect(tries[0].pushed).toBe(0); | ||
expect(tries[1].exception).toBe("Nothing to sync"); | ||
} | ||
store.isOnline = false; | ||
store.updateByIndex(0, { id: "1", name: "mathew" }); | ||
await new Promise((r) => setTimeout(r, 300)); | ||
expect(store.deferredPresent).toBe(true); | ||
await env.DB.exec( | ||
`UPDATE staff SET data = '{"id":"1","name":"john"}' WHERE id = 1` | ||
); | ||
await env.DB.exec( | ||
'INSERT INTO staff (id, account, data) VALUES (\'2\', \'ali\', \'{"id":"2","name":"ron"}\');' | ||
); | ||
const deferredVersion = Number( | ||
JSON.parse( | ||
await (store as any).$$localPersistence.getMetadata("deferred") | ||
)[0].ts | ||
); | ||
const localVersion = Number(await (store as any).$$localPersistence.getVersion()); | ||
expect(deferredVersion).toBeGreaterThan(localVersion); | ||
const remoteConflictVersion = (deferredVersion + localVersion) / 2; | ||
await env.DB.exec( | ||
`INSERT INTO staff_changes (version, account, ids) VALUES (${remoteConflictVersion}, 'ali', '1');` | ||
); | ||
await env.DB.exec( | ||
`INSERT INTO staff_changes (version, account, ids) VALUES (${ | ||
deferredVersion + 1000 | ||
}, 'ali', '2');` | ||
); | ||
store.isOnline = true; | ||
const keys = (await env.CACHE.list()).keys.map((x) => x.name); | ||
for (let index = 0; index < keys.length; index++) { | ||
const element = keys[index]; | ||
await env.CACHE.delete(element); | ||
} | ||
{ | ||
const tries = await store.sync(); | ||
expect(tries[0].pulled).toBe(1); | ||
expect(tries[0].pushed).toBe(1); // deferred won | ||
expect(tries[1].exception).toBe("Nothing to sync"); | ||
} | ||
expect(JSON.stringify(store.list)).toBe( | ||
`[{"id":"1","name":"mathew"},{"id":"2","name":"ron"}]` | ||
); | ||
}); | ||
}); |
@@ -112,3 +112,3 @@ { | ||
}, | ||
"include": ["src/**/*.ts"], | ||
"include": ["src/**/*.ts", "tests/react.test.ts"], | ||
} |
No README
QualityPackage does not have a README. This may indicate a failed publish or a low quality package.
Found 1 instance in 1 package
276722
69
7563
1
43
9