@agoric/store
Advanced tools
Comparing version 0.6.9-dev-bda9206.0 to 0.6.9-dev-bf29060.0
{ | ||
"name": "@agoric/store", | ||
"version": "0.6.9-dev-bda9206.0+bda9206", | ||
"version": "0.6.9-dev-bf29060.0+bf29060", | ||
"description": "Wrapper for JavaScript map", | ||
@@ -34,9 +34,9 @@ "type": "module", | ||
"dependencies": { | ||
"@agoric/assert": "0.3.16-dev-bda9206.0+bda9206", | ||
"@agoric/eventual-send": "0.14.1-dev-bda9206.0+bda9206", | ||
"@agoric/marshal": "0.5.1-dev-bda9206.0+bda9206", | ||
"@agoric/promise-kit": "0.2.30-dev-bda9206.0+bda9206" | ||
"@agoric/assert": "0.3.16-dev-bf29060.0+bf29060", | ||
"@agoric/eventual-send": "0.14.1-dev-bf29060.0+bf29060", | ||
"@agoric/marshal": "0.5.1-dev-bf29060.0+bf29060", | ||
"@agoric/promise-kit": "0.2.30-dev-bf29060.0+bf29060" | ||
}, | ||
"devDependencies": { | ||
"@agoric/swingset-vat": "0.24.2-dev-bda9206.0+bda9206", | ||
"@agoric/swingset-vat": "0.24.2-dev-bf29060.0+bf29060", | ||
"ava": "^3.12.1" | ||
@@ -57,6 +57,2 @@ }, | ||
], | ||
"prettier": { | ||
"trailingComma": "all", | ||
"singleQuote": true | ||
}, | ||
"publishConfig": { | ||
@@ -71,3 +67,3 @@ "access": "public" | ||
}, | ||
"gitHead": "bda920694c7ab573822415653335e258b9c21281" | ||
"gitHead": "bf290606171e44f2de4e2a358c16372f2af96471" | ||
} |
@@ -21,3 +21,8 @@ // @ts-check | ||
} from './patterns/patternMatchers.js'; | ||
export { compareRank, isRankSorted, sortByRank } from './patterns/rankOrder.js'; | ||
export { | ||
compareRank, | ||
isRankSorted, | ||
sortByRank, | ||
makeFullOrderComparatorKit, | ||
} from './patterns/rankOrder.js'; | ||
@@ -43,1 +48,3 @@ export { makeScalarWeakSetStore } from './stores/scalarWeakSetStore.js'; | ||
export { makeLegacyWeakMap } from './legacy/legacyWeakMap.js'; | ||
export { makeCopySet, getCopySetKeys } from './keys/copySet.js'; |
// @ts-check | ||
// eslint-disable-next-line spaced-comment | ||
/// <reference types="ses"/> | ||
@@ -5,0 +4,0 @@ |
// @ts-check | ||
// eslint-disable-next-line spaced-comment | ||
/// <reference types="ses"/> | ||
@@ -13,50 +12,3 @@ | ||
/** | ||
* `compareKeys` implements a partial order over keys. As with the | ||
* rank ordering produced by `compareRank`, -1, 0, and 1 mean | ||
* "less than", "equivalent to", and "greater than" respectively. | ||
* NaN means "incomparable" --- the first key is not less, equivalent, | ||
* or greater than the second. For example, subsets over sets is | ||
* a partial order. | ||
* | ||
* By using NaN for "incomparable", the normal equivalence for using | ||
* the return value in a comparison is preserved. | ||
* `compareKeys(left, right) >= 0` iff `left` is greater than or | ||
* equivalent to `right` in the partial ordering. | ||
* `compareKeys` is currently not exported directly, so its | ||
* bizarre but convenient return type is not exposed. | ||
* | ||
* Key order (a partial order) and rank order (a full order) are | ||
* co-designed so that we store passables in rank order and index into them | ||
* with keys for key-based queries. To keep these distinct, when speaking | ||
* informally about rank, we talk about "earlier" and "later". When speaking | ||
* informally about keys, we talk about "smaller" and "bigger". | ||
* | ||
* In both orders, the return-0 case defines | ||
* an equivalence class, i.e., those that are tied for the same place in the | ||
* order. The global invariant that we need between the two orders is that the | ||
* key order equivalence class is always at least as precise as the | ||
* rank order equivalence class. IOW, if `compareKeys(X,Y) === 0` then | ||
* `compareRank(X,Y) === 0`. But not vice versa. For example, two different | ||
* remotables are the same rank but incommensurate as keys. | ||
* | ||
* A further invariant is if `compareKeys(X,Y) < 0` then | ||
* `compareRank(X,Y) <= 0`, i.e., if X is smaller than Y in key order, then X | ||
* must be at least as early as Y in rank order. But not vice versa. | ||
* X can be earlier than Y in rank order and still be incommensurate with Y in | ||
* key order. For example, the records `{b: 3, a: 5}` is earlier than but | ||
* incommensurate with the record `{b: 5, a: 3}`. | ||
* | ||
* This lets us translate a range search over the | ||
* partial key order into a range search over rank order followed by filtering | ||
* out those that don't match. To get this effect, we store the elements of | ||
* a set in an array sorted in reverse rank order, from later to earlier. | ||
* Combined with our lexicographic comparison of arrays, if set X is a subset | ||
* of set Y then the array representing set X will be an earlier rank that the | ||
* array representing set Y. | ||
* | ||
* @param {Key} left | ||
* @param {Key} right | ||
* @returns {-1 | 0 | 1 | NaN} | ||
*/ | ||
/** @type {KeyCompare} */ | ||
export const compareKeys = (left, right) => { | ||
@@ -63,0 +15,0 @@ assertKey(left); |
@@ -5,2 +5,3 @@ // @ts-check | ||
assertChecker, | ||
Far, | ||
getTag, | ||
@@ -13,3 +14,2 @@ makeTagged, | ||
// eslint-disable-next-line spaced-comment | ||
/// <reference types="ses"/> | ||
@@ -61,6 +61,22 @@ | ||
/** | ||
* @callback IsCopyMap | ||
* @param {Passable} m | ||
* @returns {m is CopyMap<Key, Passable>} | ||
*/ | ||
/** @type {IsCopyMap} */ | ||
export const isCopyMap = m => checkCopyMap(m); | ||
harden(isCopyMap); | ||
export const assertCopyMap = m => checkCopyMap(m, assertChecker); | ||
/** | ||
* @callback AssertCopyMap | ||
* @param {Passable} m | ||
* @returns {asserts m is CopyMap<Key, Passable>} | ||
*/ | ||
/** @type {AssertCopyMap} */ | ||
export const assertCopyMap = m => { | ||
checkCopyMap(m, assertChecker); | ||
}; | ||
harden(assertCopyMap); | ||
@@ -101,6 +117,6 @@ | ||
const { length } = keys; | ||
return harden({ | ||
return Far('CopyMap entries iterable', { | ||
[Symbol.iterator]: () => { | ||
let i = 0; | ||
return harden({ | ||
return Far('CopyMap entries iterator', { | ||
next: () => { | ||
@@ -107,0 +123,0 @@ /** @type {IteratorResult<[K,V],void>} */ |
@@ -12,6 +12,6 @@ // @ts-check | ||
isRankSorted, | ||
makeFullOrderComparatorKit, | ||
sortByRank, | ||
} from '../patterns/rankOrder.js'; | ||
// eslint-disable-next-line spaced-comment | ||
/// <reference types="ses"/> | ||
@@ -22,2 +22,31 @@ | ||
/** | ||
* @param {FullCompare} fullCompare | ||
* @returns {(keys: Key[], check?: Checker) => boolean} | ||
*/ | ||
export const makeCheckNoDuplicates = fullCompare => (keys, check = x => x) => { | ||
keys = sortByRank(keys, fullCompare); | ||
const { length } = keys; | ||
for (let i = 1; i < length; i += 1) { | ||
const k0 = keys[i - 1]; | ||
const k1 = keys[i]; | ||
if (fullCompare(k0, k1) === 0) { | ||
return check(false, X`value has duplicates: ${k0}`); | ||
} | ||
} | ||
return true; | ||
}; | ||
/** | ||
* TODO SECURITY HAZARD: https://github.com/Agoric/agoric-sdk/issues/4261 | ||
* This creates mutable static state that should be unobservable, since it | ||
* is only used by checkNoDuplicates in an internal sort algorithm whose | ||
* result is tested and then dropped. However, that has a bad performance | ||
* cost. It is not yet clear how to fix this without opening a | ||
* communications channel. | ||
*/ | ||
const fullCompare = makeFullOrderComparatorKit(true).antiComparator; | ||
const checkNoDuplicates = makeCheckNoDuplicates(fullCompare); | ||
/** | ||
* @param {Passable[]} keys | ||
@@ -40,7 +69,7 @@ * @param {Checker=} check | ||
} | ||
return true; | ||
return checkNoDuplicates(keys, check); | ||
}; | ||
harden(checkCopySetKeys); | ||
/** @type WeakSet<CopySet<any>> */ | ||
/** @type WeakSet<CopySet<Key>> */ | ||
const copySetMemo = new WeakSet(); | ||
@@ -69,6 +98,22 @@ | ||
/** | ||
* @callback IsCopySet | ||
* @param {Passable} s | ||
* @returns {s is CopySet<Key>} | ||
*/ | ||
/** @type {IsCopySet} */ | ||
export const isCopySet = s => checkCopySet(s); | ||
harden(isCopySet); | ||
export const assertCopySet = s => checkCopySet(s, assertChecker); | ||
/** | ||
* @callback AssertCopySet | ||
* @param {Passable} s | ||
* @returns {asserts s is CopySet<Key>} | ||
*/ | ||
/** @type {AssertCopySet} */ | ||
export const assertCopySet = s => { | ||
checkCopySet(s, assertChecker); | ||
}; | ||
harden(assertCopySet); | ||
@@ -102,4 +147,7 @@ | ||
*/ | ||
export const makeCopySet = elements => | ||
makeTagged('copySet', sortByRank(elements, compareAntiRank)); | ||
export const makeCopySet = elements => { | ||
const result = makeTagged('copySet', sortByRank(elements, compareAntiRank)); | ||
assertCopySet(result); | ||
return result; | ||
}; | ||
harden(makeCopySet); |
// @ts-check | ||
import { sortByRank } from '../patterns/rankOrder.js'; | ||
import { | ||
makeFullOrderComparatorKit, | ||
sortByRank, | ||
} from '../patterns/rankOrder.js'; | ||
getCopySetKeys, | ||
makeCheckNoDuplicates, | ||
makeCopySet, | ||
} from './copySet.js'; | ||
@@ -26,3 +28,3 @@ const { details: X } = assert; | ||
* @param {Iterable<T>} ys | ||
* @param {CompareRank} fullCompare | ||
* @param {FullCompare} fullCompare | ||
* @returns {Iterable<[Opt<T>,Opt<T>]>} | ||
@@ -92,3 +94,3 @@ */ | ||
const isSupersetOp = xyi => { | ||
const isIterSuperset = xyi => { | ||
for (const [x, _yr] of xyi) { | ||
@@ -103,3 +105,3 @@ if (x === PUMPKIN) { | ||
const isDisjointOp = xyi => { | ||
const isIterDisjoint = xyi => { | ||
for (const [x, y] of xyi) { | ||
@@ -114,3 +116,3 @@ if (x !== PUMPKIN && y !== PUMPKIN) { | ||
const unionOp = xyi => { | ||
const iterUnion = xyi => { | ||
const result = []; | ||
@@ -131,3 +133,3 @@ for (const [x, y] of xyi) { | ||
const disjointUnionOp = xyi => { | ||
const iterDisjointUnion = xyi => { | ||
const result = []; | ||
@@ -149,3 +151,3 @@ for (const [x, y] of xyi) { | ||
const intersectionOp = xyi => { | ||
const iterIntersection = xyi => { | ||
const result = []; | ||
@@ -161,3 +163,3 @@ for (const [x, y] of xyi) { | ||
const disjointSubtractOp = xyi => { | ||
const iterDisjointSubtract = xyi => { | ||
const result = []; | ||
@@ -177,9 +179,18 @@ for (const [x, y] of xyi) { | ||
* @typedef {Object} SetOps | ||
* @property {CompareRank} fullCompare | ||
* @property {(xlist: T[], ylist: T[]) => boolean} isSuperset | ||
* @property {(xlist: T[], ylist: T[]) => boolean} isDisjoint | ||
* @property {(xlist: T[], ylist: T[]) => T[]} union | ||
* @property {(xlist: T[], ylist: T[]) => T[]} disjointUnion | ||
* @property {(xlist: T[], ylist: T[]) => T[]} intersection | ||
* @property {(xlist: T[], ylist: T[]) => T[]} disjointSubtract | ||
* | ||
* @property {(keys: Key[], check?: Checker) => boolean} checkNoDuplicates | ||
* | ||
* @property {(xlist: T[], ylist: T[]) => boolean} isListSuperset | ||
* @property {(xlist: T[], ylist: T[]) => boolean} isListDisjoint | ||
* @property {(xlist: T[], ylist: T[]) => T[]} listUnion | ||
* @property {(xlist: T[], ylist: T[]) => T[]} listDisjointUnion | ||
* @property {(xlist: T[], ylist: T[]) => T[]} listIntersection | ||
* @property {(xlist: T[], ylist: T[]) => T[]} listDisjointSubtract | ||
* | ||
* @property {(x: CopySet<T>, y: CopySet<T>) => boolean} isSetSuperset | ||
* @property {(x: CopySet<T>, y: CopySet<T>) => boolean} isSetDisjoint | ||
* @property {(x: CopySet<T>, y: CopySet<T>) => CopySet<T>} setUnion | ||
* @property {(x: CopySet<T>, y: CopySet<T>) => CopySet<T>} setDisjointUnion | ||
* @property {(x: CopySet<T>, y: CopySet<T>) => CopySet<T>} setIntersection | ||
* @property {(x: CopySet<T>, y: CopySet<T>) => CopySet<T>} setDisjointSubtract | ||
*/ | ||
@@ -189,23 +200,47 @@ | ||
* @template T | ||
* @param {boolean=} longLived | ||
* @param {FullCompare} fullCompare | ||
* Must be a total order, not just a rank order. See makeFullOrderComparatorKit. | ||
* @returns {SetOps<T>} | ||
*/ | ||
export const makeSetOps = (longLived = false) => { | ||
const { antiComparator: fullCompare } = makeFullOrderComparatorKit(longLived); | ||
const composeOp = op => (xlist, ylist) => { | ||
export const makeSetOps = fullCompare => { | ||
const checkNoDuplicates = makeCheckNoDuplicates(fullCompare); | ||
const listify = iterOp => (xlist, ylist) => { | ||
const xs = sortByRank(xlist, fullCompare); | ||
const ys = sortByRank(ylist, fullCompare); | ||
const xyi = merge(xs, ys, fullCompare); | ||
return op(xyi); | ||
return iterOp(xyi); | ||
}; | ||
const isListSuperset = listify(isIterSuperset); | ||
const isListDisjoint = listify(isIterDisjoint); | ||
const listUnion = listify(iterUnion); | ||
const listDisjointUnion = listify(iterDisjointUnion); | ||
const listIntersection = listify(iterIntersection); | ||
const listDisjointSubtract = listify(iterDisjointSubtract); | ||
const rawSetify = listOp => (xset, yset) => | ||
listOp(getCopySetKeys(xset), getCopySetKeys(yset)); | ||
const setify = listOp => (xset, yset) => | ||
makeCopySet(listOp(getCopySetKeys(xset), getCopySetKeys(yset))); | ||
return harden({ | ||
fullCompare, | ||
isSuperset: composeOp(isSupersetOp), | ||
isDisjoint: composeOp(isDisjointOp), | ||
union: composeOp(unionOp), | ||
disjointUnion: composeOp(disjointUnionOp), | ||
intersection: composeOp(intersectionOp), | ||
disjointSubtract: composeOp(disjointSubtractOp), | ||
checkNoDuplicates, | ||
isListSuperset, | ||
isListDisjoint, | ||
listUnion, | ||
listDisjointUnion, | ||
listIntersection, | ||
listDisjointSubtract, | ||
isSetSuperset: rawSetify(isListSuperset), | ||
isSetDisjoint: rawSetify(isListDisjoint), | ||
setUnion: setify(listUnion), | ||
setDisjointUnion: setify(listDisjointUnion), | ||
setIntersection: setify(listIntersection), | ||
setDisjointSubtract: setify(listDisjointSubtract), | ||
}); | ||
}; | ||
harden(makeSetOps); |
@@ -22,3 +22,3 @@ // @ts-check | ||
* comment explaining the problem inhibiting conversion to the new | ||
* system. Some of these problems as if this writing: | ||
* system. Some of these problems as of this writing: | ||
* * A promiseKit used as a value, even though a promiseKit is not | ||
@@ -33,3 +33,5 @@ * a passable. Solutions are to make it a passable, or to convert | ||
* @deprecated switch to ScalarMap if possible, Map otherwise | ||
* @template K,V | ||
* @param {string} [keyName='key'] - the column name for the key | ||
* @returns {LegacyMap<K,V>} | ||
*/ | ||
@@ -42,3 +44,3 @@ export const makeLegacyMap = (keyName = 'key') => { | ||
assert(m.has(key), X`${q(keyName)} not found: ${key}`); | ||
const legacyMap = harden({ | ||
return harden({ | ||
has: key => { | ||
@@ -66,8 +68,9 @@ // Check if a key exists. The key can be any JavaScript value, | ||
}, | ||
keys: () => [...m.keys()], | ||
values: () => [...m.values()], | ||
entries: () => [...m.entries()], | ||
keys: () => m.keys(), | ||
values: () => m.values(), | ||
entries: () => m.entries(), | ||
getSize: () => m.size, | ||
clear: () => m.clear(), | ||
}); | ||
return legacyMap; | ||
}; | ||
harden(makeLegacyMap); |
@@ -10,5 +10,8 @@ // @ts-check | ||
* @deprecated switch to ScalarWeakMap if possible, WeakMap otherwise | ||
* @template K,V | ||
* @param {string} [keyName='key'] - the column name for the key | ||
* @returns {LegacyWeakMap<K,V>} | ||
*/ | ||
export const makeLegacyWeakMap = (keyName = 'key') => { | ||
/** @type {WeakMap<K & Object,V>} */ | ||
const wm = new WeakMap(); | ||
@@ -19,3 +22,3 @@ const assertKeyDoesNotExist = key => | ||
assert(wm.has(key), X`${q(keyName)} not found: ${key}`); | ||
const legacyWeakMap = harden({ | ||
return harden({ | ||
has: key => { | ||
@@ -33,3 +36,4 @@ // Check if a key exists. The key can be any JavaScript value, | ||
assertKeyExists(key); | ||
return wm.get(key); | ||
// How to tell typescript I believe the `get` will succeed. | ||
return /** @type {V} */ (wm.get(key)); | ||
}, | ||
@@ -45,4 +49,3 @@ set: (key, value) => { | ||
}); | ||
return legacyWeakMap; | ||
}; | ||
harden(makeLegacyWeakMap); |
// @ts-check | ||
// eslint-disable-next-line spaced-comment | ||
/// <reference types="ses"/> | ||
@@ -5,0 +4,0 @@ |
@@ -29,3 +29,2 @@ // @ts-check | ||
// eslint-disable-next-line spaced-comment | ||
/// <reference types="ses"/> | ||
@@ -864,3 +863,7 @@ | ||
(checkMatches(specB, base, check) && | ||
(rest === undefined || checkMatches(specR, rest, check))) | ||
(rest === undefined || | ||
check( | ||
matches(specR, rest), | ||
X`Remainder ${specR} - Must match ${rest}`, | ||
))) | ||
); | ||
@@ -940,3 +943,7 @@ }, | ||
(checkMatches(specB, newBase, check) && | ||
(rest === undefined || checkMatches(specR, rest, check))) | ||
(rest === undefined || | ||
check( | ||
matches(specR, rest), | ||
X`Remainder ${specR} - Must match ${rest}`, | ||
))) | ||
); | ||
@@ -943,0 +950,0 @@ }, |
@@ -29,5 +29,5 @@ // @ts-check | ||
/* s */ ['string', ['s', 't']], | ||
/* u */ ['undefined', ['u', 'v']], | ||
/* v */ ['null', ['v', 'v~']], | ||
/* y */ ['symbol', ['y', 'z']], | ||
/* z */ ['null', ['z', 'z~']], | ||
/* z */ ['undefined', ['z', '{']], | ||
/* | remotable->ordinal mapping prefix: This is not used in covers but it is | ||
@@ -51,3 +51,3 @@ reserved from the same set of strings. Note that the prefix is > any | ||
/** | ||
* @type {WeakMap<CompareRank,WeakSet<Passable[]>>} | ||
* @type {WeakMap<RankCompare,WeakSet<Passable[]>>} | ||
*/ | ||
@@ -57,3 +57,3 @@ const memoOfSorted = new WeakMap(); | ||
/** | ||
* @type {WeakMap<CompareRank,CompareRank>} | ||
* @type {WeakMap<RankCompare,RankCompare>} | ||
*/ | ||
@@ -63,3 +63,3 @@ const comparatorMirrorImages = new WeakMap(); | ||
/** | ||
* @param {CompareRank=} compareRemotables | ||
* @param {RankCompare=} compareRemotables | ||
* An option to create a comparator in which an internal order is | ||
@@ -69,6 +69,6 @@ * assigned to remotables. This defaults to a comparator that | ||
* for the same rank. | ||
* @returns {ComparatorKit} | ||
* @returns {RankComparatorKit} | ||
*/ | ||
const makeComparatorKit = (compareRemotables = (_x, _y) => 0) => { | ||
/** @type {CompareRank} */ | ||
/** @type {RankCompare} */ | ||
const comparator = (left, right) => { | ||
@@ -190,3 +190,3 @@ if (sameValueZero(left, right)) { | ||
/** @type {CompareRank} */ | ||
/** @type {RankCompare} */ | ||
const antiComparator = (x, y) => comparator(y, x); | ||
@@ -202,4 +202,4 @@ | ||
/** | ||
* @param {CompareRank} comparator | ||
* @returns {CompareRank=} | ||
* @param {RankCompare} comparator | ||
* @returns {RankCompare=} | ||
*/ | ||
@@ -211,3 +211,3 @@ export const comparatorMirrorImage = comparator => | ||
* @param {Passable[]} passables | ||
* @param {CompareRank} compare | ||
* @param {RankCompare} compare | ||
* @returns {boolean} | ||
@@ -234,3 +234,3 @@ */ | ||
* @param {Passable[]} sorted | ||
* @param {CompareRank} compare | ||
* @param {RankCompare} compare | ||
*/ | ||
@@ -247,4 +247,13 @@ export const assertRankSorted = (sorted, compare) => | ||
/** | ||
* TODO SECURITY BUG: https://github.com/Agoric/agoric-sdk/issues/4260 | ||
* sortByRank currently uses `Array.prototype.sort` directly, and | ||
* so only works correctly when given a `compare` function that considers | ||
* `undefined` strictly bigger (`>`) than everything else. This is | ||
* because `Array.prototype.sort` bizarrely moves all `undefined`s to | ||
* the end of the array regardless, without consulting the `compare` | ||
* function. This is a genuine bug for us NOW because sometimes we sort | ||
* in reverse order by passing a reversed rank comparison function. | ||
* | ||
* @param {Iterable<Passable>} passables | ||
* @param {CompareRank} compare | ||
* @param {RankCompare} compare | ||
* @returns {Passable[]} | ||
@@ -277,3 +286,3 @@ */ | ||
* @param {Passable[]} sorted | ||
* @param {CompareRank} compare | ||
* @param {RankCompare} compare | ||
* @param {Passable} key | ||
@@ -336,3 +345,3 @@ * @param {("leftMost" | "rightMost")=} bias | ||
/** | ||
* @param {CompareRank} compare | ||
* @param {RankCompare} compare | ||
* @param {Passable} a | ||
@@ -345,3 +354,3 @@ * @param {Passable} b | ||
/** | ||
* @param {CompareRank} compare | ||
* @param {RankCompare} compare | ||
* @param {Passable} a | ||
@@ -354,3 +363,3 @@ * @param {Passable} b | ||
/** | ||
* @param {CompareRank} compare | ||
* @param {RankCompare} compare | ||
* @param {RankCover[]} covers | ||
@@ -374,3 +383,3 @@ * @returns {RankCover} | ||
/** | ||
* @param {CompareRank} compare | ||
* @param {RankCompare} compare | ||
* @param {RankCover[]} covers | ||
@@ -421,3 +430,3 @@ * @returns {RankCover} | ||
* @param {boolean=} longLived | ||
* @returns {ComparatorKit} | ||
* @returns {FullComparatorKit} | ||
*/ | ||
@@ -424,0 +433,0 @@ export const makeFullOrderComparatorKit = (longLived = false) => { |
// @ts-check | ||
import { Far, assertPassable } from '@agoric/marshal'; | ||
import { | ||
Far, | ||
assertPassable, | ||
filterIterable, | ||
mapIterable, | ||
} from '@agoric/marshal'; | ||
import { compareRank } from '../patterns/rankOrder.js'; | ||
import { assertScalarKey } from '../keys/checkKey.js'; | ||
import { makeCopyMap } from '../keys/copyMap.js'; | ||
import { fit, assertPattern } from '../patterns/patternMatchers.js'; | ||
import { matches, fit, assertPattern } from '../patterns/patternMatchers.js'; | ||
import { makeWeakMapStoreMethods } from './scalarWeakMapStore.js'; | ||
import { makeCursorKit } from './store-utils.js'; | ||
import { makeCurrentKeysKit } from './store-utils.js'; | ||
const { details: X, quote: q } = assert; | ||
const { quote: q } = assert; | ||
@@ -16,3 +21,4 @@ /** | ||
* @param {Map<K,V>} jsmap | ||
* @param {(k: K, v: V) => void} assertKVOkToWrite | ||
* @param {(k: K, v: V) => void} assertKVOkToAdd | ||
* @param {(k: K, v: V) => void} assertKVOkToSet | ||
* @param {((k: K) => void)=} assertKeyOkToDelete | ||
@@ -24,3 +30,4 @@ * @param {string=} keyName | ||
jsmap, | ||
assertKVOkToWrite, | ||
assertKVOkToAdd, | ||
assertKVOkToSet, | ||
assertKeyOkToDelete = undefined, | ||
@@ -30,9 +37,9 @@ keyName = 'key', | ||
const { | ||
assertUpdateOnWrite, | ||
assertUpdateOnAdd, | ||
assertUpdateOnDelete, | ||
makeCursor, | ||
makeArray, | ||
} = makeCursorKit( | ||
iterableKeys, | ||
} = makeCurrentKeysKit( | ||
() => jsmap.keys(), | ||
compareRank, | ||
assertKVOkToWrite, | ||
assertKVOkToAdd, | ||
assertKeyOkToDelete, | ||
@@ -42,40 +49,72 @@ keyName, | ||
const methods = harden({ | ||
/** | ||
* @param {Pattern=} keyPatt | ||
* @param {Pattern=} valuePatt | ||
* @returns {Iterable<K>} | ||
*/ | ||
const keys = (keyPatt = undefined, valuePatt = undefined) => { | ||
if (keyPatt === undefined && valuePatt === undefined) { | ||
return iterableKeys; | ||
} | ||
const filter = k => { | ||
if (keyPatt !== undefined && !matches(k, keyPatt)) { | ||
return false; | ||
} | ||
// Uses the current jsmap value, since the iteratator survives `.set` | ||
if (valuePatt !== undefined && !matches(jsmap.get(k), valuePatt)) { | ||
return false; | ||
} | ||
return true; | ||
}; | ||
return filterIterable(iterableKeys, filter); | ||
}; | ||
/** | ||
* @param {Pattern=} keyPatt | ||
* @param {Pattern=} valuePatt | ||
* @returns {Iterable<V>} | ||
*/ | ||
const values = (keyPatt = undefined, valuePatt = undefined) => | ||
mapIterable(keys(keyPatt, valuePatt), k => /** @type {V} */ (jsmap.get(k))); | ||
/** | ||
* @param {Pattern=} keyPatt | ||
* @param {Pattern=} valuePatt | ||
* @returns {Iterable<[K,V]>} | ||
*/ | ||
const entries = (keyPatt = undefined, valuePatt = undefined) => | ||
mapIterable(keys(keyPatt, valuePatt), k => [ | ||
k, | ||
/** @type {V} */ (jsmap.get(k)), | ||
]); | ||
return harden({ | ||
...makeWeakMapStoreMethods( | ||
jsmap, | ||
assertUpdateOnWrite, | ||
/** @type {(k: K, v: V) => void} */ (assertUpdateOnAdd), | ||
assertKVOkToSet, | ||
assertUpdateOnDelete, | ||
keyName, | ||
), | ||
keys, | ||
values, | ||
entries, | ||
cursor: (entryPatt = undefined, direction = 'forward') => { | ||
assert.equal( | ||
direction, | ||
'forward', | ||
X`Non-forward cursors are not yet implemented: map ${q(keyName)}`, | ||
); | ||
return makeCursor(jsmap.entries(), entryPatt); | ||
}, | ||
snapshot: (keyPatt = undefined, valuePatt = undefined) => | ||
makeCopyMap(entries(keyPatt, valuePatt)), | ||
keys: (keyPatt = undefined) => makeArray(jsmap.keys(), keyPatt), | ||
values: (valPatt = undefined) => makeArray(jsmap.values(), valPatt), | ||
entries: (entryPatt = undefined) => makeArray(jsmap.entries(), entryPatt), | ||
getSize: (keyPatt = undefined, valuePatt = undefined) => | ||
keyPatt === undefined && valuePatt === undefined | ||
? jsmap.size | ||
: [...keys(keyPatt, valuePatt)].length, | ||
snapshot: (entryPatt = undefined) => makeCopyMap(methods.cursor(entryPatt)), | ||
addAll: copyMap => { | ||
const { | ||
payload: { keys, values }, | ||
} = copyMap; | ||
const { length } = keys; | ||
for (let i = 0; i < length; i += 1) { | ||
const key = keys[i]; | ||
const value = values[i]; | ||
// Don't assert that the key either does or does not exist. | ||
assertUpdateOnWrite(key, value); | ||
jsmap.set(key, value); | ||
clear: (keyPatt = undefined, valuePatt = undefined) => { | ||
if (keyPatt === undefined && valuePatt === undefined) { | ||
jsmap.clear(); | ||
} | ||
for (const key of keys(keyPatt, valuePatt)) { | ||
jsmap.delete(key); | ||
} | ||
}, | ||
}); | ||
return methods; | ||
}; | ||
@@ -101,25 +140,45 @@ | ||
keyName = 'key', | ||
{ schema = undefined } = {}, | ||
{ keyPattern = undefined, valuePattern = undefined } = {}, | ||
) => { | ||
const jsmap = new Map(); | ||
if (schema) { | ||
assertPattern(schema); | ||
if (keyPattern !== undefined) { | ||
assertPattern(keyPattern); | ||
} | ||
const assertKVOkToWrite = (key, value) => { | ||
if (valuePattern !== undefined) { | ||
assertPattern(valuePattern); | ||
} | ||
const assertKVOkToSet = (_key, value) => { | ||
// TODO: Just a transition kludge. Remove when possible. | ||
// See https://github.com/Agoric/agoric-sdk/issues/3606 | ||
harden(key); | ||
harden(value); | ||
assertScalarKey(key); | ||
assertPassable(value); | ||
if (schema) { | ||
fit(harden([key, value]), schema); | ||
if (valuePattern !== undefined) { | ||
fit(value, valuePattern); | ||
} | ||
}; | ||
const mapStore = Far(`scalar MapStore of ${q(keyName)}`, { | ||
...makeMapStoreMethods(jsmap, assertKVOkToWrite, undefined, keyName), | ||
const assertKVOkToAdd = (key, value) => { | ||
// TODO: Just a transition kludge. Remove when possible. | ||
// See https://github.com/Agoric/agoric-sdk/issues/3606 | ||
harden(key); | ||
assertScalarKey(key); | ||
if (keyPattern !== undefined) { | ||
fit(key, keyPattern); | ||
} | ||
assertKVOkToSet(key, value); | ||
}; | ||
return Far(`scalar MapStore of ${q(keyName)}`, { | ||
...makeMapStoreMethods( | ||
jsmap, | ||
assertKVOkToAdd, | ||
assertKVOkToSet, | ||
undefined, | ||
keyName, | ||
), | ||
}); | ||
return mapStore; | ||
}; | ||
harden(makeScalarMapStore); |
// @ts-check | ||
import { Far } from '@agoric/marshal'; | ||
import { Far, filterIterable } from '@agoric/marshal'; | ||
import { compareRank } from '../patterns/rankOrder.js'; | ||
import { assertScalarKey } from '../keys/checkKey.js'; | ||
import { makeCopySet } from '../keys/copySet.js'; | ||
import { fit, assertPattern } from '../patterns/patternMatchers.js'; | ||
import { matches, fit, assertPattern } from '../patterns/patternMatchers.js'; | ||
import { makeWeakSetStoreMethods } from './scalarWeakSetStore.js'; | ||
import { makeCursorKit } from './store-utils.js'; | ||
import { makeCurrentKeysKit } from './store-utils.js'; | ||
const { details: X, quote: q } = assert; | ||
const { quote: q } = assert; | ||
@@ -16,3 +16,3 @@ /** | ||
* @param {Set<K>} jsset | ||
* @param {(k: K) => void} assertKeyOkToWrite | ||
* @param {(k: K) => void} assertKeyOkToAdd | ||
* @param {((k: K) => void)=} assertKeyOkToDelete | ||
@@ -24,3 +24,3 @@ * @param {string=} keyName | ||
jsset, | ||
assertKeyOkToWrite, | ||
assertKeyOkToAdd, | ||
assertKeyOkToDelete = undefined, | ||
@@ -30,9 +30,9 @@ keyName = 'key', | ||
const { | ||
assertUpdateOnWrite, | ||
assertUpdateOnAdd, | ||
assertUpdateOnDelete, | ||
makeCursor, | ||
makeArray, | ||
} = makeCursorKit( | ||
iterableKeys, | ||
} = makeCurrentKeysKit( | ||
() => jsset.keys(), | ||
compareRank, | ||
assertKeyOkToWrite, | ||
assertKeyOkToAdd, | ||
assertKeyOkToDelete, | ||
@@ -42,6 +42,15 @@ keyName, | ||
const methods = harden({ | ||
/** | ||
* @param {Pattern=} keyPatt | ||
* @returns {Iterable<K>} | ||
*/ | ||
const keys = (keyPatt = undefined) => | ||
keyPatt === undefined | ||
? iterableKeys | ||
: filterIterable(iterableKeys, k => matches(k, keyPatt)); | ||
return harden({ | ||
...makeWeakSetStoreMethods( | ||
jsset, | ||
assertUpdateOnWrite, | ||
assertUpdateOnAdd, | ||
assertUpdateOnDelete, | ||
@@ -51,27 +60,18 @@ keyName, | ||
cursor: (keyPatt = undefined, direction = 'forward') => { | ||
assert.equal( | ||
direction, | ||
'forward', | ||
X`Non-forward cursors are not yet implemented: map ${q(keyName)}`, | ||
); | ||
return makeCursor(jsset.keys(), keyPatt); | ||
}, | ||
keys, | ||
keys: (keyPatt = undefined) => makeArray(jsset.keys(), keyPatt), | ||
snapshot: (keyPatt = undefined) => makeCopySet(keys(keyPatt)), | ||
snapshot: (keyPatt = undefined) => makeCopySet(methods.cursor(keyPatt)), | ||
getSize: (keyPatt = undefined) => | ||
keyPatt === undefined ? jsset.size : [...keys(keyPatt)].length, | ||
addAll: copySet => { | ||
const { payload: keys } = copySet; | ||
const { length } = keys; | ||
for (let i = 0; i < length; i += 1) { | ||
const key = keys[i]; | ||
// Don't assert that the key either does or does not exist. | ||
assertKeyOkToWrite(key); | ||
jsset.add(key); | ||
clear: (keyPatt = undefined) => { | ||
if (keyPatt === undefined) { | ||
jsset.clear(); | ||
} | ||
for (const key of keys(keyPatt)) { | ||
jsset.delete(key); | ||
} | ||
}, | ||
}); | ||
return methods; | ||
}; | ||
@@ -97,9 +97,10 @@ | ||
keyName = 'key', | ||
{ schema = undefined } = {}, | ||
{ keyPattern = undefined } = {}, | ||
) => { | ||
const jsset = new Set(); | ||
if (schema) { | ||
assertPattern(schema); | ||
if (keyPattern !== undefined) { | ||
assertPattern(keyPattern); | ||
} | ||
const assertKeyOkToWrite = key => { | ||
const assertKeyOkToAdd = key => { | ||
// TODO: Just a transition kludge. Remove when possible. | ||
@@ -110,11 +111,11 @@ // See https://github.com/Agoric/agoric-sdk/issues/3606 | ||
assertScalarKey(key); | ||
if (schema) { | ||
fit(key, schema); | ||
if (keyPattern !== undefined) { | ||
fit(key, keyPattern); | ||
} | ||
}; | ||
const setStore = Far(`scalar SetStore of ${q(keyName)}`, { | ||
...makeSetStoreMethods(jsset, assertKeyOkToWrite, undefined, keyName), | ||
return Far(`scalar SetStore of ${q(keyName)}`, { | ||
...makeSetStoreMethods(jsset, assertKeyOkToAdd, undefined, keyName), | ||
}); | ||
return setStore; | ||
}; | ||
harden(makeScalarSetStore); |
@@ -11,3 +11,4 @@ // @ts-check | ||
* @param {WeakMap<K & Object,V>} jsmap | ||
* @param {(k: K, v: V) => void} assertKVOkToWrite | ||
* @param {(k: K, v: V) => void} assertKVOkToAdd | ||
* @param {(k: K, v: V) => void} assertKVOkToSet | ||
* @param {((k: K) => void)=} assertKeyOkToDelete | ||
@@ -19,4 +20,5 @@ * @param {string=} keyName | ||
jsmap, | ||
assertKVOkToWrite, | ||
assertKeyOkToDelete = () => {}, | ||
assertKVOkToAdd, | ||
assertKVOkToSet, | ||
assertKeyOkToDelete = undefined, | ||
keyName = 'key', | ||
@@ -45,3 +47,3 @@ ) => { | ||
assertKeyDoesNotExist(key); | ||
assertKVOkToWrite(key, value); | ||
assertKVOkToAdd(key, value); | ||
jsmap.set(key, value); | ||
@@ -51,3 +53,3 @@ }, | ||
assertKeyExists(key); | ||
assertKVOkToWrite(key, value); | ||
assertKVOkToSet(key, value); | ||
jsmap.set(key, value); | ||
@@ -57,5 +59,15 @@ }, | ||
assertKeyExists(key); | ||
assertKeyOkToDelete(key); | ||
if (assertKeyOkToDelete !== undefined) { | ||
assertKeyOkToDelete(key); | ||
} | ||
jsmap.delete(key); | ||
}, | ||
addAll: entries => { | ||
for (const [key, value] of entries) { | ||
// Don't assert that the key either does or does not exist. | ||
assertKVOkToAdd(key, value); | ||
jsmap.set(key, value); | ||
} | ||
}, | ||
}); | ||
@@ -65,5 +77,6 @@ }; | ||
/** | ||
* This is a *scalar* map in that the keys can only be atomic values, primitives | ||
* or remotables. Other storeMaps will accept, for example, copyArrays and | ||
* copyRecords, as keys and look them up based on equality of their contents. | ||
* This is a *scalar* mapStore in that the keys can only be atomic values: | ||
* primitives or remotables. | ||
* Other mapStores will accept, for example, copyArrays and | ||
* copyRecords as keys and look them up based on equality of their contents. | ||
* | ||
@@ -75,3 +88,3 @@ * TODO For now, this scalarWeakMap accepts only remotables, reflecting the | ||
* there. Though note that this would only enables collection of the | ||
* remotables, since the other primitives may always appear. | ||
* remotables, since the other primitives may always reappear. | ||
* | ||
@@ -85,14 +98,28 @@ * @template K,V | ||
keyName = 'key', | ||
{ longLived = true, schema = undefined } = {}, | ||
{ longLived = true, keyPattern = undefined, valuePattern = undefined } = {}, | ||
) => { | ||
const jsmap = new (longLived ? WeakMap : Map)(); | ||
if (schema) { | ||
assertPattern(schema); | ||
if (keyPattern !== undefined) { | ||
assertPattern(keyPattern); | ||
} | ||
const assertKVOkToWrite = (key, value) => { | ||
if (valuePattern !== undefined) { | ||
assertPattern(valuePattern); | ||
} | ||
const assertKVOkToSet = (_key, value) => { | ||
// TODO: Just a transition kludge. Remove when possible. | ||
// See https://github.com/Agoric/agoric-sdk/issues/3606 | ||
harden(key); | ||
harden(value); | ||
assertPassable(value); | ||
if (valuePattern !== undefined) { | ||
fit(value, valuePattern); | ||
} | ||
}; | ||
const assertKVOkToAdd = (key, value) => { | ||
// TODO: Just a transition kludge. Remove when possible. | ||
// See https://github.com/Agoric/agoric-sdk/issues/3606 | ||
harden(key); | ||
assert( | ||
@@ -102,12 +129,18 @@ passStyleOf(key) === 'remotable', | ||
); | ||
assertPassable(value); | ||
if (schema) { | ||
fit(harden([key, value]), schema); | ||
if (keyPattern !== undefined) { | ||
fit(key, keyPattern); | ||
} | ||
assertKVOkToSet(key, value); | ||
}; | ||
const weakMapStore = Far(`scalar WeakMapStore of ${q(keyName)}`, { | ||
...makeWeakMapStoreMethods(jsmap, assertKVOkToWrite, undefined, keyName), | ||
return Far(`scalar WeakMapStore of ${q(keyName)}`, { | ||
...makeWeakMapStoreMethods( | ||
jsmap, | ||
assertKVOkToAdd, | ||
assertKVOkToSet, | ||
undefined, | ||
keyName, | ||
), | ||
}); | ||
return weakMapStore; | ||
}; | ||
harden(makeScalarWeakMapStore); |
@@ -11,3 +11,3 @@ // @ts-check | ||
* @param {WeakSet<K & Object>} jsset | ||
* @param {(k: K) => void} assertKeyOkToWrite | ||
* @param {(k: K) => void} assertKeyOkToAdd | ||
* @param {((k: K) => void)=} assertKeyOkToDelete | ||
@@ -19,4 +19,4 @@ * @param {string=} keyName | ||
jsset, | ||
assertKeyOkToWrite, | ||
assertKeyOkToDelete = () => {}, | ||
assertKeyOkToAdd, | ||
assertKeyOkToDelete = undefined, | ||
keyName = 'key', | ||
@@ -36,3 +36,3 @@ ) => { | ||
add: key => { | ||
assertKeyOkToWrite(key); | ||
assertKeyOkToAdd(key); | ||
jsset.add(key); | ||
@@ -42,5 +42,14 @@ }, | ||
assertKeyExists(key); | ||
assertKeyOkToDelete(key); | ||
if (assertKeyOkToDelete !== undefined) { | ||
assertKeyOkToDelete(key); | ||
} | ||
jsset.delete(key); | ||
}, | ||
addAll: keys => { | ||
for (const key of keys) { | ||
assertKeyOkToAdd(key); | ||
jsset.add(key); | ||
} | ||
}, | ||
}); | ||
@@ -68,9 +77,10 @@ }; | ||
keyName = 'key', | ||
{ longLived = true, schema = undefined } = {}, | ||
{ longLived = true, keyPattern = undefined } = {}, | ||
) => { | ||
const jsset = new (longLived ? WeakSet : Set)(); | ||
if (schema) { | ||
assertPattern(schema); | ||
if (keyPattern !== undefined) { | ||
assertPattern(keyPattern); | ||
} | ||
const assertKeyOkToWrite = key => { | ||
const assertKeyOkToAdd = key => { | ||
// TODO: Just a transition kludge. Remove when possible. | ||
@@ -84,11 +94,11 @@ // See https://github.com/Agoric/agoric-sdk/issues/3606 | ||
); | ||
if (schema) { | ||
fit(key, schema); | ||
if (keyPattern !== undefined) { | ||
fit(key, keyPattern); | ||
} | ||
}; | ||
const weakSetStore = Far(`scalar WeakSetStore of ${q(keyName)}`, { | ||
...makeWeakSetStoreMethods(jsset, assertKeyOkToWrite, undefined, keyName), | ||
return Far(`scalar WeakSetStore of ${q(keyName)}`, { | ||
...makeWeakSetStoreMethods(jsset, assertKeyOkToAdd, undefined, keyName), | ||
}); | ||
return weakSetStore; | ||
}; | ||
harden(makeScalarWeakSetStore); |
// @ts-check | ||
import { filterIterable } from '@agoric/marshal'; | ||
import { matches } from '../patterns/patternMatchers.js'; | ||
import { assertRankSorted } from '../patterns/rankOrder.js'; | ||
import { Far } from '@agoric/marshal'; | ||
const { details: X, quote: q } = assert; | ||
export const makeCursorKit = ( | ||
/** | ||
* @template K,V | ||
* @typedef {Object} CurrentKeysKit | ||
* @property {(k: K, v?: V) => void} assertUpdateOnAdd | ||
* @property {(k: K) => void} assertUpdateOnDelete | ||
* @property {Iterable<K>} iterableKeys | ||
*/ | ||
/** | ||
* @template K,V | ||
* @param {() => Iterable<K>} getRawKeys | ||
* @param {RankCompare} compare | ||
* @param {(k: K, v?: V) => void} assertOkToAdd | ||
* @param {((k: K) => void)=} assertOkToDelete | ||
* @param {string=} keyName | ||
* @returns {CurrentKeysKit<K,V>} | ||
*/ | ||
export const makeCurrentKeysKit = ( | ||
getRawKeys, | ||
compare, | ||
assertOkToWrite, | ||
assertOkToAdd, | ||
assertOkToDelete = undefined, | ||
@@ -16,55 +32,62 @@ keyName = 'key', | ||
let updateCount = 0; | ||
let sortedKeysMemo; | ||
const cursorKit = harden({ | ||
assertUpdateOnWrite: (k, v) => { | ||
assertOkToWrite(k, v); | ||
updateCount += 1; | ||
}, | ||
const assertUpdateOnAdd = (k, v = undefined) => { | ||
assertOkToAdd(k, v); | ||
updateCount += 1; | ||
sortedKeysMemo = undefined; | ||
}; | ||
assertUpdateOnDelete: assertOkToDelete | ||
? k => { | ||
assertOkToDelete(k); | ||
const assertUpdateOnDelete = | ||
assertOkToDelete === undefined | ||
? _k => { | ||
updateCount += 1; | ||
sortedKeysMemo = undefined; | ||
} | ||
: () => { | ||
: k => { | ||
assertOkToDelete(k); | ||
updateCount += 1; | ||
}, | ||
sortedKeysMemo = undefined; | ||
}; | ||
makeArray: (baseIterable, pattern = undefined) => { | ||
const filter = pattern ? val => matches(harden(val), pattern) : harden; | ||
const filtered = filterIterable(baseIterable, filter); | ||
const sorted = harden([...filtered].sort(compare)); | ||
assertRankSorted(sorted, compare); | ||
return sorted; | ||
}, | ||
const getSortedKeys = () => { | ||
if (sortedKeysMemo === undefined) { | ||
sortedKeysMemo = harden([...getRawKeys()].sort(compare)); | ||
} | ||
return sortedKeysMemo; | ||
}; | ||
makeCursor: (baseIterable, pattern = undefined) => { | ||
const currentUpdateCount = updateCount; | ||
const notStaleFilter = () => { | ||
assert.equal( | ||
currentUpdateCount, | ||
updateCount, | ||
X`MapStore ${q(keyName)} cursor stale`, | ||
); | ||
return true; | ||
}; | ||
// TODO In an implementation where the baseIterable returns its data | ||
// already rank sorted, `makeCursor` would use the following | ||
// code to make a cursor, and makeArray would be a snapshot of that. | ||
// However, | ||
// to get the correct external behavior on non-ordered representation, | ||
// we sort in makeArray instead and then makeCursor return a cursor built | ||
// from that. | ||
// const filter = pattern | ||
// ? val => notStaleFilter() && matches(val, pattern) | ||
// : notStaleFilter; | ||
// return filterIterable(baseIterable, filter); | ||
const sorted = cursorKit.makeArray(baseIterable, pattern); | ||
return filterIterable(sorted, notStaleFilter); | ||
const iterableKeys = Far('Iterable of keys', { | ||
[Symbol.iterator]: () => { | ||
const generation = updateCount; | ||
getSortedKeys(); | ||
const len = sortedKeysMemo.length; | ||
let i = 0; | ||
return Far('Iterator of keys', { | ||
next: () => { | ||
assert.equal( | ||
generation, | ||
updateCount, | ||
X`Store ${q(keyName)} cursor stale`, | ||
); | ||
// If they're equal, then the sortedKeyMemo is the same one | ||
// we started with. | ||
if (i < len) { | ||
const result = harden({ done: false, value: sortedKeysMemo[i] }); | ||
i += 1; | ||
return result; | ||
} else { | ||
return harden({ done: true, value: undefined }); | ||
} | ||
}, | ||
}); | ||
}, | ||
}); | ||
return cursorKit; | ||
return harden({ | ||
assertUpdateOnAdd, | ||
assertUpdateOnDelete, | ||
iterableKeys, | ||
}); | ||
}; | ||
harden(makeCursorKit); | ||
harden(makeCurrentKeysKit); |
181
src/types.js
// @ts-check | ||
// eslint-disable-next-line spaced-comment | ||
/// <reference types="ses"/> | ||
@@ -50,3 +49,3 @@ | ||
* | ||
* * I say "indended" above because Patterns, in order to be declarative | ||
* * I say "intended" above because Patterns, in order to be declarative | ||
* and passable, cannot have the generality of predicates written in a | ||
@@ -89,11 +88,18 @@ * Turing-universal programming language. Rather, to represent the category of | ||
* Defaults to true, so please mark short lived stores explicitly. | ||
* @property {Pattern=} schema The store will reject | ||
* the insertion of any content that does not match the schema pattern. | ||
* For a SetStore, this is a key pattern. | ||
* For a MapStore, this is an entry pattern, | ||
* i.e., a pattern over `[key,value]` pairs. | ||
* Defaults to `M.any()`. | ||
* @property {Pattern=} keyPattern | ||
* @property {Pattern=} valuePattern | ||
*/ | ||
/** @typedef {"forward" | "reverse"} Direction */ | ||
/** | ||
* Most store methods are in one of three categories | ||
* * lookup methods (`has`,`get`) | ||
* * update methods (`add`,`init`,`set`,`delete`,`addAll`) | ||
* * query methods (`snapshot`,`keys`,`values`,`entries`,`getSize`) | ||
* * query-update methods (`clear`) | ||
* | ||
* WeakStores have the lookup and update methods but not the query | ||
* or query-update methods. | ||
* Non-weak Stores are like their corresponding WeakStore, but with the | ||
* additional query and query-update methods. | ||
*/ | ||
@@ -113,2 +119,3 @@ /** | ||
* Remove the key. Throws if not found. | ||
* @property {(keys: Iterable<K>) => void} addAll | ||
*/ | ||
@@ -129,8 +136,7 @@ | ||
* Remove the key. Throws if not found. | ||
* @property {(keyPattern?: Pattern) => K[]} keys | ||
* @property {(keyPattern?: Pattern) => CopySet<K>} snapshot | ||
* @property {(copySet: CopySet<K>) => void} addAll | ||
* @property {(keyPattern?: Pattern, | ||
* direction?: Direction | ||
* ) => Iterable<K>} cursor | ||
* @property {(keys: Iterable<K>) => void} addAll | ||
* @property {(keyPatt?: Pattern) => Iterable<K>} keys | ||
* @property {(keyPatt?: Pattern) => CopySet<K>} snapshot | ||
* @property {(keyPatt?: Pattern) => number} getSize | ||
* @property {(keyPatt?: Pattern) => void} clear | ||
*/ | ||
@@ -154,2 +160,3 @@ | ||
* Remove the key. Throws if not found. | ||
* @property {(entries: Iterable<[K,V]>) => void} addAll | ||
*/ | ||
@@ -173,10 +180,12 @@ | ||
* Remove the key. Throws if not found. | ||
* @property {(keyPattern?: Pattern) => K[]} keys | ||
* @property {(valuePattern?: Pattern) => V[]} values | ||
* @property {(entryPattern?: Pattern) => [K,V][]} entries | ||
* @property {(entryPattern?: Pattern) => CopyMap<K,V>} snapshot | ||
* @property {(copyMap: CopyMap<K,V>) => void} addAll | ||
* @property {(entryPattern?: Pattern, | ||
* direction?: Direction | ||
* ) => Iterable<[K,V]>} cursor | ||
* @property {(entries: Iterable<[K,V]>) => void} addAll | ||
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => Iterable<K>} keys | ||
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => Iterable<V>} values | ||
* @property {( | ||
* keyPatt?: Pattern, | ||
* valuePatt?: Pattern | ||
* ) => Iterable<[K,V]>} entries | ||
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => CopyMap<K,V>} snapshot | ||
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => number} getSize | ||
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => void} clear | ||
*/ | ||
@@ -194,13 +203,17 @@ | ||
* @template K,V | ||
* @typedef {Object} Store | ||
* @typedef {MapStore<K,V>} Store | ||
* Deprecated type name `Store`. Use `MapStore` instead. | ||
*/ | ||
/** | ||
* @template K,V | ||
* @typedef {Object} LegacyWeakMap | ||
* LegacyWeakMap is deprecated. Use WeakMapStore instead. | ||
* @property {(key: K) => boolean} has | ||
* Check if a key exists. The key can be any JavaScript value, though the | ||
* answer will always be false for keys that cannot be found in this map | ||
* Check if a key exists | ||
* @property {(key: K) => V} get | ||
* Return a value for the key. Throws if not found. | ||
* @property {(key: K, value: V) => void} init | ||
* Initialize the key only if it doesn't already exist. | ||
* The key must be one allowed by this store. For example a scalar store only | ||
* allows primitives and remotables. | ||
* Initialize the key only if it | ||
* doesn't already exist | ||
* @property {(key: K, value: V) => void} set | ||
@@ -210,5 +223,2 @@ * Set the key. Throws if not found. | ||
* Remove the key. Throws if not found. | ||
* @property {(keyPattern?: Pattern) => K[]} keys | ||
* @property {(valuePattern?: Pattern) => V[]} values | ||
* @property {(entryPattern?: Pattern) => [K,V][]} entries | ||
*/ | ||
@@ -218,3 +228,3 @@ | ||
* @template K,V | ||
* @typedef {Object} LegacyWeakMap | ||
* @typedef {Object} LegacyMap | ||
* LegacyWeakMap is deprecated. Use WeakMapStore instead. | ||
@@ -232,15 +242,20 @@ * @property {(key: K) => boolean} has | ||
* Remove the key. Throws if not found. | ||
* @property {() => Iterable<K>} keys | ||
* @property {() => Iterable<V>} values | ||
* @property {() => Iterable<[K,V]>} entries | ||
* @property {() => number} getSize | ||
* @property {() => void} clear | ||
*/ | ||
// ///////////////////////////////////////////////////////////////////////////// | ||
/** | ||
* @template K,V | ||
* @callback MakeLegacyWeakMap | ||
* @param {string} [keyName='key'] - the column name for the key | ||
* @returns {LegacyWeakMap} | ||
* @typedef {-1 | 0 | 1} RankComparison | ||
* The result of a `RankCompare` function that defines a rank-order, i.e., | ||
* a total order in which different elements can be tied for the same | ||
* rank. See `RankCompare`. | ||
*/ | ||
// ///////////////////////////////////////////////////////////////////////////// | ||
/** | ||
* @callback CompareRank | ||
* @callback RankCompare | ||
* Returns `-1`, `0`, or `1` depending on whether the rank of `left` | ||
@@ -273,11 +288,87 @@ * is before, tied-with, or after the rank of `right`. | ||
* @param {Passable} right | ||
* @returns {-1 | 0 | 1} | ||
* @returns {RankComparison} | ||
*/ | ||
/** | ||
* @typedef {Object} ComparatorKit | ||
* @property {CompareRank} comparator | ||
* @property {CompareRank} antiComparator | ||
* @typedef {RankCompare} FullCompare | ||
* A `FullCompare` function satisfies all the invariants stated below for | ||
* `RankCompare`'s relation with KeyCompare. | ||
* In addition, its equality is as precise as the `KeyCompare` | ||
* comparison defined below, in that, for all Keys `x` and `y`, | ||
* `FullCompare(x, y) === 0` iff `KeyCompare(x, y) === 0`. | ||
* | ||
* For non-keys a `FullCompare` should be exactly as imprecise as | ||
* `RankCompare`. For example, both will treat all errors as in the same | ||
* equivalence class. Both will treat all promises as in the same | ||
* equivalence class. Both will order taggeds the same way, which is admittedly | ||
* weird, as some taggeds will be considered keys and other taggeds will be | ||
* considered non-keys. | ||
*/ | ||
/** | ||
* @typedef {Object} RankComparatorKit | ||
* @property {RankCompare} comparator | ||
* @property {RankCompare} antiComparator | ||
*/ | ||
/** | ||
* @typedef {Object} FullComparatorKit | ||
* @property {FullCompare} comparator | ||
* @property {FullCompare} antiComparator | ||
*/ | ||
/** | ||
* @typedef {-1 | 0 | 1 | NaN} KeyComparison | ||
* The result of a `KeyCompare` function that defines a meaningful | ||
* and meaningfully precise partial order of `Key` values. See `KeyCompare`. | ||
*/ | ||
/** | ||
* @callback KeyCompare | ||
* `compareKeys` implements a partial order over keys. As with the | ||
* rank ordering produced by `compareRank`, -1, 0, and 1 mean | ||
* "less than", "equivalent to", and "greater than" respectively. | ||
* NaN means "incomparable" --- the first key is not less, equivalent, | ||
* or greater than the second. For example, subsets over sets is | ||
* a partial order. | ||
* | ||
* By using NaN for "incomparable", the normal equivalence for using | ||
* the return value in a comparison is preserved. | ||
* `compareKeys(left, right) >= 0` iff `left` is greater than or | ||
* equivalent to `right` in the partial ordering. | ||
* | ||
* Key order (a partial order) and rank order (a full order) are | ||
* co-designed so that we store passables in rank order and index into them | ||
* with keys for key-based queries. To keep these distinct, when speaking | ||
* informally about rank, we talk about "earlier" and "later". When speaking | ||
* informally about keys, we talk about "smaller" and "bigger". | ||
* | ||
* In both orders, the return-0 case defines | ||
* an equivalence class, i.e., those that are tied for the same place in the | ||
* order. The global invariant that we need between the two orders is that the | ||
* key order equivalence class is always at least as precise as the | ||
* rank order equivalence class. IOW, if `compareKeys(X,Y) === 0` then | ||
* `compareRank(X,Y) === 0`. But not vice versa. For example, two different | ||
* remotables are the same rank but incommensurate as keys. | ||
* | ||
* A further invariant is if `compareKeys(X,Y) < 0` then | ||
* `compareRank(X,Y) <= 0`, i.e., if X is smaller than Y in key order, then X | ||
* must be at least as early as Y in rank order. But not vice versa. | ||
* X can be earlier than Y in rank order and still be incommensurate with Y in | ||
* key order. For example, the record `{b: 3, a: 5}` is earlier than but | ||
* incommensurate with the record `{b: 5, a: 3}`. | ||
* | ||
* This lets us translate a range search over the | ||
* partial key order into a range search over rank order followed by filtering | ||
* out those that don't match. To get this effect, we store the elements of | ||
* a set in an array sorted in reverse rank order, from later to earlier. | ||
* Combined with our lexicographic comparison of arrays, if set X is a subset | ||
* of set Y then the array representing set X will be an earlier rank that the | ||
* array representing set Y. | ||
* | ||
* @param {Key} left | ||
* @param {Key} right | ||
* @returns {KeyComparison} | ||
*/ | ||
// ///////////////////// Should be internal only types ///////////////////////// | ||
@@ -308,3 +399,3 @@ | ||
* @param {Passable[]} sorted | ||
* @param {CompareRank} compare | ||
* @param {RankCompare} compare | ||
* @param {RankCover} rankCover | ||
@@ -311,0 +402,0 @@ * @returns {IndexCover} |
Manifest confusion
Supply chain riskThis package has inconsistent metadata. This could be malicious or caused by an error when publishing the package.
Found 1 instance in 1 package
Manifest confusion
Supply chain riskThis package has inconsistent metadata. This could be malicious or caused by an error when publishing the package.
Found 1 instance in 1 package
150599
3592