New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@agoric/store

Package Overview
Dependencies
Maintainers
5
Versions
2730
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@agoric/store - npm Package Compare versions

Comparing version 0.6.9-dev-3c4b2fb.0 to 0.6.9-dev-3ddd160.0

src/keys/copyBag.js

25

package.json
{
"name": "@agoric/store",
"version": "0.6.9-dev-3c4b2fb.0+3c4b2fb",
"version": "0.6.9-dev-3ddd160.0+3ddd160",
"description": "Wrapper for JavaScript map",

@@ -14,6 +14,5 @@ "type": "module",

"test:xs": "exit 0",
"lint-fix": "yarn lint:eslint --fix && yarn lint:types",
"lint-check": "yarn lint",
"lint": "yarn lint:types && yarn lint:eslint",
"lint:types": "tsc -p jsconfig.json",
"lint-fix": "yarn lint:eslint --fix",
"lint": "run-s --continue-on-error lint:*",
"lint:types": "tsc --maxNodeModuleJsDepth 3 -p jsconfig.json",
"lint:eslint": "eslint '**/*.js'"

@@ -35,9 +34,9 @@ },

"dependencies": {
"@agoric/assert": "0.3.16-dev-3c4b2fb.0+3c4b2fb",
"@agoric/eventual-send": "0.14.1-dev-3c4b2fb.0+3c4b2fb",
"@agoric/marshal": "0.5.1-dev-3c4b2fb.0+3c4b2fb",
"@agoric/promise-kit": "0.2.30-dev-3c4b2fb.0+3c4b2fb"
"@agoric/assert": "0.3.16-dev-3ddd160.0+3ddd160",
"@agoric/eventual-send": "0.14.1-dev-3ddd160.0+3ddd160",
"@agoric/promise-kit": "0.2.30-dev-3ddd160.0+3ddd160",
"@endo/marshal": "^0.5.4"
},
"devDependencies": {
"@agoric/swingset-vat": "0.24.2-dev-3c4b2fb.0+3c4b2fb",
"@agoric/swingset-vat": "0.24.2-dev-3ddd160.0+3ddd160",
"ava": "^3.12.1"

@@ -58,6 +57,2 @@ },

],
"prettier": {
"trailingComma": "all",
"singleQuote": true
},
"publishConfig": {

@@ -72,3 +67,3 @@ "access": "public"

},
"gitHead": "3c4b2fb7126f82211bacd0b4c5a1e0a7ea595ed9"
"gitHead": "3ddd160f343a7ad6faebeee8e09787310a63e211"
}

65

src/index.js
// @ts-check
export { isKey, assertKey } from './keys/checkKey.js';
export { keyLT, keyLTE, keyEQ, keyGTE, keyGT } from './keys/compareKeys.js';
export {
isKey,
assertKey,
makeCopySet,
getCopySetKeys,
makeCopyBag,
makeCopyBagFromElements,
getCopyBagEntries,
makeCopyMap,
getCopyMapEntries,
} from './keys/checkKey.js';
export { coerceToElements } from './keys/copySet.js';
export { coerceToBagEntries } from './keys/copyBag.js';
export {
compareKeys,
keyLT,
keyLTE,
keyEQ,
keyGTE,
keyGT,
} from './keys/compareKeys.js';
export {
elementsIsSuperset,
elementsIsDisjoint,
elementsCompare,
elementsUnion,
elementsDisjointUnion,
elementsIntersection,
elementsDisjointSubtract,
setIsSuperset,
setIsDisjoint,
setCompare,
setUnion,
setDisjointUnion,
setIntersection,
setDisjointSubtract,
} from './keys/merge-set-operators.js';
export { makePatternKit } from './patterns/patternMatchers.js';
export { compareRank } from './patterns/rankOrder.js';
export {
bagIsSuperbag,
bagCompare,
bagUnion,
bagIntersection,
bagDisjointSubtract,
} from './keys/merge-bag-operators.js';
export {
M,
getRankCover,
isPattern,
assertPattern,
assertKeyPattern,
matches,
fit,
} from './patterns/patternMatchers.js';
export { compareRank, isRankSorted, sortByRank } from './patterns/rankOrder.js';
export {
makeDecodeKey,
makeEncodeKey,
zeroPad,
BIGINT_TAG_LEN,
} from './patterns/encodeKey.js';
export { makeScalarWeakSetStore } from './stores/scalarWeakSetStore.js';

@@ -10,0 +67,0 @@ export { makeScalarSetStore } from './stores/scalarSetStore.js';

// @ts-check
// eslint-disable-next-line spaced-comment
import '@endo/marshal/exported.js';
/// <reference types="ses"/>

@@ -9,11 +10,18 @@

assertPassable,
everyPassableChild,
Far,
getTag,
isObject,
makeTagged,
passStyleOf,
} from '@agoric/marshal';
import { checkCopySet, everyCopySetKey } from './copySet.js';
import { checkCopyMap, everyCopyMapKey, everyCopyMapValue } from './copyMap.js';
} from '@endo/marshal';
import { checkElements, makeSetOfElements } from './copySet.js';
import { checkBagEntries, makeBagOfEntries } from './copyBag.js';
import {
compareAntiRank,
makeFullOrderComparatorKit,
sortByRank,
} from '../patterns/rankOrder.js';
const { details: X, quote: q } = assert;
const { ownKeys } = Reflect;

@@ -85,4 +93,7 @@ // ////////////////// Primitive and Scalar keys ////////////////////////////////

// ///////////////////////////// Keys //////////////////////////////////////////
// ////////////////////////////// Keys /////////////////////////////////////////
/** @type {WeakSet<Key>} */
const keyMemo = new WeakSet();
/**

@@ -93,4 +104,424 @@ * @param {Passable} val

*/
export const checkKey = (val, check = x => x) => {
if (isPrimitiveKey(val)) {
return true;
}
if (!isObject(val)) {
// TODO There is not yet a checkPassable, but perhaps there should be.
// If that happens, we should call it here instead.
assertPassable(val);
return true;
}
if (keyMemo.has(val)) {
return true;
}
// eslint-disable-next-line no-use-before-define
const result = checkKeyInternal(val, check);
if (result) {
// Don't cache the undefined cases, so that if it is tried again
// with `assertChecker` it'll throw a diagnostic again
keyMemo.add(val);
}
// Note that we do not memoize a negative judgement, so that if it is tried
// again with a checker, it will still produce a useful diagnostic.
return result;
};
harden(checkKey);
/**
* @param {Passable} val
* @returns {boolean}
*/
export const isKey = val => checkKey(val);
harden(isKey);
/**
* @param {Key} val
*/
export const assertKey = val => {
checkKey(val, assertChecker);
};
harden(assertKey);
// //////////////////////////// CopySet ////////////////////////////////////////
// Moved to here so they can check that the copySet contains only keys
// without creating an import cycle.
/** @type WeakSet<CopySet<Key>> */
const copySetMemo = new WeakSet();
/**
* @param {Passable} s
* @param {Checker=} check
* @returns {boolean}
*/
export const checkCopySet = (s, check = x => x) => {
if (copySetMemo.has(s)) {
return true;
}
const result =
check(
passStyleOf(s) === 'tagged' && getTag(s) === 'copySet',
X`Not a copySet: ${s}`,
) &&
checkElements(s.payload, check) &&
checkKey(s.payload);
if (result) {
copySetMemo.add(s);
}
return result;
};
harden(checkCopySet);
/**
* @callback IsCopySet
* @param {Passable} s
* @returns {s is CopySet<Key>}
*/
/** @type {IsCopySet} */
export const isCopySet = s => checkCopySet(s);
harden(isCopySet);
/**
* @callback AssertCopySet
* @param {Passable} s
* @returns {asserts s is CopySet<Key>}
*/
/** @type {AssertCopySet} */
export const assertCopySet = s => {
checkCopySet(s, assertChecker);
};
harden(assertCopySet);
/**
* @template K
* @param {CopySet<K>} s
* @returns {K[]}
*/
export const getCopySetKeys = s => {
assertCopySet(s);
return s.payload;
};
harden(getCopySetKeys);
/**
* @template K
* @param {CopySet<K>} s
* @param {(key: K, index: number) => boolean} fn
* @returns {boolean}
*/
export const everyCopySetKey = (s, fn) =>
getCopySetKeys(s).every((key, index) => fn(key, index));
harden(everyCopySetKey);
/**
* @template K
* @param {Iterable<K>} elementIter
* @returns {CopySet<K>}
*/
export const makeCopySet = elementIter => {
const result = makeSetOfElements(elementIter);
assertCopySet(result);
return result;
};
harden(makeCopySet);
// //////////////////////////// CopyBag ////////////////////////////////////////
// Moved to here so they can check that the copyBag contains only keys
// without creating an import cycle.
/** @type WeakSet<CopyBag<Key>> */
const copyBagMemo = new WeakSet();
/**
* @param {Passable} b
* @param {Checker=} check
* @returns {boolean}
*/
export const checkCopyBag = (b, check = x => x) => {
if (copyBagMemo.has(b)) {
return true;
}
const result =
check(
passStyleOf(b) === 'tagged' && getTag(b) === 'copyBag',
X`Not a copyBag: ${b}`,
) &&
checkBagEntries(b.payload, check) &&
checkKey(b.payload);
if (result) {
copyBagMemo.add(b);
}
return result;
};
harden(checkCopyBag);
/**
* @callback IsCopyBag
* @param {Passable} b
* @returns {b is CopyBag<Key>}
*/
/** @type {IsCopyBag} */
export const isCopyBag = b => checkCopyBag(b);
harden(isCopyBag);
/**
* @callback AssertCopyBag
* @param {Passable} b
* @returns {asserts b is CopyBag<Key>}
*/
/** @type {AssertCopyBag} */
export const assertCopyBag = b => {
checkCopyBag(b, assertChecker);
};
harden(assertCopyBag);
/**
* @template K
* @param {CopyBag<K>} b
* @returns {K[]}
*/
export const getCopyBagEntries = b => {
assertCopyBag(b);
return b.payload;
};
harden(getCopyBagEntries);
/**
* @template K
* @param {CopyBag<K>} b
* @param {(entry: [K, bigint], index: number) => boolean} fn
* @returns {boolean}
*/
export const everyCopyBagEntry = (b, fn) =>
getCopyBagEntries(b).every((entry, index) => fn(entry, index));
harden(everyCopyBagEntry);
/**
* @template K
* @param {Iterable<[K,bigint]>} bagEntryIter
* @returns {CopyBag<K>}
*/
export const makeCopyBag = bagEntryIter => {
const result = makeBagOfEntries(bagEntryIter);
assertCopyBag(result);
return result;
};
harden(makeCopyBag);
/**
* @template K
* @param {Iterable<K>} elementIter
* @returns {CopySet<K>}
*/
export const makeCopyBagFromElements = elementIter => {
// This fullOrder contains history dependent state. It is specific
// to this one call and does not survive it.
const fullCompare = makeFullOrderComparatorKit().antiComparator;
const sorted = sortByRank(elementIter, fullCompare);
/** @type {[K,bigint][]} */
const entries = [];
for (let i = 0; i < sorted.length; ) {
const k = sorted[i];
let j = i + 1;
while (j < sorted.length && fullCompare(k, sorted[j]) === 0) {
j += 1;
}
entries.push([k, BigInt(j - i)]);
i = j;
}
return makeCopyBag(entries);
};
harden(makeCopyBagFromElements);
// //////////////////////////// CopyMap ////////////////////////////////////////
// Moved to here so they can check that the copyMap's keys contains only keys
// without creating an import cycle.
/** @type WeakSet<CopyMap<any,any>> */
const copyMapMemo = new WeakSet();
/**
* @param {Passable} m
* @param {Checker=} check
* @returns {boolean}
*/
export const checkCopyMap = (m, check = x => x) => {
if (copyMapMemo.has(m)) {
return true;
}
if (!(passStyleOf(m) === 'tagged' && getTag(m) === 'copyMap')) {
return check(false, X`Not a copyMap: ${m}`);
}
const { payload } = m;
if (passStyleOf(payload) !== 'copyRecord') {
return check(false, X`A copyMap's payload must be a record: ${m}`);
}
const { keys, values, ...rest } = payload;
const result =
check(
ownKeys(rest).length === 0,
X`A copyMap's payload must only have .keys and .values: ${m}`,
) &&
checkElements(keys, check) &&
check(
passStyleOf(values) === 'copyArray',
X`A copyMap's .values must be a copyArray: ${m}`,
) &&
check(
keys.length === values.length,
X`A copyMap must have the same number of keys and values: ${m}`,
);
if (result) {
copyMapMemo.add(m);
}
return result;
};
harden(checkCopyMap);
/**
* @callback IsCopyMap
* @param {Passable} m
* @returns {m is CopyMap<Key, Passable>}
*/
/** @type {IsCopyMap} */
export const isCopyMap = m => checkCopyMap(m);
harden(isCopyMap);
/**
* @callback AssertCopyMap
* @param {Passable} m
* @returns {asserts m is CopyMap<Key, Passable>}
*/
/** @type {AssertCopyMap} */
export const assertCopyMap = m => {
checkCopyMap(m, assertChecker);
};
harden(assertCopyMap);
/**
* @template K,V
* @param {CopyMap<K,V>} m
* @returns {K[]}
*/
export const getCopyMapKeys = m => {
assertCopyMap(m);
return m.payload.keys;
};
harden(getCopyMapKeys);
/**
* @template K,V
* @param {CopyMap<K,V>} m
* @returns {V[]}
*/
export const getCopyMapValues = m => {
assertCopyMap(m);
return m.payload.values;
};
harden(getCopyMapValues);
/**
* @template K,V
* @param {CopyMap<K,V>} m
* @returns {Iterable<[K,V]>}
*/
export const getCopyMapEntries = m => {
assertCopyMap(m);
const {
payload: { keys, values },
} = m;
const { length } = keys;
return Far('CopyMap entries iterable', {
[Symbol.iterator]: () => {
let i = 0;
return Far('CopyMap entries iterator', {
next: () => {
/** @type {IteratorResult<[K,V],void>} */
let result;
if (i < length) {
result = harden({ done: false, value: [keys[i], values[i]] });
i += 1;
return result;
} else {
result = harden({ done: true, value: undefined });
}
return result;
},
});
},
});
};
harden(getCopyMapEntries);
/**
* @template K,V
* @param {CopyMap<K,V>} m
* @param {(key: K, index: number) => boolean} fn
* @returns {boolean}
*/
export const everyCopyMapKey = (m, fn) =>
getCopyMapKeys(m).every((key, index) => fn(key, index));
harden(everyCopyMapKey);
/**
* @template K,V
* @param {CopyMap<K,V>} m
* @param {(value: V, index: number) => boolean} fn
* @returns {boolean}
*/
export const everyCopyMapValue = (m, fn) =>
getCopyMapValues(m).every((value, index) => fn(value, index));
harden(everyCopyMapValue);
/**
* @template K,V
* @param {CopyMap<K,V>} m
* @returns {CopySet<K>}
*/
export const copyMapKeySet = m =>
// A copyMap's keys are already in the internal form used by copySets.
makeTagged('copySet', m.payload.keys);
harden(copyMapKeySet);
/**
* @template K,V
* @param {Iterable<[K, V]>} entries
* @returns {CopyMap<K,V>}
*/
export const makeCopyMap = entries => {
// This is weird, but reverse rank sorting the entries is a good first step
// for getting the rank sorted keys together with the values
// organized by those keys. Also, among values associated with
// keys in the same equivalence class, those are rank sorted.
// TODO This
// could solve the copyMap cover issue explained in patternMatchers.js.
// But only if we include this criteria in our validation of copyMaps,
// which we currently do not.
const sortedEntries = sortByRank(entries, compareAntiRank);
const keys = sortedEntries.map(([k, _v]) => k);
const values = sortedEntries.map(([_k, v]) => v);
const result = makeTagged('copyMap', { keys, values });
assertCopyMap(result);
return result;
};
harden(makeCopyMap);
// //////////////////////// Keys Recur /////////////////////////////////////////
/**
* @param {Passable} val
* @param {Checker=} check
* @returns {boolean}
*/
const checkKeyInternal = (val, check = x => x) => {
// eslint-disable-next-line no-use-before-define
const checkIt = child => checkKey(child, check);

@@ -100,6 +531,9 @@

switch (passStyle) {
case 'copyRecord':
case 'copyRecord': {
// A copyRecord is a key iff all its children are keys
return Object.values(val).every(checkIt);
}
case 'copyArray': {
// A copyRecord or copyArray is a key iff all its children are keys
return everyPassableChild(val, checkIt);
// A copyArray is a key iff all its children are keys
return val.every(checkIt);
}

@@ -110,8 +544,7 @@ case 'tagged': {

case 'copySet': {
return (
checkCopySet(val, check) &&
// For a copySet to be a key, all its elements must be keys
everyCopySetKey(val, checkIt)
);
return checkCopySet(val, check);
}
case 'copyBag': {
return checkCopyBag(val, check);
}
case 'copyMap': {

@@ -121,4 +554,4 @@ return (

// For a copyMap to be a key, all its keys and values must
// be keys.
everyCopyMapKey(val, checkIt) &&
// be keys. Keys already checked by `checkCopyMap` since
// that's a copyMap requirement in general.
everyCopyMapValue(val, checkIt)

@@ -150,49 +583,1 @@ );

};
/** @type {WeakSet<Key>} */
const keyMemo = new WeakSet();
/**
* @param {Passable} val
* @param {Checker=} check
* @returns {boolean}
*/
export const checkKey = (val, check = x => x) => {
if (isPrimitiveKey(val)) {
return true;
}
if (!isObject(val)) {
// TODO There is not yet a checkPassable, but perhaps there should be.
// If that happens, we should call it here instead.
assertPassable(val);
return true;
}
if (keyMemo.has(val)) {
return true;
}
const result = checkKeyInternal(val, check);
if (result) {
// Don't cache the undefined cases, so that if it is tried again
// with `assertChecker` it'll throw a diagnostic again
keyMemo.add(val);
}
// Note that we do not memoize a negative judgement, so that if it is tried
// again with a checker, it will still produce a useful diagnostic.
return result;
};
harden(checkKey);
/**
* @param {Passable} val
* @returns {boolean}
*/
export const isKey = val => checkKey(val);
harden(isKey);
/**
* @param {Key} val
*/
export const assertKey = val => {
checkKey(val, assertChecker);
};
harden(assertKey);
// @ts-check
// eslint-disable-next-line spaced-comment
/// <reference types="ses"/>
import { passStyleOf, getTag } from '@agoric/marshal';
import { compareRank } from '../patterns/rankOrder.js';
import { passStyleOf, getTag } from '@endo/marshal';
import { compareRank, recordParts } from '../patterns/rankOrder.js';
import { assertKey } from './checkKey.js';
import { bagCompare } from './merge-bag-operators.js';
import { setCompare } from './merge-set-operators.js';
const { details: X, quote: q } = assert;
const { ownKeys } = Reflect;
/**
* `compareKeys` implements a partial order over keys. As with the
* rank ordering produced by `compareRank`, -1, 0, and 1 mean
* "less than", "equivalent to", and "greater than" respectively.
* NaN means "incomparable" --- the first key is not less, equivalent,
* or greater than the second. For example, subsets over sets is
* a partial order.
*
* By using NaN for "incomparable", the normal equivalence for using
* the return value in a comparison is preserved.
* `compareKeys(left, right) >= 0` iff `left` is greater than or
* equivalent to `right` in the partial ordering.
* `compareKeys` is currently not exported directly, so its
* bizarre but convenient return type is not exposed.
*
* Key order (a partial order) and rank order (a full order) are
* co-designed so that we store passables in rank order and index into them
* with keys for key-based queries. To keep these distinct, when speaking
* informally about rank, we talk about "earlier" and "later". When speaking
* informally about keys, we talk about "smaller" and "bigger".
*
* In both orders, the return-0 case defines
* an equivalence class, i.e., those that are tied for the same place in the
* order. The global invariant that we need between the two orders is that the
* key order equivalence class is always at least as precise as the
* rank order equivalence class. IOW, if `compareKeys(X,Y) === 0` then
* `compareRank(X,Y) === 0`. But not vice versa. For example, two different
* remotables are the same rank but incommensurate as keys.
*
* A further invariant is if `compareKeys(X,Y) < 0` then
* `compareRank(X,Y) <= 0`, i.e., if X is smaller than Y in key order, then X
* must be at least as early as Y in rank order. But not vice versa.
* X can be earlier than Y in rank order and still be incommensurate with Y in
* key order. For example, the records `{b: 3, a: 5}` is earlier than but
* incommensurate with the record `{b: 5, a: 3}`.
*
* This lets us translate a range search over the
* partial key order into a range search over rank order followed by filtering
* out those that don't match. To get this effect, we store the elements of
* a set in an array sorted in reverse rank order, from later to earlier.
* Combined with our lexicographic comparison of arrays, if set X is a subset
* of set Y then the array representing set X will be an earlier rank that the
* array representing set Y.
*
* @param {Key} left
* @param {Key} right
* @returns {-1 | 0 | 1 | NaN}
*/
const compareKeys = (left, right) => {
/** @type {KeyCompare} */
export const compareKeys = (left, right) => {
assertKey(left);

@@ -120,4 +73,5 @@ assertKey(right);

// Pareto partial order comparison.
const leftNames = harden(ownKeys(left).sort());
const rightNames = harden(ownKeys(right).sort());
const [leftNames, leftValues] = recordParts(left);
const [rightNames, rightValues] = recordParts(right);
// eslint-disable-next-line no-use-before-define

@@ -132,5 +86,7 @@ if (!keyEQ(leftNames, rightNames)) {

}
let result = 0; // start with hypothesis they are keyEQ
for (const name of leftNames) {
const comp = compareKeys(left[name], right[name]);
// Presume that both copyRecords have the same key order
// until encountering a property disproving that hypothesis.
let result = 0;
for (let i = 0; i < leftValues.length; i += 1) {
const comp = compareKeys(leftValues[i], rightValues[i]);
if (Number.isNaN(comp)) {

@@ -151,7 +107,8 @@ return NaN;

// If copyRecord X is smaller than copyRecord Y, then they must have the
// same property names, and every value is X must be smaller or equal to
// every corresponding value in Y. The rank order of X and Y is based
// on lexicographic rank order of their values, as organized by the
// reverse order of their property names. Thus
// if compareKeys(X,Y) < 0 then compareRank(X,Y) <= 0.
// same property names and every value in X must be smaller or equal to
// the corresponding value in Y (with at least one value smaller).
// The rank order of X and Y is based on lexicographic rank order of
// their values, as organized by reverse lexicographic order of their
// property names.
// Thus if compareKeys(X,Y) < 0 then compareRank(X,Y) < 0.
return result;

@@ -169,68 +126,13 @@ }

// copySet X is smaller than copySet Y when every element of X
// is keyEQ to some element of Y.
// The following algorithm is good when there are few elements tied
// for the same rank. But it is O(N*M) in the size of these ties.
// Sets of remotables are a particularly bad case. For these, some
// kind of hash table (scalar set or map) should be used instead.
// TODO to get something working, I am currently implementing
// only the special case where there are no rank-order ties.
let result = 0; // start with the hypothesis they are keyEQ.
const xs = left.payload;
const ys = right.payload;
const xlen = xs.length;
const ylen = ys.length;
let xi = 0;
let yi = 0;
while (xi < xlen && yi < ylen) {
const x = xs[xi];
const y = ys[yi];
if (xi + 1 < xlen && compareRank(x, xs[xi + 1]) === 0) {
assert.fail(X`sets with rank ties not yet implemented: ${left}`);
}
if (yi + 1 < ylen && compareRank(y, ys[yi + 1]) === 0) {
assert.fail(X`sets with rank ties not yet implemented: ${right}`);
}
const comp = compareKeys(x, y);
if (Number.isNaN(comp)) {
// If they are incommensurate, then each element is not in the
// other set, so the sets are incommensurate.
return NaN;
} else if (comp === 0) {
//
xi += 1;
yi += 1;
} else {
if (result !== comp) {
if (result === 0) {
result = comp;
} else {
assert(
(result === -1 && comp === 1) ||
(result === 1 && comp === -1),
);
return NaN;
}
}
if (comp === 1) {
xi += 1;
} else {
assert(comp === -1);
yi += 1;
}
}
}
const comp = compareKeys(xlen, ylen);
if (comp === 0) {
return result;
} else if (result === 0 || result === comp) {
return comp;
} else {
assert(
(result === -1 && comp === 1) || (result === 1 && comp === -1),
);
return NaN;
}
// is keyEQ to some element of Y and some element of Y is
// not keyEQ to any element of X.
return setCompare(left, right);
}
case 'copyBag': {
// copyBag X is smaller than copyBag Y when every element of X
// occurs no more than the keyEQ element of Y, and some element
// of Y occurs more than some element of X, where being absent
// from X counts as occuring zero times.
return bagCompare(left, right);
}
case 'copyMap': {

@@ -237,0 +139,0 @@ // Two copyMaps that have different keys (according to keyEQ) are

// @ts-check
import { assertChecker, makeTagged, passStyleOf } from '@endo/marshal';
import {
assertChecker,
getTag,
makeTagged,
passStyleOf,
} from '@agoric/marshal';
import {
compareRank,
compareAntiRank,
isRankSorted,
makeFullOrderComparatorKit,
sortByRank,
} from '../patterns/rankOrder.js';
// eslint-disable-next-line spaced-comment
/// <reference types="ses"/>

@@ -21,71 +16,88 @@

/**
* @param {Passable[]} keys
* @template T
* @param {T[]} elements
* @param {FullCompare=} fullCompare If provided and `elements` is already known
* to be sorted by this `fullCompare`, then we should get a memo hit rather
* than a resorting. However, currently, we still enumerate the entire array
* each time.
*
* TODO: If doing this reduntantly turns out to be expensive, we
* could memoize this no-duplicate finding as well, independent
* of the `fullOrder` use to reach this finding.
* @param {Checker=} check
* @returns {boolean}
*/
export const checkCopySetKeys = (keys, check = x => x) => {
if (passStyleOf(keys) !== 'copyArray') {
return check(
false,
X`The keys of a copySet or copyMap must be a copyArray: ${keys}`,
);
const checkNoDuplicates = (
elements,
fullCompare = undefined,
check = x => x,
) => {
// This fullOrder contains history dependent state. It is specific
// to this one call and does not survive it.
// TODO Once all our tooling is ready for `&&=`, the following
// line should be rewritten using it.
fullCompare = fullCompare || makeFullOrderComparatorKit().antiComparator;
elements = sortByRank(elements, fullCompare);
const { length } = elements;
for (let i = 1; i < length; i += 1) {
const k0 = elements[i - 1];
const k1 = elements[i];
if (fullCompare(k0, k1) === 0) {
return check(false, X`value has duplicates: ${k0}`);
}
}
const reverseKeys = harden([...keys].reverse());
if (!isRankSorted(reverseKeys, compareRank)) {
return check(
false,
X`The keys of a copySet or copyMap must be sorted in reverse rank order: ${keys}`,
);
}
return true;
};
harden(checkCopySetKeys);
/** @type WeakSet<CopySet> */
const copySetMemo = new WeakSet();
/**
* @template T
* @param {T[]} elements
* @param {FullCompare=} fullCompare
* @returns {void}
*/
export const assertNoDuplicates = (elements, fullCompare = undefined) => {
checkNoDuplicates(elements, fullCompare, assertChecker);
};
/**
* @param {Passable} s
* @param {Passable[]} elements
* @param {Checker=} check
* @returns {boolean}
*/
export const checkCopySet = (s, check = x => x) => {
if (copySetMemo.has(s)) {
return true;
export const checkElements = (elements, check = x => x) => {
if (passStyleOf(elements) !== 'copyArray') {
return check(
false,
X`The keys of a copySet or copyMap must be a copyArray: ${elements}`,
);
}
const result =
check(
passStyleOf(s) === 'tagged' && getTag(s) === 'copySet',
X`Not a copySet: ${s}`,
) && checkCopySetKeys(s.payload, check);
if (result) {
copySetMemo.add(s);
if (!isRankSorted(elements, compareAntiRank)) {
return check(
false,
X`The keys of a copySet or copyMap must be sorted in reverse rank order: ${elements}`,
);
}
return result;
return checkNoDuplicates(elements, undefined, check);
};
harden(checkCopySet);
harden(checkElements);
export const isCopySet = s => checkCopySet(s);
harden(isCopySet);
export const assertElements = elements =>
checkElements(elements, assertChecker);
harden(assertElements);
export const assertCopySet = s => checkCopySet(s, assertChecker);
harden(assertCopySet);
/**
* @param {CopySet} s
* @param {(v: Passable, i: number) => boolean} fn
* @returns {boolean}
*/
export const everyCopySetKey = (s, fn) => {
assertCopySet(s);
return s.payload.every((v, i) => fn(v, i));
export const coerceToElements = elementsList => {
const elements = sortByRank(elementsList, compareAntiRank);
assertElements(elements);
return elements;
};
harden(everyCopySetKey);
harden(coerceToElements);
/**
* @param {Iterable<Passable>} elements
* @returns {CopySet}
* @template K
* @param {Iterable<K>} elementIter
* @returns {CopySet<K>}
*/
export const makeCopySet = elements =>
makeTagged('copySet', [...sortByRank(elements, compareRank)].reverse());
harden(makeCopySet);
export const makeSetOfElements = elementIter =>
makeTagged('copySet', coerceToElements(elementIter));
harden(makeSetOfElements);

@@ -22,3 +22,3 @@ // @ts-check

* comment explaining the problem inhibiting conversion to the new
* system. Some of these problems as if this writing:
* system. Some of these problems as of this writing:
* * A promiseKit used as a value, even though a promiseKit is not

@@ -33,3 +33,5 @@ * a passable. Solutions are to make it a passable, or to convert

* @deprecated switch to ScalarMap if possible, Map otherwise
* @template K,V
* @param {string} [keyName='key'] - the column name for the key
* @returns {LegacyMap<K,V>}
*/

@@ -42,3 +44,3 @@ export const makeLegacyMap = (keyName = 'key') => {

assert(m.has(key), X`${q(keyName)} not found: ${key}`);
const legacyMap = harden({
return harden({
has: key => {

@@ -66,8 +68,9 @@ // Check if a key exists. The key can be any JavaScript value,

},
keys: () => [...m.keys()],
values: () => [...m.values()],
entries: () => [...m.entries()],
keys: () => m.keys(),
values: () => m.values(),
entries: () => m.entries(),
getSize: () => m.size,
clear: () => m.clear(),
});
return legacyMap;
};
harden(makeLegacyMap);

@@ -10,5 +10,8 @@ // @ts-check

* @deprecated switch to ScalarWeakMap if possible, WeakMap otherwise
* @template K,V
* @param {string} [keyName='key'] - the column name for the key
* @returns {LegacyWeakMap<K,V>}
*/
export const makeLegacyWeakMap = (keyName = 'key') => {
/** @type {WeakMap<K & Object,V>} */
const wm = new WeakMap();

@@ -19,3 +22,3 @@ const assertKeyDoesNotExist = key =>

assert(wm.has(key), X`${q(keyName)} not found: ${key}`);
const legacyWeakMap = harden({
return harden({
has: key => {

@@ -33,3 +36,4 @@ // Check if a key exists. The key can be any JavaScript value,

assertKeyExists(key);
return wm.get(key);
// How to tell typescript I believe the `get` will succeed.
return /** @type {V} */ (wm.get(key));
},

@@ -45,4 +49,3 @@ set: (key, value) => {

});
return legacyWeakMap;
};
harden(makeLegacyWeakMap);
// @ts-check
// eslint-disable-next-line spaced-comment
/// <reference types="ses"/>

@@ -5,0 +4,0 @@

@@ -5,3 +5,2 @@ // @ts-check

assertChecker,
everyPassableChild,
Far,

@@ -11,3 +10,4 @@ getTag,

passStyleOf,
} from '@agoric/marshal';
hasOwnPropertyOf,
} from '@endo/marshal';
import {

@@ -17,2 +17,3 @@ compareRank,

intersectRankCovers,
recordParts,
unionRankCovers,

@@ -27,11 +28,10 @@ } from './rankOrder.js';

isScalarKey,
checkCopySet,
checkCopyBag,
checkCopyMap,
copyMapKeySet,
} from '../keys/checkKey.js';
import { checkCopySet /* , makeCopySet XXX TEMP */ } from '../keys/copySet.js';
import { checkCopyMap, copyMapKeySet } from '../keys/copyMap.js';
// eslint-disable-next-line spaced-comment
/// <reference types="ses"/>
// const { entries, fromEntries } = Object; // XXX TEMP
const { ownKeys } = Reflect;
const { quote: q, details: X } = assert;

@@ -43,5 +43,5 @@

/**
* @returns {Object}
* @returns {PatternKit}
*/
export const makePatternKit = () => {
const makePatternKit = () => {
/**

@@ -90,7 +90,11 @@ * If this is a recognized match tag, return the MatchHelper.

switch (passStyle) {
case 'copyRecord':
case 'copyRecord': {
// A copyRecord is a pattern iff all its children are
// patterns
return Object.values(patt).every(checkIt);
}
case 'copyArray': {
// A copyRecord or copyArray is a pattern iff all its children are
// A copyArray is a pattern iff all its children are
// patterns
return everyPassableChild(patt, checkIt);
return patt.every(checkIt);
}

@@ -122,2 +126,16 @@ case 'tagged': {

}
case 'copyBag': {
if (!checkCopyBag(patt, check)) {
return false;
}
// If it is a CopyBag, then it must also be a key and we
// should never get here.
if (isKey(patt)) {
assert.fail(
X`internal: The key case should have been dealt with earlier: ${patt}`,
);
} else {
assert.fail(X`A CopyMap must be a Key but was not: ${patt}`);
}
}
case 'copyMap': {

@@ -140,3 +158,3 @@ return (

false,
X`A passable tagged ${q(tag)} is not a key: ${patt}`,
X`A passable tagged ${q(tag)} is not a pattern: ${patt}`,
);

@@ -254,5 +272,6 @@ }

keyEQ(specimen, patt),
X`${specimen} - Must be equivalent to the literal pattern: ${patt}`,
X`${specimen} - Must be equivalent to: ${patt}`,
);
}
assertPattern(patt);
const specStyle = passStyleOf(specimen);

@@ -272,8 +291,6 @@ const pattStyle = passStyleOf(patt);

false,
X`Array ${specimen} - must be as long as copyArray pattern: ${patt}`,
X`Array ${specimen} - Must be as long as copyArray pattern: ${patt}`,
);
}
return everyPassableChild(patt, (p, i) =>
checkMatches(specimen[i], p, check),
);
return patt.every((p, i) => checkMatches(specimen[i], p, check));
}

@@ -287,12 +304,4 @@ case 'copyRecord': {

}
const specNames = harden(
ownKeys(specimen)
.sort()
.reverse(),
);
const pattNames = harden(
ownKeys(patt)
.sort()
.reverse(),
);
const [specNames, specValues] = recordParts(specimen);
const [pattNames, pattValues] = recordParts(patt);
if (!keyEQ(specNames, pattNames)) {

@@ -304,4 +313,2 @@ return check(

}
const specValues = harden(specNames.map(name => specimen[name]));
const pattValues = harden(pattNames.map(name => patt[name]));
return checkMatches(specValues, pattValues, check);

@@ -384,3 +391,3 @@ }

*/
const assertMatches = (specimen, patt) => {
const fit = (specimen, patt) => {
checkMatches(specimen, patt, assertChecker);

@@ -395,3 +402,5 @@ };

const encoded = encodeKey(patt);
return [encoded, `${encoded}~`];
if (encoded !== undefined) {
return [encoded, `${encoded}~`];
}
}

@@ -401,11 +410,18 @@ const passStyle = passStyleOf(patt);

case 'copyArray': {
const rankCovers = patt.map(p => getRankCover(p, encodeKey));
return harden([
rankCovers.map(([left, _right]) => left),
rankCovers.map(([_left, right]) => right),
]);
// XXX this doesn't get along with the world of cover === pair of
// strings. In the meantime, fall through to the default which
// returns a cover that covers all copyArrays.
//
// const rankCovers = patt.map(p => getRankCover(p, encodeKey));
// return harden([
// rankCovers.map(([left, _right]) => left),
// rankCovers.map(([_left, right]) => right),
// ]);
break;
}
case 'copyRecord': {
// XXX this doesn't get along with the world of cover === pair of
// strings
// strings. In the meantime, fall through to the default which
// returns a cover that covers all copyRecords.
//
// const pattKeys = ownKeys(patt);

@@ -419,3 +435,3 @@ // const pattEntries = harden(pattKeys.map(key => [key, patt[key]]));

// ]);
assert.fail('not supporting copyRecord patterns yet'); // XXX TEMP
break;
}

@@ -426,2 +442,4 @@ case 'tagged': {

if (matchHelper) {
// Buried here is the important case, where we process
// the various patternNodes
return matchHelper.getRankCover(patt.payload, encodeKey);

@@ -432,3 +450,5 @@ }

// XXX this doesn't get along with the world of cover === pair of
// strings
// strings. In the meantime, fall through to the default which
// returns a cover that covers all copySets.
//
// // Should already be validated by checkPattern. But because this

@@ -449,27 +469,32 @@ // // is a check that may loosen over time, we also assert

// ]);
assert.fail('not supporting copySet patterns yet'); // XXX TEMP
break;
}
case 'copyMap': {
// A matching copyMap must have the same keys, or at most one
// non-key key pattern. Thus we can assume that value positions
// match 1-to-1.
// XXX this doesn't get along with the world of cover === pair of
// strings. In the meantime, fall through to the default which
// returns a cover that covers all copyMaps.
//
// TODO I may be overlooking that the less precise rankOrder
// equivalence class may cause values to be out of order,
// making this rankCover not actually cover. In that case, for
// all the values for keys at the same rank, we should union their
// rank covers. TODO POSSIBLE SILENT CORRECTNESS BUG
//
// If this is a bug, it probably affects the getRankCover
// cases if matchLTEHelper and matchGTEHelper on copyMap as
// well. See makeCopyMap for an idea on fixing
// this bug.
const [leftPayloadLimit, rightPayloadLimit] = getRankCover(
patt.payload,
encodeKey,
);
return harden([
makeTagged('copyMap', leftPayloadLimit),
makeTagged('copyMap', rightPayloadLimit),
]);
// // A matching copyMap must have the same keys, or at most one
// // non-key key pattern. Thus we can assume that value positions
// // match 1-to-1.
// //
// // TODO I may be overlooking that the less precise rankOrder
// // equivalence class may cause values to be out of order,
// // making this rankCover not actually cover. In that case, for
// // all the values for keys at the same rank, we should union their
// // rank covers. TODO POSSIBLE SILENT CORRECTNESS BUG
// //
// // If this is a bug, it probably affects the getRankCover
// // cases of matchLTEHelper and matchGTEHelper on copyMap as
// // well. See makeCopyMap for an idea on fixing
// // this bug.
// const [leftPayloadLimit, rightPayloadLimit] = getRankCover(
// patt.payload,
// encodeKey,
// );
// return harden([
// makeTagged('copyMap', leftPayloadLimit),
// makeTagged('copyMap', rightPayloadLimit),
// ]);
break;
}

@@ -493,28 +518,12 @@ default: {

const matchAnyHelper = Far('M.any helper', {
checkIsMatcherPayload: (matcherPayload, check = x => x) =>
check(
matcherPayload === undefined,
X`An M.any matcher's .payload must be undefined: ${matcherPayload}`,
),
checkMatches: (_specimen, _matcherPayload, _check = x => x) => true,
getRankCover: (_matchPayload, _encodeKey) => ['', '{'],
checkKeyPattern: (_matcherPayload, _check = x => x) => true,
});
/** @type {MatchHelper} */
const matchScalarHelper = Far('M.scalar helper', {
checkIsMatcherPayload: (matcherPayload, check = x => x) =>
check(
matcherPayload === undefined,
X`An M.scalar matcher's .payload must be undefined: ${matcherPayload}`,
X`Payload must be undefined: ${matcherPayload}`,
),
checkMatches: (specimen, _matcherPayload, check = x => x) =>
checkScalarKey(specimen, check),
getRankCover: (_matchPayload, _encodeKey) => ['', '{'],
getRankCover: (_matchPayload, _encodeKey) => ['a', 'z~'],
checkKeyPattern: (_matcherPayload, _check = x => x) => true,

@@ -524,80 +533,17 @@ });

/** @type {MatchHelper} */
const matchKindHelper = Far('M.kind helper', {
checkIsMatcherPayload: (allegedKeyKind, check = x => x) =>
check(
// We cannot further restrict this to only possible passStyles
// or tags, because we wish to allow matching of tags that we
// don't know ahead of time. Do we need to separate the namespaces?
// TODO are we asking for trouble by lumping passStyles and tags
// together into kinds?
typeof allegedKeyKind === 'string',
X`A kind name must be a string: ${allegedKeyKind}`,
),
checkMatches: (specimen, kind, check = x => x) =>
check(
passStyleOf(specimen) === kind ||
(passStyleOf(specimen) === 'tagged' && getTag(specimen) === kind),
X`${specimen} - Must have passStyle or tag ${q(kind)}`,
),
getRankCover: (kind, _encodeKey) => {
switch (kind) {
case 'copySet': {
// The bounds in the cover are not valid copySets, which is fine.
// They only need to be valid copyTagged that bound all possible
// copySets. Thus, we need to call makeTagged directly, rather
// than using makeCopySet.
return [
makeTagged('copySet', null),
makeTagged('copySet', undefined),
];
}
case 'copyMap': {
// The bounds in the cover are not valid copyMaps, which is fine.
// They only need to be valid copyTagged that bound all possible
// copyMaps.
return [
makeTagged('copyMap', null),
makeTagged('copyMap', undefined),
];
}
default: {
return getPassStyleCover(/** @type {PassStyle} */ (kind));
}
}
const matchAndHelper = Far('match:and helper', {
checkMatches: (specimen, patts, check = x => x) => {
return patts.every(patt => checkMatches(specimen, patt, check));
},
checkKeyPattern: (kind, check = x => x) => {
switch (kind) {
case 'boolean':
case 'number':
case 'bigint':
case 'string':
case 'symbol':
case 'remotable':
case 'undefined':
return true;
default:
return check(false, X`${kind} keys are not supported`);
}
},
});
/** @type {MatchHelper} */
const matchAndHelper = Far('match:and helper', {
checkIsMatcherPayload: (allegedPatts, check = x => x) => {
const checkIt = patt => checkPattern(patt, check);
return (
(check(
check(
passStyleOf(allegedPatts) === 'copyArray',
X`Needs array of sub-patterns: ${allegedPatts}`,
) && everyPassableChild(allegedPatts, checkIt))
) && allegedPatts.every(checkIt)
);
},
checkMatches: (specimen, patts, check = x => x) => {
return patts.every(patt => checkMatches(specimen, patt, check));
},
getRankCover: (patts, encodeKey) =>

@@ -616,13 +562,18 @@ intersectRankCovers(

const matchOrHelper = Far('match:or helper', {
checkIsMatcherPayload: matchAndHelper.checkIsMatcherPayload,
checkMatches: (specimen, patts, check = x => x) => {
return (
(check(
patts.length >= 1,
const { length } = patts;
if (length === 0) {
return check(
false,
X`${specimen} - no pattern disjuncts to match: ${patts}`,
) && !patts.every(patt => !checkMatches(specimen, patt, check)))
);
);
}
if (patts.some(patt => matches(specimen, patt))) {
return true;
}
return check(false, X`${specimen} - Must match one of ${patts}`);
},
checkIsMatcherPayload: matchAndHelper.checkIsMatcherPayload,
getRankCover: (patts, encodeKey) =>

@@ -641,4 +592,2 @@ unionRankCovers(

const matchNotHelper = Far('match:not helper', {
checkIsMatcherPayload: checkPattern,
checkMatches: (specimen, patt, check = x => x) => {

@@ -655,2 +604,4 @@ if (matches(specimen, patt)) {

checkIsMatcherPayload: checkPattern,
getRankCover: (_patt, _encodeKey) => ['', '{'],

@@ -662,5 +613,91 @@

/** @type {MatchHelper} */
const matchScalarHelper = Far('M.scalar helper', {
checkMatches: (specimen, _matcherPayload, check = x => x) =>
checkScalarKey(specimen, check),
checkIsMatcherPayload: matchAnyHelper.checkIsMatcherPayload,
getRankCover: (_matchPayload, _encodeKey) => ['a', 'z~'],
checkKeyPattern: (_matcherPayload, _check = x => x) => true,
});
/** @type {MatchHelper} */
const matchKeyHelper = Far('M.key helper', {
checkMatches: (specimen, _matcherPayload, check = x => x) =>
checkKey(specimen, check),
checkIsMatcherPayload: matchAnyHelper.checkIsMatcherPayload,
getRankCover: (_matchPayload, _encodeKey) => ['a', 'z~'],
checkKeyPattern: (_matcherPayload, _check = x => x) => true,
});
/** @type {MatchHelper} */
const matchPatternHelper = Far('M.pattern helper', {
checkMatches: (specimen, _matcherPayload, check = x => x) =>
checkPattern(specimen, check),
checkIsMatcherPayload: matchAnyHelper.checkIsMatcherPayload,
getRankCover: (_matchPayload, _encodeKey) => ['a', 'z~'],
checkKeyPattern: (_matcherPayload, _check = x => x) => true,
});
/** @type {MatchHelper} */
const matchKindHelper = Far('M.kind helper', {
checkMatches: (specimen, kind, check = x => x) =>
check(
passStyleOf(specimen) === kind ||
(passStyleOf(specimen) === 'tagged' && getTag(specimen) === kind),
X`${specimen} - Must have passStyle or tag ${q(kind)}`,
),
checkIsMatcherPayload: (allegedKeyKind, check = x => x) =>
check(
// We cannot further restrict this to only possible passStyles
// or tags, because we wish to allow matching of tags that we
// don't know ahead of time. Do we need to separate the namespaces?
// TODO are we asking for trouble by lumping passStyles and tags
// together into kinds?
typeof allegedKeyKind === 'string',
X`A kind name must be a string: ${allegedKeyKind}`,
),
getRankCover: (kind, _encodeKey) => {
let style;
switch (kind) {
case 'copySet':
case 'copyMap': {
style = 'tagged';
break;
}
default: {
style = kind;
break;
}
}
return getPassStyleCover(style);
},
checkKeyPattern: (kind, check = x => x) => {
switch (kind) {
case 'boolean':
case 'number':
case 'bigint':
case 'string':
case 'symbol':
case 'remotable':
case 'undefined':
return true;
default:
return check(false, X`${kind} keys are not supported`);
}
},
});
/** @type {MatchHelper} */
const matchLTEHelper = Far('match:lte helper', {
checkIsMatcherPayload: checkKey,
checkMatches: (specimen, rightOperand, check = x => x) =>

@@ -672,2 +709,4 @@ check(

checkIsMatcherPayload: checkKey,
getRankCover: (rightOperand, encodeKey) => {

@@ -678,50 +717,8 @@ const passStyle = passStyleOf(rightOperand);

// eslint-disable-next-line prefer-const
let [leftBound, _rightBound] = getPassStyleCover(passStyle);
switch (passStyle) {
case 'number': {
if (Number.isNaN(rightOperand)) {
// leftBound = NaN;
leftBound = 'f'; // XXX BOGUS
}
break;
}
case 'copyRecord': {
// XXX this doesn't get along with the world of cover === pair of
// strings
// leftBound = harden(
// fromEntries(entries(rightOperand).map(([k, _v]) => [k, null])),
// );
break;
}
case 'tagged': {
leftBound = makeTagged(getTag(rightOperand), null);
switch (getTag(rightOperand)) {
case 'copyMap': {
const { keys } = rightOperand.payload;
const values = keys.map(_ => null);
// See note in getRankCover for copyMap about why we
// may need to take variable values orders into account
// to be correct.
leftBound = makeTagged('copyMap', harden({ keys, values }));
break;
}
default: {
break;
}
}
break;
}
case 'remotable': {
// This does not make for a tighter rankCover, but if an
// underlying table internally further optimizes, for example with
// an identityHash of a virtual object, then this might
// help it take advantage of that.
leftBound = encodeKey(rightOperand);
break;
}
default: {
break;
}
let [leftBound, rightBound] = getPassStyleCover(passStyle);
const newRightBound = `${encodeKey(rightOperand)}~`;
if (newRightBound !== undefined) {
rightBound = newRightBound;
}
return [leftBound, `${encodeKey(rightOperand)}~`];
return [leftBound, rightBound];
},

@@ -735,4 +732,2 @@

const matchLTHelper = Far('match:lt helper', {
checkIsMatcherPayload: checkKey,
checkMatches: (specimen, rightOperand, check = x => x) =>

@@ -744,2 +739,4 @@ check(

checkIsMatcherPayload: checkKey,
getRankCover: matchLTEHelper.getRankCover,

@@ -753,4 +750,2 @@

const matchGTEHelper = Far('match:gte helper', {
checkIsMatcherPayload: checkKey,
checkMatches: (specimen, rightOperand, check = x => x) =>

@@ -762,2 +757,4 @@ check(

checkIsMatcherPayload: checkKey,
getRankCover: (rightOperand, encodeKey) => {

@@ -768,55 +765,8 @@ const passStyle = passStyleOf(rightOperand);

// eslint-disable-next-line prefer-const
let [_leftBound, rightBound] = getPassStyleCover(passStyle);
switch (passStyle) {
case 'number': {
if (Number.isNaN(rightOperand)) {
// rightBound = NaN;
rightBound = 'f';
} else {
// rightBound = Infinity;
rightBound = 'f~';
}
break;
}
case 'copyRecord': {
// XXX this doesn't get along with the world of cover === pair of
// strings
// rightBound = harden(
// fromEntries(
// entries(rightOperand).map(([k, _v]) => [k, undefined]),
// ),
// );
break;
}
case 'tagged': {
rightBound = makeTagged(getTag(rightOperand), undefined);
switch (getTag(rightOperand)) {
case 'copyMap': {
const { keys } = rightOperand.payload;
const values = keys.map(_ => undefined);
// See note in getRankCover for copyMap about why we
// may need to take variable values orders into account
// to be correct.
rightBound = makeTagged('copyMap', harden({ keys, values }));
break;
}
default: {
break;
}
}
break;
}
case 'remotable': {
// This does not make for a tighter rankCover, but if an
// underlying table internally further optimizes, for example with
// an identityHash of a virtual object, then this might
// help it take advantage of that.
rightBound = encodeKey(rightOperand);
break;
}
default: {
break;
}
let [leftBound, rightBound] = getPassStyleCover(passStyle);
const newLeftBound = encodeKey(rightOperand);
if (newLeftBound !== undefined) {
leftBound = newLeftBound;
}
return [encodeKey(rightOperand), rightBound];
return [leftBound, rightBound];
},

@@ -830,4 +780,2 @@

const matchGTHelper = Far('match:gt helper', {
getMatchTag: () => 'gt',
checkMatches: (specimen, rightOperand, check = x => x) =>

@@ -847,10 +795,237 @@ check(

/** @type {MatchHelper} */
const matchArrayOfHelper = Far('match:arrayOf helper', {
checkMatches: (specimen, subPatt, check = x => x) =>
check(
passStyleOf(specimen) === 'copyArray',
X`${specimen} - Must be an array`,
) && specimen.every(el => checkMatches(el, subPatt, check)),
checkIsMatcherPayload: checkPattern,
getRankCover: () => getPassStyleCover('copyArray'),
checkKeyPattern: (_, check = x => x) =>
check(false, X`Arrays not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchRecordOfHelper = Far('match:recordOf helper', {
checkMatches: (specimen, entryPatt, check = x => x) =>
check(
passStyleOf(specimen) === 'copyRecord',
X`${specimen} - Must be a record`,
) &&
Object.entries(specimen).every(el =>
checkMatches(harden(el), entryPatt, check),
),
checkIsMatcherPayload: (entryPatt, check = x => x) =>
check(
passStyleOf(entryPatt) === 'copyArray' && entryPatt.length === 2,
X`${entryPatt} - Must be an pair of patterns`,
) && checkPattern(entryPatt, check),
getRankCover: _entryPatt => getPassStyleCover('copyRecord'),
checkKeyPattern: (_entryPatt, check = x => x) =>
check(false, X`Records not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchSetOfHelper = Far('match:setOf helper', {
checkMatches: (specimen, keyPatt, check = x => x) =>
check(
passStyleOf(specimen) === 'tagged' && getTag(specimen) === 'copySet',
X`${specimen} - Must be a a CopySet`,
) && specimen.payload.every(el => checkMatches(el, keyPatt)),
checkIsMatcherPayload: checkPattern,
getRankCover: () => getPassStyleCover('tagged'),
checkKeyPattern: (_, check = x => x) =>
check(false, X`CopySets not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchBagOfHelper = Far('match:bagOf helper', {
checkMatches: (specimen, keyPatt, check = x => x) =>
check(
passStyleOf(specimen) === 'tagged' && getTag(specimen) === 'copyBag',
X`${specimen} - Must be a a CopyBag`,
) &&
specimen.payload.every(([key, _count]) => checkMatches(key, keyPatt)),
checkIsMatcherPayload: checkPattern,
getRankCover: () => getPassStyleCover('tagged'),
checkKeyPattern: (_, check = x => x) =>
check(false, X`CopySets not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchMapOfHelper = Far('match:mapOf helper', {
checkMatches: (specimen, [keyPatt, valuePatt], check = x => x) =>
check(
passStyleOf(specimen) === 'tagged' && getTag(specimen) === 'copyMap',
X`${specimen} - Must be a CopyMap`,
) &&
specimen.payload.keys.every(k => checkMatches(k, keyPatt, check)) &&
specimen.payload.values.every(v => checkMatches(v, valuePatt, check)),
checkIsMatcherPayload: (entryPatt, check = x => x) =>
check(
passStyleOf(entryPatt) === 'copyArray' && entryPatt.length === 2,
X`${entryPatt} - Must be an pair of patterns`,
) && checkPattern(entryPatt, check),
getRankCover: _entryPatt => getPassStyleCover('tagged'),
checkKeyPattern: (_entryPatt, check = x => x) =>
check(false, X`CopyMap not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchSplitHelper = Far('match:split helper', {
checkMatches: (specimen, [base, rest = undefined], check = x => x) => {
const specimenStyle = passStyleOf(specimen);
const baseStyle = passStyleOf(base);
if (specimenStyle !== baseStyle) {
return check(
false,
X`${specimen} - Must have shape of base: ${q(baseStyle)}`,
);
}
let specB;
let specR;
if (baseStyle === 'copyArray') {
const { length: baseLen } = base;
// Frozen below
specB = specimen.slice(0, baseLen);
specR = specimen.slice(baseLen);
} else {
assert(baseStyle === 'copyRecord');
// Not yet frozen! Mutated in place
specB = {};
specR = {};
for (const [name, value] of Object.entries(specimen)) {
if (hasOwnPropertyOf(base, name)) {
specB[name] = value;
} else {
specR[name] = value;
}
}
}
harden(specB);
harden(specR);
return (
checkMatches(specB, base, check) &&
(rest === undefined ||
check(
matches(specR, rest),
X`Remainder ${specR} - Must match ${rest}`,
))
);
},
checkIsMatcherPayload: (splitArgs, check = x => x) => {
if (
passStyleOf(splitArgs) === 'copyArray' &&
(splitArgs.length === 1 || splitArgs.length === 2)
) {
const [base, rest = undefined] = splitArgs;
const baseStyle = passStyleOf(base);
if (
isPattern(base) &&
(baseStyle === 'copyArray' || baseStyle === 'copyRecord') &&
(rest === undefined || isPattern(rest))
) {
return true;
}
}
return check(
false,
X`Must be an array of a base structure and an optional rest pattern: ${splitArgs}`,
);
},
getRankCover: ([base, _rest = undefined]) =>
getPassStyleCover(passStyleOf(base)),
checkKeyPattern: ([base, _rest = undefined], check = x => x) =>
check(false, X`${q(passStyleOf(base))} not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchPartialHelper = Far('match:partial helper', {
checkMatches: (specimen, [base, rest = undefined], check = x => x) => {
const specimenStyle = passStyleOf(specimen);
const baseStyle = passStyleOf(base);
if (specimenStyle !== baseStyle) {
return check(
false,
X`${specimen} - Must have shape of base: ${q(baseStyle)}`,
);
}
let specB;
let specR;
let newBase = base;
if (baseStyle === 'copyArray') {
const { length: specLen } = specimen;
const { length: baseLen } = base;
if (specLen < baseLen) {
newBase = harden(base.slice(0, specLen));
}
// Frozen below
specB = specimen.slice(0, baseLen);
specR = specimen.slice(baseLen);
} else {
assert(baseStyle === 'copyRecord');
// Not yet frozen! Mutated in place
specB = {};
specR = {};
newBase = {};
for (const [name, value] of Object.entries(specimen)) {
if (hasOwnPropertyOf(base, name)) {
specB[name] = value;
newBase[name] = base[name];
} else {
specR[name] = value;
}
}
}
harden(specB);
harden(specR);
harden(newBase);
return (
checkMatches(specB, newBase, check) &&
(rest === undefined ||
check(
matches(specR, rest),
X`Remainder ${specR} - Must match ${rest}`,
))
);
},
checkIsMatcherPayload: matchSplitHelper.checkIsMatcherPayload,
getRankCover: matchSplitHelper.getRankCover,
checkKeyPattern: matchSplitHelper.checkKeyPattern,
});
/** @type {Record<string, MatchHelper>} */
const HelpersByMatchTag = harden({
'match:any': matchAnyHelper,
'match:scalar': matchScalarHelper,
'match:kind': matchKindHelper,
'match:and': matchAndHelper,
'match:or': matchOrHelper,
'match:not': matchNotHelper,
'match:scalar': matchScalarHelper,
'match:key': matchKeyHelper,
'match:pattern': matchPatternHelper,
'match:kind': matchKindHelper,
'match:lt': matchLTHelper,

@@ -860,44 +1035,70 @@ 'match:lte': matchLTEHelper,

'match:gt': matchGTHelper,
'match:arrayOf': matchArrayOfHelper,
'match:recordOf': matchRecordOfHelper,
'match:setOf': matchSetOfHelper,
'match:bagOf': matchBagOfHelper,
'match:mapOf': matchMapOfHelper,
'match:split': matchSplitHelper,
'match:partial': matchPartialHelper,
});
const patt = p => {
assertPattern(p);
return p;
const makeMatcher = (tag, payload) => {
const matcher = makeTagged(tag, payload);
assertPattern(matcher);
return matcher;
};
const makeKindMatcher = kind => makeMatcher('match:kind', kind);
const AnyShape = makeMatcher('match:any', undefined);
const ScalarShape = makeMatcher('match:scalar', undefined);
const KeyShape = makeMatcher('match:key', undefined);
const PatternShape = makeMatcher('match:pattern', undefined);
const BooleanShape = makeKindMatcher('boolean');
const NumberShape = makeKindMatcher('number');
const BigintShape = makeKindMatcher('bigint');
const NatShape = makeMatcher('match:gte', 0n);
const StringShape = makeKindMatcher('string');
const SymbolShape = makeKindMatcher('symbol');
const RecordShape = makeKindMatcher('copyRecord');
const ArrayShape = makeKindMatcher('copyArray');
const SetShape = makeKindMatcher('copySet');
const BagShape = makeKindMatcher('copyBag');
const MapShape = makeKindMatcher('copyMap');
const RemotableShape = makeKindMatcher('remotable');
const ErrorShape = makeKindMatcher('error');
const PromiseShape = makeKindMatcher('promise');
const UndefinedShape = makeKindMatcher('undefined');
/** @type {MatcherNamespace} */
const M = harden({
any: () => patt(makeTagged('match:any', undefined)),
scalar: () => patt(makeTagged('match:scalar', undefined)),
and: (...patts) => patt(makeTagged('match:and', patts)),
or: (...patts) => patt(makeTagged('match:or', patts)),
not: subPatt => patt(makeTagged('match:not', subPatt)),
any: () => AnyShape,
and: (...patts) => makeMatcher('match:and', patts),
or: (...patts) => makeMatcher('match:or', patts),
not: subPatt => makeMatcher('match:not', subPatt),
kind: kind => patt(makeTagged('match:kind', kind)),
boolean: () => M.kind('boolean'),
number: () => M.kind('number'),
bigint: () => M.kind('bigint'),
string: () => M.kind('string'),
symbol: () => M.kind('symbol'),
record: () => M.kind('copyRecord'),
array: () => M.kind('copyArray'),
set: () => M.kind('copySet'),
map: () => M.kind('copyMap'),
remotable: () => M.kind('remotable'),
error: () => M.kind('error'),
promise: () => M.kind('promise'),
/**
* All keys including `undefined` are already valid patterns and
* so can validly represent themselves. But optional pattern arguments
* `(pattern = undefined) => ...`
* cannot distinguish between `undefined` passed as a pattern vs
* omission of the argument. It will interpret the first as the
* second. Thus, when a passed pattern does not also need to be a key,
* we recommend passing `M.undefined()` instead of `undefined`.
*/
undefined: () => M.kind('undefined'),
scalar: () => ScalarShape,
key: () => KeyShape,
pattern: () => PatternShape,
kind: makeKindMatcher,
boolean: () => BooleanShape,
number: () => NumberShape,
bigint: () => BigintShape,
nat: () => NatShape,
string: () => StringShape,
symbol: () => SymbolShape,
record: () => RecordShape,
array: () => ArrayShape,
set: () => SetShape,
bag: () => BagShape,
map: () => MapShape,
remotable: () => RemotableShape,
error: () => ErrorShape,
promise: () => PromiseShape,
undefined: () => UndefinedShape,
null: () => null,
lt: rightSide => patt(makeTagged('match:lt', rightSide)),
lte: rightSide => patt(makeTagged('match:lte', rightSide)),
lt: rightOperand => makeMatcher('match:lt', rightOperand),
lte: rightOperand => makeMatcher('match:lte', rightOperand),
eq: key => {

@@ -908,10 +1109,16 @@ assertKey(key);

neq: key => M.not(M.eq(key)),
gte: rightSide => patt(makeTagged('match:gte', rightSide)),
gt: rightSide => patt(makeTagged('match:gt', rightSide)),
gte: rightOperand => makeMatcher('match:gte', rightOperand),
gt: rightOperand => makeMatcher('match:gt', rightOperand),
// TODO make more precise
arrayOf: _elementPatt => M.array(),
recordOf: _entryPatt => M.record(),
setOf: _elementPatt => M.set(),
mapOf: _entryPatt => M.map(),
arrayOf: (subPatt = M.any()) => makeMatcher('match:arrayOf', subPatt),
recordOf: (keyPatt = M.any(), valuePatt = M.any()) =>
makeMatcher('match:recordOf', [keyPatt, valuePatt]),
setOf: (keyPatt = M.any()) => makeMatcher('match:setOf', keyPatt),
bagOf: (keyPatt = M.any()) => makeMatcher('match:bagOf', keyPatt),
mapOf: (keyPatt = M.any(), valuePatt = M.any()) =>
makeMatcher('match:mapOf', [keyPatt, valuePatt]),
split: (base, rest = undefined) =>
makeMatcher('match:split', rest === undefined ? [base] : [base, rest]),
partial: (base, rest = undefined) =>
makeMatcher('match:partial', rest === undefined ? [base] : [base, rest]),
});

@@ -921,3 +1128,3 @@

matches,
assertMatches,
fit,
assertPattern,

@@ -931,1 +1138,18 @@ isPattern,

};
// Only include those whose meaning is independent of an imputed sort order
// of remotables, or of encoding of passable as sortable strings. Thus,
// getRankCover is omitted. To get one, you'd need to instantiate
// `makePatternKit()` yourself. Since there are currently no external
// uses of `getRankCover`, for clarity during development, `makePatternKit`
// is not currently exported.
export const {
matches,
fit,
assertPattern,
isPattern,
assertKeyPattern,
isKeyPattern,
getRankCover,
M,
} = makePatternKit();

@@ -5,9 +5,9 @@ // @ts-check

import {
assertRecord,
getTag,
nameForPassableSymbol,
passStyleOf,
sameValueZero,
} from '@agoric/marshal';
} from '@endo/marshal';
const { fromEntries, entries, setPrototypeOf } = Object;
const { fromEntries, entries, setPrototypeOf, is } = Object;

@@ -17,2 +17,17 @@ const { ownKeys } = Reflect;

/**
* This is the equality comparison used by JavaScript's Map and Set
* abstractions, where NaN is the same as NaN and -0 is the same as
* 0. Marshal serializes -0 as zero, so the semantics of our distributed
* object system does not distinguish 0 from -0.
*
* `sameValueZero` is the EcmaScript spec name for this equality comparison,
* but TODO we need a better name for the API.
*
* @param {any} x
* @param {any} y
* @returns {boolean}
*/
const sameValueZero = (x, y) => x === y || is(x, y);
/**
* @type {[PassStyle, RankCover][]}

@@ -31,5 +46,5 @@ */

/* s */ ['string', ['s', 't']],
/* u */ ['undefined', ['u', 'v']],
/* v */ ['null', ['v', 'v~']],
/* y */ ['symbol', ['y', 'z']],
/* z */ ['null', ['z', 'z~']],
/* z */ ['undefined', ['z', '{']],
/* | remotable->ordinal mapping prefix: This is not used in covers but it is

@@ -52,132 +67,158 @@ reserved from the same set of strings. Note that the prefix is > any

/** @type {CompareRank} */
export const compareRank = (left, right) => {
if (sameValueZero(left, right)) {
return 0;
}
const leftStyle = passStyleOf(left);
const rightStyle = passStyleOf(right);
if (leftStyle !== rightStyle) {
return compareRank(PassStyleRank[leftStyle], PassStyleRank[rightStyle]);
}
switch (leftStyle) {
case 'undefined':
case 'null':
case 'remotable':
case 'error':
case 'promise': {
// For each of these passStyles, all members of that passStyle are tied
// for the same rank.
/**
* @type {WeakMap<RankCompare,WeakSet<Passable[]>>}
*/
const memoOfSorted = new WeakMap();
/**
* @type {WeakMap<RankCompare,RankCompare>}
*/
const comparatorMirrorImages = new WeakMap();
export const recordParts = record => {
assertRecord(record);
// TODO Measure which is faster: a reverse sort by sorting and
// reversing, or by sorting with an inverse comparison function.
// If it makes a significant difference, use the faster one.
const names = ownKeys(record).sort().reverse();
// @ts-expect-error It thinks name might be a symbol, which it doesn't like.
const vals = names.map(name => record[name]);
return harden([names, vals]);
};
harden(recordParts);
/**
* @param {RankCompare=} compareRemotables
* An option to create a comparator in which an internal order is
* assigned to remotables. This defaults to a comparator that
* always returns `0`, meaning that all remotables are tied
* for the same rank.
* @returns {RankComparatorKit}
*/
export const makeComparatorKit = (compareRemotables = (_x, _y) => 0) => {
/** @type {RankCompare} */
const comparator = (left, right) => {
if (sameValueZero(left, right)) {
return 0;
}
case 'boolean':
case 'bigint':
case 'string': {
// Within each of these passStyles, the rank ordering agrees with
// JavaScript's relational operators `<` and `>`.
if (left < right) {
return -1;
} else {
assert(left > right);
return 1;
}
const leftStyle = passStyleOf(left);
const rightStyle = passStyleOf(right);
if (leftStyle !== rightStyle) {
return comparator(PassStyleRank[leftStyle], PassStyleRank[rightStyle]);
}
case 'symbol': {
return compareRank(
nameForPassableSymbol(left),
nameForPassableSymbol(right),
);
}
case 'number': {
// `NaN`'s rank is after all other numbers.
if (Number.isNaN(left)) {
assert(!Number.isNaN(right));
return 1;
} else if (Number.isNaN(right)) {
return -1;
switch (leftStyle) {
case 'remotable': {
return compareRemotables(left, right);
}
// The rank ordering of non-NaN numbers agrees with JavaScript's
// relational operators '<' and '>'.
if (left < right) {
return -1;
} else {
assert(left > right);
return 1;
case 'undefined':
case 'null':
case 'error':
case 'promise': {
// For each of these passStyles, all members of that passStyle are tied
// for the same rank.
return 0;
}
}
case 'copyRecord': {
// Lexicographic by inverse sorted order of property names, then
// lexicographic by corresponding values in that same inverse
// order of their property names. Comparing names by themselves first,
// all records with the exact same set of property names sort next to
// each other in a rank-sort of copyRecords.
case 'boolean':
case 'bigint':
case 'string': {
// Within each of these passStyles, the rank ordering agrees with
// JavaScript's relational operators `<` and `>`.
if (left < right) {
return -1;
} else {
assert(left > right);
return 1;
}
}
case 'symbol': {
return comparator(
nameForPassableSymbol(left),
nameForPassableSymbol(right),
);
}
case 'number': {
// `NaN`'s rank is after all other numbers.
if (Number.isNaN(left)) {
assert(!Number.isNaN(right));
return 1;
} else if (Number.isNaN(right)) {
return -1;
}
// The rank ordering of non-NaN numbers agrees with JavaScript's
// relational operators '<' and '>'.
if (left < right) {
return -1;
} else {
assert(left > right);
return 1;
}
}
case 'copyRecord': {
// Lexicographic by inverse sorted order of property names, then
// lexicographic by corresponding values in that same inverse
// order of their property names. Comparing names by themselves first,
// all records with the exact same set of property names sort next to
// each other in a rank-sort of copyRecords.
// The copyRecord invariants enforced by passStyleOf ensure that
// all the property names are strings. We need the reverse sorted order
// of these names, which we then compare lexicographically. This ensures
// that if the names of record X are a subset of the names of record Y,
// then record X will have an earlier rank and sort to the left of Y.
const leftNames = harden(
ownKeys(left)
.sort()
// TODO Measure which is faster: a reverse sort by sorting and
// reversing, or by sorting with an inverse comparison function.
// If it makes a significant difference, use the faster one.
.reverse(),
);
const rightNames = harden(
ownKeys(right)
.sort()
.reverse(),
);
const result = compareRank(leftNames, rightNames);
if (result !== 0) {
return result;
}
const leftValues = harden(leftNames.map(name => left[name]));
const rightValues = harden(rightNames.map(name => right[name]));
return compareRank(leftValues, rightValues);
}
case 'copyArray': {
// Lexicographic
const len = Math.min(left.length, right.length);
for (let i = 0; i < len; i += 1) {
const result = compareRank(left[i], right[i]);
// The copyRecord invariants enforced by passStyleOf ensure that
// all the property names are strings. We need the reverse sorted order
// of these names, which we then compare lexicographically. This ensures
// that if the names of record X are a subset of the names of record Y,
// then record X will have an earlier rank and sort to the left of Y.
const [leftNames, leftValues] = recordParts(left);
const [rightNames, rightValues] = recordParts(right);
const result = comparator(leftNames, rightNames);
if (result !== 0) {
return result;
}
return comparator(leftValues, rightValues);
}
// If all matching elements were tied, then according to their lengths.
// If array X is a prefix of array Y, then X has an earlier rank than Y.
return compareRank(left.length, right.length);
}
case 'tagged': {
// Lexicographic by `[Symbol.toStringTag]` then `.payload`.
const labelComp = compareRank(getTag(left), getTag(right));
if (labelComp !== 0) {
return labelComp;
case 'copyArray': {
// Lexicographic
const len = Math.min(left.length, right.length);
for (let i = 0; i < len; i += 1) {
const result = comparator(left[i], right[i]);
if (result !== 0) {
return result;
}
}
// If all matching elements were tied, then according to their lengths.
// If array X is a prefix of array Y, then X has an earlier rank than Y.
return comparator(left.length, right.length);
}
return compareRank(left.payload, right.payload);
case 'tagged': {
// Lexicographic by `[Symbol.toStringTag]` then `.payload`.
const labelComp = comparator(getTag(left), getTag(right));
if (labelComp !== 0) {
return labelComp;
}
return comparator(left.payload, right.payload);
}
default: {
assert.fail(X`Unrecognized passStyle: ${q(leftStyle)}`);
}
}
default: {
assert.fail(X`Unrecognized passStyle: ${q(leftStyle)}`);
}
}
};
harden(compareRank);
};
/** @type {CompareRank} */
export const compareAntiRank = (x, y) => compareRank(y, x);
/** @type {RankCompare} */
const antiComparator = (x, y) => comparator(y, x);
memoOfSorted.set(comparator, new WeakSet());
memoOfSorted.set(antiComparator, new WeakSet());
comparatorMirrorImages.set(comparator, antiComparator);
comparatorMirrorImages.set(antiComparator, comparator);
return harden({ comparator, antiComparator });
};
/**
* @type {Map<CompareRank,WeakSet<Passable[]>>}
* @param {RankCompare} comparator
* @returns {RankCompare=}
*/
const memoOfSorted = new Map([
[compareRank, new WeakSet()],
[compareAntiRank, new WeakSet()],
]);
export const comparatorMirrorImage = comparator =>
comparatorMirrorImages.get(comparator);
/**
* @param {Passable[]} passables
* @param {CompareRank} compare
* @param {RankCompare} compare
* @returns {boolean}

@@ -204,3 +245,3 @@ */

* @param {Passable[]} sorted
* @param {CompareRank} compare
* @param {RankCompare} compare
*/

@@ -217,4 +258,13 @@ export const assertRankSorted = (sorted, compare) =>

/**
* TODO SECURITY BUG: https://github.com/Agoric/agoric-sdk/issues/4260
* sortByRank currently uses `Array.prototype.sort` directly, and
* so only works correctly when given a `compare` function that considers
* `undefined` strictly bigger (`>`) than everything else. This is
* because `Array.prototype.sort` bizarrely moves all `undefined`s to
* the end of the array regardless, without consulting the `compare`
* function. This is a genuine bug for us NOW because sometimes we sort
* in reverse order by passing a reversed rank comparison function.
*
* @param {Iterable<Passable>} passables
* @param {CompareRank} compare
* @param {RankCompare} compare
* @returns {Passable[]}

@@ -247,3 +297,3 @@ */

* @param {Passable[]} sorted
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {Passable} key

@@ -306,3 +356,3 @@ * @param {("leftMost" | "rightMost")=} bias

/**
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {Passable} a

@@ -315,3 +365,3 @@ * @param {Passable} b

/**
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {Passable} a

@@ -324,3 +374,3 @@ * @param {Passable} b

/**
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {RankCover[]} covers

@@ -344,3 +394,3 @@ * @returns {RankCover}

/**
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {RankCover[]} covers

@@ -361,1 +411,50 @@ * @returns {RankCover}

};
export const { comparator: compareRank, antiComparator: compareAntiRank } =
makeComparatorKit();
/**
* Create a comparator kit in which remotables are fully ordered
* by the order in which they are first seen by *this* comparator kit.
* BEWARE: This is observable mutable state, so such a comparator kit
* should never be shared among subsystems that should not be able
* to communicate.
*
* Note that this order does not meet the requirements for store
* ordering, since it has no memory of deleted keys.
*
* These full order comparator kit is strictly more precise that the
* rank order comparator kits above. As a result, any array which is
* sorted by such a full order will pass the isRankSorted test with
* a corresponding rank order.
*
* An array which is sorted by a *fresh* full order comparator, i.e.,
* one that has not yet seen any remotables, will of course remain
* sorted by according to *that* full order comparator. An array *of
* scalars* sorted by a fresh full order will remain sorted even
* according to a new fresh full order comparator, since it will see
* the remotables in the same order again. Unfortunately, this is
* not true of arrays of passables in general.
*
* @param {boolean=} longLived
* @returns {FullComparatorKit}
*/
export const makeFullOrderComparatorKit = (longLived = false) => {
let numSeen = 0;
// When dynamically created with short lifetimes (the default) a WeakMap
// would perform poorly, and the leak created by a Map only lasts as long
// as the Map.
const MapConstructor = longLived ? WeakMap : Map;
const seen = new MapConstructor();
const tag = r => {
if (seen.has(r)) {
return seen.get(r);
}
numSeen += 1;
seen.set(r, numSeen);
return numSeen;
};
const compareRemotables = (x, y) => compareRank(tag(x), tag(y));
return makeComparatorKit(compareRemotables);
};
harden(makeFullOrderComparatorKit);
// @ts-check
import { Far, assertPassable } from '@agoric/marshal';
import {
Far,
assertPassable,
filterIterable,
mapIterable,
} from '@endo/marshal';
import { compareRank } from '../patterns/rankOrder.js';
import { assertScalarKey } from '../keys/checkKey.js';
import { makeCopyMap } from '../keys/copyMap.js';
import { makePatternKit } from '../patterns/patternMatchers.js';
import { assertScalarKey, makeCopyMap } from '../keys/checkKey.js';
import { matches, fit, assertPattern } from '../patterns/patternMatchers.js';
import { makeWeakMapStoreMethods } from './scalarWeakMapStore.js';
import { makeCursorKit } from './store-utils.js';
import { makeCurrentKeysKit } from './store-utils.js';
const { details: X, quote: q } = assert;
const { assertMatches, assertPattern } = makePatternKit();
const { quote: q } = assert;

@@ -17,3 +20,4 @@ /**

* @param {Map<K,V>} jsmap
* @param {(k: K, v: V) => void} assertKVOkToWrite
* @param {(k: K, v: V) => void} assertKVOkToAdd
* @param {(k: K, v: V) => void} assertKVOkToSet
* @param {((k: K) => void)=} assertKeyOkToDelete

@@ -25,56 +29,87 @@ * @param {string=} keyName

jsmap,
assertKVOkToWrite,
assertKVOkToAdd,
assertKVOkToSet,
assertKeyOkToDelete = undefined,
keyName = 'key',
) => {
const {
assertUpdateOnWrite,
assertUpdateOnDelete,
makeCursor,
makeArray,
} = makeCursorKit(
compareRank,
assertKVOkToWrite,
assertKeyOkToDelete,
keyName,
);
const { assertUpdateOnAdd, assertUpdateOnDelete, iterableKeys } =
makeCurrentKeysKit(
() => jsmap.keys(),
k => jsmap.has(k),
compareRank,
assertKVOkToAdd,
assertKeyOkToDelete,
keyName,
);
const methods = harden({
/**
* @param {Pattern=} keyPatt
* @param {Pattern=} valuePatt
* @returns {Iterable<K>}
*/
const keys = (keyPatt = undefined, valuePatt = undefined) => {
if (keyPatt === undefined && valuePatt === undefined) {
return iterableKeys;
}
const filter = k => {
if (keyPatt !== undefined && !matches(k, keyPatt)) {
return false;
}
// Uses the current jsmap value, since the iteratator survives `.set`
if (valuePatt !== undefined && !matches(jsmap.get(k), valuePatt)) {
return false;
}
return true;
};
return filterIterable(iterableKeys, filter);
};
/**
* @param {Pattern=} keyPatt
* @param {Pattern=} valuePatt
* @returns {Iterable<V>}
*/
const values = (keyPatt = undefined, valuePatt = undefined) =>
mapIterable(keys(keyPatt, valuePatt), k => /** @type {V} */ (jsmap.get(k)));
/**
* @param {Pattern=} keyPatt
* @param {Pattern=} valuePatt
* @returns {Iterable<[K,V]>}
*/
const entries = (keyPatt = undefined, valuePatt = undefined) =>
mapIterable(keys(keyPatt, valuePatt), k => [
k,
/** @type {V} */ (jsmap.get(k)),
]);
return harden({
...makeWeakMapStoreMethods(
jsmap,
assertUpdateOnWrite,
/** @type {(k: K, v: V) => void} */ (assertUpdateOnAdd),
assertKVOkToSet,
assertUpdateOnDelete,
keyName,
),
keys,
values,
entries,
cursor: (entryPatt = undefined, direction = 'forward') => {
assert.equal(
direction,
'forward',
X`Non-forward cursors are not yet implemented: map ${q(keyName)}`,
);
return makeCursor(jsmap.entries(), entryPatt);
},
snapshot: (keyPatt = undefined, valuePatt = undefined) =>
makeCopyMap(entries(keyPatt, valuePatt)),
keys: (keyPatt = undefined) => makeArray(jsmap.keys(), keyPatt),
values: (valPatt = undefined) => makeArray(jsmap.values(), valPatt),
entries: (entryPatt = undefined) => makeArray(jsmap.entries(), entryPatt),
getSize: (keyPatt = undefined, valuePatt = undefined) =>
keyPatt === undefined && valuePatt === undefined
? jsmap.size
: [...keys(keyPatt, valuePatt)].length,
snapshot: (entryPatt = undefined) => makeCopyMap(methods.cursor(entryPatt)),
addAll: copyMap => {
const {
payload: { keys, values },
} = copyMap;
const { length } = keys;
for (let i = 0; i < length; i += 1) {
const key = keys[i];
const value = values[i];
// Don't assert that the key either does or does not exist.
assertUpdateOnWrite(key, value);
jsmap.set(key, value);
clear: (keyPatt = undefined, valuePatt = undefined) => {
if (keyPatt === undefined && valuePatt === undefined) {
jsmap.clear();
}
for (const key of keys(keyPatt, valuePatt)) {
jsmap.delete(key);
}
},
});
return methods;
};

@@ -95,3 +130,3 @@

* @param {string} [keyName='key'] - the column name for the key
* @param {Partial<StoreOptions>=} options
* @param {StoreOptions=} options
* @returns {MapStore<K,V>}

@@ -101,25 +136,45 @@ */

keyName = 'key',
{ schema = undefined } = {},
{ keySchema = undefined, valueSchema = undefined } = {},
) => {
const jsmap = new Map();
if (schema) {
assertPattern(schema);
if (keySchema !== undefined) {
assertPattern(keySchema);
}
const assertKVOkToWrite = (key, value) => {
if (valueSchema !== undefined) {
assertPattern(valueSchema);
}
const assertKVOkToSet = (_key, value) => {
// TODO: Just a transition kludge. Remove when possible.
// See https://github.com/Agoric/agoric-sdk/issues/3606
harden(key);
harden(value);
assertScalarKey(key);
assertPassable(value);
if (schema) {
assertMatches(harden([key, value]), schema);
if (valueSchema !== undefined) {
fit(value, valueSchema);
}
};
const mapStore = Far(`scalar MapStore of ${q(keyName)}`, {
...makeMapStoreMethods(jsmap, assertKVOkToWrite, undefined, keyName),
const assertKVOkToAdd = (key, value) => {
// TODO: Just a transition kludge. Remove when possible.
// See https://github.com/Agoric/agoric-sdk/issues/3606
harden(key);
assertScalarKey(key);
if (keySchema !== undefined) {
fit(key, keySchema);
}
assertKVOkToSet(key, value);
};
return Far(`scalar MapStore of ${q(keyName)}`, {
...makeMapStoreMethods(
jsmap,
assertKVOkToAdd,
assertKVOkToSet,
undefined,
keyName,
),
});
return mapStore;
};
harden(makeScalarMapStore);
// @ts-check
import { Far } from '@agoric/marshal';
import { Far, filterIterable } from '@endo/marshal';
import { compareRank } from '../patterns/rankOrder.js';
import { assertScalarKey } from '../keys/checkKey.js';
import { makeCopySet } from '../keys/copySet.js';
import { makePatternKit } from '../patterns/patternMatchers.js';
import { assertScalarKey, makeCopySet } from '../keys/checkKey.js';
import { matches, fit, assertPattern } from '../patterns/patternMatchers.js';
import { makeWeakSetStoreMethods } from './scalarWeakSetStore.js';
import { makeCursorKit } from './store-utils.js';
import { makeCurrentKeysKit } from './store-utils.js';
const { details: X, quote: q } = assert;
const { assertMatches, assertPattern } = makePatternKit();
const { quote: q } = assert;

@@ -17,3 +15,3 @@ /**

* @param {Set<K>} jsset
* @param {(k: K) => void} assertKeyOkToWrite
* @param {(k: K) => void} assertKeyOkToAdd
* @param {((k: K) => void)=} assertKeyOkToDelete

@@ -25,22 +23,29 @@ * @param {string=} keyName

jsset,
assertKeyOkToWrite,
assertKeyOkToAdd,
assertKeyOkToDelete = undefined,
keyName = 'key',
) => {
const {
assertUpdateOnWrite,
assertUpdateOnDelete,
makeCursor,
makeArray,
} = makeCursorKit(
compareRank,
assertKeyOkToWrite,
assertKeyOkToDelete,
keyName,
);
const { assertUpdateOnAdd, assertUpdateOnDelete, iterableKeys } =
makeCurrentKeysKit(
() => jsset.keys(),
k => jsset.has(k),
compareRank,
assertKeyOkToAdd,
assertKeyOkToDelete,
keyName,
);
const methods = harden({
/**
* @param {Pattern=} keyPatt
* @returns {Iterable<K>}
*/
const keys = (keyPatt = undefined) =>
keyPatt === undefined
? iterableKeys
: filterIterable(iterableKeys, k => matches(k, keyPatt));
return harden({
...makeWeakSetStoreMethods(
jsset,
assertUpdateOnWrite,
assertUpdateOnAdd,
assertUpdateOnDelete,

@@ -50,27 +55,20 @@ keyName,

cursor: (keyPatt = undefined, direction = 'forward') => {
assert.equal(
direction,
'forward',
X`Non-forward cursors are not yet implemented: map ${q(keyName)}`,
);
return makeCursor(jsset.keys(), keyPatt);
},
keys,
keys: (keyPatt = undefined) => makeArray(jsset.keys(), keyPatt),
values: keys,
snapshot: (keyPatt = undefined) => makeCopySet(methods.cursor(keyPatt)),
snapshot: (keyPatt = undefined) => makeCopySet(keys(keyPatt)),
addAll: copySet => {
const { payload: keys } = copySet;
const { length } = keys;
for (let i = 0; i < length; i += 1) {
const key = keys[i];
// Don't assert that the key either does or does not exist.
assertKeyOkToWrite(key);
jsset.add(key);
getSize: (keyPatt = undefined) =>
keyPatt === undefined ? jsset.size : [...keys(keyPatt)].length,
clear: (keyPatt = undefined) => {
if (keyPatt === undefined) {
jsset.clear();
}
for (const key of keys(keyPatt)) {
jsset.delete(key);
}
},
});
return methods;
};

@@ -91,3 +89,3 @@

* @param {string} [keyName='key'] - the column name for the key
* @param {Partial<StoreOptions>=} options
* @param {StoreOptions=} options
* @returns {SetStore<K>}

@@ -97,9 +95,10 @@ */

keyName = 'key',
{ schema = undefined } = {},
{ keySchema = undefined } = {},
) => {
const jsset = new Set();
if (schema) {
assertPattern(schema);
if (keySchema !== undefined) {
assertPattern(keySchema);
}
const assertKeyOkToWrite = key => {
const assertKeyOkToAdd = key => {
// TODO: Just a transition kludge. Remove when possible.

@@ -110,11 +109,11 @@ // See https://github.com/Agoric/agoric-sdk/issues/3606

assertScalarKey(key);
if (schema) {
assertMatches(key, schema);
if (keySchema !== undefined) {
fit(key, keySchema);
}
};
const setStore = Far(`scalar SetStore of ${q(keyName)}`, {
...makeSetStoreMethods(jsset, assertKeyOkToWrite, undefined, keyName),
return Far(`scalar SetStore of ${q(keyName)}`, {
...makeSetStoreMethods(jsset, assertKeyOkToAdd, undefined, keyName),
});
return setStore;
};
harden(makeScalarSetStore);
// @ts-check
import { Far, assertPassable, passStyleOf } from '@agoric/marshal';
import { makePatternKit } from '../patterns/patternMatchers.js';
import { Far, assertPassable, passStyleOf } from '@endo/marshal';
import { fit, assertPattern } from '../patterns/patternMatchers.js';
const { details: X, quote: q } = assert;
const { assertMatches, assertPattern } = makePatternKit();

@@ -12,3 +11,4 @@ /**

* @param {WeakMap<K & Object,V>} jsmap
* @param {(k: K, v: V) => void} assertKVOkToWrite
* @param {(k: K, v: V) => void} assertKVOkToAdd
* @param {(k: K, v: V) => void} assertKVOkToSet
* @param {((k: K) => void)=} assertKeyOkToDelete

@@ -20,4 +20,5 @@ * @param {string=} keyName

jsmap,
assertKVOkToWrite,
assertKeyOkToDelete = () => {},
assertKVOkToAdd,
assertKVOkToSet,
assertKeyOkToDelete = undefined,
keyName = 'key',

@@ -46,3 +47,3 @@ ) => {

assertKeyDoesNotExist(key);
assertKVOkToWrite(key, value);
assertKVOkToAdd(key, value);
jsmap.set(key, value);

@@ -52,3 +53,3 @@ },

assertKeyExists(key);
assertKVOkToWrite(key, value);
assertKVOkToSet(key, value);
jsmap.set(key, value);

@@ -58,5 +59,15 @@ },

assertKeyExists(key);
assertKeyOkToDelete(key);
if (assertKeyOkToDelete !== undefined) {
assertKeyOkToDelete(key);
}
jsmap.delete(key);
},
addAll: entries => {
for (const [key, value] of entries) {
// Don't assert that the key either does or does not exist.
assertKVOkToAdd(key, value);
jsmap.set(key, value);
}
},
});

@@ -66,5 +77,6 @@ };

/**
* This is a *scalar* map in that the keys can only be atomic values, primitives
* or remotables. Other storeMaps will accept, for example, copyArrays and
* copyRecords, as keys and look them up based on equality of their contents.
* This is a *scalar* mapStore in that the keys can only be atomic values:
* primitives or remotables.
* Other mapStores will accept, for example, copyArrays and
* copyRecords as keys and look them up based on equality of their contents.
*

@@ -76,7 +88,7 @@ * TODO For now, this scalarWeakMap accepts only remotables, reflecting the

* there. Though note that this would only enables collection of the
* remotables, since the other primitives may always appear.
* remotables, since the other primitives may always reappear.
*
* @template K,V
* @param {string} [keyName='key'] - the column name for the key
* @param {Partial<StoreOptions>=} options
* @param {StoreOptions=} options
* @returns {WeakMapStore<K,V>}

@@ -86,14 +98,28 @@ */

keyName = 'key',
{ longLived = true, schema = undefined } = {},
{ longLived = true, keySchema = undefined, valueSchema = undefined } = {},
) => {
const jsmap = new (longLived ? WeakMap : Map)();
if (schema) {
assertPattern(schema);
if (keySchema !== undefined) {
assertPattern(keySchema);
}
const assertKVOkToWrite = (key, value) => {
if (valueSchema !== undefined) {
assertPattern(valueSchema);
}
const assertKVOkToSet = (_key, value) => {
// TODO: Just a transition kludge. Remove when possible.
// See https://github.com/Agoric/agoric-sdk/issues/3606
harden(key);
harden(value);
assertPassable(value);
if (valueSchema !== undefined) {
fit(value, valueSchema);
}
};
const assertKVOkToAdd = (key, value) => {
// TODO: Just a transition kludge. Remove when possible.
// See https://github.com/Agoric/agoric-sdk/issues/3606
harden(key);
assert(

@@ -103,12 +129,18 @@ passStyleOf(key) === 'remotable',

);
assertPassable(value);
if (schema) {
assertMatches(harden([key, value]), schema);
if (keySchema !== undefined) {
fit(key, keySchema);
}
assertKVOkToSet(key, value);
};
const weakMapStore = Far(`scalar WeakMapStore of ${q(keyName)}`, {
...makeWeakMapStoreMethods(jsmap, assertKVOkToWrite, undefined, keyName),
return Far(`scalar WeakMapStore of ${q(keyName)}`, {
...makeWeakMapStoreMethods(
jsmap,
assertKVOkToAdd,
assertKVOkToSet,
undefined,
keyName,
),
});
return weakMapStore;
};
harden(makeScalarWeakMapStore);
// @ts-check
import { Far, passStyleOf } from '@agoric/marshal';
import { makePatternKit } from '../patterns/patternMatchers.js';
import { Far, passStyleOf } from '@endo/marshal';
import { fit, assertPattern } from '../patterns/patternMatchers.js';
const { details: X, quote: q } = assert;
const { assertMatches, assertPattern } = makePatternKit();

@@ -12,3 +11,3 @@ /**

* @param {WeakSet<K & Object>} jsset
* @param {(k: K) => void} assertKeyOkToWrite
* @param {(k: K) => void} assertKeyOkToAdd
* @param {((k: K) => void)=} assertKeyOkToDelete

@@ -20,4 +19,4 @@ * @param {string=} keyName

jsset,
assertKeyOkToWrite,
assertKeyOkToDelete = () => {},
assertKeyOkToAdd,
assertKeyOkToDelete = undefined,
keyName = 'key',

@@ -37,3 +36,3 @@ ) => {

add: key => {
assertKeyOkToWrite(key);
assertKeyOkToAdd(key);
jsset.add(key);

@@ -43,5 +42,14 @@ },

assertKeyExists(key);
assertKeyOkToDelete(key);
if (assertKeyOkToDelete !== undefined) {
assertKeyOkToDelete(key);
}
jsset.delete(key);
},
addAll: keys => {
for (const key of keys) {
assertKeyOkToAdd(key);
jsset.add(key);
}
},
});

@@ -64,3 +72,3 @@ };

* @param {string} [keyName='key'] - the column name for the key
* @param {Partial<StoreOptions>=} options
* @param {StoreOptions=} options
* @returns {WeakSetStore<K>}

@@ -70,9 +78,10 @@ */

keyName = 'key',
{ longLived = true, schema = undefined } = {},
{ longLived = true, keySchema = undefined } = {},
) => {
const jsset = new (longLived ? WeakSet : Set)();
if (schema) {
assertPattern(schema);
if (keySchema !== undefined) {
assertPattern(keySchema);
}
const assertKeyOkToWrite = key => {
const assertKeyOkToAdd = key => {
// TODO: Just a transition kludge. Remove when possible.

@@ -86,11 +95,11 @@ // See https://github.com/Agoric/agoric-sdk/issues/3606

);
if (schema) {
assertMatches(key, schema);
if (keySchema !== undefined) {
fit(key, keySchema);
}
};
const weakSetStore = Far(`scalar WeakSetStore of ${q(keyName)}`, {
...makeWeakSetStoreMethods(jsset, assertKeyOkToWrite, undefined, keyName),
return Far(`scalar WeakSetStore of ${q(keyName)}`, {
...makeWeakSetStoreMethods(jsset, assertKeyOkToAdd, undefined, keyName),
});
return weakSetStore;
};
harden(makeScalarWeakSetStore);
// @ts-check
import { filterIterable } from '@agoric/marshal';
import { makePatternKit } from '../patterns/patternMatchers.js';
import { assertRankSorted } from '../patterns/rankOrder.js';
import { Far } from '@endo/marshal';
const { details: X, quote: q } = assert;
const { matches } = makePatternKit();
export const makeCursorKit = (
/**
* @template K,V
* @typedef {Object} CurrentKeysKit
* @property {(k: K, v?: V) => void} assertUpdateOnAdd
* @property {(k: K) => void} assertUpdateOnDelete
* @property {Iterable<K>} iterableKeys
*/
/**
* @template K,V
* @param {() => Iterable<K>} getRawKeys
* @param {(k: K) => boolean} checkHas
* @param {RankCompare} compare
* @param {(k: K, v?: V) => void} assertOkToAdd
* @param {((k: K) => void)=} assertOkToDelete
* @param {string=} keyName
* @returns {CurrentKeysKit<K,V>}
*/
export const makeCurrentKeysKit = (
getRawKeys,
checkHas,
compare,
assertOkToWrite,
assertOkToAdd,
assertOkToDelete = undefined,

@@ -17,55 +34,56 @@ keyName = 'key',

let updateCount = 0;
let sortedKeysMemo;
const cursorKit = harden({
assertUpdateOnWrite: (k, v) => {
assertOkToWrite(k, v);
updateCount += 1;
},
const assertUpdateOnAdd = (k, v = undefined) => {
assertOkToAdd(k, v);
updateCount += 1;
sortedKeysMemo = undefined;
};
assertUpdateOnDelete: assertOkToDelete
? k => {
assertOkToDelete(k);
updateCount += 1;
}
: () => {
updateCount += 1;
},
const assertUpdateOnDelete = k => assertOkToDelete && assertOkToDelete(k);
makeArray: (baseIterable, pattern = undefined) => {
const filter = pattern ? val => matches(harden(val), pattern) : harden;
const filtered = filterIterable(baseIterable, filter);
const sorted = harden([...filtered].sort(compare));
assertRankSorted(sorted, compare);
return sorted;
},
const getSortedKeys = () => {
if (sortedKeysMemo === undefined) {
sortedKeysMemo = harden([...getRawKeys()].sort(compare));
}
return sortedKeysMemo;
};
makeCursor: (baseIterable, pattern = undefined) => {
const currentUpdateCount = updateCount;
const notStaleFilter = () => {
assert.equal(
currentUpdateCount,
updateCount,
X`MapStore ${q(keyName)} cursor stale`,
);
return true;
};
// TODO In an implementation where the baseIterable returns its data
// already rank sorted, `makeCursor` would use the following
// code to make a cursor, and makeArray would be a snapshot of that.
// However,
// to get the correct external behavior on non-ordered representation,
// we sort in makeArray instead and then makeCursor return a cursor built
// from that.
// const filter = pattern
// ? val => notStaleFilter() && matches(val, pattern)
// : notStaleFilter;
// return filterIterable(baseIterable, filter);
const sorted = cursorKit.makeArray(baseIterable, pattern);
return filterIterable(sorted, notStaleFilter);
const iterableKeys = Far('Iterable of keys', {
[Symbol.iterator]: () => {
const generation = updateCount;
getSortedKeys();
const len = sortedKeysMemo.length;
let i = 0;
return Far('Iterator of keys', {
next: () => {
assert.equal(
generation,
updateCount,
X`Store ${q(keyName)} cursor stale`,
);
// If they're equal, then the sortedKeyMemo is the same one
// we started with.
for (;;) {
if (i < len) {
const value = sortedKeysMemo[i];
i += 1;
if (checkHas(value)) {
return harden({ done: false, value });
}
} else {
return harden({ done: true, value: undefined });
}
}
},
});
},
});
return cursorKit;
return harden({
assertUpdateOnAdd,
assertUpdateOnDelete,
iterableKeys,
});
};
harden(makeCursorKit);
harden(makeCurrentKeysKit);
// @ts-check
// eslint-disable-next-line spaced-comment
/// <reference types="ses"/>

@@ -8,2 +7,18 @@

* @typedef {Passable} Key
* Keys are pass-by-copy structures (CopyArray, CopyRecord,
* CopySet, CopyBag, CopyMap) that end in either passable primitive data or
* Remotables (Far objects or their remote presences.) Keys are so named
* because they can be used as keys in MapStores and CopyMaps, as well as
* the elements of CopySets and CopyBags.
*
* Keys cannot contain promises or errors, as these do not have a useful
* distributed equality semantics. Keys also cannot contain any CopyTagged
* except for those recognized as CopySets, CopyBags, and CopyMaps.
*
* Be aware that we may recognize more CopyTaggeds over time, including
* CopyTaggeds recognized as keys.
*
* Distributed equality is location independent.
* The same two keys, passed to another location, will be equal there iff
* they are equal here.
*/

@@ -13,6 +28,45 @@

* @typedef {Passable} Pattern
* Patterns can be either Keys or Matchers
* Patterns are pass-by-copy structures (CopyArray, CopyRecord,
* CopySet, CopyBag, CopyMap) that end in either Keys or Matchers. Each pattern
* acts as a declarative passable predicate over passables, where each passable
* either passes a given pattern, or does not. Every key is also a pattern.
* Used as a pattern, a key matches only "itself", i.e., keys that are equal
* to it according to the key distributed equality semantics.
*
* Patterns cannot contain promises or errors, as these do
* not have a useful distributed equality or matching semantics. Likewise,
* no pattern can distinguish among promises, or distinguish among errors.
* Patterns also cannot contain any CopyTaggeds except for those recognized as
* CopySets, CopyBags, CopyMaps, or Matchers.
*
* Be aware that we may recognize more CopyTaggeds over time, including
* CopyTaggeds recognized as patterns.
*
* Whether a passable matches a given pattern is location independent.
* For a passable and a pattern, both passed to another location, the passable
* will match the pattern there iff the passable matches that pattern here.
*
* Patterns are often used in a type-like manner, to represent the category
* of passables that are intended* to match that pattern. To keep this
* distinction clear, we often use the suffix "Shape" rather than "Pattern"
* to avoid the levels confusion when the pattern itself represents
* some category of pattern. For example, an "AmountShape" represents the
* category of Amounts. And "AmountPatternShape" represents the
* category of patterns over Amounts.
*
* * I say "intended" above because Patterns, in order to be declarative
* and passable, cannot have the generality of predicates written in a
* Turing-universal programming language. Rather, to represent the category of
* things intended to be a Foo, a FooShape should reliably
* accept all Foos and reject only non-Foos. However, a FooShape may also accept
* non-Foos that "look like" or have "the same shape as" genuine Foos. To write
* as accurate predicate, for use, for example, for input validation, would
* need to supplement the pattern check with code to check for the residual
* cases.
* We hope the "Shape" metaphore helps remind us of this type-like imprecision
* of patterns.
*/
/**
* @template K
* @typedef {CopyTagged} CopySet

@@ -22,2 +76,8 @@ */

/**
* @template K
* @typedef {CopyTagged} CopyBag
*/
/**
* @template K,V
* @typedef {CopyTagged} CopyMap

@@ -41,11 +101,21 @@ */

* Defaults to true, so please mark short lived stores explicitly.
* @property {Pattern=} schema The store will reject
* the insertion of any content that does not match the schema pattern.
* For a SetStore, this is a key pattern.
* For a MapStore, this is an entry pattern,
* i.e., a pattern over `[key,value]` pairs.
* Defaults to `M.any()`.
* @property {boolean=} durable The contents of this store survive termination
* of its containing process, allowing for restart or upgrade but at the cost
* of forbidding storage of references to ephemeral data. Defaults to false.
* @property {Pattern=} keySchema
* @property {Pattern=} valueSchema
*/
/** @typedef {"forward" | "reverse"} Direction */
/**
* Most store methods are in one of three categories
* * lookup methods (`has`,`get`)
* * update methods (`add`,`init`,`set`,`delete`,`addAll`)
* * query methods (`snapshot`,`keys`,`values`,`entries`,`getSize`)
* * query-update methods (`clear`)
*
* WeakStores have the lookup and update methods but not the query
* or query-update methods.
* Non-weak Stores are like their corresponding WeakStore, but with the
* additional query and query-update methods.
*/

@@ -65,2 +135,3 @@ /**

* Remove the key. Throws if not found.
* @property {(keys: Iterable<K>) => void} addAll
*/

@@ -81,8 +152,7 @@

* Remove the key. Throws if not found.
* @property {(keyPattern?: Pattern) => K[]} keys
* @property {(keyPattern?: Pattern) => CopySet} snapshot
* @property {(copySet: CopySet) => void} addAll
* @property {(keyPattern?: Pattern,
* direction?: Direction
* ) => Iterable<K>} cursor
* @property {(keys: Iterable<K>) => void} addAll
* @property {(keyPatt?: Pattern) => Iterable<K>} keys
* @property {(keyPatt?: Pattern) => CopySet<K>} snapshot
* @property {(keyPatt?: Pattern) => number} getSize
* @property {(keyPatt?: Pattern) => void} clear
*/

@@ -106,2 +176,3 @@

* Remove the key. Throws if not found.
* @property {(entries: Iterable<[K,V]>) => void} addAll
*/

@@ -125,10 +196,12 @@

* Remove the key. Throws if not found.
* @property {(keyPattern?: Pattern) => K[]} keys
* @property {(valuePattern?: Pattern) => V[]} values
* @property {(entryPattern?: Pattern) => [K,V][]} entries
* @property {(entryPattern?: Pattern) => CopyMap} snapshot
* @property {(copyMap: CopyMap) => void} addAll
* @property {(entryPattern?: Pattern,
* direction?: Direction
* ) => Iterable<[K,V]>} cursor
* @property {(entries: Iterable<[K,V]>) => void} addAll
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => Iterable<K>} keys
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => Iterable<V>} values
* @property {(
* keyPatt?: Pattern,
* valuePatt?: Pattern
* ) => Iterable<[K,V]>} entries
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => CopyMap<K,V>} snapshot
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => number} getSize
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => void} clear
*/

@@ -146,13 +219,17 @@

* @template K,V
* @typedef {Object} Store
* @typedef {MapStore<K,V>} Store
* Deprecated type name `Store`. Use `MapStore` instead.
*/
/**
* @template K,V
* @typedef {Object} LegacyWeakMap
* LegacyWeakMap is deprecated. Use WeakMapStore instead.
* @property {(key: K) => boolean} has
* Check if a key exists. The key can be any JavaScript value, though the
* answer will always be false for keys that cannot be found in this map
* Check if a key exists
* @property {(key: K) => V} get
* Return a value for the key. Throws if not found.
* @property {(key: K, value: V) => void} init
* Initialize the key only if it doesn't already exist.
* The key must be one allowed by this store. For example a scalar store only
* allows primitives and remotables.
* Initialize the key only if it
* doesn't already exist
* @property {(key: K, value: V) => void} set

@@ -162,5 +239,2 @@ * Set the key. Throws if not found.

* Remove the key. Throws if not found.
* @property {(keyPattern?: Pattern) => K[]} keys
* @property {(valuePattern?: Pattern) => V[]} values
* @property {(entryPattern?: Pattern) => [K,V][]} entries
*/

@@ -170,3 +244,3 @@

* @template K,V
* @typedef {Object} LegacyWeakMap
* @typedef {Object} LegacyMap
* LegacyWeakMap is deprecated. Use WeakMapStore instead.

@@ -184,15 +258,20 @@ * @property {(key: K) => boolean} has

* Remove the key. Throws if not found.
* @property {() => Iterable<K>} keys
* @property {() => Iterable<V>} values
* @property {() => Iterable<[K,V]>} entries
* @property {() => number} getSize
* @property {() => void} clear
*/
// /////////////////////////////////////////////////////////////////////////////
/**
* @template K,V
* @callback MakeLegacyWeakMap
* @param {string} [keyName='key'] - the column name for the key
* @returns {LegacyWeakMap}
* @typedef {-1 | 0 | 1} RankComparison
* The result of a `RankCompare` function that defines a rank-order, i.e.,
* a total preorder in which different elements are always comparable but
* can be tied for the same rank. See `RankCompare`.
*/
// /////////////////////////////////////////////////////////////////////////////
/**
* @callback CompareRank
* @callback RankCompare
* Returns `-1`, `0`, or `1` depending on whether the rank of `left`

@@ -202,9 +281,9 @@ * is before, tied-with, or after the rank of `right`.

* This comparison function is valid as argument to
* `Array.prototype.sort`. This is often described as a "total order"
* but, depending on your definitions, this is technically incorrect
* because it may return `0` to indicate that two distinguishable elements,
* like `-0` and `0`, are tied, i.e., are in the same equivalence class
* as far as this ordering is concerned. If each such equivalence class is
* `Array.prototype.sort`. This is sometimes described as a "total order"
* but, depending on your definitions, this is technically incorrect because
* it may return `0` to indicate that two distinguishable elements such as
* `-0` and `0` are tied (i.e., are in the same equivalence class
* for the purposes of this ordering). If each such equivalence class is
* a *rank* and ranks are disjoint, then this "rank order" is a
* total order among these ranks. In mathematics this goes by several
* true total order over these ranks. In mathematics this goes by several
* other names such as "total preorder".

@@ -218,13 +297,97 @@ *

* lookups based on those comparisons. For example, in order to get a total
* order among ranks, we put `NaN` after all other JavaScript "number" values.
* But otherwise, we order JavaScript numbers by magnitude,
* with `-0` tied with `0`. A semantically useful ordering of JavaScript number
* values, i.e., IEEE floating point values, would compare magnitudes, and
* so agree with the rank ordering everywhere except `NaN`. An array sorted by
* rank would enable range queries by magnitude.
* order among ranks, we put `NaN` after all other JavaScript "number" values
* (i.e., IEEE 754 floating-point values). But otherwise, we rank JavaScript
* numbers by signed magnitude, with `0` and `-0` tied. A semantically useful
* ordering would also compare magnitudes, and so agree with the rank ordering
* of all values other than `NaN`. An array sorted by rank would enable range
* queries by magnitude.
* @param {Passable} left
* @param {Passable} right
* @returns {-1 | 0 | 1}
* @returns {RankComparison}
*/
/**
* @typedef {RankCompare} FullCompare
* A `FullCompare` function satisfies all the invariants stated below for
* `RankCompare`'s relation with KeyCompare.
* In addition, its equality is as precise as the `KeyCompare`
* comparison defined below, in that, for all Keys `x` and `y`,
* `FullCompare(x, y) === 0` iff `KeyCompare(x, y) === 0`.
*
* For non-keys a `FullCompare` should be exactly as imprecise as
* `RankCompare`. For example, both will treat all errors as in the same
* equivalence class. Both will treat all promises as in the same
* equivalence class. Both will order taggeds the same way, which is admittedly
* weird, as some taggeds will be considered keys and other taggeds will be
* considered non-keys.
*/
/**
* @typedef {Object} RankComparatorKit
* @property {RankCompare} comparator
* @property {RankCompare} antiComparator
*/
/**
* @typedef {Object} FullComparatorKit
* @property {FullCompare} comparator
* @property {FullCompare} antiComparator
*/
/**
* @typedef {-1 | 0 | 1 | NaN} KeyComparison
* The result of a `KeyCompare` function that defines a meaningful
* and meaningfully precise partial order of `Key` values. See `KeyCompare`.
*/
/**
* @callback KeyCompare
* `compareKeys` implements a partial order over keys. As with the
* rank ordering produced by `compareRank`, -1, 0, and 1 mean
* "less than", "equivalent to", and "greater than" respectively.
* NaN means "incomparable" --- the first key is not less, equivalent,
* or greater than the second. For example, subsets over sets is
* a partial order.
*
* By using NaN for "incomparable", the normal equivalence for using
* the return value in a comparison is preserved.
* `compareKeys(left, right) >= 0` iff `left` is greater than or
* equivalent to `right` in the partial ordering.
*
* Key order (a partial order) and rank order (a total preorder) are
* co-designed so that we store passables in rank order and index into them
* with keys for key-based queries. To keep these distinct, when speaking
* informally about rank, we talk about "earlier" and "later". When speaking
* informally about keys, we talk about "smaller" and "bigger".
*
* In both orders, the return-0 case defines
* an equivalence class, i.e., those that are tied for the same place in the
* order. The global invariant that we need between the two orders is that the
* key order equivalence class is always at least as precise as the
* rank order equivalence class. IOW, if `compareKeys(X,Y) === 0` then
* `compareRank(X,Y) === 0`. But not vice versa. For example, two different
* remotables are the same rank but incomparable as keys.
*
* A further invariant is if `compareKeys(X,Y) < 0` then
* `compareRank(X,Y) < 0`, i.e., if X is smaller than Y in key order, then X
* must be earlier than Y in rank order. But not vice versa.
* X can be equivalent to or earlier than Y in rank order and still be
* incomparable with Y in key order. For example, the record `{b: 3, a: 5}` is
* earlier than the record `{b: 5, a: 3}` in rank order but they are
* incomparable as keys. And two distinct remotables such as `Far('X', {})` and
* `Far('Y', {})` are equivalent in rank order but incomparable as keys.
*
* This lets us translate a range search over the
* partial key order into a range search over rank order followed by filtering
* out those that don't match. To get this effect, we store the elements of
* a set in an array sorted in reverse rank order, from later to earlier.
* Combined with our lexicographic comparison of arrays, if set X is a subset
* of set Y then the array representing set X will be an earlier rank that the
* array representing set Y.
*
* @param {Key} left
* @param {Key} right
* @returns {KeyComparison}
*/
// ///////////////////// Should be internal only types /////////////////////////

@@ -255,3 +418,3 @@

* @param {Passable[]} sorted
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {RankCover} rankCover

@@ -291,4 +454,7 @@ * @returns {IndexCover}

* @callback KeyToDBKey
* If this key can be encoded as a DBKey string which sorts correctly,
* return that string. Else return `undefined`. For example, a scalar-only
* encodeKey would return `undefined` for all non-scalar keys.
* @param {Passable} key
* @returns {string}
* @returns {string=}
*/

@@ -304,4 +470,98 @@

/**
* @typedef {Object} PatternMatcher
* @typedef {Object} MatcherNamespace
* @property {() => Matcher} any
* Matches any passable
* @property {(...patts: Pattern[]) => Matcher} and
* Only if it matches all the sub-patterns
* @property {(...patts: Pattern[]) => Matcher} or
* Only if it matches at least one subPattern
* @property {(subPatt: Pattern) => Matcher} not
* Only if it does not match the sub-pattern
*
* @property {() => Matcher} scalar
* The scalars are the primitive values and Remotables.
* All scalars are keys.
* @property {() => Matcher} key
* Can be in a copySet or CopyBag, or the key in a CopyMap.
* (Will eventually be able to a key is a MapStore.)
* All keys are patterns that match only themselves.
* @property {() => Matcher} pattern
* If it matches M.pattern(), the it is itself a pattern used
* to match other passables. A pattern cannot contain errors
* or promises, as these are not stable enough to usefully match.
* @property {(kind: string) => Matcher} kind
* @property {() => Matcher} boolean
* @property {() => Matcher} number Only floating point numbers
* @property {() => Matcher} bigint
* @property {() => Matcher} nat
* @property {() => Matcher} string
* @property {() => Matcher} symbol
* Only registered and well-known symbols are passable
* @property {() => Matcher} record A CopyRecord
* @property {() => Matcher} array A CopyArray
* @property {() => Matcher} set A CopySet
* @property {() => Matcher} bag A CopyBag
* @property {() => Matcher} map A CopyMap
* @property {() => Matcher} remotable A far object or its remote presence
* @property {() => Matcher} error
* Error objects are passable, but are neither keys nor symbols.
* They do not have a useful identity.
* @property {() => Matcher} promise
* Promises are passable, but are neither keys nor symbols.
* They do not have a useful identity.
* @property {() => Matcher} undefined
* All keys including `undefined` are already valid patterns and
* so can validly represent themselves. But optional pattern arguments
* `(pattern = undefined) => ...`
* cannot distinguish between `undefined` passed as a pattern vs
* omission of the argument. It will interpret the first as the
* second. Thus, when a passed pattern does not also need to be a key,
* we recommend passing `M.undefined()` instead of `undefined`.
*
* @property {() => null} null
*
* @property {(rightOperand :Key) => Matcher} lt
* Matches if < the right operand by compareKeys
* @property {(rightOperand :Key) => Matcher} lte
* Matches if <= the right operand by compareKeys
* @property {(key :Key) => Matcher} eq
* @property {(key :Key) => Matcher} neq
* @property {(rightOperand :Key) => Matcher} gte
* Matches if >= the right operand by compareKeys
* @property {(rightOperand :Key) => Matcher} gt
* Matches if > the right operand by compareKeys
*
* @property {(subPatt?: Pattern) => Matcher} arrayOf
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => Matcher} recordOf
* @property {(keyPatt?: Pattern) => Matcher} setOf
* @property {(keyPatt?: Pattern) => Matcher} bagOf
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => Matcher} mapOf
* @property {(
* base: CopyRecord<*> | CopyArray<*>,
* rest?: Pattern
* ) => Matcher} split
* An array or record is split into the first part that matches the
* base pattern, and the remainder, which matches against the optional
* rest pattern if present.
* @property {(
* base: CopyRecord<*> | CopyArray<*>,
* rest?: Pattern
* ) => Matcher} partial
* An array or record is split into the first part that matches the
* base pattern, and the remainder, which matches against the optional
* rest pattern if present.
* Unlike `M.split`, `M.partial` ignores properties on the base
* pattern that are not present on the specimen.
*/
/**
* @typedef {Object} PatternKit
* @property {(specimen: Passable, patt: Pattern) => boolean} matches
* @property {(specimen: Passable, patt: Pattern) => void} fit
* @property {(patt: Pattern) => void} assertPattern
* @property {(patt: Passable) => boolean} isPattern
* @property {(patt: Pattern) => void} assertKeyPattern
* @property {(patt: Passable) => boolean} isKeyPattern
* @property {GetRankCover} getRankCover
* @property {MatcherNamespace} M
*/

@@ -312,3 +572,3 @@

// TODO
// The following commented out type should be in internal-types.js, since the
// The following type should be in internal-types.js, since the
// `MatchHelper` type is purely internal to this package. However,

@@ -315,0 +575,0 @@ // in order to get the governance and solo packages both to pass lint,

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc