Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@agoric/store

Package Overview
Dependencies
Maintainers
5
Versions
2632
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@agoric/store - npm Package Compare versions

Comparing version 0.6.9-dev-2c92c13.0 to 0.6.9-dev-3464e52.0

src/keys/merge-set-operators.js

14

package.json
{
"name": "@agoric/store",
"version": "0.6.9-dev-2c92c13.0+2c92c13",
"version": "0.6.9-dev-3464e52.0+3464e52",
"description": "Wrapper for JavaScript map",

@@ -34,9 +34,9 @@ "type": "module",

"dependencies": {
"@agoric/assert": "0.3.16-dev-2c92c13.0+2c92c13",
"@agoric/eventual-send": "0.14.1-dev-2c92c13.0+2c92c13",
"@agoric/marshal": "0.5.1-dev-2c92c13.0+2c92c13",
"@agoric/promise-kit": "0.2.30-dev-2c92c13.0+2c92c13"
"@agoric/assert": "0.3.16-dev-3464e52.0+3464e52",
"@agoric/eventual-send": "0.14.1-dev-3464e52.0+3464e52",
"@agoric/marshal": "0.5.1-dev-3464e52.0+3464e52",
"@agoric/promise-kit": "0.2.30-dev-3464e52.0+3464e52"
},
"devDependencies": {
"@agoric/swingset-vat": "0.24.2-dev-2c92c13.0+2c92c13",
"@agoric/swingset-vat": "0.24.2-dev-3464e52.0+3464e52",
"ava": "^3.12.1"

@@ -70,3 +70,3 @@ },

},
"gitHead": "2c92c138d3bf83846db39856ab7774ef49d0d9f8"
"gitHead": "3464e52160772a220a86f3093d56b69ff7d2fe0f"
}
// @ts-check
export { isKey, assertKey } from './keys/checkKey.js';
export { keyLT, keyLTE, keyEQ, keyGTE, keyGT } from './keys/compareKeys.js';
export {
compareKeys,
keyLT,
keyLTE,
keyEQ,
keyGTE,
keyGT,
} from './keys/compareKeys.js';
export { makeSetOps } from './keys/merge-set-operators.js';
export { makePatternKit } from './patterns/patternMatchers.js';
export { compareRank } from './patterns/rankOrder.js';
export {
M,
isPattern,
assertPattern,
matches,
fit,
} from './patterns/patternMatchers.js';
export {
compareRank,
isRankSorted,
sortByRank,
makeFullOrderComparatorKit,
} from './patterns/rankOrder.js';

@@ -28,1 +47,3 @@ export { makeScalarWeakSetStore } from './stores/scalarWeakSetStore.js';

export { makeLegacyWeakMap } from './legacy/legacyWeakMap.js';
export { makeCopySet, getCopySetKeys } from './keys/copySet.js';

@@ -9,3 +9,2 @@ // @ts-check

assertPassable,
everyPassableChild,
getTag,

@@ -98,6 +97,9 @@ isObject,

switch (passStyle) {
case 'copyRecord':
case 'copyRecord': {
// A copyRecord is a key iff all its children are keys
return Object.values(val).every(checkIt);
}
case 'copyArray': {
// A copyRecord or copyArray is a key iff all its children are keys
return everyPassableChild(val, checkIt);
// A copyArray is a key iff all its children are keys
return val.every(checkIt);
}

@@ -104,0 +106,0 @@ case 'tagged': {

@@ -13,51 +13,4 @@ // @ts-check

/**
* `compareKeys` implements a partial order over keys. As with the
* rank ordering produced by `compareRank`, -1, 0, and 1 mean
* "less than", "equivalent to", and "greater than" respectively.
* NaN means "incomparable" --- the first key is not less, equivalent,
* or greater than the second. For example, subsets over sets is
* a partial order.
*
* By using NaN for "incomparable", the normal equivalence for using
* the return value in a comparison is preserved.
* `compareKeys(left, right) >= 0` iff `left` is greater than or
* equivalent to `right` in the partial ordering.
* `compareKeys` is currently not exported directly, so its
* bizarre but convenient return type is not exposed.
*
* Key order (a partial order) and rank order (a full order) are
* co-designed so that we store passables in rank order and index into them
* with keys for key-based queries. To keep these distinct, when speaking
* informally about rank, we talk about "earlier" and "later". When speaking
* informally about keys, we talk about "smaller" and "bigger".
*
* In both orders, the return-0 case defines
* an equivalence class, i.e., those that are tied for the same place in the
* order. The global invariant that we need between the two orders is that the
* key order equivalence class is always at least as precise as the
* rank order equivalence class. IOW, if `compareKeys(X,Y) === 0` then
* `compareRank(X,Y) === 0`. But not vice versa. For example, two different
* remotables are the same rank but incommensurate as keys.
*
* A further invariant is if `compareKeys(X,Y) < 0` then
* `compareRank(X,Y) <= 0`, i.e., if X is smaller than Y in key order, then X
* must be at least as early as Y in rank order. But not vice versa.
* X can be earlier than Y in rank order and still be incommensurate with Y in
* key order. For example, the records `{b: 3, a: 5}` is earlier than but
* incommensurate with the record `{b: 5, a: 3}`.
*
* This lets us translate a range search over the
* partial key order into a range search over rank order followed by filtering
* out those that don't match. To get this effect, we store the elements of
* a set in an array sorted in reverse rank order, from later to earlier.
* Combined with our lexicographic comparison of arrays, if set X is a subset
* of set Y then the array representing set X will be an earlier rank that the
* array representing set Y.
*
* @param {Key} left
* @param {Key} right
* @returns {-1 | 0 | 1 | NaN}
*/
const compareKeys = (left, right) => {
/** @type {KeyCompare} */
export const compareKeys = (left, right) => {
assertKey(left);

@@ -64,0 +17,0 @@ assertKey(right);

@@ -5,2 +5,3 @@ // @ts-check

assertChecker,
Far,
getTag,

@@ -10,3 +11,3 @@ makeTagged,

} from '@agoric/marshal';
import { compareRank, sortByRank } from '../patterns/rankOrder.js';
import { compareAntiRank, sortByRank } from '../patterns/rankOrder.js';
import { checkCopySetKeys } from './copySet.js';

@@ -20,3 +21,3 @@

/** @type WeakSet<CopyMap> */
/** @type WeakSet<CopyMap<any,any>> */
const copyMapMemo = new WeakSet();

@@ -62,33 +63,103 @@

/**
* @callback IsCopyMap
* @param {Passable} m
* @returns {m is CopyMap<Key, Passable>}
*/
/** @type {IsCopyMap} */
export const isCopyMap = m => checkCopyMap(m);
harden(isCopyMap);
export const assertCopyMap = m => checkCopyMap(m, assertChecker);
/**
* @callback AssertCopyMap
* @param {Passable} m
* @returns {asserts m is CopyMap<Key, Passable>}
*/
/** @type {AssertCopyMap} */
export const assertCopyMap = m => {
checkCopyMap(m, assertChecker);
};
harden(assertCopyMap);
/**
* @param {CopyMap} m
* @param {(v: Passable, i: number) => boolean} fn
* @returns {boolean}
* @template K,V
* @param {CopyMap<K,V>} m
* @returns {K[]}
*/
export const everyCopyMapKey = (m, fn) => {
export const getCopyMapKeys = m => {
assertCopyMap(m);
return m.payload.keys.every((v, i) => fn(v, i));
return m.payload.keys;
};
harden(getCopyMapKeys);
/**
* @template K,V
* @param {CopyMap<K,V>} m
* @returns {V[]}
*/
export const getCopyMapValues = m => {
assertCopyMap(m);
return m.payload.values;
};
harden(getCopyMapValues);
/**
* @template K,V
* @param {CopyMap<K,V>} m
* @returns {Iterable<[K,V]>}
*/
export const getCopyMapEntries = m => {
assertCopyMap(m);
const {
payload: { keys, values },
} = m;
const { length } = keys;
return Far('CopyMap entries iterable', {
[Symbol.iterator]: () => {
let i = 0;
return Far('CopyMap entries iterator', {
next: () => {
/** @type {IteratorResult<[K,V],void>} */
let result;
if (i < length) {
result = harden({ done: false, value: [keys[i], values[i]] });
i += 1;
return result;
} else {
result = harden({ done: true, value: undefined });
}
return result;
},
});
},
});
};
harden(getCopyMapEntries);
/**
* @template K,V
* @param {CopyMap<K,V>} m
* @param {(key: K, index: number) => boolean} fn
* @returns {boolean}
*/
export const everyCopyMapKey = (m, fn) =>
getCopyMapKeys(m).every((key, index) => fn(key, index));
harden(everyCopyMapKey);
/**
* @param {CopyMap} m
* @param {(v: Passable, i: number) => boolean} fn
* @template K,V
* @param {CopyMap<K,V>} m
* @param {(value: V, index: number) => boolean} fn
* @returns {boolean}
*/
export const everyCopyMapValue = (m, fn) => {
assertCopyMap(m);
return m.payload.values.every((v, i) => fn(v, i));
};
export const everyCopyMapValue = (m, fn) =>
getCopyMapValues(m).every((value, index) => fn(value, index));
harden(everyCopyMapValue);
/**
* @param {CopyMap} m
* @returns {CopySet}
* @template K,V
* @param {CopyMap<K,V>} m
* @returns {CopySet<K>}
*/

@@ -101,4 +172,5 @@ export const copyMapKeySet = m =>

/**
* @param {Iterable<[Passable, Passable]>} entries
* @returns {CopyMap}
* @template K,V
* @param {Iterable<[K, V]>} entries
* @returns {CopyMap<K,V>}
*/

@@ -109,7 +181,8 @@ export const makeCopyMap = entries => {

// organized by those keys. Also, among values associated with
// keys in the same equivalence class, those are rank sorted. This
// keys in the same equivalence class, those are rank sorted.
// TODO This
// could solve the copyMap cover issue explained in patternMatchers.js.
// But only if we include this criteria in our validation of copyMaps,
// which we currently do not.
const sortedEntries = [...sortByRank(entries, compareRank)].reverse();
const sortedEntries = sortByRank(entries, compareAntiRank);
const keys = sortedEntries.map(([k, _v]) => k);

@@ -116,0 +189,0 @@ const values = sortedEntries.map(([_k, v]) => v);

@@ -10,4 +10,5 @@ // @ts-check

import {
compareRank,
compareAntiRank,
isRankSorted,
makeFullOrderComparatorKit,
sortByRank,

@@ -22,2 +23,31 @@ } from '../patterns/rankOrder.js';

/**
* @param {FullCompare} fullCompare
* @returns {(keys: Key[], check?: Checker) => boolean}
*/
export const makeCheckNoDuplicates = fullCompare => (keys, check = x => x) => {
keys = sortByRank(keys, fullCompare);
const { length } = keys;
for (let i = 1; i < length; i += 1) {
const k0 = keys[i - 1];
const k1 = keys[i];
if (fullCompare(k0, k1) === 0) {
return check(false, X`value has duplicates: ${k0}`);
}
}
return true;
};
/**
* TODO SECURITY HAZARD: https://github.com/Agoric/agoric-sdk/issues/4261
* This creates mutable static state that should be unobservable, since it
* is only used by checkNoDuplicates in an internal sort algorithm whose
* result is tested and then dropped. However, that has a bad performance
* cost. It is not yet clear how to fix this without opening a
* communications channel.
*/
const fullCompare = makeFullOrderComparatorKit(true).antiComparator;
const checkNoDuplicates = makeCheckNoDuplicates(fullCompare);
/**
* @param {Passable[]} keys

@@ -34,4 +64,3 @@ * @param {Checker=} check

}
const reverseKeys = harden([...keys].reverse());
if (!isRankSorted(reverseKeys, compareRank)) {
if (!isRankSorted(keys, compareAntiRank)) {
return check(

@@ -42,7 +71,7 @@ false,

}
return true;
return checkNoDuplicates(keys, check);
};
harden(checkCopySetKeys);
/** @type WeakSet<CopySet> */
/** @type WeakSet<CopySet<Key>> */
const copySetMemo = new WeakSet();

@@ -71,25 +100,55 @@

/**
* @callback IsCopySet
* @param {Passable} s
* @returns {s is CopySet<Key>}
*/
/** @type {IsCopySet} */
export const isCopySet = s => checkCopySet(s);
harden(isCopySet);
export const assertCopySet = s => checkCopySet(s, assertChecker);
/**
* @callback AssertCopySet
* @param {Passable} s
* @returns {asserts s is CopySet<Key>}
*/
/** @type {AssertCopySet} */
export const assertCopySet = s => {
checkCopySet(s, assertChecker);
};
harden(assertCopySet);
/**
* @param {CopySet} s
* @param {(v: Passable, i: number) => boolean} fn
* @returns {boolean}
* @template K
* @param {CopySet<K>} s
* @returns {K[]}
*/
export const everyCopySetKey = (s, fn) => {
export const getCopySetKeys = s => {
assertCopySet(s);
return s.payload.every((v, i) => fn(v, i));
return s.payload;
};
harden(getCopySetKeys);
/**
* @template K
* @param {CopySet<K>} s
* @param {(key: K, index: number) => boolean} fn
* @returns {boolean}
*/
export const everyCopySetKey = (s, fn) =>
getCopySetKeys(s).every((key, index) => fn(key, index));
harden(everyCopySetKey);
/**
* @param {Iterable<Passable>} elements
* @returns {CopySet}
* @template K
* @param {Iterable<K>} elements
* @returns {CopySet<K>}
*/
export const makeCopySet = elements =>
makeTagged('copySet', [...sortByRank(elements, compareRank)].reverse());
export const makeCopySet = elements => {
const result = makeTagged('copySet', sortByRank(elements, compareAntiRank));
assertCopySet(result);
return result;
};
harden(makeCopySet);

@@ -22,3 +22,3 @@ // @ts-check

* comment explaining the problem inhibiting conversion to the new
* system. Some of these problems as if this writing:
* system. Some of these problems as of this writing:
* * A promiseKit used as a value, even though a promiseKit is not

@@ -33,3 +33,5 @@ * a passable. Solutions are to make it a passable, or to convert

* @deprecated switch to ScalarMap if possible, Map otherwise
* @template K,V
* @param {string} [keyName='key'] - the column name for the key
* @returns {LegacyMap<K,V>}
*/

@@ -42,3 +44,3 @@ export const makeLegacyMap = (keyName = 'key') => {

assert(m.has(key), X`${q(keyName)} not found: ${key}`);
const legacyMap = harden({
return harden({
has: key => {

@@ -66,8 +68,9 @@ // Check if a key exists. The key can be any JavaScript value,

},
keys: () => [...m.keys()],
values: () => [...m.values()],
entries: () => [...m.entries()],
keys: () => m.keys(),
values: () => m.values(),
entries: () => m.entries(),
getSize: () => m.size,
clear: () => m.clear(),
});
return legacyMap;
};
harden(makeLegacyMap);

@@ -10,5 +10,8 @@ // @ts-check

* @deprecated switch to ScalarWeakMap if possible, WeakMap otherwise
* @template K,V
* @param {string} [keyName='key'] - the column name for the key
* @returns {LegacyWeakMap<K,V>}
*/
export const makeLegacyWeakMap = (keyName = 'key') => {
/** @type {WeakMap<K & Object,V>} */
const wm = new WeakMap();

@@ -19,3 +22,3 @@ const assertKeyDoesNotExist = key =>

assert(wm.has(key), X`${q(keyName)} not found: ${key}`);
const legacyWeakMap = harden({
return harden({
has: key => {

@@ -33,3 +36,4 @@ // Check if a key exists. The key can be any JavaScript value,

assertKeyExists(key);
return wm.get(key);
// How to tell typescript I believe the `get` will succeed.
return /** @type {V} */ (wm.get(key));
},

@@ -45,4 +49,3 @@ set: (key, value) => {

});
return legacyWeakMap;
};
harden(makeLegacyWeakMap);

@@ -5,3 +5,2 @@ // @ts-check

assertChecker,
everyPassableChild,
Far,

@@ -11,4 +10,6 @@ getTag,

passStyleOf,
hasOwnPropertyOf,
} from '@agoric/marshal';
import {
compareAntiRank,
compareRank,

@@ -41,5 +42,5 @@ getPassStyleCover,

/**
* @returns {Object}
* @returns {PatternKit}
*/
export const makePatternKit = () => {
const makePatternKit = () => {
/**

@@ -88,7 +89,11 @@ * If this is a recognized match tag, return the MatchHelper.

switch (passStyle) {
case 'copyRecord':
case 'copyRecord': {
// A copyRecord is a pattern iff all its children are
// patterns
return Object.values(patt).every(checkIt);
}
case 'copyArray': {
// A copyRecord or copyArray is a pattern iff all its children are
// A copyArray is a pattern iff all its children are
// patterns
return everyPassableChild(patt, checkIt);
return patt.every(checkIt);
}

@@ -250,5 +255,6 @@ case 'tagged': {

keyEQ(specimen, patt),
X`${specimen} - Must be equivalent to the literal pattern: ${patt}`,
X`${specimen} - Must be equivalent to: ${patt}`,
);
}
assertPattern(patt);
const specStyle = passStyleOf(specimen);

@@ -268,8 +274,6 @@ const pattStyle = passStyleOf(patt);

false,
X`Array ${specimen} - must be as long as copyArray pattern: ${patt}`,
X`Array ${specimen} - Must be as long as copyArray pattern: ${patt}`,
);
}
return everyPassableChild(patt, (p, i) =>
checkMatches(specimen[i], p, check),
);
return patt.every((p, i) => checkMatches(specimen[i], p, check));
}

@@ -283,12 +287,4 @@ case 'copyRecord': {

}
const specNames = harden(
ownKeys(specimen)
.sort()
.reverse(),
);
const pattNames = harden(
ownKeys(patt)
.sort()
.reverse(),
);
const specNames = harden(ownKeys(specimen).sort(compareAntiRank));
const pattNames = harden(ownKeys(patt).sort(compareAntiRank));
if (!keyEQ(specNames, pattNames)) {

@@ -379,3 +375,3 @@ return check(

*/
const assertMatches = (specimen, patt) => {
const fit = (specimen, patt) => {
checkMatches(specimen, patt, assertChecker);

@@ -390,3 +386,5 @@ };

const encoded = encodeKey(patt);
return [encoded, `${encoded}~`];
if (encoded !== undefined) {
return [encoded, `${encoded}~`];
}
}

@@ -396,11 +394,18 @@ const passStyle = passStyleOf(patt);

case 'copyArray': {
const rankCovers = patt.map(p => getRankCover(p, encodeKey));
return harden([
rankCovers.map(([left, _right]) => left),
rankCovers.map(([_left, right]) => right),
]);
// XXX this doesn't get along with the world of cover === pair of
// strings. In the meantime, fall through to the default which
// returns a cover that covers all copyArrays.
//
// const rankCovers = patt.map(p => getRankCover(p, encodeKey));
// return harden([
// rankCovers.map(([left, _right]) => left),
// rankCovers.map(([_left, right]) => right),
// ]);
break;
}
case 'copyRecord': {
// XXX this doesn't get along with the world of cover === pair of
// strings
// strings. In the meantime, fall through to the default which
// returns a cover that covers all copyRecords.
//
// const pattKeys = ownKeys(patt);

@@ -414,3 +419,3 @@ // const pattEntries = harden(pattKeys.map(key => [key, patt[key]]));

// ]);
assert.fail('not supporting copyRecord patterns yet'); // XXX TEMP
break;
}

@@ -421,2 +426,4 @@ case 'tagged': {

if (matchHelper) {
// Buried here is the important case, where we process
// the various patternNodes
return matchHelper.getRankCover(patt.payload, encodeKey);

@@ -427,3 +434,5 @@ }

// XXX this doesn't get along with the world of cover === pair of
// strings
// strings. In the meantime, fall through to the default which
// returns a cover that covers all copySets.
//
// // Should already be validated by checkPattern. But because this

@@ -444,27 +453,32 @@ // // is a check that may loosen over time, we also assert

// ]);
assert.fail('not supporting copySet patterns yet'); // XXX TEMP
break;
}
case 'copyMap': {
// A matching copyMap must have the same keys, or at most one
// non-key key pattern. Thus we can assume that value positions
// match 1-to-1.
// XXX this doesn't get along with the world of cover === pair of
// strings. In the meantime, fall through to the default which
// returns a cover that covers all copyMaps.
//
// TODO I may be overlooking that the less precise rankOrder
// equivalence class may cause values to be out of order,
// making this rankCover not actually cover. In that case, for
// all the values for keys at the same rank, we should union their
// rank covers. TODO POSSIBLE SILENT CORRECTNESS BUG
//
// If this is a bug, it probably affects the getRankCover
// cases if matchLTEHelper and matchGTEHelper on copyMap as
// well. See makeCopyMap for an idea on fixing
// this bug.
const [leftPayloadLimit, rightPayloadLimit] = getRankCover(
patt.payload,
encodeKey,
);
return harden([
makeTagged('copyMap', leftPayloadLimit),
makeTagged('copyMap', rightPayloadLimit),
]);
// // A matching copyMap must have the same keys, or at most one
// // non-key key pattern. Thus we can assume that value positions
// // match 1-to-1.
// //
// // TODO I may be overlooking that the less precise rankOrder
// // equivalence class may cause values to be out of order,
// // making this rankCover not actually cover. In that case, for
// // all the values for keys at the same rank, we should union their
// // rank covers. TODO POSSIBLE SILENT CORRECTNESS BUG
// //
// // If this is a bug, it probably affects the getRankCover
// // cases of matchLTEHelper and matchGTEHelper on copyMap as
// // well. See makeCopyMap for an idea on fixing
// // this bug.
// const [leftPayloadLimit, rightPayloadLimit] = getRankCover(
// patt.payload,
// encodeKey,
// );
// return harden([
// makeTagged('copyMap', leftPayloadLimit),
// makeTagged('copyMap', rightPayloadLimit),
// ]);
break;
}

@@ -488,28 +502,12 @@ default: {

const matchAnyHelper = Far('M.any helper', {
checkIsMatcherPayload: (matcherPayload, check = x => x) =>
check(
matcherPayload === undefined,
X`An M.any matcher's .payload must be undefined: ${matcherPayload}`,
),
checkMatches: (_specimen, _matcherPayload, _check = x => x) => true,
getRankCover: (_matchPayload, _encodeKey) => ['', '{'],
checkKeyPattern: (_matcherPayload, _check = x => x) => true,
});
/** @type {MatchHelper} */
const matchScalarHelper = Far('M.scalar helper', {
checkIsMatcherPayload: (matcherPayload, check = x => x) =>
check(
matcherPayload === undefined,
X`An M.scalar matcher's .payload must be undefined: ${matcherPayload}`,
X`Payload must be undefined: ${matcherPayload}`,
),
checkMatches: (specimen, _matcherPayload, check = x => x) =>
checkScalarKey(specimen, check),
getRankCover: (_matchPayload, _encodeKey) => ['', '{'],
getRankCover: (_matchPayload, _encodeKey) => ['a', 'z~'],
checkKeyPattern: (_matcherPayload, _check = x => x) => true,

@@ -519,66 +517,7 @@ });

/** @type {MatchHelper} */
const matchKindHelper = Far('M.kind helper', {
checkIsMatcherPayload: (allegedKeyKind, check = x => x) =>
check(
// We cannot further restrict this to only possible passStyles
// or tags, because we wish to allow matching of tags that we
// don't know ahead of time. Do we need to separate the namespaces?
// TODO are we asking for trouble by lumping passStyles and tags
// together into kinds?
typeof allegedKeyKind === 'string',
X`A kind name must be a string: ${allegedKeyKind}`,
),
checkMatches: (specimen, kind, check = x => x) =>
check(
passStyleOf(specimen) === kind ||
(passStyleOf(specimen) === 'tagged' && getTag(specimen) === kind),
X`${specimen} - Must have passStyle or tag ${q(kind)}`,
),
getRankCover: (kind, _encodeKey) => {
switch (kind) {
case 'copySet': {
// The bounds in the cover are not valid copySets, which is fine.
// They only need to be valid copyTagged that bound all possible
// copySets. Thus, we need to call makeTagged directly, rather
// than using makeCopySet.
return [
makeTagged('copySet', null),
makeTagged('copySet', undefined),
];
}
case 'copyMap': {
// The bounds in the cover are not valid copyMaps, which is fine.
// They only need to be valid copyTagged that bound all possible
// copyMaps.
return [
makeTagged('copyMap', null),
makeTagged('copyMap', undefined),
];
}
default: {
return getPassStyleCover(/** @type {PassStyle} */ (kind));
}
}
const matchAndHelper = Far('match:and helper', {
checkMatches: (specimen, patts, check = x => x) => {
return patts.every(patt => checkMatches(specimen, patt, check));
},
checkKeyPattern: (kind, check = x => x) => {
switch (kind) {
case 'boolean':
case 'number':
case 'bigint':
case 'string':
case 'symbol':
case 'remotable':
case 'undefined':
return true;
default:
return check(false, X`${kind} keys are not supported`);
}
},
});
/** @type {MatchHelper} */
const matchAndHelper = Far('match:and helper', {
checkIsMatcherPayload: (allegedPatts, check = x => x) => {

@@ -590,10 +529,6 @@ const checkIt = patt => checkPattern(patt, check);

X`Needs array of sub-patterns: ${allegedPatts}`,
) && everyPassableChild(allegedPatts, checkIt))
) && allegedPatts.every(checkIt))
);
},
checkMatches: (specimen, patts, check = x => x) => {
return patts.every(patt => checkMatches(specimen, patt, check));
},
getRankCover: (patts, encodeKey) =>

@@ -612,13 +547,18 @@ intersectRankCovers(

const matchOrHelper = Far('match:or helper', {
checkIsMatcherPayload: matchAndHelper.checkIsMatcherPayload,
checkMatches: (specimen, patts, check = x => x) => {
return (
(check(
patts.length >= 1,
const { length } = patts;
if (length === 0) {
return check(
false,
X`${specimen} - no pattern disjuncts to match: ${patts}`,
) && !patts.every(patt => !checkMatches(specimen, patt, check)))
);
);
}
if (patts.some(patt => matches(specimen, patt))) {
return true;
}
return check(false, X`${specimen} - Must match one of ${patts}`);
},
checkIsMatcherPayload: matchAndHelper.checkIsMatcherPayload,
getRankCover: (patts, encodeKey) =>

@@ -637,4 +577,2 @@ unionRankCovers(

const matchNotHelper = Far('match:not helper', {
checkIsMatcherPayload: checkPattern,
checkMatches: (specimen, patt, check = x => x) => {

@@ -651,2 +589,4 @@ if (matches(specimen, patt)) {

checkIsMatcherPayload: checkPattern,
getRankCover: (_patt, _encodeKey) => ['', '{'],

@@ -658,5 +598,91 @@

/** @type {MatchHelper} */
const matchScalarHelper = Far('M.scalar helper', {
checkMatches: (specimen, _matcherPayload, check = x => x) =>
checkScalarKey(specimen, check),
checkIsMatcherPayload: matchAnyHelper.checkIsMatcherPayload,
getRankCover: (_matchPayload, _encodeKey) => ['a', 'z~'],
checkKeyPattern: (_matcherPayload, _check = x => x) => true,
});
/** @type {MatchHelper} */
const matchKeyHelper = Far('M.key helper', {
checkMatches: (specimen, _matcherPayload, check = x => x) =>
checkKey(specimen, check),
checkIsMatcherPayload: matchAnyHelper.checkIsMatcherPayload,
getRankCover: (_matchPayload, _encodeKey) => ['a', 'z~'],
checkKeyPattern: (_matcherPayload, _check = x => x) => true,
});
/** @type {MatchHelper} */
const matchPatternHelper = Far('M.pattern helper', {
checkMatches: (specimen, _matcherPayload, check = x => x) =>
checkPattern(specimen, check),
checkIsMatcherPayload: matchAnyHelper.checkIsMatcherPayload,
getRankCover: (_matchPayload, _encodeKey) => ['a', 'z~'],
checkKeyPattern: (_matcherPayload, _check = x => x) => true,
});
/** @type {MatchHelper} */
const matchKindHelper = Far('M.kind helper', {
checkMatches: (specimen, kind, check = x => x) =>
check(
passStyleOf(specimen) === kind ||
(passStyleOf(specimen) === 'tagged' && getTag(specimen) === kind),
X`${specimen} - Must have passStyle or tag ${q(kind)}`,
),
checkIsMatcherPayload: (allegedKeyKind, check = x => x) =>
check(
// We cannot further restrict this to only possible passStyles
// or tags, because we wish to allow matching of tags that we
// don't know ahead of time. Do we need to separate the namespaces?
// TODO are we asking for trouble by lumping passStyles and tags
// together into kinds?
typeof allegedKeyKind === 'string',
X`A kind name must be a string: ${allegedKeyKind}`,
),
getRankCover: (kind, _encodeKey) => {
let style;
switch (kind) {
case 'copySet':
case 'copyMap': {
style = 'tagged';
break;
}
default: {
style = kind;
break;
}
}
return getPassStyleCover(style);
},
checkKeyPattern: (kind, check = x => x) => {
switch (kind) {
case 'boolean':
case 'number':
case 'bigint':
case 'string':
case 'symbol':
case 'remotable':
case 'undefined':
return true;
default:
return check(false, X`${kind} keys are not supported`);
}
},
});
/** @type {MatchHelper} */
const matchLTEHelper = Far('match:lte helper', {
checkIsMatcherPayload: checkKey,
checkMatches: (specimen, rightOperand, check = x => x) =>

@@ -668,2 +694,4 @@ check(

checkIsMatcherPayload: checkKey,
getRankCover: (rightOperand, encodeKey) => {

@@ -674,50 +702,8 @@ const passStyle = passStyleOf(rightOperand);

// eslint-disable-next-line prefer-const
let [leftBound, _rightBound] = getPassStyleCover(passStyle);
switch (passStyle) {
case 'number': {
if (Number.isNaN(rightOperand)) {
// leftBound = NaN;
leftBound = 'f'; // XXX BOGUS
}
break;
}
case 'copyRecord': {
// XXX this doesn't get along with the world of cover === pair of
// strings
// leftBound = harden(
// fromEntries(entries(rightOperand).map(([k, _v]) => [k, null])),
// );
break;
}
case 'tagged': {
leftBound = makeTagged(getTag(rightOperand), null);
switch (getTag(rightOperand)) {
case 'copyMap': {
const { keys } = rightOperand.payload;
const values = keys.map(_ => null);
// See note in getRankCover for copyMap about why we
// may need to take variable values orders into account
// to be correct.
leftBound = makeTagged('copyMap', harden({ keys, values }));
break;
}
default: {
break;
}
}
break;
}
case 'remotable': {
// This does not make for a tighter rankCover, but if an
// underlying table internally further optimizes, for example with
// an identityHash of a virtual object, then this might
// help it take advantage of that.
leftBound = encodeKey(rightOperand);
break;
}
default: {
break;
}
let [leftBound, rightBound] = getPassStyleCover(passStyle);
const newRightBound = `${encodeKey(rightOperand)}~`;
if (newRightBound !== undefined) {
rightBound = newRightBound;
}
return [leftBound, `${encodeKey(rightOperand)}~`];
return [leftBound, rightBound];
},

@@ -731,4 +717,2 @@

const matchLTHelper = Far('match:lt helper', {
checkIsMatcherPayload: checkKey,
checkMatches: (specimen, rightOperand, check = x => x) =>

@@ -740,2 +724,4 @@ check(

checkIsMatcherPayload: checkKey,
getRankCover: matchLTEHelper.getRankCover,

@@ -749,4 +735,2 @@

const matchGTEHelper = Far('match:gte helper', {
checkIsMatcherPayload: checkKey,
checkMatches: (specimen, rightOperand, check = x => x) =>

@@ -758,2 +742,4 @@ check(

checkIsMatcherPayload: checkKey,
getRankCover: (rightOperand, encodeKey) => {

@@ -764,55 +750,8 @@ const passStyle = passStyleOf(rightOperand);

// eslint-disable-next-line prefer-const
let [_leftBound, rightBound] = getPassStyleCover(passStyle);
switch (passStyle) {
case 'number': {
if (Number.isNaN(rightOperand)) {
// rightBound = NaN;
rightBound = 'f';
} else {
// rightBound = Infinity;
rightBound = 'f~';
}
break;
}
case 'copyRecord': {
// XXX this doesn't get along with the world of cover === pair of
// strings
// rightBound = harden(
// fromEntries(
// entries(rightOperand).map(([k, _v]) => [k, undefined]),
// ),
// );
break;
}
case 'tagged': {
rightBound = makeTagged(getTag(rightOperand), undefined);
switch (getTag(rightOperand)) {
case 'copyMap': {
const { keys } = rightOperand.payload;
const values = keys.map(_ => undefined);
// See note in getRankCover for copyMap about why we
// may need to take variable values orders into account
// to be correct.
rightBound = makeTagged('copyMap', harden({ keys, values }));
break;
}
default: {
break;
}
}
break;
}
case 'remotable': {
// This does not make for a tighter rankCover, but if an
// underlying table internally further optimizes, for example with
// an identityHash of a virtual object, then this might
// help it take advantage of that.
rightBound = encodeKey(rightOperand);
break;
}
default: {
break;
}
let [leftBound, rightBound] = getPassStyleCover(passStyle);
const newLeftBound = encodeKey(rightOperand);
if (newLeftBound !== undefined) {
leftBound = newLeftBound;
}
return [encodeKey(rightOperand), rightBound];
return [leftBound, rightBound];
},

@@ -826,4 +765,2 @@

const matchGTHelper = Far('match:gt helper', {
getMatchTag: () => 'gt',
checkMatches: (specimen, rightOperand, check = x => x) =>

@@ -843,10 +780,220 @@ check(

/** @type {MatchHelper} */
const matchArrayOfHelper = Far('match:arrayOf helper', {
checkMatches: (specimen, subPatt, check = x => x) =>
check(
passStyleOf(specimen) === 'copyArray',
X`${specimen} - Must be an array`,
) && specimen.every(el => checkMatches(el, subPatt, check)),
checkIsMatcherPayload: checkPattern,
getRankCover: () => getPassStyleCover('copyArray'),
checkKeyPattern: (_, check = x => x) =>
check(false, X`Arrays not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchRecordOfHelper = Far('match:recordOf helper', {
checkMatches: (specimen, entryPatt, check = x => x) =>
check(
passStyleOf(specimen) === 'copyRecord',
X`${specimen} - Must be a record`,
) &&
Object.entries(specimen).every(el =>
checkMatches(harden(el), entryPatt, check),
),
checkIsMatcherPayload: (entryPatt, check = x => x) =>
check(
passStyleOf(entryPatt) === 'copyArray' && entryPatt.length === 2,
X`${entryPatt} - Must be an pair of patterns`,
) && checkPattern(entryPatt, check),
getRankCover: _entryPatt => getPassStyleCover('copyRecord'),
checkKeyPattern: (_entryPatt, check = x => x) =>
check(false, X`Records not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchSetOfHelper = Far('match:setOf helper', {
checkMatches: (specimen, keyPatt, check = x => x) =>
check(
passStyleOf(specimen) === 'tagged' && getTag(specimen) === 'copySet',
X`${specimen} - Must be a a CopySet`,
) && specimen.payload.every(el => checkMatches(el, keyPatt)),
checkIsMatcherPayload: checkPattern,
getRankCover: () => getPassStyleCover('tagged'),
checkKeyPattern: (_, check = x => x) =>
check(false, X`CopySets not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchMapOfHelper = Far('match:mapOf helper', {
checkMatches: (specimen, [keyPatt, valuePatt], check = x => x) =>
check(
passStyleOf(specimen) === 'tagged' && getTag(specimen) === 'copyMap',
X`${specimen} - Must be a CopyMap`,
) &&
specimen.payload.keys.every(k => checkMatches(k, keyPatt, check)) &&
specimen.payload.values.every(v => checkMatches(v, valuePatt, check)),
checkIsMatcherPayload: (entryPatt, check = x => x) =>
check(
passStyleOf(entryPatt) === 'copyArray' && entryPatt.length === 2,
X`${entryPatt} - Must be an pair of patterns`,
) && checkPattern(entryPatt, check),
getRankCover: _entryPatt => getPassStyleCover('tagged'),
checkKeyPattern: (_entryPatt, check = x => x) =>
check(false, X`CopyMap not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchSplitHelper = Far('match:split helper', {
checkMatches: (specimen, [base, rest = undefined], check = x => x) => {
const specimenStyle = passStyleOf(specimen);
const baseStyle = passStyleOf(base);
if (specimenStyle !== baseStyle) {
return check(
false,
X`${specimen} - Must have shape of base: ${q(baseStyle)}`,
);
}
let specB;
let specR;
if (baseStyle === 'copyArray') {
const { length: baseLen } = base;
// Frozen below
specB = specimen.slice(0, baseLen);
specR = specimen.slice(baseLen);
} else {
assert(baseStyle === 'copyRecord');
// Not yet frozen! Mutated in place
specB = {};
specR = {};
for (const [name, value] of Object.entries(specimen)) {
if (hasOwnPropertyOf(base, name)) {
specB[name] = value;
} else {
specR[name] = value;
}
}
}
harden(specB);
harden(specR);
return (
(checkMatches(specB, base, check) &&
(rest === undefined ||
check(
matches(specR, rest),
X`Remainder ${specR} - Must match ${rest}`,
)))
);
},
checkIsMatcherPayload: (splitArgs, check = x => x) => {
if (
passStyleOf(splitArgs) === 'copyArray' &&
(splitArgs.length === 1 || splitArgs.length === 2)
) {
const [base, rest = undefined] = splitArgs;
const baseStyle = passStyleOf(base);
if (
isPattern(base) &&
(baseStyle === 'copyArray' || baseStyle === 'copyRecord') &&
(rest === undefined || isPattern(rest))
) {
return true;
}
}
return check(
false,
X`Must be an array of a base structure and an optional rest pattern: ${splitArgs}`,
);
},
getRankCover: ([base, _rest = undefined]) =>
getPassStyleCover(passStyleOf(base)),
checkKeyPattern: ([base, _rest = undefined], check = x => x) =>
check(false, X`${q(passStyleOf(base))} not yet supported as keys`),
});
/** @type {MatchHelper} */
const matchPartialHelper = Far('match:partial helper', {
checkMatches: (specimen, [base, rest = undefined], check = x => x) => {
const specimenStyle = passStyleOf(specimen);
const baseStyle = passStyleOf(base);
if (specimenStyle !== baseStyle) {
return check(
false,
X`${specimen} - Must have shape of base: ${q(baseStyle)}`,
);
}
let specB;
let specR;
let newBase = base;
if (baseStyle === 'copyArray') {
const { length: specLen } = specimen;
const { length: baseLen } = base;
if (specLen < baseLen) {
newBase = harden(base.slice(0, specLen));
}
// Frozen below
specB = specimen.slice(0, baseLen);
specR = specimen.slice(baseLen);
} else {
assert(baseStyle === 'copyRecord');
// Not yet frozen! Mutated in place
specB = {};
specR = {};
newBase = {};
for (const [name, value] of Object.entries(specimen)) {
if (hasOwnPropertyOf(base, name)) {
specB[name] = value;
newBase[name] = base[name];
} else {
specR[name] = value;
}
}
}
harden(specB);
harden(specR);
harden(newBase);
return (
(checkMatches(specB, newBase, check) &&
(rest === undefined ||
check(
matches(specR, rest),
X`Remainder ${specR} - Must match ${rest}`,
)))
);
},
checkIsMatcherPayload: matchSplitHelper.checkIsMatcherPayload,
getRankCover: matchSplitHelper.getRankCover,
checkKeyPattern: matchSplitHelper.checkKeyPattern,
});
/** @type {Record<string, MatchHelper>} */
const HelpersByMatchTag = harden({
'match:any': matchAnyHelper,
'match:scalar': matchScalarHelper,
'match:kind': matchKindHelper,
'match:and': matchAndHelper,
'match:or': matchOrHelper,
'match:not': matchNotHelper,
'match:scalar': matchScalarHelper,
'match:key': matchKeyHelper,
'match:pattern': matchPatternHelper,
'match:kind': matchKindHelper,
'match:lt': matchLTHelper,

@@ -856,44 +1003,67 @@ 'match:lte': matchLTEHelper,

'match:gt': matchGTHelper,
'match:arrayOf': matchArrayOfHelper,
'match:recordOf': matchRecordOfHelper,
'match:setOf': matchSetOfHelper,
'match:mapOf': matchMapOfHelper,
'match:split': matchSplitHelper,
'match:partial': matchPartialHelper,
});
const patt = p => {
assertPattern(p);
return p;
const makeMatcher = (tag, payload) => {
const matcher = makeTagged(tag, payload);
assertPattern(matcher);
return matcher;
};
const makeKindMatcher = kind => makeMatcher('match:kind', kind);
const AnyShape = makeMatcher('match:any', undefined);
const ScalarShape = makeMatcher('match:scalar', undefined);
const KeyShape = makeMatcher('match:key', undefined);
const PatternShape = makeMatcher('match:pattern', undefined);
const BooleanShape = makeKindMatcher('boolean');
const NumberShape = makeKindMatcher('number');
const BigintShape = makeKindMatcher('bigint');
const NatShape = makeMatcher('match:gte', 0n);
const StringShape = makeKindMatcher('string');
const SymbolShape = makeKindMatcher('symbol');
const RecordShape = makeKindMatcher('copyRecord');
const ArrayShape = makeKindMatcher('copyArray');
const SetShape = makeKindMatcher('copySet');
const MapShape = makeKindMatcher('copyMap');
const RemotableShape = makeKindMatcher('remotable');
const ErrorShape = makeKindMatcher('error');
const PromiseShape = makeKindMatcher('promise');
const UndefinedShape = makeKindMatcher('undefined');
/** @type {MatcherNamespace} */
const M = harden({
any: () => patt(makeTagged('match:any', undefined)),
scalar: () => patt(makeTagged('match:scalar', undefined)),
and: (...patts) => patt(makeTagged('match:and', patts)),
or: (...patts) => patt(makeTagged('match:or', patts)),
not: subPatt => patt(makeTagged('match:not', subPatt)),
any: () => AnyShape,
and: (...patts) => makeMatcher('match:and', patts),
or: (...patts) => makeMatcher('match:or', patts),
not: subPatt => makeMatcher('match:not', subPatt),
kind: kind => patt(makeTagged('match:kind', kind)),
boolean: () => M.kind('boolean'),
number: () => M.kind('number'),
bigint: () => M.kind('bigint'),
string: () => M.kind('string'),
symbol: () => M.kind('symbol'),
record: () => M.kind('copyRecord'),
array: () => M.kind('copyArray'),
set: () => M.kind('copySet'),
map: () => M.kind('copyMap'),
remotable: () => M.kind('remotable'),
error: () => M.kind('error'),
promise: () => M.kind('promise'),
/**
* All keys including `undefined` are already valid patterns and
* so can validly represent themselves. But optional pattern arguments
* `(pattern = undefined) => ...`
* cannot distinguish between `undefined` passed as a pattern vs
* omission of the argument. It will interpret the first as the
* second. Thus, when a passed pattern does not also need to be a key,
* we recommend passing `M.undefined()` instead of `undefined`.
*/
undefined: () => M.kind('undefined'),
scalar: () => ScalarShape,
key: () => KeyShape,
pattern: () => PatternShape,
kind: makeKindMatcher,
boolean: () => BooleanShape,
number: () => NumberShape,
bigint: () => BigintShape,
nat: () => NatShape,
string: () => StringShape,
symbol: () => SymbolShape,
record: () => RecordShape,
array: () => ArrayShape,
set: () => SetShape,
map: () => MapShape,
remotable: () => RemotableShape,
error: () => ErrorShape,
promise: () => PromiseShape,
undefined: () => UndefinedShape,
null: () => null,
lt: rightSide => patt(makeTagged('match:lt', rightSide)),
lte: rightSide => patt(makeTagged('match:lte', rightSide)),
lt: rightOperand => makeMatcher('match:lt', rightOperand),
lte: rightOperand => makeMatcher('match:lte', rightOperand),
eq: key => {

@@ -904,10 +1074,15 @@ assertKey(key);

neq: key => M.not(M.eq(key)),
gte: rightSide => patt(makeTagged('match:gte', rightSide)),
gt: rightSide => patt(makeTagged('match:gt', rightSide)),
gte: rightOperand => makeMatcher('match:gte', rightOperand),
gt: rightOperand => makeMatcher('match:gt', rightOperand),
// TODO make more precise
arrayOf: _elementPatt => M.array(),
recordOf: _entryPatt => M.record(),
setOf: _elementPatt => M.set(),
mapOf: _entryPatt => M.map(),
arrayOf: (subPatt = M.any()) => makeMatcher('match:arrayOf', subPatt),
recordOf: (keyPatt = M.any(), valuePatt = M.any()) =>
makeMatcher('match:recordOf', [keyPatt, valuePatt]),
setOf: (keyPatt = M.any()) => makeMatcher('match:setOf', keyPatt),
mapOf: (keyPatt = M.any(), valuePatt = M.any()) =>
makeMatcher('match:mapOf', [keyPatt, valuePatt]),
split: (base, rest = undefined) =>
makeMatcher('match:split', rest === undefined ? [base] : [base, rest]),
partial: (base, rest = undefined) =>
makeMatcher('match:partial', rest === undefined ? [base] : [base, rest]),
});

@@ -917,3 +1092,3 @@

matches,
assertMatches,
fit,
assertPattern,

@@ -927,1 +1102,17 @@ isPattern,

};
// Only include those whose meaning is independent of an imputed sort order
// of remotables, or of encoding of passable as sortable strings. Thus,
// getRankCover is omitted. To get one, you'd need to instantiate
// `makePatternKit()` yourself. Since there are currently no external
// uses of `getRankCover`, for clarity during development, `makePatternKit`
// is not currently exported.
export const {
matches,
fit,
assertPattern,
isPattern,
assertKeyPattern,
isKeyPattern,
M,
} = makePatternKit();

@@ -29,5 +29,5 @@ // @ts-check

/* s */ ['string', ['s', 't']],
/* u */ ['undefined', ['u', 'v']],
/* v */ ['null', ['v', 'v~']],
/* y */ ['symbol', ['y', 'z']],
/* z */ ['null', ['z', 'z~']],
/* z */ ['undefined', ['z', '{']],
/* | remotable->ordinal mapping prefix: This is not used in covers but it is

@@ -50,132 +50,158 @@ reserved from the same set of strings. Note that the prefix is > any

/** @type {CompareRank} */
export const compareRank = (left, right) => {
if (sameValueZero(left, right)) {
return 0;
}
const leftStyle = passStyleOf(left);
const rightStyle = passStyleOf(right);
if (leftStyle !== rightStyle) {
return compareRank(PassStyleRank[leftStyle], PassStyleRank[rightStyle]);
}
switch (leftStyle) {
case 'undefined':
case 'null':
case 'remotable':
case 'error':
case 'promise': {
// For each of these passStyles, all members of that passStyle are tied
// for the same rank.
/**
* @type {WeakMap<RankCompare,WeakSet<Passable[]>>}
*/
const memoOfSorted = new WeakMap();
/**
* @type {WeakMap<RankCompare,RankCompare>}
*/
const comparatorMirrorImages = new WeakMap();
/**
* @param {RankCompare=} compareRemotables
* An option to create a comparator in which an internal order is
* assigned to remotables. This defaults to a comparator that
* always returns `0`, meaning that all remotables are tied
* for the same rank.
* @returns {RankComparatorKit}
*/
const makeComparatorKit = (compareRemotables = (_x, _y) => 0) => {
/** @type {RankCompare} */
const comparator = (left, right) => {
if (sameValueZero(left, right)) {
return 0;
}
case 'boolean':
case 'bigint':
case 'string': {
// Within each of these passStyles, the rank ordering agrees with
// JavaScript's relational operators `<` and `>`.
if (left < right) {
return -1;
} else {
assert(left > right);
return 1;
}
const leftStyle = passStyleOf(left);
const rightStyle = passStyleOf(right);
if (leftStyle !== rightStyle) {
return comparator(PassStyleRank[leftStyle], PassStyleRank[rightStyle]);
}
case 'symbol': {
return compareRank(
nameForPassableSymbol(left),
nameForPassableSymbol(right),
);
}
case 'number': {
// `NaN`'s rank is after all other numbers.
if (Number.isNaN(left)) {
assert(!Number.isNaN(right));
return 1;
} else if (Number.isNaN(right)) {
return -1;
switch (leftStyle) {
case 'remotable': {
return compareRemotables(left, right);
}
// The rank ordering of non-NaN numbers agrees with JavaScript's
// relational operators '<' and '>'.
if (left < right) {
return -1;
} else {
assert(left > right);
return 1;
case 'undefined':
case 'null':
case 'error':
case 'promise': {
// For each of these passStyles, all members of that passStyle are tied
// for the same rank.
return 0;
}
}
case 'copyRecord': {
// Lexicographic by inverse sorted order of property names, then
// lexicographic by corresponding values in that same inverse
// order of their property names. Comparing names by themselves first,
// all records with the exact same set of property names sort next to
// each other in a rank-sort of copyRecords.
case 'boolean':
case 'bigint':
case 'string': {
// Within each of these passStyles, the rank ordering agrees with
// JavaScript's relational operators `<` and `>`.
if (left < right) {
return -1;
} else {
assert(left > right);
return 1;
}
}
case 'symbol': {
return comparator(
nameForPassableSymbol(left),
nameForPassableSymbol(right),
);
}
case 'number': {
// `NaN`'s rank is after all other numbers.
if (Number.isNaN(left)) {
assert(!Number.isNaN(right));
return 1;
} else if (Number.isNaN(right)) {
return -1;
}
// The rank ordering of non-NaN numbers agrees with JavaScript's
// relational operators '<' and '>'.
if (left < right) {
return -1;
} else {
assert(left > right);
return 1;
}
}
case 'copyRecord': {
// Lexicographic by inverse sorted order of property names, then
// lexicographic by corresponding values in that same inverse
// order of their property names. Comparing names by themselves first,
// all records with the exact same set of property names sort next to
// each other in a rank-sort of copyRecords.
// The copyRecord invariants enforced by passStyleOf ensure that
// all the property names are strings. We need the reverse sorted order
// of these names, which we then compare lexicographically. This ensures
// that if the names of record X are a subset of the names of record Y,
// then record X will have an earlier rank and sort to the left of Y.
const leftNames = harden(
ownKeys(left)
.sort()
// TODO Measure which is faster: a reverse sort by sorting and
// reversing, or by sorting with an inverse comparison function.
// If it makes a significant difference, use the faster one.
.reverse(),
);
const rightNames = harden(
ownKeys(right)
.sort()
.reverse(),
);
const result = compareRank(leftNames, rightNames);
if (result !== 0) {
return result;
}
const leftValues = harden(leftNames.map(name => left[name]));
const rightValues = harden(rightNames.map(name => right[name]));
return compareRank(leftValues, rightValues);
}
case 'copyArray': {
// Lexicographic
const len = Math.min(left.length, right.length);
for (let i = 0; i < len; i += 1) {
const result = compareRank(left[i], right[i]);
// The copyRecord invariants enforced by passStyleOf ensure that
// all the property names are strings. We need the reverse sorted order
// of these names, which we then compare lexicographically. This ensures
// that if the names of record X are a subset of the names of record Y,
// then record X will have an earlier rank and sort to the left of Y.
const leftNames = harden(
ownKeys(left)
.sort()
// TODO Measure which is faster: a reverse sort by sorting and
// reversing, or by sorting with an inverse comparison function.
// If it makes a significant difference, use the faster one.
.reverse(),
);
const rightNames = harden(
ownKeys(right)
.sort()
.reverse(),
);
const result = comparator(leftNames, rightNames);
if (result !== 0) {
return result;
}
const leftValues = harden(leftNames.map(name => left[name]));
const rightValues = harden(rightNames.map(name => right[name]));
return comparator(leftValues, rightValues);
}
// If all matching elements were tied, then according to their lengths.
// If array X is a prefix of array Y, then X has an earlier rank than Y.
return compareRank(left.length, right.length);
}
case 'tagged': {
// Lexicographic by `[Symbol.toStringTag]` then `.payload`.
const labelComp = compareRank(getTag(left), getTag(right));
if (labelComp !== 0) {
return labelComp;
case 'copyArray': {
// Lexicographic
const len = Math.min(left.length, right.length);
for (let i = 0; i < len; i += 1) {
const result = comparator(left[i], right[i]);
if (result !== 0) {
return result;
}
}
// If all matching elements were tied, then according to their lengths.
// If array X is a prefix of array Y, then X has an earlier rank than Y.
return comparator(left.length, right.length);
}
return compareRank(left.payload, right.payload);
case 'tagged': {
// Lexicographic by `[Symbol.toStringTag]` then `.payload`.
const labelComp = comparator(getTag(left), getTag(right));
if (labelComp !== 0) {
return labelComp;
}
return comparator(left.payload, right.payload);
}
default: {
assert.fail(X`Unrecognized passStyle: ${q(leftStyle)}`);
}
}
default: {
assert.fail(X`Unrecognized passStyle: ${q(leftStyle)}`);
}
}
};
harden(compareRank);
};
/** @type {CompareRank} */
export const compareAntiRank = (x, y) => compareRank(y, x);
/** @type {RankCompare} */
const antiComparator = (x, y) => comparator(y, x);
memoOfSorted.set(comparator, new WeakSet());
memoOfSorted.set(antiComparator, new WeakSet());
comparatorMirrorImages.set(comparator, antiComparator);
comparatorMirrorImages.set(antiComparator, comparator);
return harden({ comparator, antiComparator });
};
/**
* @type {Map<CompareRank,WeakSet<Passable[]>>}
* @param {RankCompare} comparator
* @returns {RankCompare=}
*/
const memoOfSorted = new Map([
[compareRank, new WeakSet()],
[compareAntiRank, new WeakSet()],
]);
export const comparatorMirrorImage = comparator =>
comparatorMirrorImages.get(comparator);
/**
* @param {Passable[]} passables
* @param {CompareRank} compare
* @param {RankCompare} compare
* @returns {boolean}

@@ -202,3 +228,3 @@ */

* @param {Passable[]} sorted
* @param {CompareRank} compare
* @param {RankCompare} compare
*/

@@ -215,4 +241,13 @@ export const assertRankSorted = (sorted, compare) =>

/**
* TODO SECURITY BUG: https://github.com/Agoric/agoric-sdk/issues/4260
* sortByRank currently uses `Array.prototype.sort` directly, and
* so only works correctly when given a `compare` function that considers
* `undefined` strictly bigger (`>`) than everything else. This is
* because `Array.prototype.sort` bizarrely moves all `undefined`s to
* the end of the array regardless, without consulting the `compare`
* function. This is a genuine bug for us NOW because sometimes we sort
* in reverse order by passing a reversed rank comparison function.
*
* @param {Iterable<Passable>} passables
* @param {CompareRank} compare
* @param {RankCompare} compare
* @returns {Passable[]}

@@ -245,3 +280,3 @@ */

* @param {Passable[]} sorted
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {Passable} key

@@ -304,3 +339,3 @@ * @param {("leftMost" | "rightMost")=} bias

/**
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {Passable} a

@@ -313,3 +348,3 @@ * @param {Passable} b

/**
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {Passable} a

@@ -322,3 +357,3 @@ * @param {Passable} b

/**
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {RankCover[]} covers

@@ -342,3 +377,3 @@ * @returns {RankCover}

/**
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {RankCover[]} covers

@@ -359,1 +394,52 @@ * @returns {RankCover}

};
export const {
comparator: compareRank,
antiComparator: compareAntiRank,
} = makeComparatorKit();
/**
* Create a comparator kit in which remotables are fully ordered
* by the order in which they are first seen by *this* comparator kit.
* BEWARE: This is observable mutable state, so such a comparator kit
* should never be shared among subsystems that should not be able
* to communicate.
*
* Note that this order does not meet the requirements for store
* ordering, since it has no memory of deleted keys.
*
* These full order comparator kit is strictly more precise that the
* rank order comparator kits above. As a result, any array which is
* sorted by such a full order will pass the isRankSorted test with
* a corresponding rank order.
*
* An array which is sorted by a *fresh* full order comparator, i.e.,
* one that has not yet seen any remotables, will of course remain
* sorted by according to *that* full order comparator. An array *of
* scalars* sorted by a fresh full order will remain sorted even
* according to a new fresh full order comparator, since it will see
* the remotables in the same order again. Unfortunately, this is
* not true of arrays of passables in general.
*
* @param {boolean=} longLived
* @returns {FullComparatorKit}
*/
export const makeFullOrderComparatorKit = (longLived = false) => {
let numSeen = 0;
// When dynamically created with short lifetimes (the default) a WeakMap
// would perform poorly, and the leak created by a Map only lasts as long
// as the Map.
const MapConstructor = longLived ? WeakMap : Map;
const seen = new MapConstructor();
const tag = r => {
if (seen.has(r)) {
return seen.get(r);
}
numSeen += 1;
seen.set(r, numSeen);
return numSeen;
};
const compareRemotables = (x, y) => compareRank(tag(x), tag(y));
return makeComparatorKit(compareRemotables);
};
harden(makeFullOrderComparatorKit);
// @ts-check
import { Far, assertPassable } from '@agoric/marshal';
import {
Far,
assertPassable,
filterIterable,
mapIterable,
} from '@agoric/marshal';
import { compareRank } from '../patterns/rankOrder.js';
import { assertScalarKey } from '../keys/checkKey.js';
import { makeCopyMap } from '../keys/copyMap.js';
import { makePatternKit } from '../patterns/patternMatchers.js';
import { matches, fit, assertPattern } from '../patterns/patternMatchers.js';
import { makeWeakMapStoreMethods } from './scalarWeakMapStore.js';
import { makeCursorKit } from './store-utils.js';
import { makeCurrentKeysKit } from './store-utils.js';
const { details: X, quote: q } = assert;
const { assertMatches, assertPattern } = makePatternKit();
const { quote: q } = assert;

@@ -17,3 +21,4 @@ /**

* @param {Map<K,V>} jsmap
* @param {(k: K, v: V) => void} assertKVOkToWrite
* @param {(k: K, v: V) => void} assertKVOkToAdd
* @param {(k: K, v: V) => void} assertKVOkToSet
* @param {((k: K) => void)=} assertKeyOkToDelete

@@ -25,3 +30,4 @@ * @param {string=} keyName

jsmap,
assertKVOkToWrite,
assertKVOkToAdd,
assertKVOkToSet,
assertKeyOkToDelete = undefined,

@@ -31,9 +37,9 @@ keyName = 'key',

const {
assertUpdateOnWrite,
assertUpdateOnAdd,
assertUpdateOnDelete,
makeCursor,
makeArray,
} = makeCursorKit(
iterableKeys,
} = makeCurrentKeysKit(
() => jsmap.keys(),
compareRank,
assertKVOkToWrite,
assertKVOkToAdd,
assertKeyOkToDelete,

@@ -43,40 +49,72 @@ keyName,

const methods = harden({
/**
* @param {Pattern=} keyPatt
* @param {Pattern=} valuePatt
* @returns {Iterable<K>}
*/
const keys = (keyPatt = undefined, valuePatt = undefined) => {
if (keyPatt === undefined && valuePatt === undefined) {
return iterableKeys;
}
const filter = k => {
if (keyPatt !== undefined && !matches(k, keyPatt)) {
return false;
}
// Uses the current jsmap value, since the iteratator survives `.set`
if (valuePatt !== undefined && !matches(jsmap.get(k), valuePatt)) {
return false;
}
return true;
};
return filterIterable(iterableKeys, filter);
};
/**
* @param {Pattern=} keyPatt
* @param {Pattern=} valuePatt
* @returns {Iterable<V>}
*/
const values = (keyPatt = undefined, valuePatt = undefined) =>
mapIterable(keys(keyPatt, valuePatt), k => /** @type {V} */ (jsmap.get(k)));
/**
* @param {Pattern=} keyPatt
* @param {Pattern=} valuePatt
* @returns {Iterable<[K,V]>}
*/
const entries = (keyPatt = undefined, valuePatt = undefined) =>
mapIterable(keys(keyPatt, valuePatt), k => [
k,
/** @type {V} */ (jsmap.get(k)),
]);
return harden({
...makeWeakMapStoreMethods(
jsmap,
assertUpdateOnWrite,
/** @type {(k: K, v: V) => void} */ (assertUpdateOnAdd),
assertKVOkToSet,
assertUpdateOnDelete,
keyName,
),
keys,
values,
entries,
cursor: (entryPatt = undefined, direction = 'forward') => {
assert.equal(
direction,
'forward',
X`Non-forward cursors are not yet implemented: map ${q(keyName)}`,
);
return makeCursor(jsmap.entries(), entryPatt);
},
snapshot: (keyPatt = undefined, valuePatt = undefined) =>
makeCopyMap(entries(keyPatt, valuePatt)),
keys: (keyPatt = undefined) => makeArray(jsmap.keys(), keyPatt),
values: (valPatt = undefined) => makeArray(jsmap.values(), valPatt),
entries: (entryPatt = undefined) => makeArray(jsmap.entries(), entryPatt),
getSize: (keyPatt = undefined, valuePatt = undefined) =>
keyPatt === undefined && valuePatt === undefined
? jsmap.size
: [...keys(keyPatt, valuePatt)].length,
snapshot: (entryPatt = undefined) => makeCopyMap(methods.cursor(entryPatt)),
addAll: copyMap => {
const {
payload: { keys, values },
} = copyMap;
const { length } = keys;
for (let i = 0; i < length; i += 1) {
const key = keys[i];
const value = values[i];
// Don't assert that the key either does or does not exist.
assertUpdateOnWrite(key, value);
jsmap.set(key, value);
clear: (keyPatt = undefined, valuePatt = undefined) => {
if (keyPatt === undefined && valuePatt === undefined) {
jsmap.clear();
}
for (const key of keys(keyPatt, valuePatt)) {
jsmap.delete(key);
}
},
});
return methods;
};

@@ -102,25 +140,45 @@

keyName = 'key',
{ schema = undefined } = {},
{ keySchema = undefined, valueSchema = undefined } = {},
) => {
const jsmap = new Map();
if (schema) {
assertPattern(schema);
if (keySchema !== undefined) {
assertPattern(keySchema);
}
const assertKVOkToWrite = (key, value) => {
if (valueSchema !== undefined) {
assertPattern(valueSchema);
}
const assertKVOkToSet = (_key, value) => {
// TODO: Just a transition kludge. Remove when possible.
// See https://github.com/Agoric/agoric-sdk/issues/3606
harden(key);
harden(value);
assertScalarKey(key);
assertPassable(value);
if (schema) {
assertMatches(harden([key, value]), schema);
if (valueSchema !== undefined) {
fit(value, valueSchema);
}
};
const mapStore = Far(`scalar MapStore of ${q(keyName)}`, {
...makeMapStoreMethods(jsmap, assertKVOkToWrite, undefined, keyName),
const assertKVOkToAdd = (key, value) => {
// TODO: Just a transition kludge. Remove when possible.
// See https://github.com/Agoric/agoric-sdk/issues/3606
harden(key);
assertScalarKey(key);
if (keySchema !== undefined) {
fit(key, keySchema);
}
assertKVOkToSet(key, value);
};
return Far(`scalar MapStore of ${q(keyName)}`, {
...makeMapStoreMethods(
jsmap,
assertKVOkToAdd,
assertKVOkToSet,
undefined,
keyName,
),
});
return mapStore;
};
harden(makeScalarMapStore);
// @ts-check
import { Far } from '@agoric/marshal';
import { Far, filterIterable } from '@agoric/marshal';
import { compareRank } from '../patterns/rankOrder.js';
import { assertScalarKey } from '../keys/checkKey.js';
import { makeCopySet } from '../keys/copySet.js';
import { makePatternKit } from '../patterns/patternMatchers.js';
import { matches, fit, assertPattern } from '../patterns/patternMatchers.js';
import { makeWeakSetStoreMethods } from './scalarWeakSetStore.js';
import { makeCursorKit } from './store-utils.js';
import { makeCurrentKeysKit } from './store-utils.js';
const { details: X, quote: q } = assert;
const { assertMatches, assertPattern } = makePatternKit();
const { quote: q } = assert;

@@ -17,3 +16,3 @@ /**

* @param {Set<K>} jsset
* @param {(k: K) => void} assertKeyOkToWrite
* @param {(k: K) => void} assertKeyOkToAdd
* @param {((k: K) => void)=} assertKeyOkToDelete

@@ -25,3 +24,3 @@ * @param {string=} keyName

jsset,
assertKeyOkToWrite,
assertKeyOkToAdd,
assertKeyOkToDelete = undefined,

@@ -31,9 +30,9 @@ keyName = 'key',

const {
assertUpdateOnWrite,
assertUpdateOnAdd,
assertUpdateOnDelete,
makeCursor,
makeArray,
} = makeCursorKit(
iterableKeys,
} = makeCurrentKeysKit(
() => jsset.keys(),
compareRank,
assertKeyOkToWrite,
assertKeyOkToAdd,
assertKeyOkToDelete,

@@ -43,6 +42,15 @@ keyName,

const methods = harden({
/**
* @param {Pattern=} keyPatt
* @returns {Iterable<K>}
*/
const keys = (keyPatt = undefined) =>
keyPatt === undefined
? iterableKeys
: filterIterable(iterableKeys, k => matches(k, keyPatt));
return harden({
...makeWeakSetStoreMethods(
jsset,
assertUpdateOnWrite,
assertUpdateOnAdd,
assertUpdateOnDelete,

@@ -52,27 +60,18 @@ keyName,

cursor: (keyPatt = undefined, direction = 'forward') => {
assert.equal(
direction,
'forward',
X`Non-forward cursors are not yet implemented: map ${q(keyName)}`,
);
return makeCursor(jsset.keys(), keyPatt);
},
keys,
keys: (keyPatt = undefined) => makeArray(jsset.keys(), keyPatt),
snapshot: (keyPatt = undefined) => makeCopySet(keys(keyPatt)),
snapshot: (keyPatt = undefined) => makeCopySet(methods.cursor(keyPatt)),
getSize: (keyPatt = undefined) =>
keyPatt === undefined ? jsset.size : [...keys(keyPatt)].length,
addAll: copySet => {
const { payload: keys } = copySet;
const { length } = keys;
for (let i = 0; i < length; i += 1) {
const key = keys[i];
// Don't assert that the key either does or does not exist.
assertKeyOkToWrite(key);
jsset.add(key);
clear: (keyPatt = undefined) => {
if (keyPatt === undefined) {
jsset.clear();
}
for (const key of keys(keyPatt)) {
jsset.delete(key);
}
},
});
return methods;
};

@@ -98,9 +97,10 @@

keyName = 'key',
{ schema = undefined } = {},
{ keySchema = undefined } = {},
) => {
const jsset = new Set();
if (schema) {
assertPattern(schema);
if (keySchema !== undefined) {
assertPattern(keySchema);
}
const assertKeyOkToWrite = key => {
const assertKeyOkToAdd = key => {
// TODO: Just a transition kludge. Remove when possible.

@@ -111,11 +111,11 @@ // See https://github.com/Agoric/agoric-sdk/issues/3606

assertScalarKey(key);
if (schema) {
assertMatches(key, schema);
if (keySchema !== undefined) {
fit(key, keySchema);
}
};
const setStore = Far(`scalar SetStore of ${q(keyName)}`, {
...makeSetStoreMethods(jsset, assertKeyOkToWrite, undefined, keyName),
return Far(`scalar SetStore of ${q(keyName)}`, {
...makeSetStoreMethods(jsset, assertKeyOkToAdd, undefined, keyName),
});
return setStore;
};
harden(makeScalarSetStore);
// @ts-check
import { Far, assertPassable, passStyleOf } from '@agoric/marshal';
import { makePatternKit } from '../patterns/patternMatchers.js';
import { fit, assertPattern } from '../patterns/patternMatchers.js';
const { details: X, quote: q } = assert;
const { assertMatches, assertPattern } = makePatternKit();

@@ -12,3 +11,4 @@ /**

* @param {WeakMap<K & Object,V>} jsmap
* @param {(k: K, v: V) => void} assertKVOkToWrite
* @param {(k: K, v: V) => void} assertKVOkToAdd
* @param {(k: K, v: V) => void} assertKVOkToSet
* @param {((k: K) => void)=} assertKeyOkToDelete

@@ -20,4 +20,5 @@ * @param {string=} keyName

jsmap,
assertKVOkToWrite,
assertKeyOkToDelete = () => {},
assertKVOkToAdd,
assertKVOkToSet,
assertKeyOkToDelete = undefined,
keyName = 'key',

@@ -46,3 +47,3 @@ ) => {

assertKeyDoesNotExist(key);
assertKVOkToWrite(key, value);
assertKVOkToAdd(key, value);
jsmap.set(key, value);

@@ -52,3 +53,3 @@ },

assertKeyExists(key);
assertKVOkToWrite(key, value);
assertKVOkToSet(key, value);
jsmap.set(key, value);

@@ -58,5 +59,15 @@ },

assertKeyExists(key);
assertKeyOkToDelete(key);
if (assertKeyOkToDelete !== undefined) {
assertKeyOkToDelete(key);
}
jsmap.delete(key);
},
addAll: entries => {
for (const [key, value] of entries) {
// Don't assert that the key either does or does not exist.
assertKVOkToAdd(key, value);
jsmap.set(key, value);
}
},
});

@@ -66,5 +77,6 @@ };

/**
* This is a *scalar* map in that the keys can only be atomic values, primitives
* or remotables. Other storeMaps will accept, for example, copyArrays and
* copyRecords, as keys and look them up based on equality of their contents.
* This is a *scalar* mapStore in that the keys can only be atomic values:
* primitives or remotables.
* Other mapStores will accept, for example, copyArrays and
* copyRecords as keys and look them up based on equality of their contents.
*

@@ -76,3 +88,3 @@ * TODO For now, this scalarWeakMap accepts only remotables, reflecting the

* there. Though note that this would only enables collection of the
* remotables, since the other primitives may always appear.
* remotables, since the other primitives may always reappear.
*

@@ -86,14 +98,28 @@ * @template K,V

keyName = 'key',
{ longLived = true, schema = undefined } = {},
{ longLived = true, keySchema = undefined, valueSchema = undefined } = {},
) => {
const jsmap = new (longLived ? WeakMap : Map)();
if (schema) {
assertPattern(schema);
if (keySchema !== undefined) {
assertPattern(keySchema);
}
const assertKVOkToWrite = (key, value) => {
if (valueSchema !== undefined) {
assertPattern(valueSchema);
}
const assertKVOkToSet = (_key, value) => {
// TODO: Just a transition kludge. Remove when possible.
// See https://github.com/Agoric/agoric-sdk/issues/3606
harden(key);
harden(value);
assertPassable(value);
if (valueSchema !== undefined) {
fit(value, valueSchema);
}
};
const assertKVOkToAdd = (key, value) => {
// TODO: Just a transition kludge. Remove when possible.
// See https://github.com/Agoric/agoric-sdk/issues/3606
harden(key);
assert(

@@ -103,12 +129,18 @@ passStyleOf(key) === 'remotable',

);
assertPassable(value);
if (schema) {
assertMatches(harden([key, value]), schema);
if (keySchema !== undefined) {
fit(key, keySchema);
}
assertKVOkToSet(key, value);
};
const weakMapStore = Far(`scalar WeakMapStore of ${q(keyName)}`, {
...makeWeakMapStoreMethods(jsmap, assertKVOkToWrite, undefined, keyName),
return Far(`scalar WeakMapStore of ${q(keyName)}`, {
...makeWeakMapStoreMethods(
jsmap,
assertKVOkToAdd,
assertKVOkToSet,
undefined,
keyName,
),
});
return weakMapStore;
};
harden(makeScalarWeakMapStore);
// @ts-check
import { Far, passStyleOf } from '@agoric/marshal';
import { makePatternKit } from '../patterns/patternMatchers.js';
import { fit, assertPattern } from '../patterns/patternMatchers.js';
const { details: X, quote: q } = assert;
const { assertMatches, assertPattern } = makePatternKit();

@@ -12,3 +11,3 @@ /**

* @param {WeakSet<K & Object>} jsset
* @param {(k: K) => void} assertKeyOkToWrite
* @param {(k: K) => void} assertKeyOkToAdd
* @param {((k: K) => void)=} assertKeyOkToDelete

@@ -20,4 +19,4 @@ * @param {string=} keyName

jsset,
assertKeyOkToWrite,
assertKeyOkToDelete = () => {},
assertKeyOkToAdd,
assertKeyOkToDelete = undefined,
keyName = 'key',

@@ -37,3 +36,3 @@ ) => {

add: key => {
assertKeyOkToWrite(key);
assertKeyOkToAdd(key);
jsset.add(key);

@@ -43,5 +42,14 @@ },

assertKeyExists(key);
assertKeyOkToDelete(key);
if (assertKeyOkToDelete !== undefined) {
assertKeyOkToDelete(key);
}
jsset.delete(key);
},
addAll: keys => {
for (const key of keys) {
assertKeyOkToAdd(key);
jsset.add(key);
}
},
});

@@ -69,9 +77,10 @@ };

keyName = 'key',
{ longLived = true, schema = undefined } = {},
{ longLived = true, keySchema = undefined } = {},
) => {
const jsset = new (longLived ? WeakSet : Set)();
if (schema) {
assertPattern(schema);
if (keySchema !== undefined) {
assertPattern(keySchema);
}
const assertKeyOkToWrite = key => {
const assertKeyOkToAdd = key => {
// TODO: Just a transition kludge. Remove when possible.

@@ -85,11 +94,11 @@ // See https://github.com/Agoric/agoric-sdk/issues/3606

);
if (schema) {
assertMatches(key, schema);
if (keySchema !== undefined) {
fit(key, keySchema);
}
};
const weakSetStore = Far(`scalar WeakSetStore of ${q(keyName)}`, {
...makeWeakSetStoreMethods(jsset, assertKeyOkToWrite, undefined, keyName),
return Far(`scalar WeakSetStore of ${q(keyName)}`, {
...makeWeakSetStoreMethods(jsset, assertKeyOkToAdd, undefined, keyName),
});
return weakSetStore;
};
harden(makeScalarWeakSetStore);
// @ts-check
import { filterIterable } from '@agoric/marshal';
import { makePatternKit } from '../patterns/patternMatchers.js';
import { assertRankSorted } from '../patterns/rankOrder.js';
import { Far } from '@agoric/marshal';
const { details: X, quote: q } = assert;
const { matches } = makePatternKit();
export const makeCursorKit = (
/**
* @template K,V
* @typedef {Object} CurrentKeysKit
* @property {(k: K, v?: V) => void} assertUpdateOnAdd
* @property {(k: K) => void} assertUpdateOnDelete
* @property {Iterable<K>} iterableKeys
*/
/**
* @template K,V
* @param {() => Iterable<K>} getRawKeys
* @param {RankCompare} compare
* @param {(k: K, v?: V) => void} assertOkToAdd
* @param {((k: K) => void)=} assertOkToDelete
* @param {string=} keyName
* @returns {CurrentKeysKit<K,V>}
*/
export const makeCurrentKeysKit = (
getRawKeys,
compare,
assertOkToWrite,
assertOkToAdd,
assertOkToDelete = undefined,

@@ -17,55 +32,62 @@ keyName = 'key',

let updateCount = 0;
let sortedKeysMemo;
const cursorKit = harden({
assertUpdateOnWrite: (k, v) => {
assertOkToWrite(k, v);
updateCount += 1;
},
const assertUpdateOnAdd = (k, v = undefined) => {
assertOkToAdd(k, v);
updateCount += 1;
sortedKeysMemo = undefined;
};
assertUpdateOnDelete: assertOkToDelete
? k => {
assertOkToDelete(k);
const assertUpdateOnDelete =
assertOkToDelete === undefined
? _k => {
updateCount += 1;
sortedKeysMemo = undefined;
}
: () => {
: k => {
assertOkToDelete(k);
updateCount += 1;
},
sortedKeysMemo = undefined;
};
makeArray: (baseIterable, pattern = undefined) => {
const filter = pattern ? val => matches(harden(val), pattern) : harden;
const filtered = filterIterable(baseIterable, filter);
const sorted = harden([...filtered].sort(compare));
assertRankSorted(sorted, compare);
return sorted;
},
const getSortedKeys = () => {
if (sortedKeysMemo === undefined) {
sortedKeysMemo = harden([...getRawKeys()].sort(compare));
}
return sortedKeysMemo;
};
makeCursor: (baseIterable, pattern = undefined) => {
const currentUpdateCount = updateCount;
const notStaleFilter = () => {
assert.equal(
currentUpdateCount,
updateCount,
X`MapStore ${q(keyName)} cursor stale`,
);
return true;
};
// TODO In an implementation where the baseIterable returns its data
// already rank sorted, `makeCursor` would use the following
// code to make a cursor, and makeArray would be a snapshot of that.
// However,
// to get the correct external behavior on non-ordered representation,
// we sort in makeArray instead and then makeCursor return a cursor built
// from that.
// const filter = pattern
// ? val => notStaleFilter() && matches(val, pattern)
// : notStaleFilter;
// return filterIterable(baseIterable, filter);
const sorted = cursorKit.makeArray(baseIterable, pattern);
return filterIterable(sorted, notStaleFilter);
const iterableKeys = Far('Iterable of keys', {
[Symbol.iterator]: () => {
const generation = updateCount;
getSortedKeys();
const len = sortedKeysMemo.length;
let i = 0;
return Far('Iterator of keys', {
next: () => {
assert.equal(
generation,
updateCount,
X`Store ${q(keyName)} cursor stale`,
);
// If they're equal, then the sortedKeyMemo is the same one
// we started with.
if (i < len) {
const result = harden({ done: false, value: sortedKeysMemo[i] });
i += 1;
return result;
} else {
return harden({ done: true, value: undefined });
}
},
});
},
});
return cursorKit;
return harden({
assertUpdateOnAdd,
assertUpdateOnDelete,
iterableKeys,
});
};
harden(makeCursorKit);
harden(makeCurrentKeysKit);

@@ -8,2 +8,15 @@ // @ts-check

* @typedef {Passable} Key
* Keys are pass-by-copy structures (CopyArray, CopyRecord,
* CopySet, CopyMap) that end in either passable primitive data or
* Remotables (Far objects or their remote presences.) Keys are so named
* because they can be used as keys in MapStores and CopyMaps, as well as
* the elements of CopySets.
*
* Keys cannot contain promises or errors, as these do not have a useful
* distributed equality semantics. Keys also cannot contain any CopyTagged
* except for those recognized as CopySets and CopyMaps.
*
* Distributed equality is location independent.
* The same two keys, passed to another location, will be equal there iff
* they are equal here.
*/

@@ -13,6 +26,42 @@

* @typedef {Passable} Pattern
* Patterns can be either Keys or Matchers
* Patterns are pass-by-copy structures (CopyArray, CopyRecord,
* CopySet, CopyMap) that end in either Keys or Matchers. Each pattern
* acts as a declarative passable predicate over passables, where each passable
* either passes a given pattern, or does not. Every key is also a pattern.
* Used as a pattern, a key matches only "itself", i.e., keys that are equal
* to it according to the key distributed equality semantics.
*
* Patterns cannot contain promises or errors, as these do
* not have a useful distributed equality or matching semantics. Likewise,
* no pattern can distinguish among promises, or distinguish among errors.
* Patterns also cannot contain any CopyTaggeds except for those recognized as
* CopySets, CopyMaps, or Matchers.
*
* Whether a passable matches a given pattern is location independent.
* For a passable and a pattern, both passed to another location, the passable
* will match the pattern there iff the passable matches that pattern here.
*
* Patterns are often used in a type-like manner, to represent the category
* of passables that are intended* to match that pattern. To keep this
* distinction clear, we often use the suffix "Shape" rather than "Pattern"
* to avoid the levels confusion when the pattern itself represents
* some category of pattern. For example, an "AmountShape" represents the
* category of Amounts. And "AmountPatternShape" represents the
* category of patterns over Amounts.
*
* * I say "intended" above because Patterns, in order to be declarative
* and passable, cannot have the generality of predicates written in a
* Turing-universal programming language. Rather, to represent the category of
* things intended to be a Foo, a FooShape should reliably
* accept all Foos and reject only non-Foos. However, a FooShape may also accept
* non-Foos that "look like" or have "the same shape as" genuine Foos. To write
* as accurate predicate, for use, for example, for input validation, would
* need to supplement the pattern check with code to check for the residual
* cases.
* We hope the "Shape" metaphore helps remind us of this type-like imprecision
* of patterns.
*/
/**
* @template K
* @typedef {CopyTagged} CopySet

@@ -22,2 +71,3 @@ */

/**
* @template K,V
* @typedef {CopyTagged} CopyMap

@@ -41,11 +91,18 @@ */

* Defaults to true, so please mark short lived stores explicitly.
* @property {Pattern=} schema The store will reject
* the insertion of any content that does not match the schema pattern.
* For a SetStore, this is a key pattern.
* For a MapStore, this is an entry pattern,
* i.e., a pattern over `[key,value]` pairs.
* Defaults to `M.any()`.
* @property {Pattern=} keySchema
* @property {Pattern=} valueSchema
*/
/** @typedef {"forward" | "reverse"} Direction */
/**
* Most store methods are in one of three categories
* * lookup methods (`has`,`get`)
* * update methods (`add`,`init`,`set`,`delete`,`addAll`)
* * query methods (`snapshot`,`keys`,`values`,`entries`,`getSize`)
* * query-update methods (`clear`)
*
* WeakStores have the lookup and update methods but not the query
* or query-update methods.
* Non-weak Stores are like their corresponding WeakStore, but with the
* additional query and query-update methods.
*/

@@ -65,2 +122,3 @@ /**

* Remove the key. Throws if not found.
* @property {(keys: Iterable<K>) => void} addAll
*/

@@ -81,8 +139,7 @@

* Remove the key. Throws if not found.
* @property {(keyPattern?: Pattern) => K[]} keys
* @property {(keyPattern?: Pattern) => CopySet} snapshot
* @property {(copySet: CopySet) => void} addAll
* @property {(keyPattern?: Pattern,
* direction?: Direction
* ) => Iterable<K>} cursor
* @property {(keys: Iterable<K>) => void} addAll
* @property {(keyPatt?: Pattern) => Iterable<K>} keys
* @property {(keyPatt?: Pattern) => CopySet<K>} snapshot
* @property {(keyPatt?: Pattern) => number} getSize
* @property {(keyPatt?: Pattern) => void} clear
*/

@@ -106,2 +163,3 @@

* Remove the key. Throws if not found.
* @property {(entries: Iterable<[K,V]>) => void} addAll
*/

@@ -125,10 +183,12 @@

* Remove the key. Throws if not found.
* @property {(keyPattern?: Pattern) => K[]} keys
* @property {(valuePattern?: Pattern) => V[]} values
* @property {(entryPattern?: Pattern) => [K,V][]} entries
* @property {(entryPattern?: Pattern) => CopyMap} snapshot
* @property {(copyMap: CopyMap) => void} addAll
* @property {(entryPattern?: Pattern,
* direction?: Direction
* ) => Iterable<[K,V]>} cursor
* @property {(entries: Iterable<[K,V]>) => void} addAll
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => Iterable<K>} keys
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => Iterable<V>} values
* @property {(
* keyPatt?: Pattern,
* valuePatt?: Pattern
* ) => Iterable<[K,V]>} entries
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => CopyMap<K,V>} snapshot
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => number} getSize
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => void} clear
*/

@@ -146,13 +206,17 @@

* @template K,V
* @typedef {Object} Store
* @typedef {MapStore<K,V>} Store
* Deprecated type name `Store`. Use `MapStore` instead.
*/
/**
* @template K,V
* @typedef {Object} LegacyWeakMap
* LegacyWeakMap is deprecated. Use WeakMapStore instead.
* @property {(key: K) => boolean} has
* Check if a key exists. The key can be any JavaScript value, though the
* answer will always be false for keys that cannot be found in this map
* Check if a key exists
* @property {(key: K) => V} get
* Return a value for the key. Throws if not found.
* @property {(key: K, value: V) => void} init
* Initialize the key only if it doesn't already exist.
* The key must be one allowed by this store. For example a scalar store only
* allows primitives and remotables.
* Initialize the key only if it
* doesn't already exist
* @property {(key: K, value: V) => void} set

@@ -162,5 +226,2 @@ * Set the key. Throws if not found.

* Remove the key. Throws if not found.
* @property {(keyPattern?: Pattern) => K[]} keys
* @property {(valuePattern?: Pattern) => V[]} values
* @property {(entryPattern?: Pattern) => [K,V][]} entries
*/

@@ -170,3 +231,3 @@

* @template K,V
* @typedef {Object} LegacyWeakMap
* @typedef {Object} LegacyMap
* LegacyWeakMap is deprecated. Use WeakMapStore instead.

@@ -184,15 +245,20 @@ * @property {(key: K) => boolean} has

* Remove the key. Throws if not found.
* @property {() => Iterable<K>} keys
* @property {() => Iterable<V>} values
* @property {() => Iterable<[K,V]>} entries
* @property {() => number} getSize
* @property {() => void} clear
*/
// /////////////////////////////////////////////////////////////////////////////
/**
* @template K,V
* @callback MakeLegacyWeakMap
* @param {string} [keyName='key'] - the column name for the key
* @returns {LegacyWeakMap}
* @typedef {-1 | 0 | 1} RankComparison
* The result of a `RankCompare` function that defines a rank-order, i.e.,
* a total order in which different elements can be tied for the same
* rank. See `RankCompare`.
*/
// /////////////////////////////////////////////////////////////////////////////
/**
* @callback CompareRank
* @callback RankCompare
* Returns `-1`, `0`, or `1` depending on whether the rank of `left`

@@ -225,5 +291,87 @@ * is before, tied-with, or after the rank of `right`.

* @param {Passable} right
* @returns {-1 | 0 | 1}
* @returns {RankComparison}
*/
/**
* @typedef {RankCompare} FullCompare
* A `FullCompare` function satisfies all the invariants stated below for
* `RankCompare`'s relation with KeyCompare.
* In addition, its equality is as precise as the `KeyCompare`
* comparison defined below, in that, for all Keys `x` and `y`,
* `FullCompare(x, y) === 0` iff `KeyCompare(x, y) === 0`.
*
* For non-keys a `FullCompare` should be exactly as imprecise as
* `RankCompare`. For example, both will treat all errors as in the same
* equivalence class. Both will treat all promises as in the same
* equivalence class. Both will order taggeds the same way, which is admittedly
* weird, as some taggeds will be considered keys and other taggeds will be
* considered non-keys.
*/
/**
* @typedef {Object} RankComparatorKit
* @property {RankCompare} comparator
* @property {RankCompare} antiComparator
*/
/**
* @typedef {Object} FullComparatorKit
* @property {FullCompare} comparator
* @property {FullCompare} antiComparator
*/
/**
* @typedef {-1 | 0 | 1 | NaN} KeyComparison
* The result of a `KeyCompare` function that defines a meaningful
* and meaningfully precise partial order of `Key` values. See `KeyCompare`.
*/
/**
* @callback KeyCompare
* `compareKeys` implements a partial order over keys. As with the
* rank ordering produced by `compareRank`, -1, 0, and 1 mean
* "less than", "equivalent to", and "greater than" respectively.
* NaN means "incomparable" --- the first key is not less, equivalent,
* or greater than the second. For example, subsets over sets is
* a partial order.
*
* By using NaN for "incomparable", the normal equivalence for using
* the return value in a comparison is preserved.
* `compareKeys(left, right) >= 0` iff `left` is greater than or
* equivalent to `right` in the partial ordering.
*
* Key order (a partial order) and rank order (a full order) are
* co-designed so that we store passables in rank order and index into them
* with keys for key-based queries. To keep these distinct, when speaking
* informally about rank, we talk about "earlier" and "later". When speaking
* informally about keys, we talk about "smaller" and "bigger".
*
* In both orders, the return-0 case defines
* an equivalence class, i.e., those that are tied for the same place in the
* order. The global invariant that we need between the two orders is that the
* key order equivalence class is always at least as precise as the
* rank order equivalence class. IOW, if `compareKeys(X,Y) === 0` then
* `compareRank(X,Y) === 0`. But not vice versa. For example, two different
* remotables are the same rank but incommensurate as keys.
*
* A further invariant is if `compareKeys(X,Y) < 0` then
* `compareRank(X,Y) <= 0`, i.e., if X is smaller than Y in key order, then X
* must be at least as early as Y in rank order. But not vice versa.
* X can be earlier than Y in rank order and still be incommensurate with Y in
* key order. For example, the record `{b: 3, a: 5}` is earlier than but
* incommensurate with the record `{b: 5, a: 3}`.
*
* This lets us translate a range search over the
* partial key order into a range search over rank order followed by filtering
* out those that don't match. To get this effect, we store the elements of
* a set in an array sorted in reverse rank order, from later to earlier.
* Combined with our lexicographic comparison of arrays, if set X is a subset
* of set Y then the array representing set X will be an earlier rank that the
* array representing set Y.
*
* @param {Key} left
* @param {Key} right
* @returns {KeyComparison}
*/
// ///////////////////// Should be internal only types /////////////////////////

@@ -254,3 +402,3 @@

* @param {Passable[]} sorted
* @param {CompareRank} compare
* @param {RankCompare} compare
* @param {RankCover} rankCover

@@ -290,4 +438,7 @@ * @returns {IndexCover}

* @callback KeyToDBKey
* If this key can be encoded as a DBKey string which sorts correctly,
* return that string. Else return `undefined`. For example, a scalar-only
* encodeKey would return `undefined` for all non-scalar keys.
* @param {Passable} key
* @returns {string}
* @returns {string=}
*/

@@ -303,10 +454,101 @@

/**
* @typedef {Object} PatternMatcher
* @property {GetRankCover} getRankCover
* @typedef {Object} MatcherNamespace
* @property {() => Matcher} any
* Matches any passable
* @property {(...patts: Pattern[]) => Matcher} and
* Only if it matches all the sub-patterns
* @property {(...patts: Pattern[]) => Matcher} or
* Only if it matches at least one subPattern
* @property {(subPatt: Pattern) => Matcher} not
* Only if it does not match the sub-pattern
*
* @property {() => Matcher} scalar
* The scalars are the primitive values and Remotables.
* All scalars are keys.
* @property {() => Matcher} key
* Can be in a copySet or the key in a CopyMap.
* (Will eventually be able to a key is a MapStore.)
* All keys are patterns that match only themselves.
* @property {() => Matcher} pattern
* If it matches M.pattern(), the it is itself a pattern used
* to match other passables. A pattern cannot contain errors
* or promises, as these are not stable enough to usefully match.
* @property {(kind: string) => Matcher} kind
* @property {() => Matcher} boolean
* @property {() => Matcher} number Only floating point numbers
* @property {() => Matcher} bigint
* @property {() => Matcher} nat
* @property {() => Matcher} string
* @property {() => Matcher} symbol
* Only registered and well-known symbols are passable
* @property {() => Matcher} record A CopyRecord
* @property {() => Matcher} array A CopyArray
* @property {() => Matcher} set A CopySet
* @property {() => Matcher} map A CopyMap
* @property {() => Matcher} remotable A far object or its remote presence
* @property {() => Matcher} error
* Error objects are passable, but are neither keys nor symbols.
* They do not have a useful identity.
* @property {() => Matcher} promise
* Promises are passable, but are neither keys nor symbols.
* They do not have a useful identity.
* @property {() => Matcher} undefined
* All keys including `undefined` are already valid patterns and
* so can validly represent themselves. But optional pattern arguments
* `(pattern = undefined) => ...`
* cannot distinguish between `undefined` passed as a pattern vs
* omission of the argument. It will interpret the first as the
* second. Thus, when a passed pattern does not also need to be a key,
* we recommend passing `M.undefined()` instead of `undefined`.
*
* @property {() => null} null
*
* @property {(rightOperand :Key) => Matcher} lt
* Matches if < the right operand by compareKeys
* @property {(rightOperand :Key) => Matcher} lte
* Matches if <= the right operand by compareKeys
* @property {(key :Key) => Matcher} eq
* @property {(key :Key) => Matcher} neq
* @property {(rightOperand :Key) => Matcher} gte
* Matches if >= the right operand by compareKeys
* @property {(rightOperand :Key) => Matcher} gt
* Matches if > the right operand by compareKeys
*
* @property {(subPatt?: Pattern) => Matcher} arrayOf
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => Matcher} recordOf
* @property {(keyPatt?: Pattern) => Matcher} setOf
* @property {(keyPatt?: Pattern, valuePatt?: Pattern) => Matcher} mapOf
* @property {(
* base: CopyRecord<*> | CopyArray<*>,
* rest?: Pattern
* ) => Matcher} split
* An array or record is split into the first part that matches the
* base pattern, and the remainder, which matches against the optional
* rest pattern if present.
* @property {(
* base: CopyRecord<*> | CopyArray<*>,
* rest?: Pattern
* ) => Matcher} partial
* An array or record is split into the first part that matches the
* base pattern, and the remainder, which matches against the optional
* rest pattern if present.
* Unlike `M.split`, `M.partial` ignores properties on the base
* pattern that are not present on the specimen.
*/
/**
* @typedef {Object} PatternKit
* @property {(specimen: Passable, patt: Pattern) => boolean} matches
* @property {(specimen: Passable, patt: Pattern) => void} fit
* @property {(patt: Pattern) => void} assertPattern
* @property {(patt: Passable) => boolean} isPattern
* @property {(patt: Pattern) => void} assertKeyPattern
* @property {(patt: Passable) => boolean} isKeyPattern
* @property {MatcherNamespace} M
*/
// /////////////////////////////////////////////////////////////////////////////
// TODO
// The following commented out type should be in internal-types.js, since the
// The following type should be in internal-types.js, since the
// `MatchHelper` type is purely internal to this package. However,

@@ -313,0 +555,0 @@ // in order to get the governance and solo packages both to pass lint,

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc