Socket
Socket
Sign inDemoInstall

@chainsafe/persistent-merkle-tree

Package Overview
Dependencies
Maintainers
6
Versions
25
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@chainsafe/persistent-merkle-tree - npm Package Compare versions

Comparing version 0.7.2 to 0.8.0

lib/gindex.js.map

1

lib/gindex.js

@@ -170,1 +170,2 @@ "use strict";

exports.gindexChild = gindexChild;
//# sourceMappingURL=gindex.js.map

@@ -5,5 +5,126 @@ "use strict";

const as_sha256_1 = require("@chainsafe/as-sha256");
const util_1 = require("./util");
exports.hasher = {
name: "as-sha256",
digest64: as_sha256_1.digest2Bytes32,
digest64HashObjects: as_sha256_1.digest64HashObjects,
digest64HashObjects: as_sha256_1.digest64HashObjectsInto,
merkleizeInto(data, padFor, output, offset) {
return util_1.doMerkleizeInto(data, padFor, output, offset, as_sha256_1.hashInto);
},
digestNLevel(data, nLevel) {
return util_1.doDigestNLevel(data, nLevel, as_sha256_1.hashInto);
},
executeHashComputations: (hashComputations) => {
for (let level = hashComputations.length - 1; level >= 0; level--) {
const hcArr = hashComputations[level];
if (!hcArr) {
// should not happen
throw Error(`no hash computations for level ${level}`);
}
if (hcArr.length === 0) {
// nothing to hash
continue;
}
// HashComputations of the same level are safe to batch
let src0_0 = null;
let src1_0 = null;
let dest0 = null;
let src0_1 = null;
let src1_1 = null;
let dest1 = null;
let src0_2 = null;
let src1_2 = null;
let dest2 = null;
let src0_3 = null;
let src1_3 = null;
let dest3 = null;
let i = 0;
for (const hc of hcArr) {
const indexInBatch = i % 4;
switch (indexInBatch) {
case 0:
src0_0 = hc.src0;
src1_0 = hc.src1;
dest0 = hc.dest;
break;
case 1:
src0_1 = hc.src0;
src1_1 = hc.src1;
dest1 = hc.dest;
break;
case 2:
src0_2 = hc.src0;
src1_2 = hc.src1;
dest2 = hc.dest;
break;
case 3:
src0_3 = hc.src0;
src1_3 = hc.src1;
dest3 = hc.dest;
if (src0_0 !== null &&
src1_0 !== null &&
dest0 !== null &&
src0_1 !== null &&
src1_1 !== null &&
dest1 !== null &&
src0_2 !== null &&
src1_2 !== null &&
dest2 !== null &&
src0_3 !== null &&
src1_3 !== null &&
dest3 !== null) {
// TODO - batch: find a way not allocate here
const [o0, o1, o2, o3] = as_sha256_1.batchHash4HashObjectInputs([
src0_0,
src1_0,
src0_1,
src1_1,
src0_2,
src1_2,
src0_3,
src1_3,
]);
if (o0 == null || o1 == null || o2 == null || o3 == null) {
throw Error(`batchHash4HashObjectInputs return null or undefined at batch ${i} level ${level}`);
}
dest0.applyHash(o0);
dest1.applyHash(o1);
dest2.applyHash(o2);
dest3.applyHash(o3);
// reset for next batch
src0_0 = null;
src1_0 = null;
dest0 = null;
src0_1 = null;
src1_1 = null;
dest1 = null;
src0_2 = null;
src1_2 = null;
dest2 = null;
src0_3 = null;
src1_3 = null;
dest3 = null;
}
break;
default:
throw Error(`Unexpected indexInBatch ${indexInBatch}`);
}
i++;
}
// remaining
if (src0_0 !== null && src1_0 !== null && dest0 !== null) {
dest0.applyHash(as_sha256_1.digest64HashObjects(src0_0, src1_0));
}
if (src0_1 !== null && src1_1 !== null && dest1 !== null) {
dest1.applyHash(as_sha256_1.digest64HashObjects(src0_1, src1_1));
}
if (src0_2 !== null && src1_2 !== null && dest2 !== null) {
dest2.applyHash(as_sha256_1.digest64HashObjects(src0_2, src1_2));
}
if (src0_3 !== null && src1_3 !== null && dest3 !== null) {
dest3.applyHash(as_sha256_1.digest64HashObjects(src0_3, src1_3));
}
}
},
};
//# sourceMappingURL=as-sha256.js.map

8

lib/hasher/index.d.ts
import { Hasher } from "./types";
export { HashObject } from "@chainsafe/as-sha256/lib/hashObject";
import type { HashComputationLevel } from "../hashComputation";
export * from "./types";
export * from "./util";
/**
* Hasher used across the SSZ codebase
* Hasher used across the SSZ codebase, by default, this does not support batch hash.
*/

@@ -15,1 +15,5 @@ export declare let hasher: Hasher;

export declare function setHasher(newHasher: Hasher): void;
export declare function digest64(a: Uint8Array, b: Uint8Array): Uint8Array;
export declare function digestNLevel(data: Uint8Array, nLevel: number): Uint8Array;
export declare function merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void;
export declare function executeHashComputations(hashComputations: HashComputationLevel[]): void;

@@ -13,3 +13,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.setHasher = exports.hasher = void 0;
exports.executeHashComputations = exports.merkleizeInto = exports.digestNLevel = exports.digest64 = exports.setHasher = exports.hasher = void 0;
const noble_1 = require("./noble");

@@ -19,3 +19,3 @@ __exportStar(require("./types"), exports);

/**
* Hasher used across the SSZ codebase
* Hasher used across the SSZ codebase, by default, this does not support batch hash.
*/

@@ -32,1 +32,18 @@ exports.hasher = noble_1.hasher;

exports.setHasher = setHasher;
function digest64(a, b) {
return exports.hasher.digest64(a, b);
}
exports.digest64 = digest64;
function digestNLevel(data, nLevel) {
return exports.hasher.digestNLevel(data, nLevel);
}
exports.digestNLevel = digestNLevel;
function merkleizeInto(data, padFor, output, offset) {
exports.hasher.merkleizeInto(data, padFor, output, offset);
}
exports.merkleizeInto = merkleizeInto;
function executeHashComputations(hashComputations) {
exports.hasher.executeHashComputations(hashComputations);
}
exports.executeHashComputations = executeHashComputations;
//# sourceMappingURL=index.js.map

@@ -5,7 +5,46 @@ "use strict";

const sha256_1 = require("@noble/hashes/sha256");
const as_sha256_1 = require("@chainsafe/as-sha256");
const util_1 = require("./util");
const digest64 = (a, b) => sha256_1.sha256.create().update(a).update(b).digest();
const hashInto = (input, output) => {
if (input.length % 64 !== 0) {
throw new Error(`Invalid input length ${input.length}`);
}
if (input.length !== output.length * 2) {
throw new Error(`Invalid output length ${output.length}`);
}
const count = Math.floor(input.length / 64);
for (let i = 0; i < count; i++) {
const offset = i * 64;
const in1 = input.subarray(offset, offset + 32);
const in2 = input.subarray(offset + 32, offset + 64);
const out = digest64(in1, in2);
output.set(out, i * 32);
}
};
exports.hasher = {
name: "noble",
digest64,
digest64HashObjects: (a, b) => util_1.uint8ArrayToHashObject(digest64(util_1.hashObjectToUint8Array(a), util_1.hashObjectToUint8Array(b))),
digest64HashObjects: (left, right, parent) => {
as_sha256_1.byteArrayIntoHashObject(digest64(util_1.hashObjectToUint8Array(left), util_1.hashObjectToUint8Array(right)), 0, parent);
},
merkleizeInto(data, padFor, output, offset) {
return util_1.doMerkleizeInto(data, padFor, output, offset, hashInto);
},
digestNLevel(data, nLevel) {
return util_1.doDigestNLevel(data, nLevel, hashInto);
},
executeHashComputations: (hashComputations) => {
for (let level = hashComputations.length - 1; level >= 0; level--) {
const hcArr = hashComputations[level];
if (!hcArr) {
// should not happen
throw Error(`no hash computations for level ${level}`);
}
for (const hc of hcArr) {
hc.dest.applyHash(as_sha256_1.digest64HashObjects(hc.src0, hc.src1));
}
}
},
};
//# sourceMappingURL=noble.js.map
import type { HashObject } from "@chainsafe/as-sha256/lib/hashObject";
import type { HashComputationLevel } from "../hashComputation";
export type { HashObject };
export declare type Hasher = {
name: string;
/**

@@ -10,3 +13,19 @@ * Hash two 32-byte Uint8Arrays

*/
digest64HashObjects(a: HashObject, b: HashObject): HashObject;
digest64HashObjects(left: HashObject, right: HashObject, parent: HashObject): void;
/**
* Merkleize n chunk of data, 32 bytes each
* padFor is maxChunkCount, use it to compute layers to hash
* data is mutated after the function
*/
merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void;
/**
* Hash multiple chunks (1 chunk = 32 bytes) at multiple levels
* With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes.
* The result is unsafe as it will be overwritten by the next call.
*/
digestNLevel(data: Uint8Array, nLevel: number): Uint8Array;
/**
* Execute a batch of HashComputations
*/
executeHashComputations(hashComputations: HashComputationLevel[]): void;
};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=types.js.map
import { HashObject } from "@chainsafe/as-sha256/lib/hashObject";
export declare function hashObjectToUint8Array(obj: HashObject): Uint8Array;
export declare function uint8ArrayToHashObject(byteArr: Uint8Array): HashObject;
declare type HashIntoFn = (input: Uint8Array, output: Uint8Array) => void;
/**
* Input data is unsafe because it's modified
* If its chunk count is not even, need to be appended with zero hash at layer 0 so that we don't need
* a new memory allocation here (even through we don't need it if padFor = 1)
* The Uint8Array(32) will be written to output at offset
*/
export declare function doMerkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number, hashInto: HashIntoFn): void;
/**
* Input data is unsafe because it's modified
* given nLevel = 3
* digest multiple of 8 chunks = 256 bytes
* the result is multiple of 1 chunk = 32 bytes
* this is the same to hashTreeRoot() of multiple validators
*/
export declare function doDigestNLevel(data: Uint8Array, nLevel: number, hashInto: HashIntoFn): Uint8Array;
export {};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.uint8ArrayToHashObject = exports.hashObjectToUint8Array = void 0;
exports.doDigestNLevel = exports.doMerkleizeInto = exports.uint8ArrayToHashObject = exports.hashObjectToUint8Array = void 0;
const hashObject_1 = require("@chainsafe/as-sha256/lib/hashObject");
const zeroHash_1 = require("../zeroHash");
function hashObjectToUint8Array(obj) {

@@ -12,4 +13,79 @@ const byteArr = new Uint8Array(32);

function uint8ArrayToHashObject(byteArr) {
return hashObject_1.byteArrayToHashObject(byteArr);
return hashObject_1.byteArrayToHashObject(byteArr, 0);
}
exports.uint8ArrayToHashObject = uint8ArrayToHashObject;
/**
* Input data is unsafe because it's modified
* If its chunk count is not even, need to be appended with zero hash at layer 0 so that we don't need
* a new memory allocation here (even through we don't need it if padFor = 1)
* The Uint8Array(32) will be written to output at offset
*/
function doMerkleizeInto(data, padFor, output, offset, hashInto) {
if (padFor < 1) {
throw new Error(`Invalid padFor, expect to be greater than 0, got ${padFor}`);
}
const layerCount = Math.ceil(Math.log2(padFor));
if (data.length === 0) {
output.set(zeroHash_1.zeroHash(layerCount), offset);
return;
}
if (data.length % 32 !== 0) {
throw new Error(`Invalid input length, expect to be multiple of 32 bytes, got ${data.length}`);
}
// if padFor = 1, only need 32 bytes
if (padFor > 1 && data.length % 64 !== 0) {
throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${data.length}, padFor=${padFor}`);
}
let inputLength = data.length;
let outputLength = Math.floor(inputLength / 2);
let bufferIn = data;
// hash into the same buffer
for (let i = 0; i < layerCount; i++) {
const bufferOut = data.subarray(0, outputLength);
hashInto(bufferIn, bufferOut);
const chunkCount = Math.floor(outputLength / 32);
if (chunkCount % 2 === 1 && i < layerCount - 1) {
// extend to 1 more chunk
inputLength = outputLength + 32;
bufferIn = data.subarray(0, inputLength);
bufferIn.set(zeroHash_1.zeroHash(i + 1), outputLength);
}
else {
bufferIn = bufferOut;
inputLength = outputLength;
}
outputLength = Math.floor(inputLength / 2);
}
output.set(bufferIn.subarray(0, 32), offset);
}
exports.doMerkleizeInto = doMerkleizeInto;
/**
* Input data is unsafe because it's modified
* given nLevel = 3
* digest multiple of 8 chunks = 256 bytes
* the result is multiple of 1 chunk = 32 bytes
* this is the same to hashTreeRoot() of multiple validators
*/
function doDigestNLevel(data, nLevel, hashInto) {
let inputLength = data.length;
const bytesInBatch = Math.pow(2, nLevel) * 32;
if (nLevel < 1) {
throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`);
}
if (inputLength % bytesInBatch !== 0) {
throw new Error(`Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}`);
}
let outputLength = Math.floor(inputLength / 2);
// hash into same buffer
let bufferIn = data;
for (let i = nLevel; i > 0; i--) {
const bufferOut = bufferIn.subarray(0, outputLength);
hashInto(bufferIn, bufferOut);
bufferIn = bufferOut;
inputLength = outputLength;
outputLength = Math.floor(inputLength / 2);
}
return bufferIn;
}
exports.doDigestNLevel = doDigestNLevel;
//# sourceMappingURL=util.js.map
export * from "./gindex";
export * from "./hasher";
export * from "./node";
export * from "./hashComputation";
export * from "./packedNode";

@@ -9,1 +10,2 @@ export * from "./proof";

export * from "./zeroNode";
export * from "./zeroHash";

@@ -16,2 +16,3 @@ "use strict";

__exportStar(require("./node"), exports);
__exportStar(require("./hashComputation"), exports);
__exportStar(require("./packedNode"), exports);

@@ -22,1 +23,3 @@ __exportStar(require("./proof"), exports);

__exportStar(require("./zeroNode"), exports);
__exportStar(require("./zeroHash"), exports);
//# sourceMappingURL=index.js.map

@@ -50,3 +50,3 @@ "use strict";

if (this.h0 === null) {
super.applyHash(hasher_1.hasher.digest64HashObjects(this.left.rootHashObject, this.right.rootHashObject));
hasher_1.hasher.digest64HashObjects(this.left.rootHashObject, this.right.rootHashObject, this);
}

@@ -344,1 +344,2 @@ return this;

exports.bitwiseOrNodeH = bitwiseOrNodeH;
//# sourceMappingURL=node.js.map

@@ -126,1 +126,2 @@ "use strict";

exports.packedNodeRootsToBytes = packedNodeRootsToBytes;
//# sourceMappingURL=packedNode.js.map

@@ -143,1 +143,2 @@ "use strict";

exports.createNodeFromCompactMultiProof = createNodeFromCompactMultiProof;
//# sourceMappingURL=compactMulti.js.map

@@ -121,1 +121,2 @@ "use strict";

exports.deserializeProof = deserializeProof;
//# sourceMappingURL=index.js.map

@@ -92,1 +92,2 @@ "use strict";

exports.createNodeFromMultiProof = createNodeFromMultiProof;
//# sourceMappingURL=multi.js.map

@@ -43,1 +43,2 @@ "use strict";

exports.createNodeFromSingleProof = createNodeFromSingleProof;
//# sourceMappingURL=single.js.map

@@ -119,1 +119,2 @@ "use strict";

exports.deserializeTreeOffsetProof = deserializeTreeOffsetProof;
//# sourceMappingURL=treeOffset.js.map

@@ -154,1 +154,2 @@ "use strict";

exports.computeMultiProofBitstrings = computeMultiProofBitstrings;
//# sourceMappingURL=util.js.map
import { Node } from "./node";
import { HashComputationLevel } from "./hashComputation";
export declare function subtreeFillToDepth(bottom: Node, depth: number): Node;

@@ -7,3 +8,4 @@ export declare function subtreeFillToLength(bottom: Node, depth: number, length: number): Node;

* TODO: Don't mutate the nodes array.
* hcByLevel is an output parameter that will be filled with the hash computations if exists.
*/
export declare function subtreeFillToContents(nodes: Node[], depth: number): Node;
export declare function subtreeFillToContents(nodes: Node[], depth: number, hcOffset?: number, hcByLevel?: HashComputationLevel[] | null): Node;

@@ -5,2 +5,3 @@ "use strict";

const node_1 = require("./node");
const hashComputation_1 = require("./hashComputation");
const zeroNode_1 = require("./zeroNode");

@@ -43,4 +44,5 @@ function subtreeFillToDepth(bottom, depth) {

* TODO: Don't mutate the nodes array.
* hcByLevel is an output parameter that will be filled with the hash computations if exists.
*/
function subtreeFillToContents(nodes, depth) {
function subtreeFillToContents(nodes, depth, hcOffset = 0, hcByLevel = null) {
const maxLength = 2 ** depth;

@@ -54,10 +56,20 @@ if (nodes.length > maxLength) {

if (depth === 0) {
return nodes[0];
const node = nodes[0];
if (hcByLevel !== null) {
hashComputation_1.getHashComputations(node, hcOffset, hcByLevel);
}
return node;
}
if (depth === 1) {
return nodes.length > 1
? // All nodes at depth 1 available
new node_1.BranchNode(nodes[0], nodes[1])
: // Pad with zero node
new node_1.BranchNode(nodes[0], zeroNode_1.zeroNode(0));
// All nodes at depth 1 available
// If there is only one node, pad with zero node
const leftNode = nodes[0];
const rightNode = nodes.length > 1 ? nodes[1] : zeroNode_1.zeroNode(0);
const rootNode = new node_1.BranchNode(leftNode, rightNode);
if (hcByLevel !== null) {
hashComputation_1.getHashComputations(leftNode, hcOffset + 1, hcByLevel);
hashComputation_1.getHashComputations(rightNode, hcOffset + 1, hcByLevel);
hashComputation_1.levelAtIndex(hcByLevel, hcOffset).push(leftNode, rightNode, rootNode);
}
return rootNode;
}

@@ -68,8 +80,31 @@ let count = nodes.length;

const countEven = count - countRemainder;
const offset = hcByLevel ? hcOffset + d - 1 : null;
// For each depth level compute the new BranchNodes and overwrite the nodes array
for (let i = 0; i < countEven; i += 2) {
nodes[i / 2] = new node_1.BranchNode(nodes[i], nodes[i + 1]);
const left = nodes[i];
const right = nodes[i + 1];
const node = new node_1.BranchNode(left, right);
nodes[i / 2] = node;
if (offset !== null && hcByLevel !== null) {
hashComputation_1.levelAtIndex(hcByLevel, offset).push(left, right, node);
if (d === depth) {
// bottom up strategy so we don't need to go down the tree except for the last level
hashComputation_1.getHashComputations(left, offset + 1, hcByLevel);
hashComputation_1.getHashComputations(right, offset + 1, hcByLevel);
}
}
}
if (countRemainder > 0) {
nodes[countEven / 2] = new node_1.BranchNode(nodes[countEven], zeroNode_1.zeroNode(depth - d));
const left = nodes[countEven];
const right = zeroNode_1.zeroNode(depth - d);
const node = new node_1.BranchNode(left, right);
nodes[countEven / 2] = node;
if (offset !== null && hcByLevel !== null) {
if (d === depth) {
// only go down on the last level
hashComputation_1.getHashComputations(left, offset + 1, hcByLevel);
}
// no need to getHashComputations for zero node
hashComputation_1.levelAtIndex(hcByLevel, offset).push(left, right, node);
}
}

@@ -82,1 +117,2 @@ // If there was remainer, 2 nodes are added to the count

exports.subtreeFillToContents = subtreeFillToContents;
//# sourceMappingURL=subtree.js.map
import { Gindex, GindexBitstring } from "./gindex";
import { Node } from "./node";
import { HashComputationLevel } from "./hashComputation";
import { Proof, ProofInput } from "./proof";

@@ -150,4 +151,5 @@ export declare type Hook = (newRootNode: Node) => void;

* Supports index up to `Number.MAX_SAFE_INTEGER`.
* @param hcByLevel an array of HashComputation[] by level (could be from 0 to `nodesDepth - 1`)
*/
export declare function setNodesAtDepth(rootNode: Node, nodesDepth: number, indexes: number[], nodes: Node[]): Node;
export declare function setNodesAtDepth(rootNode: Node, nodesDepth: number, indexes: number[], nodes: Node[], hcOffset?: number, hcByLevel?: HashComputationLevel[] | null): Node;
/**

@@ -194,1 +196,14 @@ * Fast read-only iteration

export declare function treeZeroAfterIndex(rootNode: Node, nodesDepth: number, index: number): Node;
/**
* depth depthi gindexes indexes
* 0 1 1 0
* 1 0 2 3 0 1
* 2 - 4 5 6 7 0 1 2 3
*
* **Conditions**:
* - `from` and `to` must not be equal
*
* @param from Index
* @param to Index
*/
export declare function findDiffDepthi(from: number, to: number): number;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.treeZeroAfterIndex = exports.iterateNodesAtDepth = exports.getNodesAtDepth = exports.setNodesAtDepth = exports.setNodeAtDepth = exports.getNodeAtDepth = exports.setNodeWithFn = exports.setNode = exports.getNode = exports.Tree = void 0;
exports.findDiffDepthi = exports.treeZeroAfterIndex = exports.iterateNodesAtDepth = exports.getNodesAtDepth = exports.setNodesAtDepth = exports.setNodeAtDepth = exports.getNodeAtDepth = exports.setNodeWithFn = exports.setNode = exports.getNode = exports.Tree = void 0;
const zeroNode_1 = require("./zeroNode");
const gindex_1 = require("./gindex");
const node_1 = require("./node");
const hashComputation_1 = require("./hashComputation");
const proof_1 = require("./proof");

@@ -305,4 +306,5 @@ const single_1 = require("./proof/single");

* Supports index up to `Number.MAX_SAFE_INTEGER`.
* @param hcByLevel an array of HashComputation[] by level (could be from 0 to `nodesDepth - 1`)
*/
function setNodesAtDepth(rootNode, nodesDepth, indexes, nodes) {
function setNodesAtDepth(rootNode, nodesDepth, indexes, nodes, hcOffset = 0, hcByLevel = null) {
// depth depthi gindexes indexes

@@ -380,2 +382,7 @@ // 0 1 1 0

node = new node_1.BranchNode(nodes[i], nodes[i + 1]);
if (hcByLevel != null) {
// go with level of dest node (level 0 goes with root node)
// in this case dest node is nodesDept - 2, same for below
hashComputation_1.levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(nodes[i], nodes[i + 1], node);
}
// Move pointer one extra forward since node has consumed two nodes

@@ -385,7 +392,15 @@ i++;

else {
node = new node_1.BranchNode(nodes[i], node.right);
const oldNode = node;
node = new node_1.BranchNode(nodes[i], oldNode.right);
if (hcByLevel != null) {
hashComputation_1.levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(nodes[i], oldNode.right, node);
}
}
}
else {
node = new node_1.BranchNode(node.left, nodes[i]);
const oldNode = node;
node = new node_1.BranchNode(oldNode.left, nodes[i]);
if (hcByLevel != null) {
hashComputation_1.levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(oldNode.left, nodes[i], node);
}
}

@@ -416,2 +431,6 @@ // Here `node` is the new BranchNode at depthi `depthiParent`

// If node is on the right merge with stored left node
const depth = nodesDepth - d - 1;
if (depth < 0) {
throw Error(`Invalid depth ${depth}, d=${d}, nodesDepth=${nodesDepth}`);
}
if (isLeftNode(d, index)) {

@@ -421,3 +440,7 @@ if (isLastIndex || d !== diffDepthi) {

// Also, if still has to move upwards, rebind since the node won't be visited anymore
node = new node_1.BranchNode(node, parentNodeStack[d].right);
const oldNode = node;
node = new node_1.BranchNode(oldNode, parentNodeStack[d].right);
if (hcByLevel != null) {
hashComputation_1.levelAtIndex(hcByLevel, depth + hcOffset).push(oldNode, parentNodeStack[d].right, node);
}
}

@@ -433,7 +456,15 @@ else {

if (leftNode !== undefined) {
node = new node_1.BranchNode(leftNode, node);
const oldNode = node;
node = new node_1.BranchNode(leftNode, oldNode);
if (hcByLevel != null) {
hashComputation_1.levelAtIndex(hcByLevel, depth + hcOffset).push(leftNode, oldNode, node);
}
leftParentNodeStack[d] = undefined;
}
else {
node = new node_1.BranchNode(parentNodeStack[d].left, node);
const oldNode = node;
node = new node_1.BranchNode(parentNodeStack[d].left, oldNode);
if (hcByLevel != null) {
hashComputation_1.levelAtIndex(hcByLevel, depth + hcOffset).push(parentNodeStack[d].left, oldNode, node);
}
}

@@ -628,3 +659,44 @@ }

exports.treeZeroAfterIndex = treeZeroAfterIndex;
const NUMBER_32_MAX = 0xffffffff;
const NUMBER_2_POW_32 = 2 ** 32;
/**
* depth depthi gindexes indexes
* 0 1 1 0
* 1 0 2 3 0 1
* 2 - 4 5 6 7 0 1 2 3
*
* **Conditions**:
* - `from` and `to` must not be equal
*
* @param from Index
* @param to Index
*/
function findDiffDepthi(from, to) {
if (from === to || from < 0 || to < 0) {
throw Error(`Expect different positive inputs, from=${from} to=${to}`);
}
// 0 -> 0, 1 -> 1, 2 -> 2, 3 -> 2, 4 -> 3
const numBits0 = Math.ceil(Math.log2(from + 1));
const numBits1 = Math.ceil(Math.log2(to + 1));
// these indexes stay in 2 sides of a merkle tree
if (numBits0 !== numBits1) {
// must offset by one to match the depthi scale
return Math.max(numBits0, numBits1) - 1;
}
// same number of bits and > 32
if (numBits0 > 32) {
const highBits0 = Math.floor(from / NUMBER_2_POW_32) & NUMBER_32_MAX;
const highBits1 = Math.floor(to / NUMBER_2_POW_32) & NUMBER_32_MAX;
if (highBits0 === highBits1) {
// different part is just low bits
return findDiffDepthi32Bits(from & NUMBER_32_MAX, to & NUMBER_32_MAX);
}
// highBits are different, no need to compare low bits
return 32 + findDiffDepthi32Bits(highBits0, highBits1);
}
// same number of bits and <= 32
return findDiffDepthi32Bits(from, to);
}
exports.findDiffDepthi = findDiffDepthi;
/**
* Returns true if the `index` at `depth` is a left node, false if it is a right node.

@@ -648,19 +720,17 @@ *

/**
* depth depthi gindexes indexes
* 0 1 1 0
* 1 0 2 3 0 1
* 2 - 4 5 6 7 0 1 2 3
*
* **Conditions**:
* - `from` and `to` must not be equal
*
* @param from Index
* @param to Index
* Similar to findDiffDepthi() but for 32-bit numbers only
*/
function findDiffDepthi(from, to) {
return (
function findDiffDepthi32Bits(from, to) {
const xor = from ^ to;
if (xor === 0) {
// this should not happen as checked in `findDiffDepthi`
// otherwise this function return -1 which is weird for diffi
throw Error(`Do not support equal value from=${from} to=${to}`);
}
// (0,0) -> 0 | (0,1) -> 1 | (0,2) -> 2
Math.ceil(Math.log2(-~(from ^ to))) -
// Must offset by one to match the depthi scale
1);
// xor < 0 means the 1st bit of `from` and `to` is diffent, which mean num bits diff 32
const numBitsDiff = xor < 0 ? 32 : Math.ceil(Math.log2(xor + 1));
// must offset by one to match the depthi scale
return numBitsDiff - 1;
}
//# sourceMappingURL=tree.js.map

@@ -23,2 +23,5 @@ "use strict";

}
// make sure hash is precomputed in order not to put zeroNodes to HashComputation
// otherwise get OOM
zeroes[height].root;
}

@@ -28,1 +31,2 @@ return zeroes[height];

exports.zeroNode = zeroNode;
//# sourceMappingURL=zeroNode.js.map
{
"name": "@chainsafe/persistent-merkle-tree",
"version": "0.7.2",
"version": "0.8.0",
"description": "Merkle tree implemented as a persistent datastructure",

@@ -19,9 +19,11 @@ "main": "lib/index.js",

"scripts": {
"check-types": "run -T tsc --noEmit",
"check-types": "tsc --noEmit",
"clean": "rm -rf lib",
"build": "run -T tsc",
"lint": "run -T eslint --color --ext .ts src/",
"benchmark": "node --max-old-space-size=4096 --expose-gc -r ts-node/register ./node_modules/.bin/benchmark 'test/perf/*.perf.ts'",
"build": "tsc",
"lint": "eslint --color --ext .ts src/",
"lint:fix": "yarn run lint --fix",
"benchmark:files": "node --max-old-space-size=4096 --expose-gc -r ts-node/register ../../node_modules/.bin/benchmark",
"benchmark": "yarn benchmark:files 'test/perf/*.test.ts'",
"benchmark:local": "yarn benchmark --local",
"test": "run -T mocha -r ts-node/register 'test/unit/**/*.test.ts'"
"test": "mocha -r ts-node/register 'test/unit/**/*.test.ts'"
},

@@ -48,5 +50,6 @@ "pre-push": [

"dependencies": {
"@chainsafe/as-sha256": "^0.4.2",
"@chainsafe/as-sha256": "0.5.0",
"@chainsafe/hashtree": "1.0.1",
"@noble/hashes": "^1.3.0"
}
}
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc