🚀 Big News: Socket Acquires Coana to Bring Reachability Analysis to Every Appsec Team.Learn more
Socket
DemoInstallSign in
Socket

@chainsafe/persistent-merkle-tree

Package Overview
Dependencies
Maintainers
6
Versions
31
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@chainsafe/persistent-merkle-tree - npm Package Compare versions

Comparing version

to
0.9.0

lib/cjs/gindex.js

6

lib/gindex.d.ts

@@ -1,3 +0,3 @@

export declare type Gindex = bigint;
export declare type GindexBitstring = string;
export type Gindex = bigint;
export type GindexBitstring = string;
export declare function bitIndexBigInt(v: bigint): number;

@@ -16,3 +16,3 @@ export declare function toGindex(depth: number, index: bigint): Gindex;

export declare function getGindicesAtDepth(depth: number, startIndex: number, count: number): Gindex[];
export declare type Bit = 0 | 1;
export type Bit = 0 | 1;
export interface GindexIterator extends Iterable<Bit> {

@@ -19,0 +19,0 @@ remainingBitLength(): number;

@@ -1,9 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.gindexChild = exports.gindexParent = exports.gindexSibling = exports.concatGindices = exports.getGindexBits = exports.gindexIterator = exports.getGindicesAtDepth = exports.iterateAtDepth = exports.countToDepth = exports.convertGindexToBitstring = exports.toGindexBitstring = exports.toGindex = exports.bitIndexBigInt = void 0;
function bitIndexBigInt(v) {
export function bitIndexBigInt(v) {
return v.toString(2).length - 1;
}
exports.bitIndexBigInt = bitIndexBigInt;
function toGindex(depth, index) {
export function toGindex(depth, index) {
const anchor = BigInt(1) << BigInt(depth);

@@ -15,4 +11,3 @@ if (index >= anchor) {

}
exports.toGindex = toGindex;
function toGindexBitstring(depth, index) {
export function toGindexBitstring(depth, index) {
const str = index ? Number(index).toString(2) : "";

@@ -26,4 +21,3 @@ if (str.length > depth) {

}
exports.toGindexBitstring = toGindexBitstring;
function convertGindexToBitstring(gindex) {
export function convertGindexToBitstring(gindex) {
if (typeof gindex === "string") {

@@ -42,6 +36,5 @@ if (gindex.length === 0) {

}
exports.convertGindexToBitstring = convertGindexToBitstring;
// Get the depth (root starting at 0) necessary to cover a subtree of `count` elements.
// (in out): (0 0), (1 0), (2 1), (3 2), (4 2), (5 3), (6 3), (7 3), (8 3), (9 4)
function countToDepth(count) {
export function countToDepth(count) {
if (count <= 1) {

@@ -52,7 +45,6 @@ return 0;

}
exports.countToDepth = countToDepth;
/**
* Iterate through Gindexes at a certain depth
*/
function iterateAtDepth(depth, startIndex, count) {
export function iterateAtDepth(depth, startIndex, count) {
const anchor = BigInt(1) << BigInt(depth);

@@ -81,7 +73,6 @@ if (startIndex + count > anchor) {

}
exports.iterateAtDepth = iterateAtDepth;
/**
* Return Gindexes at a certain depth
*/
function getGindicesAtDepth(depth, startIndex, count) {
export function getGindicesAtDepth(depth, startIndex, count) {
const anchor = BigInt(1) << BigInt(depth);

@@ -98,5 +89,4 @@ if (startIndex + count > anchor) {

}
exports.getGindicesAtDepth = getGindicesAtDepth;
const ERR_INVALID_GINDEX = "Invalid gindex";
function gindexIterator(gindex) {
export function gindexIterator(gindex) {
let bitstring;

@@ -133,4 +123,3 @@ if (typeof gindex === "string") {

}
exports.gindexIterator = gindexIterator;
function getGindexBits(gindex) {
export function getGindexBits(gindex) {
let bitstring;

@@ -155,3 +144,2 @@ if (typeof gindex === "string") {

}
exports.getGindexBits = getGindexBits;
/**

@@ -162,18 +150,14 @@ * Concatenate Generalized Indices

*/
function concatGindices(gindices) {
export function concatGindices(gindices) {
return BigInt(gindices.reduce((acc, gindex) => acc + gindex.toString(2).slice(1), "0b1"));
}
exports.concatGindices = concatGindices;
function gindexSibling(gindex) {
export function gindexSibling(gindex) {
return gindex ^ BigInt(1);
}
exports.gindexSibling = gindexSibling;
function gindexParent(gindex) {
export function gindexParent(gindex) {
return gindex / BigInt(2);
}
exports.gindexParent = gindexParent;
function gindexChild(gindex, rightChild) {
export function gindexChild(gindex, rightChild) {
return gindex * BigInt(2) + BigInt(rightChild);
}
exports.gindexChild = gindexChild;
//# sourceMappingURL=gindex.js.map

@@ -1,2 +0,2 @@

import type { Node } from "./node";
import type { Node } from "./node.js";
/**

@@ -11,3 +11,3 @@ * HashComputation to be later used to compute hash of nodes from bottom up.

*/
export declare type HashComputation = {
export type HashComputation = {
src0: Node;

@@ -14,0 +14,0 @@ src1: Node;

@@ -1,4 +0,1 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.levelAtIndex = exports.getHashComputations = exports.HashComputationGroup = exports.HashComputationLevel = void 0;
/**

@@ -9,3 +6,10 @@ * Model HashComputation[] at the same level that support reusing the same memory.

*/
class HashComputationLevel {
export class HashComputationLevel {
_length;
_totalLength;
// use LinkedList to avoid memory allocation when the list grows
// always have a fixed head although length is 0
head;
tail;
pointer;
constructor() {

@@ -137,7 +141,7 @@ this._length = 0;

}
exports.HashComputationLevel = HashComputationLevel;
/**
* Model HashComputationLevel[] at different levels.
*/
class HashComputationGroup {
export class HashComputationGroup {
byLevel;
constructor() {

@@ -157,3 +161,2 @@ this.byLevel = [];

}
exports.HashComputationGroup = HashComputationGroup;
/**

@@ -177,3 +180,3 @@ * Get HashComputations from a root node all the way to the leaf nodes.

*/
function getHashComputations(node, index, hcByLevel) {
export function getHashComputations(node, index, hcByLevel) {
if (node.h0 === null) {

@@ -189,7 +192,6 @@ const hashComputations = levelAtIndex(hcByLevel, index);

}
exports.getHashComputations = getHashComputations;
/**
* Utility to get HashComputationLevel at a specific index.
*/
function levelAtIndex(hcByLevel, index) {
export function levelAtIndex(hcByLevel, index) {
if (hcByLevel[index] === undefined) {

@@ -200,3 +202,2 @@ hcByLevel[index] = new HashComputationLevel();

}
exports.levelAtIndex = levelAtIndex;
//# sourceMappingURL=hashComputation.js.map

@@ -1,2 +0,2 @@

import type { Hasher } from "./types";
import type { Hasher } from "./types.js";
export declare const hasher: Hasher;

@@ -1,15 +0,20 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.hasher = void 0;
const as_sha256_1 = require("@chainsafe/as-sha256");
const util_1 = require("./util");
exports.hasher = {
import { digest2Bytes32, digest64HashObjectsInto, digest64HashObjects, batchHash4HashObjectInputs, hashInto, } from "@chainsafe/as-sha256";
import { BLOCK_SIZE, doDigestNLevel, doMerkleizeBlockArray, doMerkleizeBlocksBytes } from "./util.js";
/**
* hashInto() function of as-sha256 loop through every 256 bytes
* This is the same to hashInto() function of as-sha256 https://github.com/ChainSafe/ssz/blob/cf3e1f038c8bf7cba1bb27c38540e50b0391d0e6/packages/as-sha256/src/index.ts#L270
*/
const buffer = new Uint8Array(4 * BLOCK_SIZE);
export const hasher = {
name: "as-sha256",
digest64: as_sha256_1.digest2Bytes32,
digest64HashObjects: as_sha256_1.digest64HashObjectsInto,
merkleizeInto(data, padFor, output, offset) {
return util_1.doMerkleizeInto(data, padFor, output, offset, as_sha256_1.hashInto);
digest64: digest2Bytes32,
digest64HashObjects: digest64HashObjectsInto,
merkleizeBlocksBytes(blocksBytes, padFor, output, offset) {
return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto);
},
merkleizeBlockArray(blocks, blockLimit, padFor, output, offset) {
return doMerkleizeBlockArray(blocks, blockLimit, padFor, output, offset, hashInto, buffer);
},
digestNLevel(data, nLevel) {
return util_1.doDigestNLevel(data, nLevel, as_sha256_1.hashInto);
return doDigestNLevel(data, nLevel, hashInto);
},

@@ -76,3 +81,3 @@ executeHashComputations: (hashComputations) => {

// TODO - batch: find a way not allocate here
const [o0, o1, o2, o3] = as_sha256_1.batchHash4HashObjectInputs([
const [o0, o1, o2, o3] = batchHash4HashObjectInputs([
src0_0,

@@ -116,12 +121,12 @@ src1_0,

if (src0_0 !== null && src1_0 !== null && dest0 !== null) {
dest0.applyHash(as_sha256_1.digest64HashObjects(src0_0, src1_0));
dest0.applyHash(digest64HashObjects(src0_0, src1_0));
}
if (src0_1 !== null && src1_1 !== null && dest1 !== null) {
dest1.applyHash(as_sha256_1.digest64HashObjects(src0_1, src1_1));
dest1.applyHash(digest64HashObjects(src0_1, src1_1));
}
if (src0_2 !== null && src1_2 !== null && dest2 !== null) {
dest2.applyHash(as_sha256_1.digest64HashObjects(src0_2, src1_2));
dest2.applyHash(digest64HashObjects(src0_2, src1_2));
}
if (src0_3 !== null && src1_3 !== null && dest3 !== null) {
dest3.applyHash(as_sha256_1.digest64HashObjects(src0_3, src1_3));
dest3.applyHash(digest64HashObjects(src0_3, src1_3));
}

@@ -128,0 +133,0 @@ }

@@ -1,2 +0,2 @@

import { Hasher } from "./types";
import { Hasher } from "./types.js";
export declare const hasher: Hasher;

@@ -1,7 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.hasher = void 0;
const hashtree_1 = require("@chainsafe/hashtree");
const hashObject_1 = require("@chainsafe/as-sha256/lib/hashObject");
const util_1 = require("./util");
import { hashInto } from "@chainsafe/hashtree";
import { byteArrayIntoHashObject } from "@chainsafe/as-sha256";
import { doDigestNLevel, doMerkleizeBlockArray, doMerkleizeBlocksBytes } from "./util.js";
/**

@@ -25,3 +22,3 @@ * Best SIMD implementation is in 512 bits = 64 bytes

const destNodes = new Array(PARALLEL_FACTOR);
exports.hasher = {
export const hasher = {
name: "hashtree",

@@ -34,3 +31,3 @@ digest64(obj1, obj2) {

hash64Input.set(obj2, 32);
hashtree_1.hashInto(hash64Input, hash64Output);
hashInto(hash64Input, hash64Output);
return hash64Output.slice();

@@ -40,10 +37,13 @@ },

hashObjectsToUint32Array(left, right, uint32Input);
hashtree_1.hashInto(hash64Input, hash64Output);
hashObject_1.byteArrayIntoHashObject(hash64Output, 0, parent);
hashInto(hash64Input, hash64Output);
byteArrayIntoHashObject(hash64Output, 0, parent);
},
merkleizeInto(data, padFor, output, offset) {
return util_1.doMerkleizeInto(data, padFor, output, offset, hashtree_1.hashInto);
merkleizeBlocksBytes(blocksBytes, padFor, output, offset) {
return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto);
},
merkleizeBlockArray(blocks, blockLimit, padFor, output, offset) {
return doMerkleizeBlockArray(blocks, blockLimit, padFor, output, offset, hashInto, uint8Input);
},
digestNLevel(data, nLevel) {
return util_1.doDigestNLevel(data, nLevel, hashtree_1.hashInto);
return doDigestNLevel(data, nLevel, hashInto);
},

@@ -73,5 +73,5 @@ executeHashComputations(hashComputations) {

if (indexInBatch === PARALLEL_FACTOR - 1) {
hashtree_1.hashInto(uint8Input, uint8Output);
hashInto(uint8Input, uint8Output);
for (const [j, destNode] of destNodes.entries()) {
hashObject_1.byteArrayIntoHashObject(uint8Output, j * 32, destNode);
byteArrayIntoHashObject(uint8Output, j * 32, destNode);
}

@@ -86,6 +86,6 @@ }

const remainingOutput = uint8Output.subarray(0, remaining * 32);
hashtree_1.hashInto(remainingInput, remainingOutput);
hashInto(remainingInput, remainingOutput);
// destNodes was prepared above
for (let j = 0; j < remaining; j++) {
hashObject_1.byteArrayIntoHashObject(remainingOutput, j * 32, destNodes[j]);
byteArrayIntoHashObject(remainingOutput, j * 32, destNodes[j]);
}

@@ -92,0 +92,0 @@ }

@@ -1,5 +0,5 @@

import { Hasher } from "./types";
import type { HashComputationLevel } from "../hashComputation";
export * from "./types";
export * from "./util";
import { Hasher } from "./types.js";
import type { HashComputationLevel } from "../hashComputation.js";
export * from "./types.js";
export * from "./util.js";
/**

@@ -17,3 +17,4 @@ * Hasher used across the SSZ codebase, by default, this does not support batch hash.

export declare function digestNLevel(data: Uint8Array, nLevel: number): Uint8Array;
export declare function merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void;
export declare function merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void;
export declare function merkleizeBlockArray(blocks: Uint8Array[], blockLimit: number, padFor: number, output: Uint8Array, offset: number): void;
export declare function executeHashComputations(hashComputations: HashComputationLevel[]): void;

@@ -1,21 +0,8 @@

"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.executeHashComputations = exports.merkleizeInto = exports.digestNLevel = exports.digest64 = exports.setHasher = exports.hasher = void 0;
const noble_1 = require("./noble");
__exportStar(require("./types"), exports);
__exportStar(require("./util"), exports);
import { hasher as nobleHasher } from "./noble.js";
export * from "./types.js";
export * from "./util.js";
/**
* Hasher used across the SSZ codebase, by default, this does not support batch hash.
*/
exports.hasher = noble_1.hasher;
export let hasher = nobleHasher;
/**

@@ -26,22 +13,20 @@ * Set the hasher to be used across the SSZ codebase

*/
function setHasher(newHasher) {
exports.hasher = newHasher;
export function setHasher(newHasher) {
hasher = newHasher;
}
exports.setHasher = setHasher;
function digest64(a, b) {
return exports.hasher.digest64(a, b);
export function digest64(a, b) {
return hasher.digest64(a, b);
}
exports.digest64 = digest64;
function digestNLevel(data, nLevel) {
return exports.hasher.digestNLevel(data, nLevel);
export function digestNLevel(data, nLevel) {
return hasher.digestNLevel(data, nLevel);
}
exports.digestNLevel = digestNLevel;
function merkleizeInto(data, padFor, output, offset) {
exports.hasher.merkleizeInto(data, padFor, output, offset);
export function merkleizeBlocksBytes(blocksBytes, padFor, output, offset) {
hasher.merkleizeBlocksBytes(blocksBytes, padFor, output, offset);
}
exports.merkleizeInto = merkleizeInto;
function executeHashComputations(hashComputations) {
exports.hasher.executeHashComputations(hashComputations);
export function merkleizeBlockArray(blocks, blockLimit, padFor, output, offset) {
hasher.merkleizeBlockArray(blocks, blockLimit, padFor, output, offset);
}
exports.executeHashComputations = executeHashComputations;
export function executeHashComputations(hashComputations) {
hasher.executeHashComputations(hashComputations);
}
//# sourceMappingURL=index.js.map

@@ -1,2 +0,2 @@

import type { Hasher } from "./types";
import type { Hasher } from "./types.js";
export declare const hasher: Hasher;

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.hasher = void 0;
const sha256_1 = require("@noble/hashes/sha256");
const as_sha256_1 = require("@chainsafe/as-sha256");
const util_1 = require("./util");
const digest64 = (a, b) => sha256_1.sha256.create().update(a).update(b).digest();
import { sha256 } from "@noble/hashes/sha256";
import { digest64HashObjects, byteArrayIntoHashObject } from "@chainsafe/as-sha256";
import { BLOCK_SIZE, doDigestNLevel, doMerkleizeBlockArray, doMerkleizeBlocksBytes, hashObjectToUint8Array, } from "./util.js";
const digest64 = (a, b) => sha256.create().update(a).update(b).digest();
const hashInto = (input, output) => {

@@ -24,13 +21,18 @@ if (input.length % 64 !== 0) {

};
exports.hasher = {
/** should be multiple of 64, make it the same to as-sha256 */
const buffer = new Uint8Array(4 * BLOCK_SIZE);
export const hasher = {
name: "noble",
digest64,
digest64HashObjects: (left, right, parent) => {
as_sha256_1.byteArrayIntoHashObject(digest64(util_1.hashObjectToUint8Array(left), util_1.hashObjectToUint8Array(right)), 0, parent);
byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), 0, parent);
},
merkleizeInto(data, padFor, output, offset) {
return util_1.doMerkleizeInto(data, padFor, output, offset, hashInto);
merkleizeBlocksBytes(blocksBytes, padFor, output, offset) {
return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto);
},
merkleizeBlockArray(blocks, blockLimit, padFor, output, offset) {
return doMerkleizeBlockArray(blocks, blockLimit, padFor, output, offset, hashInto, buffer);
},
digestNLevel(data, nLevel) {
return util_1.doDigestNLevel(data, nLevel, hashInto);
return doDigestNLevel(data, nLevel, hashInto);
},

@@ -45,3 +47,3 @@ executeHashComputations: (hashComputations) => {

for (const hc of hcArr) {
hc.dest.applyHash(as_sha256_1.digest64HashObjects(hc.src0, hc.src1));
hc.dest.applyHash(digest64HashObjects(hc.src0, hc.src1));
}

@@ -48,0 +50,0 @@ }

@@ -1,5 +0,5 @@

import type { HashObject } from "@chainsafe/as-sha256/lib/hashObject";
import type { HashComputationLevel } from "../hashComputation";
import type { HashObject } from "@chainsafe/as-sha256";
import type { HashComputationLevel } from "../hashComputation.js";
export type { HashObject };
export declare type Hasher = {
export type Hasher = {
name: string;

@@ -15,8 +15,15 @@ /**

/**
* Merkleize n chunk of data, 32 bytes each
* Merkleize n SHA256 blocks in a single Uint8Array, each block is 64 bytes
* padFor is maxChunkCount, use it to compute layers to hash
* data is mutated after the function
* blocksBytes is mutated after the function
*/
merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void;
merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void;
/**
* Merkleize n SHA256 blocks, each is 64 bytes Uint8Array
* blockLimit is the number of blocks to hash, should be <= blocks.length
* padFor is maxChunkCount, use it to compute layers to hash
* blocks are mutated after the function
*/
merkleizeBlockArray(blocks: Uint8Array[], blockLimit: number, padFor: number, output: Uint8Array, offset: number): void;
/**
* Hash multiple chunks (1 chunk = 32 bytes) at multiple levels

@@ -23,0 +30,0 @@ * With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes.

@@ -1,3 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
export {};
//# sourceMappingURL=types.js.map

@@ -1,13 +0,27 @@

import { HashObject } from "@chainsafe/as-sha256/lib/hashObject";
import { HashObject } from "@chainsafe/as-sha256";
export declare function hashObjectToUint8Array(obj: HashObject): Uint8Array;
export declare function uint8ArrayToHashObject(byteArr: Uint8Array): HashObject;
declare type HashIntoFn = (input: Uint8Array, output: Uint8Array) => void;
type HashIntoFn = (input: Uint8Array, output: Uint8Array) => void;
/** a SHA256 block is 64 bytes */
export declare const BLOCK_SIZE = 64;
/**
* Input data is unsafe because it's modified
* If its chunk count is not even, need to be appended with zero hash at layer 0 so that we don't need
* a new memory allocation here (even through we don't need it if padFor = 1)
* The Uint8Array(32) will be written to output at offset
* Merkleize multiple SHA256 blocks in a single Uint8Array into ${output} at ${offset}
* - if padFor > 1 blocksBytes need to be multiple of 64 bytes.
* - if padFor = 1, blocksBytes need to be at least 32 bytes
* - if padFor = 0, throw error
* blocksBytes is unsafe because it's modified
*/
export declare function doMerkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number, hashInto: HashIntoFn): void;
export declare function doMerkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number, hashInto: HashIntoFn): void;
/**
* Merkleize multiple SHA256 blocks into ${output} at ${offset}
* @param blockLimit number of blocks, should be <= blocks.length so that consumer can reuse memory
* @param padFor is maxChunkCount, should be >= 2
* @param blocks is unsafe because it's modified
* @param output the result is stored here
* @param offset the offset to store the result
* @param hashInto the hash function of each hasher
* @param buffer is a temporary buffer of each hasher to work with the hashInto() function
*/
export declare function doMerkleizeBlockArray(blocks: Uint8Array[], blockLimit: number, padFor: number, output: Uint8Array, offset: number, hashInto: HashIntoFn, buffer: Uint8Array): void;
/**
* Input data is unsafe because it's modified

@@ -14,0 +28,0 @@ * given nLevel = 3

@@ -1,23 +0,21 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.doDigestNLevel = exports.doMerkleizeInto = exports.uint8ArrayToHashObject = exports.hashObjectToUint8Array = void 0;
const hashObject_1 = require("@chainsafe/as-sha256/lib/hashObject");
const zeroHash_1 = require("../zeroHash");
function hashObjectToUint8Array(obj) {
import { byteArrayToHashObject, hashObjectToByteArray } from "@chainsafe/as-sha256";
import { zeroHash } from "../zeroHash.js";
export function hashObjectToUint8Array(obj) {
const byteArr = new Uint8Array(32);
hashObject_1.hashObjectToByteArray(obj, byteArr, 0);
hashObjectToByteArray(obj, byteArr, 0);
return byteArr;
}
exports.hashObjectToUint8Array = hashObjectToUint8Array;
function uint8ArrayToHashObject(byteArr) {
return hashObject_1.byteArrayToHashObject(byteArr, 0);
export function uint8ArrayToHashObject(byteArr) {
return byteArrayToHashObject(byteArr, 0);
}
exports.uint8ArrayToHashObject = uint8ArrayToHashObject;
/** a SHA256 block is 64 bytes */
export const BLOCK_SIZE = 64;
/**
* Input data is unsafe because it's modified
* If its chunk count is not even, need to be appended with zero hash at layer 0 so that we don't need
* a new memory allocation here (even through we don't need it if padFor = 1)
* The Uint8Array(32) will be written to output at offset
* Merkleize multiple SHA256 blocks in a single Uint8Array into ${output} at ${offset}
* - if padFor > 1 blocksBytes need to be multiple of 64 bytes.
* - if padFor = 1, blocksBytes need to be at least 32 bytes
* - if padFor = 0, throw error
* blocksBytes is unsafe because it's modified
*/
function doMerkleizeInto(data, padFor, output, offset, hashInto) {
export function doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto) {
if (padFor < 1) {

@@ -27,26 +25,26 @@ throw new Error(`Invalid padFor, expect to be greater than 0, got ${padFor}`);

const layerCount = Math.ceil(Math.log2(padFor));
if (data.length === 0) {
output.set(zeroHash_1.zeroHash(layerCount), offset);
if (blocksBytes.length === 0) {
output.set(zeroHash(layerCount), offset);
return;
}
if (data.length % 32 !== 0) {
throw new Error(`Invalid input length, expect to be multiple of 32 bytes, got ${data.length}`);
if (blocksBytes.length % 32 !== 0) {
throw new Error(`Invalid input length, expect to be multiple of 32 bytes, got ${blocksBytes.length}`);
}
// if padFor = 1, only need 32 bytes
if (padFor > 1 && data.length % 64 !== 0) {
throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${data.length}, padFor=${padFor}`);
if (padFor > 1 && blocksBytes.length % BLOCK_SIZE !== 0) {
throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${blocksBytes.length}, padFor=${padFor}`);
}
let inputLength = data.length;
let inputLength = blocksBytes.length;
let outputLength = Math.floor(inputLength / 2);
let bufferIn = data;
// hash into the same buffer
for (let i = 0; i < layerCount; i++) {
const bufferOut = data.subarray(0, outputLength);
let bufferIn = blocksBytes;
// hash into the same buffer to save memory allocation
for (let layer = 0; layer < layerCount; layer++) {
const bufferOut = blocksBytes.subarray(0, outputLength);
hashInto(bufferIn, bufferOut);
const chunkCount = Math.floor(outputLength / 32);
if (chunkCount % 2 === 1 && i < layerCount - 1) {
if (chunkCount % 2 === 1 && layer < layerCount - 1) {
// extend to 1 more chunk
inputLength = outputLength + 32;
bufferIn = data.subarray(0, inputLength);
bufferIn.set(zeroHash_1.zeroHash(i + 1), outputLength);
bufferIn = blocksBytes.subarray(0, inputLength);
bufferIn.set(zeroHash(layer + 1), outputLength);
}

@@ -61,4 +59,91 @@ else {

}
exports.doMerkleizeInto = doMerkleizeInto;
/**
* Merkleize multiple SHA256 blocks into ${output} at ${offset}
* @param blockLimit number of blocks, should be <= blocks.length so that consumer can reuse memory
* @param padFor is maxChunkCount, should be >= 2
* @param blocks is unsafe because it's modified
* @param output the result is stored here
* @param offset the offset to store the result
* @param hashInto the hash function of each hasher
* @param buffer is a temporary buffer of each hasher to work with the hashInto() function
*/
export function doMerkleizeBlockArray(blocks, blockLimit, padFor, output, offset, hashInto, buffer) {
if (padFor < 1) {
throw new Error(`Invalid padFor, expect to be at least 1, got ${padFor}`);
}
if (blockLimit > blocks.length) {
throw new Error(`Invalid blockLimit, expect to be less than or equal blocks.length ${blocks.length}, got ${blockLimit}`);
}
const layerCount = Math.ceil(Math.log2(padFor));
if (blockLimit === 0) {
output.set(zeroHash(layerCount), offset);
return;
}
for (const block of blocks) {
if (block.length !== BLOCK_SIZE) {
throw new Error(`Invalid block length, expect to be 64 bytes, got ${block.length}`);
}
}
// as-sha256 has a buffer of 4 * 64 bytes
// hashtree has a buffer of 16 * 64 bytes
if (buffer.length === 0 || buffer.length % (4 * BLOCK_SIZE) !== 0) {
throw new Error(`Invalid buffer length, expect to be multiple of 64 bytes, got ${buffer.length}`);
}
// batchSize is 4 for as-sha256, 16 for hashtree
const batchSize = Math.floor(buffer.length / BLOCK_SIZE);
const halfBatchSize = Math.floor(batchSize / 2);
let bufferIn = buffer;
// hash into the same buffer
let bufferOut = buffer.subarray(0, halfBatchSize * BLOCK_SIZE);
// ignore remaining blocks
let blockCount = blockLimit;
// hash into the same blocks to save memory allocation
for (let layer = 0; layer < layerCount; layer++) {
let outBlockIndex = 0;
const sameLayerLoop = Math.floor(blockCount / batchSize);
for (let i = 0; i < sameLayerLoop; i++) {
// populate bufferIn
for (let j = 0; j < batchSize; j++) {
const blockIndex = i * batchSize + j;
bufferIn.set(blocks[blockIndex], j * BLOCK_SIZE);
}
// hash into bufferOut
hashInto(bufferIn, bufferOut);
// copy bufferOut to blocks, bufferOut.len = halfBatchSize * BLOCK_SIZE
for (let j = 0; j < halfBatchSize; j++) {
blocks[outBlockIndex].set(bufferOut.subarray(j * BLOCK_SIZE, (j + 1) * BLOCK_SIZE));
outBlockIndex++;
}
}
// remaining blocks
const remainingBlocks = blockCount % batchSize;
bufferIn = buffer.subarray(0, remainingBlocks * BLOCK_SIZE);
bufferOut = buffer.subarray(0, Math.floor(bufferIn.length / 2));
// populate bufferIn
for (let blockIndex = Math.floor(blockCount / batchSize) * batchSize; blockIndex < blockCount; blockIndex++) {
bufferIn.set(blocks[blockIndex], (blockIndex % batchSize) * BLOCK_SIZE);
}
// hash into bufferOut
hashInto(bufferIn, bufferOut);
// copy bufferOut to blocks, note that bufferOut.len may not be divisible by BLOCK_SIZE
for (let j = 0; j < Math.floor(bufferOut.length / BLOCK_SIZE); j++) {
blocks[outBlockIndex].set(bufferOut.subarray(j * BLOCK_SIZE, (j + 1) * BLOCK_SIZE));
outBlockIndex++;
}
if (bufferOut.length % BLOCK_SIZE !== 0) {
// set the last 32 bytes of bufferOut
blocks[outBlockIndex].set(bufferOut.subarray(bufferOut.length - 32, bufferOut.length), 0);
// add zeroHash
blocks[outBlockIndex].set(zeroHash(layer + 1), 32);
outBlockIndex++;
}
// end of layer, update blockCount, bufferIn, bufferOut
blockCount = outBlockIndex;
bufferIn = buffer.subarray(0, blockCount * BLOCK_SIZE);
bufferOut = buffer.subarray(0, Math.floor(bufferIn.length / 2));
}
// the end result stays in blocks[0]
output.set(blocks[0].subarray(0, 32), offset);
}
/**
* Input data is unsafe because it's modified

@@ -70,3 +155,3 @@ * given nLevel = 3

*/
function doDigestNLevel(data, nLevel, hashInto) {
export function doDigestNLevel(data, nLevel, hashInto) {
let inputLength = data.length;

@@ -92,3 +177,2 @@ const bytesInBatch = Math.pow(2, nLevel) * 32;

}
exports.doDigestNLevel = doDigestNLevel;
//# sourceMappingURL=util.js.map

@@ -1,10 +0,11 @@

export * from "./gindex";
export * from "./hasher";
export * from "./node";
export * from "./hashComputation";
export * from "./packedNode";
export * from "./proof";
export * from "./subtree";
export * from "./tree";
export * from "./zeroNode";
export * from "./zeroHash";
export * from "./gindex.js";
export * from "./hasher/index.js";
export * from "./node.js";
export * from "./hashComputation.js";
export * from "./packedNode.js";
export * from "./proof/index.js";
export * from "./subtree.js";
export * from "./tree.js";
export * from "./zeroNode.js";
export * from "./zeroHash.js";
export * from "./snapshot.js";

@@ -1,23 +0,12 @@

"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
__exportStar(require("./gindex"), exports);
__exportStar(require("./hasher"), exports);
__exportStar(require("./node"), exports);
__exportStar(require("./hashComputation"), exports);
__exportStar(require("./packedNode"), exports);
__exportStar(require("./proof"), exports);
__exportStar(require("./subtree"), exports);
__exportStar(require("./tree"), exports);
__exportStar(require("./zeroNode"), exports);
__exportStar(require("./zeroHash"), exports);
export * from "./gindex.js";
export * from "./hasher/index.js";
export * from "./node.js";
export * from "./hashComputation.js";
export * from "./packedNode.js";
export * from "./proof/index.js";
export * from "./subtree.js";
export * from "./tree.js";
export * from "./zeroNode.js";
export * from "./zeroHash.js";
export * from "./snapshot.js";
//# sourceMappingURL=index.js.map

@@ -1,2 +0,2 @@

import { HashObject } from "@chainsafe/as-sha256/lib/hashObject";
import type { HashObject } from "@chainsafe/as-sha256";
/**

@@ -39,5 +39,5 @@ * An immutable binary merkle tree node

get root(): Uint8Array;
isLeaf(): boolean;
get left(): Node;
get right(): Node;
isLeaf(): boolean;
}

@@ -48,2 +48,6 @@ /**

export declare class LeafNode extends Node {
get rootHashObject(): HashObject;
get root(): Uint8Array;
get left(): Node;
get right(): Node;
static fromRoot(root: Uint8Array): LeafNode;

@@ -66,7 +70,3 @@ /**

clone(): LeafNode;
get rootHashObject(): HashObject;
get root(): Uint8Array;
isLeaf(): boolean;
get left(): Node;
get right(): Node;
writeToBytes(data: Uint8Array, start: number, size: number): void;

@@ -79,3 +79,3 @@ getUint(uintBytes: number, offsetBytes: number, clipInfinity?: boolean): number;

}
export declare type Link = (n: Node) => Node;
export type Link = (n: Node) => Node;
export declare function identity(n: Node): Node;

@@ -82,0 +82,0 @@ export declare function compose(inner: Link, outer: Link): Link;

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.bitwiseOrNodeH = exports.setNodeH = exports.getNodeH = exports.compose = exports.identity = exports.LeafNode = exports.BranchNode = exports.Node = void 0;
const hasher_1 = require("./hasher");
import { hashObjectToUint8Array, hasher, uint8ArrayToHashObject } from "./hasher/index.js";
const TWO_POWER_32 = 2 ** 32;

@@ -9,3 +6,14 @@ /**

*/
class Node {
export class Node {
/**
* May be null. This is to save an extra variable to check if a node has a root or not
*/
h0;
h1;
h2;
h3;
h4;
h5;
h6;
h7;
constructor(h0, h1, h2, h3, h4, h5, h6, h7) {

@@ -32,7 +40,8 @@ this.h0 = h0;

}
exports.Node = Node;
/**
* An immutable binary merkle tree node that has a `left` and `right` child
*/
class BranchNode extends Node {
export class BranchNode extends Node {
_left;
_right;
constructor(_left, _right) {

@@ -52,3 +61,3 @@ // First null value is to save an extra variable to check if a node has a root or not

if (this.h0 === null) {
hasher_1.hasher.digest64HashObjects(this.left.rootHashObject, this.right.rootHashObject, this);
hasher.digest64HashObjects(this.left.rootHashObject, this.right.rootHashObject, this);
}

@@ -58,7 +67,4 @@ return this;

get root() {
return hasher_1.hashObjectToUint8Array(this.rootHashObject);
return hashObjectToUint8Array(this.rootHashObject);
}
isLeaf() {
return false;
}
get left() {

@@ -70,10 +76,24 @@ return this._left;

}
isLeaf() {
return false;
}
}
exports.BranchNode = BranchNode;
/**
* An immutable binary merkle tree node that has no children
*/
class LeafNode extends Node {
export class LeafNode extends Node {
get rootHashObject() {
return this;
}
get root() {
return hashObjectToUint8Array(this);
}
get left() {
throw Error("LeafNode has no left node");
}
get right() {
throw Error("LeafNode has no right node");
}
static fromRoot(root) {
return this.fromHashObject(hasher_1.uint8ArrayToHashObject(root));
return this.fromHashObject(uint8ArrayToHashObject(root));
}

@@ -104,17 +124,5 @@ /**

}
get rootHashObject() {
return this;
}
get root() {
return hasher_1.hashObjectToUint8Array(this);
}
isLeaf() {
return true;
}
get left() {
throw Error("LeafNode has no left node");
}
get right() {
throw Error("LeafNode has no right node");
}
writeToBytes(data, start, size) {

@@ -275,8 +283,6 @@ // TODO: Optimize

}
exports.LeafNode = LeafNode;
function identity(n) {
export function identity(n) {
return n;
}
exports.identity = identity;
function compose(inner, outer) {
export function compose(inner, outer) {
return function (n) {

@@ -286,4 +292,3 @@ return outer(inner(n));

}
exports.compose = compose;
function getNodeH(node, hIndex) {
export function getNodeH(node, hIndex) {
if (hIndex === 0)

@@ -308,4 +313,3 @@ return node.h0;

}
exports.getNodeH = getNodeH;
function setNodeH(node, hIndex, value) {
export function setNodeH(node, hIndex, value) {
if (hIndex === 0)

@@ -330,4 +334,3 @@ node.h0 = value;

}
exports.setNodeH = setNodeH;
function bitwiseOrNodeH(node, hIndex, value) {
export function bitwiseOrNodeH(node, hIndex, value) {
if (hIndex === 0)

@@ -352,3 +355,2 @@ node.h0 |= value;

}
exports.bitwiseOrNodeH = bitwiseOrNodeH;
//# sourceMappingURL=node.js.map

@@ -1,2 +0,2 @@

import { Node, LeafNode } from "./node";
import { Node, LeafNode } from "./node.js";
export declare function packedRootsBytesToNode(depth: number, dataView: DataView, start: number, end: number): Node;

@@ -3,0 +3,0 @@ /**

@@ -1,12 +0,8 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.packedNodeRootsToBytes = exports.packedRootsBytesToLeafNodes = exports.packedUintNum64sToLeafNodes = exports.packedRootsBytesToNode = void 0;
const subtree_1 = require("./subtree");
const node_1 = require("./node");
import { subtreeFillToContents } from "./subtree.js";
import { LeafNode, getNodeH, setNodeH } from "./node.js";
const NUMBER_2_POW_32 = 2 ** 32;
function packedRootsBytesToNode(depth, dataView, start, end) {
export function packedRootsBytesToNode(depth, dataView, start, end) {
const leafNodes = packedRootsBytesToLeafNodes(dataView, start, end);
return subtree_1.subtreeFillToContents(leafNodes, depth);
return subtreeFillToContents(leafNodes, depth);
}
exports.packedRootsBytesToNode = packedRootsBytesToNode;
/**

@@ -23,7 +19,7 @@ * Pack a list of uint64 numbers into a list of LeafNodes.

*/
function packedUintNum64sToLeafNodes(values) {
export function packedUintNum64sToLeafNodes(values) {
const leafNodes = new Array(Math.ceil(values.length / 4));
for (let i = 0; i < values.length; i++) {
const nodeIndex = Math.floor(i / 4);
const leafNode = leafNodes[nodeIndex] ?? new node_1.LeafNode(0, 0, 0, 0, 0, 0, 0, 0);
const leafNode = leafNodes[nodeIndex] ?? new LeafNode(0, 0, 0, 0, 0, 0, 0, 0);
const vIndex = i % 4;

@@ -34,8 +30,8 @@ const hIndex = 2 * vIndex;

if (value === Infinity) {
node_1.setNodeH(leafNode, hIndex, 0xffffffff);
node_1.setNodeH(leafNode, hIndex + 1, 0xffffffff);
setNodeH(leafNode, hIndex, 0xffffffff);
setNodeH(leafNode, hIndex + 1, 0xffffffff);
}
else {
node_1.setNodeH(leafNode, hIndex, value & 0xffffffff);
node_1.setNodeH(leafNode, hIndex + 1, (value / NUMBER_2_POW_32) & 0xffffffff);
setNodeH(leafNode, hIndex, value & 0xffffffff);
setNodeH(leafNode, hIndex + 1, (value / NUMBER_2_POW_32) & 0xffffffff);
}

@@ -46,7 +42,6 @@ leafNodes[nodeIndex] = leafNode;

}
exports.packedUintNum64sToLeafNodes = packedUintNum64sToLeafNodes;
/**
* Optimized deserialization of linear bytes to consecutive leaf nodes
*/
function packedRootsBytesToLeafNodes(dataView, start, end) {
export function packedRootsBytesToLeafNodes(dataView, start, end) {
const size = end - start;

@@ -62,3 +57,3 @@ // If the offset in data is not a multiple of 4, Uint32Array can't be used

const offset = start + i * 32;
leafNodes[i] = new node_1.LeafNode(dataView.getInt32(offset + 0, true), dataView.getInt32(offset + 4, true), dataView.getInt32(offset + 8, true), dataView.getInt32(offset + 12, true), dataView.getInt32(offset + 16, true), dataView.getInt32(offset + 20, true), dataView.getInt32(offset + 24, true), dataView.getInt32(offset + 28, true));
leafNodes[i] = new LeafNode(dataView.getInt32(offset + 0, true), dataView.getInt32(offset + 4, true), dataView.getInt32(offset + 8, true), dataView.getInt32(offset + 12, true), dataView.getInt32(offset + 16, true), dataView.getInt32(offset + 20, true), dataView.getInt32(offset + 24, true), dataView.getInt32(offset + 28, true));
}

@@ -69,3 +64,3 @@ // Consider that the last node may only include partial data

if (remainderBytes > 0) {
const node = new node_1.LeafNode(0, 0, 0, 0, 0, 0, 0, 0);
const node = new LeafNode(0, 0, 0, 0, 0, 0, 0, 0);
leafNodes[fullNodeCount] = node;

@@ -75,3 +70,3 @@ // Loop to dynamically copy the full h values

for (let h = 0; h < fullHCount; h++) {
node_1.setNodeH(node, h, dataView.getInt32(start + fullNodeCount * 32 + h * 4, true));
setNodeH(node, h, dataView.getInt32(start + fullNodeCount * 32 + h * 4, true));
}

@@ -84,3 +79,3 @@ const remainderUint32 = size % 4;

}
node_1.setNodeH(node, fullHCount, h);
setNodeH(node, fullHCount, h);
}

@@ -90,7 +85,6 @@ }

}
exports.packedRootsBytesToLeafNodes = packedRootsBytesToLeafNodes;
/**
* Optimized serialization of consecutive leave nodes to linear bytes
*/
function packedNodeRootsToBytes(dataView, start, size, nodes) {
export function packedNodeRootsToBytes(dataView, start, size, nodes) {
// If the offset in data is not a multiple of 4, Uint32Array can't be used

@@ -122,7 +116,7 @@ // > start offset of Uint32Array should be a multiple of 4

for (let h = 0; h < fullHCount; h++) {
dataView.setInt32(start + fullNodeCount * 32 + h * 4, node_1.getNodeH(node, h), true);
dataView.setInt32(start + fullNodeCount * 32 + h * 4, getNodeH(node, h), true);
}
const remainderUint32 = size % 4;
if (remainderUint32 > 0) {
const h = node_1.getNodeH(node, fullHCount);
const h = getNodeH(node, fullHCount);
for (let i = 0; i < remainderUint32; i++) {

@@ -134,3 +128,2 @@ dataView.setUint8(start + size - remainderUint32 + i, (h >> (i * 8)) & 0xff);

}
exports.packedNodeRootsToBytes = packedNodeRootsToBytes;
//# sourceMappingURL=packedNode.js.map

@@ -1,3 +0,3 @@

import { Gindex } from "../gindex";
import { Node } from "../node";
import { Gindex } from "../gindex.js";
import { Node } from "../node.js";
export declare function computeDescriptor(indices: Gindex[]): Uint8Array;

@@ -4,0 +4,0 @@ export declare function descriptorToBitlist(descriptor: Uint8Array): boolean[];

@@ -1,8 +0,5 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createNodeFromCompactMultiProof = exports.createCompactMultiProof = exports.compactMultiProofToNode = exports.nodeToCompactMultiProof = exports.descriptorToBitlist = exports.computeDescriptor = void 0;
const gindex_1 = require("../gindex");
const node_1 = require("../node");
const util_1 = require("./util");
function computeDescriptor(indices) {
import { convertGindexToBitstring } from "../gindex.js";
import { BranchNode, LeafNode } from "../node.js";
import { computeProofBitstrings } from "./util.js";
export function computeDescriptor(indices) {
// include all helper indices

@@ -12,5 +9,5 @@ const proofBitstrings = new Set();

for (const leafIndex of indices) {
const leafBitstring = gindex_1.convertGindexToBitstring(leafIndex);
const leafBitstring = convertGindexToBitstring(leafIndex);
proofBitstrings.add(leafBitstring);
const { branch, path } = util_1.computeProofBitstrings(leafBitstring);
const { branch, path } = computeProofBitstrings(leafBitstring);
path.delete(leafBitstring);

@@ -50,3 +47,2 @@ for (const pathIndex of path) {

}
exports.computeDescriptor = computeDescriptor;
function getBit(bitlist, bitIndex) {

@@ -77,3 +73,3 @@ const bit = bitIndex % 8;

}
function descriptorToBitlist(descriptor) {
export function descriptorToBitlist(descriptor) {
const bools = [];

@@ -108,4 +104,3 @@ const maxBitLength = descriptor.length * 8;

}
exports.descriptorToBitlist = descriptorToBitlist;
function nodeToCompactMultiProof(node, bitlist, bitIndex) {
export function nodeToCompactMultiProof(node, bitlist, bitIndex) {
if (bitlist[bitIndex]) {

@@ -120,3 +115,2 @@ return [node.root];

}
exports.nodeToCompactMultiProof = nodeToCompactMultiProof;
/**

@@ -127,16 +121,14 @@ * Create a Node given a validated bitlist, leaves, and a pointer into the bitlist and leaves

*/
function compactMultiProofToNode(bitlist, leaves, pointer) {
export function compactMultiProofToNode(bitlist, leaves, pointer) {
if (bitlist[pointer.bitIndex++]) {
return node_1.LeafNode.fromRoot(leaves[pointer.leafIndex++]);
return LeafNode.fromRoot(leaves[pointer.leafIndex++]);
}
else {
return new node_1.BranchNode(compactMultiProofToNode(bitlist, leaves, pointer), compactMultiProofToNode(bitlist, leaves, pointer));
return new BranchNode(compactMultiProofToNode(bitlist, leaves, pointer), compactMultiProofToNode(bitlist, leaves, pointer));
}
}
exports.compactMultiProofToNode = compactMultiProofToNode;
function createCompactMultiProof(rootNode, descriptor) {
export function createCompactMultiProof(rootNode, descriptor) {
return nodeToCompactMultiProof(rootNode, descriptorToBitlist(descriptor), 0);
}
exports.createCompactMultiProof = createCompactMultiProof;
function createNodeFromCompactMultiProof(leaves, descriptor) {
export function createNodeFromCompactMultiProof(leaves, descriptor) {
const bools = descriptorToBitlist(descriptor);

@@ -148,3 +140,2 @@ if (bools.length !== leaves.length * 2 - 1) {

}
exports.createNodeFromCompactMultiProof = createNodeFromCompactMultiProof;
//# sourceMappingURL=compactMulti.js.map

@@ -1,4 +0,4 @@

import { Gindex } from "../gindex";
import { Node } from "../node";
export { computeDescriptor, descriptorToBitlist } from "./compactMulti";
import { Gindex } from "../gindex.js";
import { Node } from "../node.js";
export { computeDescriptor, descriptorToBitlist } from "./compactMulti.js";
export declare enum ProofType {

@@ -50,3 +50,3 @@ single = "single",

}
export declare type Proof = SingleProof | TreeOffsetProof | MultiProof | CompactMultiProof;
export type Proof = SingleProof | TreeOffsetProof | MultiProof | CompactMultiProof;
export interface SingleProofInput {

@@ -68,3 +68,3 @@ type: ProofType.single;

}
export declare type ProofInput = SingleProofInput | TreeOffsetProofInput | MultiProofInput | CompactMultiProofInput;
export type ProofInput = SingleProofInput | TreeOffsetProofInput | MultiProofInput | CompactMultiProofInput;
export declare function createProof(rootNode: Node, input: ProofInput): Proof;

@@ -71,0 +71,0 @@ export declare function createNodeFromProof(proof: Proof): Node;

@@ -1,12 +0,7 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.deserializeProof = exports.serializeProof = exports.createNodeFromProof = exports.createProof = exports.ProofTypeSerialized = exports.ProofType = exports.descriptorToBitlist = exports.computeDescriptor = void 0;
const multi_1 = require("./multi");
const compactMulti_1 = require("./compactMulti");
const single_1 = require("./single");
const treeOffset_1 = require("./treeOffset");
var compactMulti_2 = require("./compactMulti");
Object.defineProperty(exports, "computeDescriptor", { enumerable: true, get: function () { return compactMulti_2.computeDescriptor; } });
Object.defineProperty(exports, "descriptorToBitlist", { enumerable: true, get: function () { return compactMulti_2.descriptorToBitlist; } });
var ProofType;
import { createMultiProof, createNodeFromMultiProof } from "./multi.js";
import { createNodeFromCompactMultiProof, createCompactMultiProof } from "./compactMulti.js";
import { createNodeFromSingleProof, createSingleProof } from "./single.js";
import { computeTreeOffsetProofSerializedLength, createNodeFromTreeOffsetProof, createTreeOffsetProof, deserializeTreeOffsetProof, serializeTreeOffsetProof, } from "./treeOffset.js";
export { computeDescriptor, descriptorToBitlist } from "./compactMulti.js";
export var ProofType;
(function (ProofType) {

@@ -17,16 +12,16 @@ ProofType["single"] = "single";

ProofType["compactMulti"] = "compactMulti";
})(ProofType = exports.ProofType || (exports.ProofType = {}));
})(ProofType || (ProofType = {}));
/**
* Serialized proofs are prepended with a single byte, denoting their type
*/
exports.ProofTypeSerialized = [
ProofType.single,
ProofType.treeOffset,
ProofType.multi,
export const ProofTypeSerialized = [
ProofType.single, // 0
ProofType.treeOffset, // 1
ProofType.multi, // 2
ProofType.compactMulti, // 3
];
function createProof(rootNode, input) {
export function createProof(rootNode, input) {
switch (input.type) {
case ProofType.single: {
const [leaf, witnesses] = single_1.createSingleProof(rootNode, input.gindex);
const [leaf, witnesses] = createSingleProof(rootNode, input.gindex);
return {

@@ -40,3 +35,3 @@ type: ProofType.single,

case ProofType.treeOffset: {
const [offsets, leaves] = treeOffset_1.createTreeOffsetProof(rootNode, input.gindices);
const [offsets, leaves] = createTreeOffsetProof(rootNode, input.gindices);
return {

@@ -49,3 +44,3 @@ type: ProofType.treeOffset,

case ProofType.multi: {
const [leaves, witnesses, gindices] = multi_1.createMultiProof(rootNode, input.gindices);
const [leaves, witnesses, gindices] = createMultiProof(rootNode, input.gindices);
return {

@@ -59,3 +54,3 @@ type: ProofType.multi,

case ProofType.compactMulti: {
const leaves = compactMulti_1.createCompactMultiProof(rootNode, input.descriptor);
const leaves = createCompactMultiProof(rootNode, input.descriptor);
return {

@@ -71,13 +66,12 @@ type: ProofType.compactMulti,

}
exports.createProof = createProof;
function createNodeFromProof(proof) {
export function createNodeFromProof(proof) {
switch (proof.type) {
case ProofType.single:
return single_1.createNodeFromSingleProof(proof.gindex, proof.leaf, proof.witnesses);
return createNodeFromSingleProof(proof.gindex, proof.leaf, proof.witnesses);
case ProofType.treeOffset:
return treeOffset_1.createNodeFromTreeOffsetProof(proof.offsets, proof.leaves);
return createNodeFromTreeOffsetProof(proof.offsets, proof.leaves);
case ProofType.multi:
return multi_1.createNodeFromMultiProof(proof.leaves, proof.witnesses, proof.gindices);
return createNodeFromMultiProof(proof.leaves, proof.witnesses, proof.gindices);
case ProofType.compactMulti:
return compactMulti_1.createNodeFromCompactMultiProof(proof.leaves, proof.descriptor);
return createNodeFromCompactMultiProof(proof.leaves, proof.descriptor);
default:

@@ -87,4 +81,3 @@ throw new Error("Invalid proof type");

}
exports.createNodeFromProof = createNodeFromProof;
function serializeProof(proof) {
export function serializeProof(proof) {
switch (proof.type) {

@@ -95,5 +88,5 @@ case ProofType.single:

case ProofType.treeOffset: {
const output = new Uint8Array(1 + treeOffset_1.computeTreeOffsetProofSerializedLength(proof.offsets, proof.leaves));
output[0] = exports.ProofTypeSerialized.indexOf(ProofType.treeOffset);
treeOffset_1.serializeTreeOffsetProof(output, 1, proof.offsets, proof.leaves);
const output = new Uint8Array(1 + computeTreeOffsetProofSerializedLength(proof.offsets, proof.leaves));
output[0] = ProofTypeSerialized.indexOf(ProofType.treeOffset);
serializeTreeOffsetProof(output, 1, proof.offsets, proof.leaves);
return output;

@@ -105,5 +98,4 @@ }

}
exports.serializeProof = serializeProof;
function deserializeProof(data) {
const proofType = exports.ProofTypeSerialized[data[0]];
export function deserializeProof(data) {
const proofType = ProofTypeSerialized[data[0]];
if (!proofType) {

@@ -117,3 +109,3 @@ throw new Error("Invalid proof type");

case ProofType.treeOffset: {
const [offsets, leaves] = treeOffset_1.deserializeTreeOffsetProof(data, 1);
const [offsets, leaves] = deserializeTreeOffsetProof(data, 1);
return {

@@ -129,3 +121,2 @@ type: ProofType.treeOffset,

}
exports.deserializeProof = deserializeProof;
//# sourceMappingURL=index.js.map

@@ -1,3 +0,3 @@

import { Gindex } from "../gindex";
import { Node } from "../node";
import { Gindex } from "../gindex.js";
import { Node } from "../node.js";
/**

@@ -4,0 +4,0 @@ * Create an multiproof

@@ -1,7 +0,4 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createNodeFromMultiProof = exports.createMultiProof = void 0;
const node_1 = require("../node");
const tree_1 = require("../tree");
const util_1 = require("./util");
import { BranchNode, LeafNode } from "../node.js";
import { Tree } from "../tree.js";
import { computeMultiProofBitstrings, SortOrder } from "./util.js";
/**

@@ -15,5 +12,5 @@ * Create an multiproof

*/
function createMultiProof(rootNode, gindices) {
const tree = new tree_1.Tree(rootNode);
const witnessGindices = util_1.computeMultiProofBitstrings(gindices.map((gindex) => gindex.toString(2)), false, util_1.SortOrder.Decreasing);
export function createMultiProof(rootNode, gindices) {
const tree = new Tree(rootNode);
const witnessGindices = computeMultiProofBitstrings(gindices.map((gindex) => gindex.toString(2)), false, SortOrder.Decreasing);
const leafGindices = gindices.slice().sort((a, b) => (a < b ? 1 : -1));

@@ -24,3 +21,2 @@ const leaves = leafGindices.map((gindex) => tree.getRoot(gindex));

}
exports.createMultiProof = createMultiProof;
/**

@@ -35,3 +31,3 @@ * Recreate a `Node` given a multiproof

*/
function createNodeFromMultiProof(leaves, witnesses, gindices) {
export function createNodeFromMultiProof(leaves, witnesses, gindices) {
if (leaves.length !== gindices.length) {

@@ -41,3 +37,3 @@ throw new Error("Leaves length should equal gindices length");

const leafBitstrings = gindices.map((gindex) => gindex.toString(2));
const witnessBitstrings = util_1.computeMultiProofBitstrings(leafBitstrings, false, util_1.SortOrder.Decreasing);
const witnessBitstrings = computeMultiProofBitstrings(leafBitstrings, false, SortOrder.Decreasing);
if (witnessBitstrings.length !== witnesses.length) {

@@ -58,3 +54,3 @@ throw new Error("Witnesses length should equal witnesses gindices length");

const leaf = leaves[i];
levels[leafBitstring.length][leafBitstring] = node_1.LeafNode.fromRoot(leaf);
levels[leafBitstring.length][leafBitstring] = LeafNode.fromRoot(leaf);
}

@@ -64,3 +60,3 @@ for (let i = 0; i < witnessBitstrings.length; i++) {

const witness = witnesses[i];
levels[witnessBitstring.length][witnessBitstring] = node_1.LeafNode.fromRoot(witness);
levels[witnessBitstring.length][witnessBitstring] = LeafNode.fromRoot(witness);
}

@@ -84,3 +80,3 @@ for (let i = maxLevel; i > 1; i--) {

// store the parent node
const parentNode = isLeft ? new node_1.BranchNode(node, siblingNode) : new node_1.BranchNode(siblingNode, node);
const parentNode = isLeft ? new BranchNode(node, siblingNode) : new BranchNode(siblingNode, node);
parentLevel[parentBitstring] = parentNode;

@@ -98,3 +94,2 @@ // delete the used nodes

}
exports.createNodeFromMultiProof = createNodeFromMultiProof;
//# sourceMappingURL=multi.js.map

@@ -1,5 +0,5 @@

import { Node } from "../node";
import { Gindex } from "../gindex";
import { Node } from "../node.js";
import { Gindex } from "../gindex.js";
export declare const ERR_INVALID_NAV = "Invalid tree navigation";
export declare function createSingleProof(rootNode: Node, index: Gindex): [Uint8Array, Uint8Array[]];
export declare function createNodeFromSingleProof(gindex: Gindex, leaf: Uint8Array, witnesses: Uint8Array[]): Node;

@@ -1,14 +0,11 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createNodeFromSingleProof = exports.createSingleProof = exports.ERR_INVALID_NAV = void 0;
const node_1 = require("../node");
const gindex_1 = require("../gindex");
exports.ERR_INVALID_NAV = "Invalid tree navigation";
function createSingleProof(rootNode, index) {
import { BranchNode, LeafNode } from "../node.js";
import { gindexIterator } from "../gindex.js";
export const ERR_INVALID_NAV = "Invalid tree navigation";
export function createSingleProof(rootNode, index) {
const witnesses = [];
let node = rootNode;
for (const i of gindex_1.gindexIterator(index)) {
for (const i of gindexIterator(index)) {
if (i) {
if (node.isLeaf())
throw new Error(exports.ERR_INVALID_NAV);
throw new Error(ERR_INVALID_NAV);
witnesses.push(node.left.root);

@@ -19,3 +16,3 @@ node = node.right;

if (node.isLeaf())
throw new Error(exports.ERR_INVALID_NAV);
throw new Error(ERR_INVALID_NAV);
witnesses.push(node.right.root);

@@ -27,13 +24,12 @@ node = node.left;

}
exports.createSingleProof = createSingleProof;
function createNodeFromSingleProof(gindex, leaf, witnesses) {
let node = node_1.LeafNode.fromRoot(leaf);
export function createNodeFromSingleProof(gindex, leaf, witnesses) {
let node = LeafNode.fromRoot(leaf);
const w = witnesses.slice().reverse();
while (gindex > 1) {
const sibling = node_1.LeafNode.fromRoot(w.pop());
const sibling = LeafNode.fromRoot(w.pop());
if (gindex % BigInt(2) === BigInt(0)) {
node = new node_1.BranchNode(node, sibling);
node = new BranchNode(node, sibling);
}
else {
node = new node_1.BranchNode(sibling, node);
node = new BranchNode(sibling, node);
}

@@ -44,3 +40,2 @@ gindex = gindex / BigInt(2);

}
exports.createNodeFromSingleProof = createNodeFromSingleProof;
//# sourceMappingURL=single.js.map

@@ -1,3 +0,3 @@

import { Gindex, GindexBitstring } from "../gindex";
import { Node } from "../node";
import { Gindex, GindexBitstring } from "../gindex.js";
import { Node } from "../node.js";
/**

@@ -4,0 +4,0 @@ * Compute offsets and leaves of a tree-offset proof

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.deserializeTreeOffsetProof = exports.serializeTreeOffsetProof = exports.computeTreeOffsetProofSerializedLength = exports.createNodeFromTreeOffsetProof = exports.createTreeOffsetProof = exports.treeOffsetProofToNode = exports.nodeToTreeOffsetProof = void 0;
const node_1 = require("../node");
const util_1 = require("./util");
import { BranchNode, LeafNode } from "../node.js";
import { computeMultiProofBitstrings } from "./util.js";
/**

@@ -16,3 +13,3 @@ * Compute offsets and leaves of a tree-offset proof

*/
function nodeToTreeOffsetProof(node, gindex, proofGindices) {
export function nodeToTreeOffsetProof(node, gindex, proofGindices) {
if (!proofGindices.length || !proofGindices[0].startsWith(gindex)) {

@@ -36,3 +33,2 @@ // there are no proof indices left OR the current subtree contains no remaining proof indices

}
exports.nodeToTreeOffsetProof = nodeToTreeOffsetProof;
/**

@@ -45,3 +41,3 @@ * Recreate a `Node` given offsets and leaves of a tree-offset proof

*/
function treeOffsetProofToNode(offsets, leaves) {
export function treeOffsetProofToNode(offsets, leaves) {
if (!leaves.length) {

@@ -51,3 +47,3 @@ throw new Error("Proof must contain gt 0 leaves");

else if (leaves.length === 1) {
return node_1.LeafNode.fromRoot(leaves[0]);
return LeafNode.fromRoot(leaves[0]);
}

@@ -57,6 +53,5 @@ else {

const pivot = offsets[0];
return new node_1.BranchNode(treeOffsetProofToNode(offsets.slice(1, pivot), leaves.slice(0, pivot)), treeOffsetProofToNode(offsets.slice(pivot), leaves.slice(pivot)));
return new BranchNode(treeOffsetProofToNode(offsets.slice(1, pivot), leaves.slice(0, pivot)), treeOffsetProofToNode(offsets.slice(pivot), leaves.slice(pivot)));
}
}
exports.treeOffsetProofToNode = treeOffsetProofToNode;
/**

@@ -68,6 +63,5 @@ * Create a tree-offset proof

*/
function createTreeOffsetProof(rootNode, gindices) {
return nodeToTreeOffsetProof(rootNode, "1", util_1.computeMultiProofBitstrings(gindices.map((g) => g.toString(2))));
export function createTreeOffsetProof(rootNode, gindices) {
return nodeToTreeOffsetProof(rootNode, "1", computeMultiProofBitstrings(gindices.map((g) => g.toString(2))));
}
exports.createTreeOffsetProof = createTreeOffsetProof;
/**

@@ -79,12 +73,10 @@ * Recreate a `Node` given a tree-offset proof

*/
function createNodeFromTreeOffsetProof(offsets, leaves) {
export function createNodeFromTreeOffsetProof(offsets, leaves) {
// TODO validation
return treeOffsetProofToNode(offsets, leaves);
}
exports.createNodeFromTreeOffsetProof = createNodeFromTreeOffsetProof;
function computeTreeOffsetProofSerializedLength(offsets, leaves) {
export function computeTreeOffsetProofSerializedLength(offsets, leaves) {
// add 1 for # of leaves
return (offsets.length + 1) * 2 + leaves.length * 32;
}
exports.computeTreeOffsetProofSerializedLength = computeTreeOffsetProofSerializedLength;
// Serialized tree offset proof structure:

@@ -94,3 +86,3 @@ // # of leaves - 2 bytes

// leaves - 32 bytes each
function serializeTreeOffsetProof(output, byteOffset, offsets, leaves) {
export function serializeTreeOffsetProof(output, byteOffset, offsets, leaves) {
const writer = new DataView(output.buffer, output.byteOffset, output.byteLength);

@@ -110,4 +102,3 @@ // set # of leaves

}
exports.serializeTreeOffsetProof = serializeTreeOffsetProof;
function deserializeTreeOffsetProof(data, byteOffset) {
export function deserializeTreeOffsetProof(data, byteOffset) {
const reader = new DataView(data.buffer, data.byteOffset, data.byteLength);

@@ -127,3 +118,2 @@ // get # of leaves

}
exports.deserializeTreeOffsetProof = deserializeTreeOffsetProof;
//# sourceMappingURL=treeOffset.js.map

@@ -1,2 +0,2 @@

import { Gindex, GindexBitstring } from "../gindex";
import { Gindex, GindexBitstring } from "../gindex.js";
/**

@@ -3,0 +3,0 @@ * Compute both the path and branch indices

@@ -1,5 +0,2 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.computeMultiProofBitstrings = exports.SortOrder = exports.filterParentBitstrings = exports.sortDecreasingBitstrings = exports.sortInOrderBitstrings = exports.computeProofBitstrings = exports.computeProofGindices = void 0;
const gindex_1 = require("../gindex");
import { gindexParent, gindexSibling } from "../gindex.js";
// Not currently in use, but simpler implementation useful for testing

@@ -12,3 +9,3 @@ /**

*/
function computeProofGindices(gindex) {
export function computeProofGindices(gindex) {
const path = new Set();

@@ -19,8 +16,7 @@ const branch = new Set();

path.add(g);
branch.add(gindex_1.gindexSibling(g));
g = gindex_1.gindexParent(g);
branch.add(gindexSibling(g));
g = gindexParent(g);
}
return { path, branch };
}
exports.computeProofGindices = computeProofGindices;
/**

@@ -32,3 +28,3 @@ * Compute both the path and branch indices

*/
function computeProofBitstrings(gindex) {
export function computeProofBitstrings(gindex) {
const path = new Set();

@@ -46,3 +42,2 @@ const branch = new Set();

}
exports.computeProofBitstrings = computeProofBitstrings;
/**

@@ -52,3 +47,3 @@ * Sort generalized indices in-order

*/
function sortInOrderBitstrings(gindices, bitLength) {
export function sortInOrderBitstrings(gindices, bitLength) {
if (!gindices.length) {

@@ -62,7 +57,6 @@ return [];

}
exports.sortInOrderBitstrings = sortInOrderBitstrings;
/**
* Sort generalized indices in decreasing order
*/
function sortDecreasingBitstrings(gindices) {
export function sortDecreasingBitstrings(gindices) {
if (!gindices.length) {

@@ -99,7 +93,6 @@ return [];

}
exports.sortDecreasingBitstrings = sortDecreasingBitstrings;
/**
* Filter out parent generalized indices
*/
function filterParentBitstrings(gindices) {
export function filterParentBitstrings(gindices) {
const sortedBitstrings = gindices.slice().sort((a, b) => a.length - b.length);

@@ -119,4 +112,3 @@ const filtered = [];

}
exports.filterParentBitstrings = filterParentBitstrings;
var SortOrder;
export var SortOrder;
(function (SortOrder) {

@@ -126,3 +118,3 @@ SortOrder[SortOrder["InOrder"] = 0] = "InOrder";

SortOrder[SortOrder["Unsorted"] = 2] = "Unsorted";
})(SortOrder = exports.SortOrder || (exports.SortOrder = {}));
})(SortOrder || (SortOrder = {}));
/**

@@ -134,3 +126,3 @@ * Return the set of generalized indices required for a multiproof

*/
function computeMultiProofBitstrings(gindices, includeLeaves = true, sortOrder = SortOrder.InOrder) {
export function computeMultiProofBitstrings(gindices, includeLeaves = true, sortOrder = SortOrder.InOrder) {
const leaves = filterParentBitstrings(gindices);

@@ -163,3 +155,2 @@ // Maybe initialize the proof indices with the leaves

}
exports.computeMultiProofBitstrings = computeMultiProofBitstrings;
//# sourceMappingURL=util.js.map

@@ -1,3 +0,3 @@

import { Node } from "./node";
import { HashComputationLevel } from "./hashComputation";
import { Node } from "./node.js";
import { HashComputationLevel } from "./hashComputation.js";
export declare function subtreeFillToDepth(bottom: Node, depth: number): Node;

@@ -4,0 +4,0 @@ export declare function subtreeFillToLength(bottom: Node, depth: number, length: number): Node;

@@ -1,11 +0,8 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.subtreeFillToContents = exports.subtreeFillToLength = exports.subtreeFillToDepth = void 0;
const node_1 = require("./node");
const hashComputation_1 = require("./hashComputation");
const zeroNode_1 = require("./zeroNode");
function subtreeFillToDepth(bottom, depth) {
import { BranchNode } from "./node.js";
import { getHashComputations, levelAtIndex } from "./hashComputation.js";
import { zeroNode } from "./zeroNode.js";
export function subtreeFillToDepth(bottom, depth) {
let node = bottom;
while (depth > 0) {
node = new node_1.BranchNode(node, node);
node = new BranchNode(node, node);
depth--;

@@ -15,4 +12,3 @@ }

}
exports.subtreeFillToDepth = subtreeFillToDepth;
function subtreeFillToLength(bottom, depth, length) {
export function subtreeFillToLength(bottom, depth, length) {
const maxLength = 1 << depth;

@@ -30,13 +26,12 @@ if (length > maxLength)

if (depth === 1) {
return new node_1.BranchNode(bottom, length > 1 ? bottom : zeroNode_1.zeroNode(0));
return new BranchNode(bottom, length > 1 ? bottom : zeroNode(0));
}
const pivot = maxLength >> 1;
if (length <= pivot) {
return new node_1.BranchNode(subtreeFillToLength(bottom, depth - 1, length), zeroNode_1.zeroNode(depth - 1));
return new BranchNode(subtreeFillToLength(bottom, depth - 1, length), zeroNode(depth - 1));
}
else {
return new node_1.BranchNode(subtreeFillToDepth(bottom, depth - 1), subtreeFillToLength(bottom, depth - 1, length - pivot));
return new BranchNode(subtreeFillToDepth(bottom, depth - 1), subtreeFillToLength(bottom, depth - 1, length - pivot));
}
}
exports.subtreeFillToLength = subtreeFillToLength;
/**

@@ -47,3 +42,3 @@ * WARNING: Mutates the provided nodes array.

*/
function subtreeFillToContents(nodes, depth, hcOffset = 0, hcByLevel = null) {
export function subtreeFillToContents(nodes, depth, hcOffset = 0, hcByLevel = null) {
const maxLength = 2 ** depth;

@@ -54,3 +49,3 @@ if (nodes.length > maxLength) {

if (nodes.length === 0) {
return zeroNode_1.zeroNode(depth);
return zeroNode(depth);
}

@@ -60,3 +55,3 @@ if (depth === 0) {

if (hcByLevel !== null) {
hashComputation_1.getHashComputations(node, hcOffset, hcByLevel);
getHashComputations(node, hcOffset, hcByLevel);
}

@@ -69,8 +64,8 @@ return node;

const leftNode = nodes[0];
const rightNode = nodes.length > 1 ? nodes[1] : zeroNode_1.zeroNode(0);
const rootNode = new node_1.BranchNode(leftNode, rightNode);
const rightNode = nodes.length > 1 ? nodes[1] : zeroNode(0);
const rootNode = new BranchNode(leftNode, rightNode);
if (hcByLevel !== null) {
hashComputation_1.getHashComputations(leftNode, hcOffset + 1, hcByLevel);
hashComputation_1.getHashComputations(rightNode, hcOffset + 1, hcByLevel);
hashComputation_1.levelAtIndex(hcByLevel, hcOffset).push(leftNode, rightNode, rootNode);
getHashComputations(leftNode, hcOffset + 1, hcByLevel);
getHashComputations(rightNode, hcOffset + 1, hcByLevel);
levelAtIndex(hcByLevel, hcOffset).push(leftNode, rightNode, rootNode);
}

@@ -88,10 +83,10 @@ return rootNode;

const right = nodes[i + 1];
const node = new node_1.BranchNode(left, right);
const node = new BranchNode(left, right);
nodes[i / 2] = node;
if (offset !== null && hcByLevel !== null) {
hashComputation_1.levelAtIndex(hcByLevel, offset).push(left, right, node);
levelAtIndex(hcByLevel, offset).push(left, right, node);
if (d === depth) {
// bottom up strategy so we don't need to go down the tree except for the last level
hashComputation_1.getHashComputations(left, offset + 1, hcByLevel);
hashComputation_1.getHashComputations(right, offset + 1, hcByLevel);
getHashComputations(left, offset + 1, hcByLevel);
getHashComputations(right, offset + 1, hcByLevel);
}

@@ -102,4 +97,4 @@ }

const left = nodes[countEven];
const right = zeroNode_1.zeroNode(depth - d);
const node = new node_1.BranchNode(left, right);
const right = zeroNode(depth - d);
const node = new BranchNode(left, right);
nodes[countEven / 2] = node;

@@ -109,6 +104,6 @@ if (offset !== null && hcByLevel !== null) {

// only go down on the last level
hashComputation_1.getHashComputations(left, offset + 1, hcByLevel);
getHashComputations(left, offset + 1, hcByLevel);
}
// no need to getHashComputations for zero node
hashComputation_1.levelAtIndex(hcByLevel, offset).push(left, right, node);
levelAtIndex(hcByLevel, offset).push(left, right, node);
}

@@ -121,3 +116,2 @@ }

}
exports.subtreeFillToContents = subtreeFillToContents;
//# sourceMappingURL=subtree.js.map

@@ -1,6 +0,6 @@

import { Gindex, GindexBitstring } from "./gindex";
import { Node } from "./node";
import { HashComputationLevel } from "./hashComputation";
import { Proof, ProofInput } from "./proof";
export declare type Hook = (newRootNode: Node) => void;
import { Gindex, GindexBitstring } from "./gindex.js";
import { Node } from "./node.js";
import { HashComputationLevel } from "./hashComputation.js";
import { Proof, ProofInput } from "./proof/index.js";
export type Hook = (newRootNode: Node) => void;
/**

@@ -18,5 +18,5 @@ * Binary merkle tree

/**
* Create a `Tree` from a `Proof` object
* The root hash of the tree
*/
static createFromProof(proof: Proof): Tree;
get root(): Uint8Array;
/**

@@ -32,5 +32,5 @@ * The root node of the tree

/**
* The root hash of the tree
* Create a `Tree` from a `Proof` object
*/
get root(): Uint8Array;
static createFromProof(proof: Proof): Tree;
/**

@@ -37,0 +37,0 @@ * Return a copy of the tree

@@ -1,10 +0,7 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.findDiffDepthi = exports.treeZeroAfterIndex = exports.iterateNodesAtDepth = exports.getNodesAtDepth = exports.setNodesAtDepth = exports.setNodeAtDepth = exports.getNodeAtDepth = exports.setNodeWithFn = exports.setNode = exports.getNode = exports.Tree = void 0;
const zeroNode_1 = require("./zeroNode");
const gindex_1 = require("./gindex");
const node_1 = require("./node");
const hashComputation_1 = require("./hashComputation");
const proof_1 = require("./proof");
const single_1 = require("./proof/single");
import { zeroNode } from "./zeroNode.js";
import { convertGindexToBitstring } from "./gindex.js";
import { LeafNode, BranchNode } from "./node.js";
import { levelAtIndex } from "./hashComputation.js";
import { createNodeFromProof, createProof } from "./proof/index.js";
import { createSingleProof } from "./proof/single.js";
/**

@@ -17,3 +14,5 @@ * Binary merkle tree

*/
class Tree {
export class Tree {
_rootNode;
hook;
constructor(node, hook) {

@@ -31,6 +30,6 @@ this._rootNode = node;

/**
* Create a `Tree` from a `Proof` object
* The root hash of the tree
*/
static createFromProof(proof) {
return new Tree(proof_1.createNodeFromProof(proof));
get root() {
return this.rootNode.root;
}

@@ -69,6 +68,6 @@ /**

/**
* The root hash of the tree
* Create a `Tree` from a `Proof` object
*/
get root() {
return this.rootNode.root;
static createFromProof(proof) {
return new Tree(createNodeFromProof(proof));
}

@@ -140,3 +139,3 @@ /**

setRoot(index, root) {
this.setNode(index, node_1.LeafNode.fromRoot(root));
this.setNode(index, LeafNode.fromRoot(root));
}

@@ -169,3 +168,3 @@ /**

getSingleProof(index) {
return single_1.createSingleProof(this.rootNode, index)[1];
return createSingleProof(this.rootNode, index)[1];
}

@@ -178,11 +177,10 @@ /**

getProof(input) {
return proof_1.createProof(this.rootNode, input);
return createProof(this.rootNode, input);
}
}
exports.Tree = Tree;
/**
* Return the node at the specified gindex.
*/
function getNode(rootNode, gindex) {
const gindexBitstring = gindex_1.convertGindexToBitstring(gindex);
export function getNode(rootNode, gindex) {
const gindexBitstring = convertGindexToBitstring(gindex);
let node = rootNode;

@@ -198,3 +196,2 @@ for (let i = 1; i < gindexBitstring.length; i++) {

}
exports.getNode = getNode;
/**

@@ -204,9 +201,8 @@ * Set the node at at the specified gindex.

*/
function setNode(rootNode, gindex, n) {
export function setNode(rootNode, gindex, n) {
// Pre-compute entire bitstring instead of using an iterator (25% faster)
const gindexBitstring = gindex_1.convertGindexToBitstring(gindex);
const gindexBitstring = convertGindexToBitstring(gindex);
const parentNodes = getParentNodes(rootNode, gindexBitstring);
return rebindNodeToRoot(gindexBitstring, parentNodes, n);
}
exports.setNode = setNode;
/**

@@ -221,5 +217,5 @@ * Traverse to the node at the specified gindex,

*/
function setNodeWithFn(rootNode, gindex, getNewNode) {
export function setNodeWithFn(rootNode, gindex, getNewNode) {
// Pre-compute entire bitstring instead of using an iterator (25% faster)
const gindexBitstring = gindex_1.convertGindexToBitstring(gindex);
const gindexBitstring = convertGindexToBitstring(gindex);
const parentNodes = getParentNodes(rootNode, gindexBitstring);

@@ -232,3 +228,2 @@ const lastParentNode = parentNodes[parentNodes.length - 1];

}
exports.setNodeWithFn = setNodeWithFn;
/**

@@ -268,6 +263,6 @@ * Traverse the tree from root node, ignore the last bit to get all parent nodes

if (bitstring[i] === "1") {
node = new node_1.BranchNode(parentNodes[i - 1].left, node);
node = new BranchNode(parentNodes[i - 1].left, node);
}
else {
node = new node_1.BranchNode(node, parentNodes[i - 1].right);
node = new BranchNode(node, parentNodes[i - 1].right);
}

@@ -280,3 +275,3 @@ }

*/
function getNodeAtDepth(rootNode, depth, index) {
export function getNodeAtDepth(rootNode, depth, index) {
if (depth === 0) {

@@ -297,11 +292,9 @@ return rootNode;

}
exports.getNodeAtDepth = getNodeAtDepth;
/**
* Supports index up to `Number.MAX_SAFE_INTEGER`.
*/
function setNodeAtDepth(rootNode, nodesDepth, index, nodeChanged) {
export function setNodeAtDepth(rootNode, nodesDepth, index, nodeChanged) {
// TODO: OPTIMIZE (if necessary)
return setNodesAtDepth(rootNode, nodesDepth, [index], [nodeChanged]);
}
exports.setNodeAtDepth = setNodeAtDepth;
/**

@@ -321,3 +314,3 @@ * Set multiple nodes in batch, editing and traversing nodes strictly once.

*/
function setNodesAtDepth(rootNode, nodesDepth, indexes, nodes, hcOffset = 0, hcByLevel = null) {
export function setNodesAtDepth(rootNode, nodesDepth, indexes, nodes, hcOffset = 0, hcByLevel = null) {
// depth depthi gindexes indexes

@@ -394,7 +387,7 @@ // 0 1 1 0

if (index + 1 === indexes[i + 1]) {
node = new node_1.BranchNode(nodes[i], nodes[i + 1]);
node = new BranchNode(nodes[i], nodes[i + 1]);
if (hcByLevel != null) {
// go with level of dest node (level 0 goes with root node)
// in this case dest node is nodesDept - 2, same for below
hashComputation_1.levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(nodes[i], nodes[i + 1], node);
levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(nodes[i], nodes[i + 1], node);
}

@@ -406,5 +399,5 @@ // Move pointer one extra forward since node has consumed two nodes

const oldNode = node;
node = new node_1.BranchNode(nodes[i], oldNode.right);
node = new BranchNode(nodes[i], oldNode.right);
if (hcByLevel != null) {
hashComputation_1.levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(nodes[i], oldNode.right, node);
levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(nodes[i], oldNode.right, node);
}

@@ -415,5 +408,5 @@ }

const oldNode = node;
node = new node_1.BranchNode(oldNode.left, nodes[i]);
node = new BranchNode(oldNode.left, nodes[i]);
if (hcByLevel != null) {
hashComputation_1.levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(oldNode.left, nodes[i], node);
levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(oldNode.left, nodes[i], node);
}

@@ -454,5 +447,5 @@ }

const oldNode = node;
node = new node_1.BranchNode(oldNode, parentNodeStack[d].right);
node = new BranchNode(oldNode, parentNodeStack[d].right);
if (hcByLevel != null) {
hashComputation_1.levelAtIndex(hcByLevel, depth + hcOffset).push(oldNode, parentNodeStack[d].right, node);
levelAtIndex(hcByLevel, depth + hcOffset).push(oldNode, parentNodeStack[d].right, node);
}

@@ -470,5 +463,5 @@ }

const oldNode = node;
node = new node_1.BranchNode(leftNode, oldNode);
node = new BranchNode(leftNode, oldNode);
if (hcByLevel != null) {
hashComputation_1.levelAtIndex(hcByLevel, depth + hcOffset).push(leftNode, oldNode, node);
levelAtIndex(hcByLevel, depth + hcOffset).push(leftNode, oldNode, node);
}

@@ -479,5 +472,5 @@ leftParentNodeStack[d] = undefined;

const oldNode = node;
node = new node_1.BranchNode(parentNodeStack[d].left, oldNode);
node = new BranchNode(parentNodeStack[d].left, oldNode);
if (hcByLevel != null) {
hashComputation_1.levelAtIndex(hcByLevel, depth + hcOffset).push(parentNodeStack[d].left, oldNode, node);
levelAtIndex(hcByLevel, depth + hcOffset).push(parentNodeStack[d].left, oldNode, node);
}

@@ -494,3 +487,2 @@ }

}
exports.setNodesAtDepth = setNodesAtDepth;
/**

@@ -508,3 +500,3 @@ * Fast read-only iteration

*/
function getNodesAtDepth(rootNode, depth, startIndex, count) {
export function getNodesAtDepth(rootNode, depth, startIndex, count) {
// Optimized paths for short trees (x20 times faster)

@@ -559,7 +551,6 @@ if (depth === 0) {

}
exports.getNodesAtDepth = getNodesAtDepth;
/**
* @see getNodesAtDepth but instead of pushing to an array, it yields
*/
function* iterateNodesAtDepth(rootNode, depth, startIndex, count) {
export function* iterateNodesAtDepth(rootNode, depth, startIndex, count) {
const endIndex = startIndex + count;

@@ -598,3 +589,2 @@ // Ignore first bit "1", then substract 1 to get to the parent

}
exports.iterateNodesAtDepth = iterateNodesAtDepth;
/**

@@ -623,3 +613,3 @@ * Zero's all nodes right of index with constant depth of `nodesDepth`.

*/
function treeZeroAfterIndex(rootNode, nodesDepth, index) {
export function treeZeroAfterIndex(rootNode, nodesDepth, index) {
// depth depthi gindexes indexes

@@ -639,3 +629,3 @@ // 0 1 1 0

if (index < 0) {
return zeroNode_1.zeroNode(nodesDepth);
return zeroNode(nodesDepth);
}

@@ -667,3 +657,3 @@ /**

// So re-bind new `node` with a zeroNode at the current depth.
node = new node_1.BranchNode(node, zeroNode_1.zeroNode(d));
node = new BranchNode(node, zeroNode(d));
}

@@ -673,3 +663,3 @@ else {

// So re-bind new `node` with the existing left node of the parent.
node = new node_1.BranchNode(parentNodeStack[d].left, node);
node = new BranchNode(parentNodeStack[d].left, node);
}

@@ -680,3 +670,2 @@ }

}
exports.treeZeroAfterIndex = treeZeroAfterIndex;
const NUMBER_32_MAX = 0xffffffff;

@@ -696,3 +685,3 @@ const NUMBER_2_POW_32 = 2 ** 32;

*/
function findDiffDepthi(from, to) {
export function findDiffDepthi(from, to) {
if (from === to || from < 0 || to < 0) {

@@ -723,3 +712,2 @@ throw Error(`Expect different positive inputs, from=${from} to=${to}`);

}
exports.findDiffDepthi = findDiffDepthi;
/**

@@ -726,0 +714,0 @@ * Returns true if the `index` at `depth` is a left node, false if it is a right node.

@@ -1,13 +0,10 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.zeroHash = void 0;
// use noble here instead of using hasher variable because this is used inside hasher itself
// we cache zero hashes so performance is not a concern
const sha256_1 = require("@noble/hashes/sha256");
import { sha256 } from "@noble/hashes/sha256";
// create array of "zero hashes", successively hashed zero chunks
const zeroHashes = [new Uint8Array(32)];
function zeroHash(depth) {
export function zeroHash(depth) {
if (depth >= zeroHashes.length) {
for (let i = zeroHashes.length; i <= depth; i++) {
zeroHashes[i] = sha256_1.sha256
zeroHashes[i] = sha256
.create()

@@ -21,3 +18,2 @@ .update(zeroHashes[i - 1])

}
exports.zeroHash = zeroHash;
//# sourceMappingURL=zeroHash.js.map

@@ -1,2 +0,2 @@

import { Node } from "./node";
import { Node } from "./node.js";
/**

@@ -3,0 +3,0 @@ * Return the `Node` at a specified height from the merkle tree made of "zero data"

@@ -1,6 +0,3 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.zeroNode = void 0;
const node_1 = require("./node");
const zeroes = [node_1.LeafNode.fromZero()];
import { BranchNode, LeafNode } from "./node.js";
const zeroes = [LeafNode.fromZero()];
/**

@@ -18,6 +15,6 @@ * Return the `Node` at a specified height from the merkle tree made of "zero data"

*/
function zeroNode(height) {
export function zeroNode(height) {
if (height >= zeroes.length) {
for (let i = zeroes.length; i <= height; i++) {
zeroes[i] = new node_1.BranchNode(zeroes[i - 1], zeroes[i - 1]);
zeroes[i] = new BranchNode(zeroes[i - 1], zeroes[i - 1]);
}

@@ -30,3 +27,2 @@ // make sure hash is precomputed in order not to put zeroNodes to HashComputation

}
exports.zeroNode = zeroNode;
//# sourceMappingURL=zeroNode.js.map
{
"name": "@chainsafe/persistent-merkle-tree",
"version": "0.8.0",
"version": "0.9.0",
"description": "Merkle tree implemented as a persistent datastructure",
"main": "lib/index.js",
"typesVersions": {
"*": {
"*": [
"*",
"lib/*",
"lib/*/index"
]
}
},
"type": "module",
"module": "./lib/index.js",
"main": "./lib/cjs/index.js",
"types": "./lib/index.d.ts",
"files": [

@@ -21,9 +15,12 @@ "lib"

"clean": "rm -rf lib",
"build": "tsc",
"build": "yarn build:cjs && yarn build:esm && yarn build:types",
"build:esm": "tsc -p tsconfig.build.esm.json",
"build:cjs": "tsc -p tsconfig.build.cjs.json && echo '{\"type\": \"commonjs\"}' > ./lib/cjs/package.json",
"build:types": "tsc -p tsconfig.build.types.json",
"lint": "eslint --color --ext .ts src/",
"lint:fix": "yarn run lint --fix",
"benchmark:files": "node --max-old-space-size=4096 --expose-gc -r ts-node/register ../../node_modules/.bin/benchmark",
"benchmark:files": "node --max-old-space-size=4096 --expose-gc --loader ts-node/esm ../../node_modules/.bin/benchmark",
"benchmark": "yarn benchmark:files 'test/perf/*.test.ts'",
"benchmark:local": "yarn benchmark --local",
"test": "mocha -r ts-node/register 'test/unit/**/*.test.ts'"
"test": "vitest run --dir test/unit"
},

@@ -50,3 +47,3 @@ "pre-push": [

"dependencies": {
"@chainsafe/as-sha256": "0.5.0",
"@chainsafe/as-sha256": "0.6.0",
"@chainsafe/hashtree": "1.0.1",

@@ -53,0 +50,0 @@ "@noble/hashes": "^1.3.0"

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet