Socket
Socket
Sign inDemoInstall

@chainsafe/persistent-merkle-tree

Package Overview
Dependencies
Maintainers
4
Versions
25
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@chainsafe/persistent-merkle-tree - npm Package Compare versions

Comparing version 0.2.3 to 0.3.0

28

CHANGELOG.md

@@ -0,1 +1,15 @@

## 0.3.0 (2021-03-26)
## BREAKING CHANGES
- Use WeakRef on tree hook ([dd23ed](https://github.com/chainsafe/persistent-merkle-tree/commit/dd23ed))
## Features
- Add proof serialization logic ([44ec21](https://github.com/chainsafe/persistent-merkle-tree/commit/44ec21))
## Bug Fixes
- Fix off-by-one in iterateAtDepth ([84e05e](https://github.com/chainsafe/persistent-merkle-tree/commit/84e05e))
## 0.2.3 (2021-02-13)

@@ -5,3 +19,3 @@

* Add tree-offset multiproof code ([a35181](https://github.com/chainsafe/persistent-merkle-tree/commit/a35181))
- Add tree-offset multiproof code ([a35181](https://github.com/chainsafe/persistent-merkle-tree/commit/a35181))

@@ -12,3 +26,3 @@ ## 0.2.2 (2021-02-11)

* Add concatGindices ([bb74df](https://github.com/chainsafe/persistent-merkle-tree/commit/bb74df))
- Add concatGindices ([bb74df](https://github.com/chainsafe/persistent-merkle-tree/commit/bb74df))

@@ -19,3 +33,3 @@ ## 0.2.1 (2020-07-32)

* Fix subtreeFillToContents edge cases ([8a2012](https://github.com/chainsafe/persistent-merkle-tree/commit/8a2012))
- Fix subtreeFillToContents edge cases ([8a2012](https://github.com/chainsafe/persistent-merkle-tree/commit/8a2012))

@@ -26,7 +40,7 @@ ## 0.2.0 (2020-07-27)

* Add iterateNodestDepth ([24ca18](https://github.com/chainsafe/persistent-merkle-tree/commit/24ca18))
- Add iterateNodestDepth ([24ca18](https://github.com/chainsafe/persistent-merkle-tree/commit/24ca18))
## BREAKING CHANGES
* Rearrange params, depth first where appropriate ([24ca18](https://github.com/chainsafe/persistent-merkle-tree/commit/24ca18))
- Rearrange params, depth first where appropriate ([24ca18](https://github.com/chainsafe/persistent-merkle-tree/commit/24ca18))

@@ -37,3 +51,3 @@ ## 0.1.3 (2020-06-07)

* remove bigint literals ([461fb7](https://github.com/chainsafe/persistent-merkle-tree/commit/461fb7))
- remove bigint literals ([461fb7](https://github.com/chainsafe/persistent-merkle-tree/commit/461fb7))

@@ -44,2 +58,2 @@ ## 0.1.2 (2020-02-26)

* use @chainsafe/as-sha256 sha2 implementation ([b9bcfe](https://github.com/chainsafe/persistent-merkle-tree/commit/b9bcfe))
- use @chainsafe/as-sha256 sha2 implementation ([b9bcfe](https://github.com/chainsafe/persistent-merkle-tree/commit/b9bcfe))
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.gindexChild = exports.gindexParent = exports.gindexSibling = exports.concatGindices = exports.gindexIterator = exports.iterateAtDepth = exports.countToDepth = exports.toGindexBitstring = exports.toGindex = exports.bitIndexBigInt = void 0;
function bitIndexBigInt(v) {

@@ -16,3 +17,3 @@ return v.toString(2).length - 1;

function toGindexBitstring(depth, index) {
const str = index ? index.toString(2) : '';
const str = index ? index.toString(2) : "";
if (str.length > depth) {

@@ -40,3 +41,3 @@ throw new Error("index too large for depth");

const anchor = BigInt(1) << BigInt(depth);
if (startIndex + count >= anchor) {
if (startIndex + count > anchor) {
throw new Error("Too large for depth");

@@ -58,5 +59,5 @@ }

}
}
},
};
}
},
};

@@ -63,0 +64,0 @@ }

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.hash = void 0;
const as_sha256_1 = require("@chainsafe/as-sha256");

@@ -4,0 +5,0 @@ function hash(a, b) {

"use strict";
function __export(m) {
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
}
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
__export(require("./gindex"));
__export(require("./hash"));
__export(require("./node"));
__export(require("./zeroNode"));
__export(require("./subtree"));
__export(require("./tree"));
__export(require("./proof"));
__exportStar(require("./gindex"), exports);
__exportStar(require("./hash"), exports);
__exportStar(require("./node"), exports);
__exportStar(require("./zeroNode"), exports);
__exportStar(require("./subtree"), exports);
__exportStar(require("./tree"), exports);
__exportStar(require("./proof"), exports);
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.compose = exports.identity = exports.LeafNode = exports.BranchNode = exports.Node = void 0;
const hash_1 = require("./hash");

@@ -19,5 +20,7 @@ const ERR_INVALID_TREE = "Invalid tree";

}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
rebindLeft(left) {
throw new Error(ERR_NOT_IMPLEMENTED);
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
rebindRight(right) {

@@ -24,0 +27,0 @@ throw new Error(ERR_NOT_IMPLEMENTED);

@@ -7,2 +7,6 @@ import { Gindex } from "../gindex";

}
/**
* Serialized proofs are prepended with a single byte, denoting their type
*/
export declare const ProofTypeSerialized: ProofType[];
export interface SingleProof {

@@ -31,1 +35,3 @@ type: ProofType.single;

export declare function createNodeFromProof(proof: Proof): Node;
export declare function serializeProof(proof: Proof): Uint8Array;
export declare function deserializeProof(data: Uint8Array): Proof;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.deserializeProof = exports.serializeProof = exports.createNodeFromProof = exports.createProof = exports.ProofTypeSerialized = exports.ProofType = void 0;
const single_1 = require("./single");

@@ -10,5 +11,12 @@ const treeOffset_1 = require("./treeOffset");

})(ProofType = exports.ProofType || (exports.ProofType = {}));
/**
* Serialized proofs are prepended with a single byte, denoting their type
*/
exports.ProofTypeSerialized = [
ProofType.single,
ProofType.treeOffset, // 1
];
function createProof(rootNode, input) {
switch (input.type) {
case ProofType.single:
case ProofType.single: {
const [leaf, witnesses] = single_1.createSingleProof(rootNode, input.gindex);

@@ -21,3 +29,4 @@ return {

};
case ProofType.treeOffset:
}
case ProofType.treeOffset: {
const [offsets, leaves] = treeOffset_1.createTreeOffsetProof(rootNode, input.gindices);

@@ -29,2 +38,3 @@ return {

};
}
default:

@@ -46,1 +56,37 @@ throw new Error("Invalid proof type");

exports.createNodeFromProof = createNodeFromProof;
function serializeProof(proof) {
switch (proof.type) {
case ProofType.single:
throw new Error("Not implemented");
case ProofType.treeOffset: {
const output = new Uint8Array(1 + treeOffset_1.computeTreeOffsetProofSerializedLength(proof.offsets, proof.leaves));
output[0] = exports.ProofTypeSerialized.indexOf(ProofType.treeOffset);
treeOffset_1.serializeTreeOffsetProof(output, 1, proof.offsets, proof.leaves);
return output;
}
default:
throw new Error("Invalid proof type");
}
}
exports.serializeProof = serializeProof;
function deserializeProof(data) {
const proofType = exports.ProofTypeSerialized[data[0]];
if (!proofType) {
throw new Error("Invalid proof type");
}
switch (proofType) {
case ProofType.single:
throw new Error("Not implemented");
case ProofType.treeOffset: {
const [offsets, leaves] = treeOffset_1.deserializeTreeOffsetProof(data, 1);
return {
type: ProofType.treeOffset,
offsets,
leaves,
};
}
default:
throw new Error("Invalid proof type");
}
}
exports.deserializeProof = deserializeProof;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createNodeFromSingleProof = exports.createSingleProof = exports.ERR_INVALID_NAV = void 0;
const node_1 = require("../node");

@@ -28,3 +29,3 @@ const gindex_1 = require("../gindex");

let node = new node_1.LeafNode(leaf);
let w = witnesses.reverse();
const w = witnesses.reverse();
while (gindex > 1) {

@@ -31,0 +32,0 @@ const sibling = new node_1.LeafNode(w.pop());

@@ -36,1 +36,4 @@ import { Gindex, GindexBitstring } from "../gindex";

export declare function createNodeFromTreeOffsetProof(offsets: number[], leaves: Uint8Array[]): Node;
export declare function computeTreeOffsetProofSerializedLength(offsets: number[], leaves: Uint8Array[]): number;
export declare function serializeTreeOffsetProof(output: Uint8Array, byteOffset: number, offsets: number[], leaves: Uint8Array[]): void;
export declare function deserializeTreeOffsetProof(data: Uint8Array, byteOffset: number): [number[], Uint8Array[]];
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.deserializeTreeOffsetProof = exports.serializeTreeOffsetProof = exports.computeTreeOffsetProofSerializedLength = exports.createNodeFromTreeOffsetProof = exports.createTreeOffsetProof = exports.treeOffsetProofToNode = exports.nodeToTreeOffsetProof = void 0;
const node_1 = require("../node");

@@ -63,3 +64,3 @@ const util_1 = require("./util");

function createTreeOffsetProof(rootNode, gindices) {
return nodeToTreeOffsetProof(rootNode, "1", util_1.computeMultiProofBitstrings(gindices.map(g => g.toString(2))));
return nodeToTreeOffsetProof(rootNode, "1", util_1.computeMultiProofBitstrings(gindices.map((g) => g.toString(2))));
}

@@ -78,1 +79,42 @@ exports.createTreeOffsetProof = createTreeOffsetProof;

exports.createNodeFromTreeOffsetProof = createNodeFromTreeOffsetProof;
function computeTreeOffsetProofSerializedLength(offsets, leaves) {
// add 1 for # of leaves
return (offsets.length + 1) * 2 + leaves.length * 32;
}
exports.computeTreeOffsetProofSerializedLength = computeTreeOffsetProofSerializedLength;
// Serialized tree offset proof structure:
// # of leaves - 2 bytes
// offsets - 2 bytes each
// leaves - 32 bytes each
function serializeTreeOffsetProof(output, byteOffset, offsets, leaves) {
const writer = new DataView(output.buffer, output.byteOffset, output.byteLength);
// set # of leaves
writer.setUint16(byteOffset, leaves.length, true);
// set offsets
const offsetsStartIndex = byteOffset + 2;
for (let i = 0; i < offsets.length; i++) {
writer.setUint16(i * 2 + offsetsStartIndex, offsets[i], true);
}
// set leaves
const leavesStartIndex = offsetsStartIndex + offsets.length * 2;
for (let i = 0; i < leaves.length; i++) {
output.set(leaves[i], i * 32 + leavesStartIndex);
}
}
exports.serializeTreeOffsetProof = serializeTreeOffsetProof;
function deserializeTreeOffsetProof(data, byteOffset) {
const reader = new DataView(data.buffer, data.byteOffset, data.byteLength);
// get # of leaves
const leafCount = reader.getUint16(byteOffset, true);
if (data.length < (leafCount - 1) * 2 + leafCount * 32) {
throw new Error("Unable to deserialize tree offset proof: not enough bytes");
}
// get offsets
const offsetsStartIndex = byteOffset + 2;
const offsets = Array.from({ length: leafCount - 1 }, (_, i) => reader.getUint16(i * 2 + offsetsStartIndex, true));
// get leaves
const leavesStartIndex = offsetsStartIndex + offsets.length * 2;
const leaves = Array.from({ length: leafCount }, (_, i) => data.subarray(i * 32 + leavesStartIndex, (i + 1) * 32 + leavesStartIndex));
return [offsets, leaves];
}
exports.deserializeTreeOffsetProof = deserializeTreeOffsetProof;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.computeMultiProofBitstrings = exports.filterParentBitstrings = exports.sortInOrderBitstrings = exports.computeProofBitstrings = exports.computeProofGindices = void 0;
const gindex_1 = require("../gindex");

@@ -51,3 +52,6 @@ // Not currently in use, but simpler implementation useful for testing

}
return gindices.map(g => g.padEnd(bitLength)).sort().map(g => g.trim());
return gindices
.map((g) => g.padEnd(bitLength))
.sort()
.map((g) => g.trim());
}

@@ -54,0 +58,0 @@ exports.sortInOrderBitstrings = sortInOrderBitstrings;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.subtreeFillToContents = exports.subtreeFillToLength = exports.subtreeFillToDepth = void 0;
const node_1 = require("./node");

@@ -21,5 +22,5 @@ const zeroNode_1 = require("./zeroNode");

throw new Error(ERR_TOO_MANY_NODES);
else if (length === maxLength)
if (length === maxLength)
return subtreeFillToDepth(bottom, depth);
else if (depth === 0) {
if (depth === 0) {
if (length === 1)

@@ -30,13 +31,11 @@ return bottom;

}
else if (depth === 1) {
return new node_1.BranchNode(bottom, (length > 1) ? bottom : zeroNode_1.zeroNode(0));
if (depth === 1) {
return new node_1.BranchNode(bottom, length > 1 ? bottom : zeroNode_1.zeroNode(0));
}
const pivot = maxLength >> 1;
if (length <= pivot) {
return new node_1.BranchNode(subtreeFillToLength(bottom, depth - 1, length), zeroNode_1.zeroNode(depth - 1));
}
else {
const pivot = maxLength >> 1;
if (length <= pivot) {
return new node_1.BranchNode(subtreeFillToLength(bottom, depth - 1, length), zeroNode_1.zeroNode(depth - 1));
}
else {
return new node_1.BranchNode(subtreeFillToDepth(bottom, depth - 1), subtreeFillToLength(bottom, depth - 1, length - pivot));
}
return new node_1.BranchNode(subtreeFillToDepth(bottom, depth - 1), subtreeFillToLength(bottom, depth - 1, length - pivot));
}

@@ -49,3 +48,3 @@ }

throw new Error(ERR_TOO_MANY_NODES);
else if (depth === 0) {
if (depth === 0) {
if (!nodes.length)

@@ -55,3 +54,3 @@ return zeroNode_1.zeroNode(0);

}
else if (depth === 1) {
if (depth === 1) {
if (!nodes.length)

@@ -61,12 +60,10 @@ return zeroNode_1.zeroNode(1);

}
const pivot = Math.floor(maxLength / 2);
if (nodes.length <= pivot) {
return new node_1.BranchNode(subtreeFillToContents(nodes, depth - 1), zeroNode_1.zeroNode(depth - 1));
}
else {
const pivot = Math.floor(maxLength / 2);
if (nodes.length <= pivot) {
return new node_1.BranchNode(subtreeFillToContents(nodes, depth - 1), zeroNode_1.zeroNode(depth - 1));
}
else {
return new node_1.BranchNode(subtreeFillToContents(nodes.slice(0, Number(pivot)), depth - 1), subtreeFillToContents(nodes.slice(Number(pivot)), depth - 1));
}
return new node_1.BranchNode(subtreeFillToContents(nodes.slice(0, Number(pivot)), depth - 1), subtreeFillToContents(nodes.slice(Number(pivot)), depth - 1));
}
}
exports.subtreeFillToContents = subtreeFillToContents;

@@ -7,4 +7,5 @@ import { Gindex } from "./gindex";

private _node;
hook?: Hook;
private hook?;
constructor(node: Node, hook?: Hook);
static createFromProof(proof: Proof): Tree;
get rootNode(): Node;

@@ -30,3 +31,2 @@ set rootNode(n: Node);

getProof(input: ProofInput): Proof;
static createFromProof(proof: Proof): Tree;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Tree = void 0;
const gindex_1 = require("./gindex");

@@ -14,4 +15,14 @@ const node_1 = require("./node");

this._node = node;
this.hook = hook;
if (hook) {
if (typeof WeakRef === "undefined") {
this.hook = hook;
}
else {
this.hook = new WeakRef(hook);
}
}
}
static createFromProof(proof) {
return new Tree(proof_1.createNodeFromProof(proof));
}
get rootNode() {

@@ -23,3 +34,18 @@ return this._node;

if (this.hook) {
this.hook(this);
// WeakRef should not change status during a program's execution
// So, use WeakRef feature detection to assume the type of this.hook
// to minimize the memory footprint of Tree
if (typeof WeakRef === "undefined") {
this.hook(this);
}
else {
const hookVar = this.hook.deref();
if (hookVar) {
hookVar(this);
}
else {
// Hook has been garbage collected, no need to keep the hookRef
this.hook = undefined;
}
}
}

@@ -122,3 +148,3 @@ }

}
if ((BigInt(1) << BigInt(depth)) < startIndex + count) {
if (BigInt(1) << BigInt(depth) < startIndex + count) {
throw new Error(ERR_COUNT_GT_DEPTH);

@@ -157,3 +183,3 @@ }

do {
const [parentNode, direction,] = nav.pop();
const [parentNode, direction] = nav.pop();
// if direction was left

@@ -180,6 +206,3 @@ if (!direction) {

}
static createFromProof(proof) {
return new Tree(proof_1.createNodeFromProof(proof));
}
}
exports.Tree = Tree;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.zeroNode = void 0;
const node_1 = require("./node");
let zeroes = [new node_1.LeafNode(new Uint8Array(32))];
const zeroes = [new node_1.LeafNode(new Uint8Array(32))];
function zeroNode(depth) {

@@ -6,0 +7,0 @@ if (depth >= zeroes.length) {

{
"name": "@chainsafe/persistent-merkle-tree",
"version": "0.2.3",
"version": "0.3.0",
"description": "Merkle tree implemented as a persistent datastructure",

@@ -11,3 +11,3 @@ "main": "lib/index.js",

"check-types": "tsc --noEmit",
"build": "tsc --declaration --outDir lib",
"build": "tsc",
"lint": "eslint --color --ext .ts src/",

@@ -37,14 +37,18 @@ "test": "mocha -r ts-node/register 'test/**/*.test.ts'"

"@types/chai": "^4.2.0",
"@types/eslint": "^6.1.3",
"@types/mocha": "^5.2.7",
"@types/node": "^12.0.10",
"@typescript-eslint/eslint-plugin": "^2.7.0",
"@typescript-eslint/parser": "^2.7.0",
"@types/node": "^14.14.0",
"@typescript-eslint/eslint-plugin": "4.9.0",
"@typescript-eslint/parser": "4.9.0",
"chai": "^4.2.0",
"eslint": "^6.6.0",
"eslint": "^7.14.0",
"eslint-plugin-import": "^2.22.1",
"eslint-plugin-no-only-tests": "^2.4.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^3.3.1",
"karma": "^4.3.0",
"mocha": "^6.2.0",
"nyc": "^14.1.1",
"ts-node": "^8.3.0",
"typescript": "^3.7.2"
"prettier": "^2.2.1",
"ts-node": "^9.1.0",
"typescript": "^4.1.3"
},

@@ -51,0 +55,0 @@ "dependencies": {

@@ -99,3 +99,3 @@ # Persistent Merkle Tree

When dealing with large datasets, it is very expensive to merkleize them in their entirety. In cases where large datasets are remerkleized often between updates and additions, using ephemeral structures for intermediate hashes results in significant duplicated work, as many intermediate hashes will be recomputed and thrown away on each merkleization. In these cases, maintaining structures for the entire tree, intermediate nodes included, can mitigate these issues and allow for additional usecases (eg: proof generation). This implementation also uses the known immutability of nodes to share data between common subtrees across different versions of the data.
When dealing with large datasets, it is very expensive to merkleize them in their entirety. In cases where large datasets are remerkleized often between updates and additions, using ephemeral structures for intermediate hashes results in significant duplicated work, as many intermediate hashes will be recomputed and thrown away on each merkleization. In these cases, maintaining structures for the entire tree, intermediate nodes included, can mitigate these issues and allow for additional usecases (eg: proof generation). This implementation also uses the known immutability of nodes to share data between common subtrees across different versions of the data.

@@ -102,0 +102,0 @@ ## Features

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc