@guildofweavers/merkle
Advanced tools
Comparing version 0.2.0 to 0.3.0
@@ -5,7 +5,6 @@ "use strict"; | ||
// ================================================================================================ | ||
var hash_1 = require("./lib/hash"); | ||
exports.createHash = hash_1.createHash; | ||
var MerkleTree_1 = require("./lib/MerkleTree"); | ||
exports.MerkleTree = MerkleTree_1.MerkleTree; | ||
var hash_1 = require("./lib/hash"); | ||
exports.getHashDigestSize = hash_1.getHashDigestSize; | ||
exports.getHashFunction = hash_1.getHashFunction; | ||
//# sourceMappingURL=index.js.map |
@@ -28,8 +28,8 @@ "use strict"; | ||
// ================================================================================================ | ||
function instantiateBlake2s() { | ||
let initialMemPages = 10; | ||
function instantiateBlake2s(memory) { | ||
if (memory === undefined) { | ||
memory = new WebAssembly.Memory({ initial: 10 }); | ||
} | ||
const wasm = loader.instantiateBuffer(fs.readFileSync(BLAKE2S_WASM), { | ||
env: { | ||
memory: new WebAssembly.Memory({ initial: initialMemPages }) | ||
} | ||
env: { memory } | ||
}); | ||
@@ -36,0 +36,0 @@ let sIdx = wasm.getSigmaRef(); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const sha256 = require("./sha256"); | ||
const blake2s256 = require("./blake2s"); | ||
const wasmBlake2s256 = require("./wasmBlake2s"); | ||
// PUBLIC FUNCTIONS | ||
// ================================================================================================ | ||
function getHashFunction(algorithm) { | ||
switch (algorithm) { | ||
case 'sha256': { | ||
return sha256.hash; | ||
const WasmBlake2s_1 = require("./WasmBlake2s"); | ||
const JsHash_1 = require("./JsHash"); | ||
function createHash(algorithm, useWasmOrOptions) { | ||
if (useWasmOrOptions) { | ||
const wasmOptions = (typeof useWasmOrOptions === 'boolean') | ||
? {} | ||
: useWasmOrOptions; | ||
switch (algorithm) { | ||
case 'blake2s256': { | ||
return new WasmBlake2s_1.WasmBlake2s(wasmOptions.memory); | ||
} | ||
default: { | ||
throw new Error(`WASM-optimization for ${algorithm} hash is not supported`); | ||
} | ||
} | ||
case 'blake2s256': { | ||
return blake2s256.hash; | ||
} | ||
case 'wasmBlake2s256': { | ||
return wasmBlake2s256.hash; | ||
} | ||
default: { | ||
throw new TypeError('Invalid hash algorithm'); | ||
} | ||
} | ||
} | ||
exports.getHashFunction = getHashFunction; | ||
function getHashDigestSize(algorithm) { | ||
switch (algorithm) { | ||
case "sha256": { | ||
return sha256.digestSize; | ||
} | ||
case "blake2s256": { | ||
return blake2s256.digestSize; | ||
} | ||
case 'wasmBlake2s256': { | ||
return wasmBlake2s256.digestSize; | ||
} | ||
default: { | ||
throw new TypeError('Invalid hash algorithm'); | ||
} | ||
else { | ||
return new JsHash_1.JsHash(algorithm); | ||
} | ||
} | ||
exports.getHashDigestSize = getHashDigestSize; | ||
function getMerkleTreeBuilder(algorithm) { | ||
switch (algorithm) { | ||
case "sha256": { | ||
return sha256.buildMerkleTree; | ||
} | ||
case "blake2s256": { | ||
return blake2s256.buildMerkleTree; | ||
} | ||
case 'wasmBlake2s256': { | ||
return wasmBlake2s256.buildMerkleTree; | ||
} | ||
default: { | ||
throw new TypeError('Invalid hash algorithm'); | ||
} | ||
} | ||
} | ||
exports.getMerkleTreeBuilder = getMerkleTreeBuilder; | ||
exports.createHash = createHash; | ||
//# sourceMappingURL=index.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const hashing = require("./hash"); | ||
const JsVector_1 = require("./JsVector"); | ||
// CLASS DEFINITION | ||
@@ -9,18 +9,17 @@ // ================================================================================================ | ||
// -------------------------------------------------------------------------------------------- | ||
static async createAsync(values, hashAlgorithm) { | ||
static async createAsync(values, hash) { | ||
// FUTURE: implement asynchronous instantiation | ||
return MerkleTree.create(values, hashAlgorithm); | ||
return MerkleTree.create(values, hash); | ||
} | ||
static create(values, hashAlgorithm) { | ||
const buildTree = hashing.getMerkleTreeBuilder(hashAlgorithm); | ||
const nodeSize = hashing.getHashDigestSize(hashAlgorithm); | ||
static create(values, hash) { | ||
const depth = Math.ceil(Math.log2(values.length)); | ||
const nodes = buildTree(depth, values); | ||
return new MerkleTree(nodes, values, depth, nodeSize); | ||
const leaves = Array.isArray(values) ? new JsVector_1.JsVector(values) : values; | ||
const nodes = hash.buildMerkleNodes(depth, leaves); | ||
return new MerkleTree(nodes, leaves, depth, hash.digestSize); | ||
} | ||
constructor(nodes, values, depth, nodeSize) { | ||
constructor(nodes, leaves, depth, nodeSize) { | ||
this.depth = depth; | ||
this.nodes = nodes; | ||
this.values = values; | ||
this.nodeSize = nodeSize; | ||
this.values = leaves; | ||
} | ||
@@ -32,2 +31,5 @@ // PUBLIC ACCESSORS | ||
} | ||
getLeaf(index) { | ||
return this.values.toBuffer(index, 1); | ||
} | ||
// PUBLIC METHODS | ||
@@ -44,4 +46,4 @@ // -------------------------------------------------------------------------------------------- | ||
const nodeCount = this.nodes.byteLength / nodeSize; | ||
const value1 = this.values[index]; | ||
const value2 = this.values[index ^ 1]; | ||
const value1 = this.values.toBuffer(index, 1); | ||
const value2 = this.values.toBuffer(index ^ 1, 1); | ||
const proof = [value1, value2]; | ||
@@ -70,4 +72,4 @@ index = (index + nodeCount) >> 1; | ||
let index = indexes[i]; | ||
let v1 = this.values[index]; | ||
let v2 = this.values[index + 1]; | ||
let v1 = this.values.toBuffer(index, 1); | ||
let v2 = this.values.toBuffer(index + 1, 1); | ||
// only values for indexes that were explicitly requested are included in values array | ||
@@ -114,15 +116,14 @@ const inputIndex1 = indexMap.get(index); | ||
// -------------------------------------------------------------------------------------------- | ||
static verify(root, index, proof, hashAlgorithm) { | ||
const hash = hashing.getHashFunction(hashAlgorithm); | ||
static verify(root, index, proof, hash) { | ||
const r = index & 1; | ||
const value1 = proof[r]; | ||
const value2 = proof[1 - r]; | ||
let v = hash(value1, value2); | ||
let v = hash.merge(value1, value2); | ||
index = (index + 2 ** (proof.length - 1)) >> 1; | ||
for (let i = 2; i < proof.length; i++) { | ||
if (index & 1) { | ||
v = hash(proof[i], v); | ||
v = hash.merge(proof[i], v); | ||
} | ||
else { | ||
v = hash(v, proof[i]); | ||
v = hash.merge(v, proof[i]); | ||
} | ||
@@ -133,5 +134,4 @@ index = index >> 1; | ||
} | ||
static verifyBatch(root, indexes, proof, hashAlgorithm) { | ||
static verifyBatch(root, indexes, proof, hash) { | ||
const v = new Map(); | ||
const hash = hashing.getHashFunction(hashAlgorithm); | ||
// replace odd indexes, offset, and sort in ascending order | ||
@@ -168,3 +168,6 @@ const offset = 2 ** proof.depth; | ||
} | ||
let parent = hash(v1, v2); | ||
// if either value wasn't found, proof fails | ||
if (v1 === undefined || v2 === undefined) | ||
return false; | ||
let parent = hash.merge(v1, v2); | ||
let parentIndex = (offset + index >> 1); | ||
@@ -197,3 +200,3 @@ v.set(parentIndex, parent); | ||
// calculate parent node and add it to the next set of nodes | ||
let parent = (nodeIndex & 1) ? hash(sibling, node) : hash(node, sibling); | ||
let parent = (nodeIndex & 1) ? hash.merge(sibling, node) : hash.merge(node, sibling); | ||
let parentIndex = nodeIndex >> 1; | ||
@@ -200,0 +203,0 @@ v.set(parentIndex, parent); |
declare module '@guildofweavers/merkle' { | ||
/** Algorithms that can be used to hash internal tree nodes */ | ||
export type HashAlgorithm = 'sha256' | 'blake2s256' | 'wasmBlake2s256'; | ||
/** Returns digest size (in bytes) for the specified hash algorithm */ | ||
export function getHashDigestSize(hashAlgorithm: HashAlgorithm): number; | ||
// HASHING | ||
// -------------------------------------------------------------------------------------------- | ||
export type HashAlgorithm = 'sha256' | 'blake2s256'; | ||
/** Returns a hash function for the specified algorithm */ | ||
export function getHashFunction(hashAlgorithm: HashAlgorithm): HashFunction; | ||
/** | ||
* Creates a Hash object for the specified algorithm; if useWasm is set to true, will use a | ||
* WebAssembly-optimized version of the algorithm. If WASM-optimization is not available for | ||
* the specified algorithm, throws an error. | ||
*/ | ||
export function createHash(algorithm: HashAlgorithm, useWasm?: boolean): Hash; | ||
/** | ||
* Creates a WebAssembly-optimized Hash object for the specified algorithm and passes provided | ||
* options to it. If WASM-optimization is not available for the specified algorithm, throws | ||
* an error. | ||
*/ | ||
export function createHash(algorithm: HashAlgorithm, options: Partial<WasmOptions>): Hash; | ||
export interface WasmOptions { | ||
readonly memory: WebAssembly.Memory; | ||
} | ||
export interface Hash { | ||
readonly algorithm : HashAlgorithm; | ||
readonly digestSize : number; | ||
/** Hashes the provided value */ | ||
digest(value: Buffer): Buffer; | ||
/** Hashes a concatenation of a and b */ | ||
merge(a: Buffer, b: Buffer): Buffer; | ||
buildMerkleNodes(depth: number, leaves: Vector): ArrayBuffer; | ||
} | ||
// MERKLE TREE | ||
// -------------------------------------------------------------------------------------------- | ||
export class MerkleTree { | ||
@@ -17,5 +45,5 @@ | ||
* @param values Values that form the leaves of the tree | ||
* @param hashAlgorithm Algorithm to use for hashing of internal nodes | ||
* @param hash Hash object to use for hashing of internal nodes | ||
*/ | ||
static create(values: Buffer[], hashAlgorithm: HashAlgorithm): MerkleTree; | ||
static create(values: Buffer[] | Vector, hash: Hash): MerkleTree; | ||
@@ -25,5 +53,5 @@ /** | ||
* @param values Values that form the leaves of the tree | ||
* @param hashAlgorithm Algorithm to use for hashing of internal nodes | ||
* @param hash Hash object to use for hashing of internal nodes | ||
*/ | ||
static createAsync(values: Buffer[], hashAlgorithm: HashAlgorithm): Promise<MerkleTree>; | ||
static createAsync(values: Buffer[] | Vector, hash: Hash): Promise<MerkleTree>; | ||
@@ -33,4 +61,4 @@ /** Root of the tree */ | ||
/** Leaves of the tree */ | ||
readonly values: Buffer[]; | ||
/** Returns a leaf node located at the specified index */ | ||
getLeaf(index: number): Buffer; | ||
@@ -48,5 +76,5 @@ /** Returns a Merkle proof for a single leaf at the specified index */ | ||
* @param proof Merkle proof for the leaf at the specified index | ||
* @param hashAlgorithm Algorithm used for hashing of internal nodes | ||
* @param hash Hash object to use for hashing of internal nodes | ||
*/ | ||
static verify(root: Buffer, index: number, proof: Buffer[], hashAlgorithm: HashAlgorithm): boolean; | ||
static verify(root: Buffer, index: number, proof: Buffer[], hash: Hash): boolean; | ||
@@ -58,5 +86,5 @@ /** | ||
* @param proof Compressed Merkle proof for the leaves at the specified indexes | ||
* @param hashAlgorithm Algorithm used for hashing of internal nodes | ||
* @param hash Hash object to use for hashing of internal nodes | ||
*/ | ||
static verifyBatch(root: Buffer, indexes: number[], proof: BatchMerkleProof, hashAlgorithm: HashAlgorithm): boolean; | ||
static verifyBatch(root: Buffer, indexes: number[], proof: BatchMerkleProof, hash: Hash): boolean; | ||
} | ||
@@ -75,5 +103,11 @@ | ||
export interface HashFunction { | ||
(v1: Buffer, v2?: Buffer): Buffer; | ||
// INTERNAL DATA STRUCTURES | ||
// -------------------------------------------------------------------------------------------- | ||
export interface Vector { | ||
readonly length : number; | ||
readonly byteLength : number; | ||
readonly elementSize : number; | ||
toBuffer(startIdx?: number, elementCount?: number): Buffer; | ||
} | ||
} |
{ | ||
"name": "@guildofweavers/merkle", | ||
"version": "0.2.0", | ||
"version": "0.3.0", | ||
"description": "Merkle tree and other data structures", | ||
@@ -5,0 +5,0 @@ "main": "index.js", |
@@ -11,3 +11,3 @@ # Merkle | ||
```TypeScript | ||
import { MerkleTree } from '@guildofweavers/merkle'; | ||
import { MerkleTree, createHash } from '@guildofweavers/merkle'; | ||
@@ -23,3 +23,4 @@ // create an array of values to put into a tree | ||
// create a Merkle tree | ||
const tree = MerkleTree.create(values, 'sha256'); | ||
const hash = createHash('sha256'); | ||
const tree = MerkleTree.create(values, hash); | ||
@@ -31,3 +32,3 @@ // create a proof for the second position in the tree (value 'b') | ||
// verify the proof | ||
const result = MerkleTree.verify(tree.root, 1, proof, 'sha256'); | ||
const result = MerkleTree.verify(tree.root, 1, proof, hash); | ||
console.log(result); // true | ||
@@ -42,10 +43,11 @@ ``` | ||
* static **create**(values: `Buffer[]`, hashAlgorithm: `string`): `MerkleTree` | ||
* static **createAsync**(values: `Buffer[]`, hashAlgorithm: `string`): `Promise<MerkleTree>` | ||
* static **create**(values: `Buffer[]` | `Vector`, hash: `Hash`): `MerkleTree` | ||
* static **createAsync**(values: `Buffer[]` | `Vector`, hash: `Hash`): `Promise<MerkleTree>` | ||
Both methods take an array of values and a name of the hash algorithm to use when building the tree. Currently, the following hash algorithms are supported: | ||
The meaning of the parameters is as follows: | ||
* `sha256` - Node's native implementation of SHA256 algorithm. | ||
* `blake2s256` - Node's native implementation of Blake2s algorithm for 256-bit output. | ||
* `wasmBlake2s256` - WebAssembly-based implementation of Blake2s algorithm for 256-bit output. | ||
| Parameter | Description | | ||
| --------- | ----------- | | ||
| values | Values that will form the leaves of the Merkle tree. If provided as an array of `Buffer` objects, all buffers are assumed to have the same length (otherwise, bad things will happen). Can also be provided as an object that complies with `Vector` interface. | | ||
| hash | A [hash](#Hash) object that will be used to hash values and internal nodes. | | ||
@@ -63,5 +65,5 @@ **Note:** async method is currently just a placeholder. All it does is call the sync version and returns the result. | ||
* **proveBatch**(indexes: `number[]`): `BatchMerkleProof`<br /> | ||
The resulting proof is compressed. So, if you need to prove membership of multiple values, this is a much more efficient approach. | ||
The resulting proof is [compressed](#Batch-proof-compression). So, if you need to prove membership of multiple values, this is a much more efficient approach. | ||
Batch proof has the following form: | ||
Batch proof has the following form: | ||
@@ -80,3 +82,3 @@ ```TypeScript | ||
* static **verify**(root: `Buffer`, index: `number`, proof: `Buffer[]`, hashAlgorithm: `string`): `boolean`<br /> | ||
* static **verify**(root: `Buffer`, index: `number`, proof: `Buffer[]`, hash: `Hash`): `boolean`<br /> | ||
This will return `true` if the value located at the first position in the `proof` array is indeed located at the specified `index` in the tree. | ||
@@ -86,16 +88,59 @@ | ||
* static **verifyBatch**(root: `Buffer`, indexes: `number[]`, proof: `BatchMerkleProof`, hashAlgorithm: `string`): `boolean`<br /> | ||
* static **verifyBatch**(root: `Buffer`, indexes: `number[]`, proof: `BatchMerkleProof`, hash: `Hash`): `boolean`<br /> | ||
Similarly to single-index version, this will return `true` if the values in `proof.values` are indeed located at the specified `indexes` in the tree. | ||
### Hash | ||
A `Hash` object is required when creating Merkle trees and when verifying Merkle proofs. Internally, it is used for hashing of all values and tree nodes. To create a Hash object, you can use `createHash()` function: | ||
* **createHash**(algorithm: `string`, useWasm?: `boolean`): `Hash`<br /> | ||
Creates a Hash object for the specified `algorithm`. If the optional `useWasm` parameter is set to _true_, will use a WebAssembly-optimized version of the algorithm. If WASM-optimization is not available for the specified algorithm, throws an error. | ||
* **createHash**(algorithm: `string`, wasmOptions: `WasmOptions`): `Hash`<br /> | ||
Creates a WebAssembly-optimized Hash object for the specified `algorithm` and passes the provided options to it. If WASM-optimization is not available for the specified algorithm, throws an error. | ||
Currently, the following hash algorithms are supported: | ||
| Algorithm | WASM-optimized | | ||
| ---------- | :------------: | | ||
| sha256 | no | | ||
| blake2s256 | yes | | ||
Hash objects returned from `createHash()` function will have the following form: | ||
```TypeScript | ||
interface Hash { | ||
readonly algorithm : HashAlgorithm; | ||
readonly digestSize : number; | ||
digest(value: Buffer): Buffer; | ||
merge(a: Buffer, b: Buffer): Buffer; | ||
} | ||
``` | ||
where, `digest(value)` hashes the provided value, and `merge(a,b)` hashes a concatenation of values `a` and `b`. | ||
## Performance | ||
Some very informal benchmarks run on Intel Core i5-7300U @ 2.60GHz (single thread) for generating a tree out of 2<sup>20</sup> values: | ||
| Hash Algorithm | Time | | ||
| -------------- | ----------- | | ||
| sha256 | 3.5 seconds | | ||
| blake2s256 | 3.2 seconds | | ||
| wasmBlake2s256 | 1 second | | ||
| Hash Algorithm | Native JS | WASM (external) | WASM (internal) | | ||
| -------------- | --------- | --------------- | ---------------- | | ||
| sha256 | 3.5 sec | N/A | N/A | | ||
| blake2s256 | 3.2 sec | 750 ms | 650 ms | | ||
**Note:** while `wasmBlake256` is much faster at hashing small values (i.e. 32-64 bytes), it is slower at hashing large values. For example, when hashing 1KB values, Node's native implementation of Blake2s is about 30% faster. | ||
The difference between _external_ and _internal_ cases for WASM is that in the internal case, values from which the tree is to be built are already in WASM memory, while in the external case, they need to be copied into WASM memory. | ||
**Note:** while WebAssembly-optimized version of Blake2s algorithm is much faster at hashing small values (i.e. 32-64 bytes), it is slower at hashing large values. For example, when hashing 1KB values, Node's native implementation is about 30% faster. | ||
### Batch proof compression | ||
When you generate batch proofs, the proofs are compressed by removing redundant nodes. The table below shows an approximate size of batch proof for a given number of indexes against trees of a given size. | ||
| Tree leaves | 32 indexes | 64 indexes | 128 indexes | 256 indexes | | ||
| :------------: | ------------: | ------------: | ------------: | ------------: | | ||
| 2<sup>10</sup> | 5.2 KB (47%) | 8.6 KB (39%) | 13.4 KB (30%) | 20.1 KB (23%) | | ||
| 2<sup>12</sup> | 7.0 KB (54%) | 12.4 KB (48%) | 20.6 KB (40%) | 34.0 KB (33%) | | ||
| 2<sup>14</sup> | 9.2 KB (61%) | 16.2 KB (54%) | 28.6 KB (48%) | 49.3 KB (41%) | | ||
| 2<sup>16</sup> | 11.0 KB (65%) | 20.3 KB (60%) | 36.5 KB (54%) | 65.2 KB (48%) | | ||
| 2<sup>18</sup> | 13.1 KB (69%) | 24.5 KB (63%) | 44.6 KB (59%) | 81.0 KB (53%) | | ||
| 2<sup>20</sup> | 15.1 KB (72%) | 28.4 KB (68%) | 52.5 KB (63%) | 96.8 KB (58%) | | ||
The percentages next to proof sizes are ratios of the batch proof size to a naive proof size. For example, if you generate a batch proof for 32 indexes against a tree of 2<sup>10</sup> leaves, your proof will be about 5.2 KB, and that will be 47% of 32 individual proofs against the same tree. | ||
## References | ||
@@ -102,0 +147,0 @@ |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
145
39817
11
604
2