Socket
Socket
Sign inDemoInstall

@chainsafe/ssz

Package Overview
Dependencies
Maintainers
5
Versions
71
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@chainsafe/ssz - npm Package Compare versions

Comparing version 0.8.20 to 0.9.0

lib/branchNodeStruct.d.ts

11

CHANGELOG.md

@@ -6,2 +6,13 @@ # Change Log

# [0.9.0](http://chainsafe/ssz/compare/@chainsafe/ssz@0.8.19...@chainsafe/ssz@0.9.0) (2022-03-24)
* SSZ v2 (#223) ([9d167b7](http://chainsafe/ssz/commits/9d167b703b1e974ee4943be15710aa9783183986)), closes [#223](http://chainsafe/ssz/issues/223) [#227](http://chainsafe/ssz/issues/227)
* Convert as-sha256 to typescript (#244) ([2d4e3fe](http://chainsafe/ssz/commits/2d4e3febec89ca8ca7c89a19c6949c3213c2c45c)), closes [#244](http://chainsafe/ssz/issues/244)
### BREAKING CHANGES
* complete refactor, see packages/ssz/README.md for details
## 0.8.20 (2021-11-23)

@@ -8,0 +19,0 @@ - Harden ssz implementation [#211](https://github.com/ChainSafe/ssz/pull/211)

31

lib/index.d.ts

@@ -1,7 +0,26 @@

export * from "./interface";
export * from "./types";
export * from "./backings";
export { hash } from "./util/compat";
export * from "./util/byteArray";
export * from "./util/tree";
export { BitListType } from "./type/bitList";
export { BitVectorType } from "./type/bitVector";
export { BooleanType } from "./type/boolean";
export { ByteListType } from "./type/byteList";
export { ByteVectorType } from "./type/byteVector";
export { ContainerType } from "./type/container";
export { ContainerNodeStructType } from "./type/containerNodeStruct";
export { ListBasicType } from "./type/listBasic";
export { ListCompositeType } from "./type/listComposite";
export { NoneType } from "./type/none";
export { UintBigintType, UintNumberType } from "./type/uint";
export { UnionType } from "./type/union";
export { VectorBasicType } from "./type/vectorBasic";
export { VectorCompositeType } from "./type/vectorComposite";
export { ArrayType } from "./type/array";
export { BitArrayType } from "./type/bitArray";
export { ByteArrayType } from "./type/byteArray";
export { Type, ValueOf, JsonPath } from "./type/abstract";
export { BasicType, isBasicType } from "./type/basic";
export { CompositeType, CompositeTypeAny, CompositeView, CompositeViewDU, isCompositeType } from "./type/composite";
export { TreeView } from "./view/abstract";
export { TreeViewDU } from "./viewDU/abstract";
export { BitArray, getUint8ByteToBitBooleanArray } from "./value/bitArray";
export { fromHexString, toHexString, byteArrayEquals } from "./util/byteArray";
export { hash64 } from "./util/merkleize";
//# sourceMappingURL=index.d.ts.map

80

lib/index.js
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.hash = void 0;
__exportStar(require("./interface"), exports);
__exportStar(require("./types"), exports);
__exportStar(require("./backings"), exports);
var compat_1 = require("./util/compat");
Object.defineProperty(exports, "hash", { enumerable: true, get: function () { return compat_1.hash; } });
__exportStar(require("./util/byteArray"), exports);
__exportStar(require("./util/tree"), exports);
exports.hash64 = exports.byteArrayEquals = exports.toHexString = exports.fromHexString = exports.getUint8ByteToBitBooleanArray = exports.BitArray = exports.TreeViewDU = exports.TreeView = exports.isCompositeType = exports.CompositeType = exports.isBasicType = exports.BasicType = exports.Type = exports.ByteArrayType = exports.BitArrayType = exports.ArrayType = exports.VectorCompositeType = exports.VectorBasicType = exports.UnionType = exports.UintNumberType = exports.UintBigintType = exports.NoneType = exports.ListCompositeType = exports.ListBasicType = exports.ContainerNodeStructType = exports.ContainerType = exports.ByteVectorType = exports.ByteListType = exports.BooleanType = exports.BitVectorType = exports.BitListType = void 0;
// Types
var bitList_1 = require("./type/bitList");
Object.defineProperty(exports, "BitListType", { enumerable: true, get: function () { return bitList_1.BitListType; } });
var bitVector_1 = require("./type/bitVector");
Object.defineProperty(exports, "BitVectorType", { enumerable: true, get: function () { return bitVector_1.BitVectorType; } });
var boolean_1 = require("./type/boolean");
Object.defineProperty(exports, "BooleanType", { enumerable: true, get: function () { return boolean_1.BooleanType; } });
var byteList_1 = require("./type/byteList");
Object.defineProperty(exports, "ByteListType", { enumerable: true, get: function () { return byteList_1.ByteListType; } });
var byteVector_1 = require("./type/byteVector");
Object.defineProperty(exports, "ByteVectorType", { enumerable: true, get: function () { return byteVector_1.ByteVectorType; } });
var container_1 = require("./type/container");
Object.defineProperty(exports, "ContainerType", { enumerable: true, get: function () { return container_1.ContainerType; } });
var containerNodeStruct_1 = require("./type/containerNodeStruct");
Object.defineProperty(exports, "ContainerNodeStructType", { enumerable: true, get: function () { return containerNodeStruct_1.ContainerNodeStructType; } });
var listBasic_1 = require("./type/listBasic");
Object.defineProperty(exports, "ListBasicType", { enumerable: true, get: function () { return listBasic_1.ListBasicType; } });
var listComposite_1 = require("./type/listComposite");
Object.defineProperty(exports, "ListCompositeType", { enumerable: true, get: function () { return listComposite_1.ListCompositeType; } });
var none_1 = require("./type/none");
Object.defineProperty(exports, "NoneType", { enumerable: true, get: function () { return none_1.NoneType; } });
var uint_1 = require("./type/uint");
Object.defineProperty(exports, "UintBigintType", { enumerable: true, get: function () { return uint_1.UintBigintType; } });
Object.defineProperty(exports, "UintNumberType", { enumerable: true, get: function () { return uint_1.UintNumberType; } });
var union_1 = require("./type/union");
Object.defineProperty(exports, "UnionType", { enumerable: true, get: function () { return union_1.UnionType; } });
var vectorBasic_1 = require("./type/vectorBasic");
Object.defineProperty(exports, "VectorBasicType", { enumerable: true, get: function () { return vectorBasic_1.VectorBasicType; } });
var vectorComposite_1 = require("./type/vectorComposite");
Object.defineProperty(exports, "VectorCompositeType", { enumerable: true, get: function () { return vectorComposite_1.VectorCompositeType; } });
// Base types
var array_1 = require("./type/array");
Object.defineProperty(exports, "ArrayType", { enumerable: true, get: function () { return array_1.ArrayType; } });
var bitArray_1 = require("./type/bitArray");
Object.defineProperty(exports, "BitArrayType", { enumerable: true, get: function () { return bitArray_1.BitArrayType; } });
var byteArray_1 = require("./type/byteArray");
Object.defineProperty(exports, "ByteArrayType", { enumerable: true, get: function () { return byteArray_1.ByteArrayType; } });
// Base type clases
var abstract_1 = require("./type/abstract");
Object.defineProperty(exports, "Type", { enumerable: true, get: function () { return abstract_1.Type; } });
var basic_1 = require("./type/basic");
Object.defineProperty(exports, "BasicType", { enumerable: true, get: function () { return basic_1.BasicType; } });
Object.defineProperty(exports, "isBasicType", { enumerable: true, get: function () { return basic_1.isBasicType; } });
var composite_1 = require("./type/composite");
Object.defineProperty(exports, "CompositeType", { enumerable: true, get: function () { return composite_1.CompositeType; } });
Object.defineProperty(exports, "isCompositeType", { enumerable: true, get: function () { return composite_1.isCompositeType; } });
var abstract_2 = require("./view/abstract");
Object.defineProperty(exports, "TreeView", { enumerable: true, get: function () { return abstract_2.TreeView; } });
var abstract_3 = require("./viewDU/abstract");
Object.defineProperty(exports, "TreeViewDU", { enumerable: true, get: function () { return abstract_3.TreeViewDU; } });
// Values
var bitArray_2 = require("./value/bitArray");
Object.defineProperty(exports, "BitArray", { enumerable: true, get: function () { return bitArray_2.BitArray; } });
Object.defineProperty(exports, "getUint8ByteToBitBooleanArray", { enumerable: true, get: function () { return bitArray_2.getUint8ByteToBitBooleanArray; } });
// Utils
var byteArray_2 = require("./util/byteArray");
Object.defineProperty(exports, "fromHexString", { enumerable: true, get: function () { return byteArray_2.fromHexString; } });
Object.defineProperty(exports, "toHexString", { enumerable: true, get: function () { return byteArray_2.toHexString; } });
Object.defineProperty(exports, "byteArrayEquals", { enumerable: true, get: function () { return byteArray_2.byteArrayEquals; } });
var merkleize_1 = require("./util/merkleize");
Object.defineProperty(exports, "hash64", { enumerable: true, get: function () { return merkleize_1.hash64; } });
//# sourceMappingURL=index.js.map

@@ -5,3 +5,2 @@ import { ByteVector } from "../interface";

export declare function byteArrayEquals(a: Uint8Array, b: Uint8Array): boolean;
export declare function getByteBits(target: Uint8Array, offset: number): boolean[];
//# sourceMappingURL=byteArray.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getByteBits = exports.byteArrayEquals = exports.fromHexString = exports.toHexString = void 0;
exports.byteArrayEquals = exports.fromHexString = exports.toHexString = void 0;
// Caching this info costs about ~1000 bytes and speeds up toHexString() by x6
const hexByByte = [];
const hexByByte = new Array(256);
function toHexString(bytes) {

@@ -19,3 +19,3 @@ let hex = "0x";

if (typeof hex !== "string") {
throw new Error("Expected hex string to be a string");
throw new Error(`hex argument type ${typeof hex} must be of type string`);
}

@@ -26,10 +26,11 @@ if (hex.startsWith("0x")) {

if (hex.length % 2 !== 0) {
throw new Error("Expected an even number of characters");
throw new Error(`hex string length ${hex.length} must be multiple of 2`);
}
const bytes = [];
for (let i = 0, len = hex.length; i < len; i += 2) {
const byte = parseInt(hex.slice(i, i + 2), 16);
bytes.push(byte);
const byteLen = hex.length / 2;
const bytes = new Uint8Array(byteLen);
for (let i = 0; i < byteLen; i++) {
const byte = parseInt(hex.slice(i * 2, (i + 1) * 2), 16);
bytes[i] = byte;
}
return new Uint8Array(bytes);
return bytes;
}

@@ -48,13 +49,2 @@ exports.fromHexString = fromHexString;

exports.byteArrayEquals = byteArrayEquals;
function getByteBits(target, offset) {
const byte = target[offset];
if (!byte) {
return [false, false, false, false, false, false, false, false];
}
const bits = Array.prototype.map
.call(byte.toString(2).padStart(8, "0"), (c) => (c === "1" ? true : false))
.reverse();
return bits;
}
exports.getByteBits = getByteBits;
//# sourceMappingURL=byteArray.js.map

@@ -1,6 +0,20 @@

/// <reference types="node" />
export declare function hash64(bytes32A: Uint8Array, bytes32B: Uint8Array): Uint8Array;
export declare function merkleize(chunks: Uint8Array[], padFor: number): Uint8Array;
/**
* Split a long Uint8Array into Uint8Array of exactly 32 bytes
*/
export declare function splitIntoRootChunks(longChunk: Uint8Array): Uint8Array[];
/** @ignore */
export declare function merkleize(chunks: Buffer[], padFor?: number): Buffer;
export declare function mixInLength(root: Uint8Array, length: number): Uint8Array;
export declare function bitLength(i: number): number;
/**
* Given maxChunkCount return the chunkDepth
* ```
* n: [0,1,2,3,4,5,6,7,8,9]
* d: [0,0,1,2,2,3,3,3,3,4]
* ```
*/
export declare function maxChunksToDepth(n: number): number;
/** @ignore */
export declare function mixInLength(root: Buffer, length: number): Buffer;
export declare function nextPowerOf2(n: number): number;
//# sourceMappingURL=merkleize.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.mixInLength = exports.merkleize = void 0;
/** @module ssz */
const hash_1 = require("./hash");
const math_1 = require("./math");
exports.nextPowerOf2 = exports.maxChunksToDepth = exports.bitLength = exports.mixInLength = exports.splitIntoRootChunks = exports.merkleize = exports.hash64 = void 0;
const as_sha256_1 = require("@chainsafe/as-sha256");
const zeros_1 = require("./zeros");
/** @ignore */
function merkleize(chunks, padFor = 0) {
const layerCount = math_1.bitLength(math_1.nextPowerOf2(padFor || chunks.length) - 1);
function hash64(bytes32A, bytes32B) {
return as_sha256_1.digest64(Buffer.concat([bytes32A, bytes32B]));
}
exports.hash64 = hash64;
function merkleize(chunks, padFor) {
const layerCount = bitLength(nextPowerOf2(padFor) - 1);
if (chunks.length == 0) {
return zeros_1.zeroHashes[layerCount];
return zeros_1.zeroHash(layerCount);
}
let chunkCount = chunks.length;
// Instead of pushing on all padding zero chunks at the leaf level
// we push on zero hash chunks at the highest possible level to avoid over-hashing
let layer = 0;
while (layer < layerCount) {
for (let l = 0; l < layerCount; l++) {
const padCount = chunkCount % 2;
const paddedChunkCount = chunkCount + padCount;
// if the chunks.length is odd
// we need to push on the zero-hash of that level to merkleize that level
if (chunks.length % 2 == 1) {
chunks.push(zeros_1.zeroHashes[layer]);
for (let i = 0; i < padCount; i++) {
chunks[chunkCount + i] = zeros_1.zeroHash(l);
}
for (let i = 0; i < chunks.length; i += 2) {
const h = hash_1.hash(chunks[i], chunks[i + 1]);
chunks[i / 2] = Buffer.from(h.buffer, h.byteOffset, h.byteLength);
for (let i = 0; i < paddedChunkCount; i += 2) {
chunks[i / 2] = hash64(chunks[i], chunks[i + 1]);
}
chunks.splice(chunks.length / 2, chunks.length / 2);
layer++;
chunkCount = paddedChunkCount / 2;
}

@@ -33,2 +34,16 @@ return chunks[0];

exports.merkleize = merkleize;
/**
* Split a long Uint8Array into Uint8Array of exactly 32 bytes
*/
function splitIntoRootChunks(longChunk) {
const chunkCount = Math.ceil(longChunk.length / 32);
const chunks = new Array(chunkCount);
for (let i = 0; i < chunkCount; i++) {
const chunk = new Uint8Array(32);
chunk.set(longChunk.slice(i * 32, (i + 1) * 32));
chunks[i] = chunk;
}
return chunks;
}
exports.splitIntoRootChunks = splitIntoRootChunks;
/** @ignore */

@@ -38,6 +53,31 @@ function mixInLength(root, length) {

lengthBuf.writeUIntLE(length, 0, 6);
const h = hash_1.hash(root, lengthBuf);
return Buffer.from(h.buffer, h.byteOffset, h.byteLength);
return hash64(root, lengthBuf);
}
exports.mixInLength = mixInLength;
// x2 faster than bitLengthStr() which uses Number.toString(2)
function bitLength(i) {
if (i === 0) {
return 0;
}
return Math.floor(Math.log2(i)) + 1;
}
exports.bitLength = bitLength;
/**
* Given maxChunkCount return the chunkDepth
* ```
* n: [0,1,2,3,4,5,6,7,8,9]
* d: [0,0,1,2,2,3,3,3,3,4]
* ```
*/
function maxChunksToDepth(n) {
if (n === 0)
return 0;
return Math.ceil(Math.log2(n));
}
exports.maxChunksToDepth = maxChunksToDepth;
/** @ignore */
function nextPowerOf2(n) {
return n <= 0 ? 1 : Math.pow(2, bitLength(n - 1));
}
exports.nextPowerOf2 = nextPowerOf2;
//# sourceMappingURL=merkleize.js.map

@@ -1,3 +0,2 @@

/// <reference types="node" />
export declare const zeroHashes: Buffer[];
export declare function zeroHash(depth: number): Uint8Array;
//# sourceMappingURL=zeros.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.zeroHashes = void 0;
/** @module ssz */
const constants_1 = require("./constants");
const hash_1 = require("./hash");
exports.zeroHash = void 0;
const as_sha256_1 = require("@chainsafe/as-sha256");
// create array of "zero hashes", successively hashed zero chunks
exports.zeroHashes = [Buffer.alloc(constants_1.BYTES_PER_CHUNK)];
for (let i = 0; i < 52; i++) {
const h = hash_1.hash(exports.zeroHashes[i], exports.zeroHashes[i]);
exports.zeroHashes.push(Buffer.from(h.buffer, h.byteOffset, h.byteLength));
const zeroHashes = [new Uint8Array(32)];
function zeroHash(depth) {
if (depth >= zeroHashes.length) {
for (let i = zeroHashes.length; i <= depth; i++) {
zeroHashes[i] = as_sha256_1.digest64(Buffer.concat([zeroHashes[i - 1], zeroHashes[i - 1]]));
}
}
return zeroHashes[depth];
}
exports.zeroHash = zeroHash;
//# sourceMappingURL=zeros.js.map

@@ -7,3 +7,3 @@ {

"homepage": "https://github.com/chainsafe/ssz",
"version": "0.8.20",
"version": "0.9.0",
"main": "lib/index.js",

@@ -25,8 +25,7 @@ "files": [

"prepublishOnly": "yarn build",
"pretest": "yarn check-types",
"test": "yarn test:unit",
"benchmark": "node --max-old-space-size=4096 --expose-gc ./node_modules/.bin/benchmark 'test/perf/*.test.ts'",
"benchmark": "node --max-old-space-size=4096 --expose-gc ../../node_modules/.bin/benchmark 'test/perf/*.test.ts'",
"benchmark:local": "yarn benchmark --local",
"test:perf": "mocha \"test/perf/**/*.test.ts\"",
"test:unit": "nyc -e .ts mocha \"test/unit/**/*.test.ts\"",
"test:unit": "nyc mocha \"test/unit/**/*.test.ts\"",
"test:spec": "yarn test:spec-generic && yarn test:spec-static",

@@ -41,9 +40,10 @@ "test:spec-generic": "mocha \"test/spec/generic/**/*.test.ts\"",

"dependencies": {
"@chainsafe/as-sha256": "^0.2.4",
"@chainsafe/persistent-merkle-tree": "^0.3.7",
"@chainsafe/as-sha256": "^0.3.0",
"@chainsafe/persistent-merkle-tree": "^0.4.0",
"case": "^1.6.3"
},
"devDependencies": {
"@chainsafe/lodestar-params": "^0.31.0",
"@chainsafe/lodestar-spec-test-util": "^0.31.0"
"@types/js-yaml": "^4.0.5",
"js-yaml": "^3.13.1",
"snappyjs": "^0.6.1"
},

@@ -56,3 +56,3 @@ "keywords": [

],
"gitHead": "1f23ceb84a2f1480ecfcf683098061c8ebd8a422"
"gitHead": "59065e6965a04d829d49dfaa870f237a38280c4e"
}

@@ -8,3 +8,3 @@ # ssz

Simple Serialize (SSZ) is [an Eth2 standard](https://github.com/ethereum/eth2.0-specs/blob/dev/ssz/simple-serialize.md) that defines how Eth2 consensus objects are serialized and merkleized.
[Simple Serialize (SSZ)](https://github.com/ethereum/consensus-specs/blob/dev/ssz/simple-serialize.md) is a consensus layer standard that defines how consensus objects are serialized and merkleized.

@@ -19,6 +19,5 @@ SSZ is a type system that defines:

- equality
- valid value assertion
- create and consume proofs
- to / from json-serializable object
- copy / clone
- to / from json-serializable object

@@ -36,45 +35,47 @@ ## Install

const Keypair = new ContainerType({
fields: {
priv: new ByteVectorType({
length: 32,
}),
pub: new ByteVectorType({
length: 48,
}),
privateKey: new ByteVectorType(32),
publicKey: new ByteVectorType(48),
},
});
// You may want a corresponding typescript interface for Keypair
import {ByteVector} from "@chainsafe/ssz";
interface Keypair {
priv: ByteVector;
pub: ByteVector;
}
// The type value of any SSZ types are derived with `ValueOf`
import {ValueOf} from "@chainsafe/ssz";
type Keypair = ValueOf<typeof Keypair>
// Now you can perform different operations on Keypair objects
const keypair = Keypair.defaultValue(); // Create a default Keypair
const kp = Keypair.defaultValue(); // Create a default Keypair
keypair.privateKey; // => Uint8Array [0,0,0,...], length 32
keypair.publicKey; // => Uint8Array [0,0,0, ...], length 48
kp.priv; // => ByteVector [0,0,0,...], length 32
kp.pub; // => ByteVector [0,0,0, ...], length 48
// serialize the object to a byte array
const serialized: Uint8Array = Keypair.serialize(keypair);
const serialized: Uint8Array = Keypair.serialize(kp); // serialize the object to a byte array
const root: Uint8Array = Keypair.hashTreeRoot(kp); // get the merkle root of the object
const isEqual: boolean = Keypair.equals(kp, kp); // check equality between two keypairs
const kp2: Keypair = Keypair.clone(kp); // create a copy of the object
const kp3: Keypair = Keypair.deserialize(serialized); // deserialize a serialized object
const jsonKp = Keypair.toJson(kp); // convert the object to a json-serializable representation (binary data is converted to hex strings)
JSON.stringify(jsonKp);
Keypair.fromJson(jsonKp); // convert the json-serializable representation to the object
// get the merkle root of the object
const root: Uint8Array = Keypair.hashTreeRoot(keypair);
// create a copy of the object
const keypair2: Keypair = Keypair.clone(kp);
// deserialize a serialized object
const keypair3: Keypair = Keypair.deserialize(serialized);
// Convert to JSON-serializable representation
// (binary data is converted to hex strings)
const keypairJSON = Keypair.toJson(keypair);
const keypairJSONStr = JSON.stringify(jsonKp);
// convert the json-serializable representation to the object
const keypair4: Keypair = Keypair.fromJson(keypairJSON);
// The merkle-tree-backed representation of a Keypair may be created / operated on
const kp2: TreeBacked<Keypair> = Keypair.defaultTreeBacked();
const keypairView: TreeBacked<Keypair> = Keypair.toView(keypair)
// All of the same operations can be performed on tree-backed values
Keypair.serialize(kp2);
keypairView.serialize();
```
### ETH2 Objects
### Ethereum consensus objects
For Eth2 datatypes (eg: `BeaconBlock`, `DepositData`, `BeaconState`, etc), see [`@chainsafe/lodestar-types`](https://github.com/ChainSafe/lodestar/tree/master/packages/lodestar-types).
For Ethereum consensus datatypes (eg: `BeaconBlock`, `DepositData`, `BeaconState`, etc), see [`@chainsafe/lodestar-types`](https://github.com/ChainSafe/lodestar/tree/master/packages/lodestar-types).

@@ -85,22 +86,126 @@ ## Additional notes

This library operates on values of several kinds of 'backings', or underlying representations of data. Each backing has runtime tradeoffs for the above operations that arise from the nature of the underlying representation.
This library operates on values of two kinds of 'backings', or underlying representations of data. Each backing has runtime tradeoffs for the above operations that arise from the nature of the underlying representation.
Effort has been made to minimize the differences between backings for the core API, which includes the above operations, property getter/setters, and iteration (value iteration for vectors/lists and enumerable key iteration for containers).
Prior versions of this library attempted to fully align the interfaces between operations on various backings. This has the side effect of obscuring the underlying representation in downstream code, which is undesirable when maintaining code that requires higher levels control, eg: performance-critical code. This library no longer supports interchanging values with different backings, and exported APIs clearly distinguish between backings.
We support the following backings, which correspond to the core operations of serialization and merkleization:
We support the following backings:
- Structural - This backing has a native javascript type representation.
- Value - This backing has a native javascript type representation.
Containers are constructed as js Objects, vectors and lists as Arrays (or TypedArrays) Within operations, property access is performed using js getter notation, with gets corresponding to the structure of the value's type. Because structural non-constructor operations do not assume the underlying representation of values, all backings can be operated on in this context.
Containers are constructed as Javascript Objects, vectors and lists as Arrays (or TypedArrays). Type methods `type.serialize`. `type.deserialize`, `type.hashTreeRoot`, and `type.defaultValue` all operate on values.
- Tree - This backing has an immutable merkle tree representation.
The data is always represented as a tree, and within operations, the tree structure is harnessed as much as possible. Property getters return subtrees except for basic types, when the native value corresponding th that type is returned. Values backed by a tree are wrapped in an ES6 Proxy object to provide a convenient, 'structural' interface for property getters/setters.
The data is represented as a full merkle tree, composed of immutable, linked nodes. (See [`@chainsafe/persistent-merkle-tree`](https://github.com/ChainSafe/ssz/tree/master/packages/persistent-merkle-tree)), wrapped as a "tree view". Two types of tree view are provided, a simple wrapper, and a wrapper with more caching and batched updates.
- ByteArray - This backing has a byte array representation.
### Tree View
The data is always represented as a Uint8Array, and within operations, the serialized structure is harnessed as much as possible. Property getters return sub-arrays except for basic types, when the native value corresponding to that type is returned. Values backed by an array are wrapped in an ES6 Proxy object to provide a convenient, 'structural' interface for property getters/setters.
A tree view is a wrapper around a `Tree` and a `Type` that provides methods for convenient property access and ssz operations.
Property getters return sub-views, except for basic types, which return native values. Setters, likewise, require sub-views, except for basic types, which require native views.
This tree view is a simple wrapper to tree backed data that commits any changes immediately to the tree. Changes are propagated upwards to the root parent tree.
```ts
// Create a type
const C = new ContainerType({
a: new VectorBasicType(new UintNumberType(1), 2),
});
// Create a tree view based on the default value
const c = C.defaultView();
// SSZ operations
c.serialize() === C.hashTreeRoot(C.defaultValue());
const root = c.hashTreeRoot();
// Getters
c.a.get(0) === 0;
// Setters
// Changes are applied immediately to the tree
c.a.set(0, 1);
// Subsequent calls to `hashTreeRoot` reflect the changes to the tree
assert(root.toString() !== c.hashTreeRoot().toString());
```
If you need to do many mutations at once see **ViewDU**, which defers all updates to a later `commit` step, paying the cost of updating the tree only once.
**Subview behaviour**
View implementations don't contain any internal caches beyond their internal `Tree`s, and setting one subview to another will not link the views.
```ts
const c1 = C.toView({a: [0, 0]});
const c2 = C.toView({a: [1, 1]});
// c1's Tree now includes the root node of `c2.a` but no references to `c2.a` view
// Warning: this is different behaviour than ViewDU
c1.a = c2.a;
// This statement mutates ONLY c1 data
c1.a.set(0, 2);
// This statement mutates ONLY c2 data
c2.a.set(0, 3);
```
### Tree ViewDU
ViewDU = View Deferred Update. This tree view caches all mutations to data and applies the changes to the tree only when requested by calling the `commit` method. This allows to pay to cost of navigating and updating the tree only once. This strategy is optimal for large tree manipulations that require very high performance (i.e. the Ethereum consensus beacon chain state transition).
```ts
// Create a type
const C = new ContainerType({
a: new VectorBasicType(new UintNumberType(1), 2),
});
// Create a tree view DU based on the default value
const c = C.defaultViewDU();
// SSZ operations
c.serialize() === C.hashTreeRoot(C.defaultValue());
const root = c.hashTreeRoot();
// Getters
c.a.get(0) === 0;
// Setters
// Changes are NOT applied immediately to the tree
c.a.set(0, 1);
// Subsequent calls to `hashTreeRoot` do NOT reflect the changes to the tree
assert(root.toString() === c.hashTreeRoot().toString());
// Until commit is called
c.commit();
assert(root.toString() !== c.hashTreeRoot().toString());
```
**Key features**
- Defer tree updates until `commit` is called, allowing multiple nodes to tree to be set in a batch and navigating through the tree at most once
- Persist caches of sub-properties to prevent tree navigation when re-reading data.
**Subview behaviour**
Due to having mutable caches for children properties, setting a subview from one view to the subview of another view will link the two by referencing the same underlying cache.
```ts
const c1 = C.toViewDU({a: [0, 0]});
const c2 = C.toViewDU({a: [1, 1]});
// Now both c1 and c2 have a reference to the exact same cached child view
// Warning: this is different behaviour than View
c1.a = c2.a;
// This statement mutates c1 AND c2 data
c1.a.set(0, 2);
// This statement mutates c1 AND c2 data
c2.a.set(0, 3);
```
## License
Apache 2.0

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc