Socket
Socket
Sign inDemoInstall

@chainsafe/ssz

Package Overview
Dependencies
Maintainers
6
Versions
71
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@chainsafe/ssz - npm Package Compare versions

Comparing version 0.8.15 to 0.8.16

lib/types/basic/none.d.ts

9

CHANGELOG.md

@@ -0,1 +1,10 @@

## 0.8.16
## Features
- Implement Number64UintType and Number64ListType [#159](https://github.com/ChainSafe/ssz/pull/159)
- Add ContainerLeafNodeStructType for memory efficiency [#168](https://github.com/ChainSafe/ssz/pull/168)
- Union type [#145](https://github.com/ChainSafe/ssz/pull/145)
- Add alternative to iterator interface [#171](https://github.com/ChainSafe/ssz/pull/171)
## 0.8.15 (2021-08-23)

@@ -2,0 +11,0 @@

@@ -52,3 +52,8 @@ import { Proof, Tree } from "@chainsafe/persistent-merkle-tree";

readonlyEntries(): IterableIterator<[string, ValueOf<T>]>;
keysArray(): string[];
valuesArray(): ValueOf<T>[];
entriesArray(): [string, ValueOf<T>][];
readonlyValuesArray(): ValueOf<T>[];
readonlyEntriesArray(): [string, ValueOf<T>][];
}
//# sourceMappingURL=interface.d.ts.map

40

lib/backings/tree/treeValue.d.ts
import { Proof, Tree } from "@chainsafe/persistent-merkle-tree";
import { ArrayLike, CompositeValue, List } from "../../interface";
import { BasicArrayType, BasicListType, CompositeArrayType, CompositeListType, CompositeType, ContainerType } from "../../types";
import { ArrayLike, CompositeValue, List, Union } from "../../interface";
import { BasicArrayType, BasicListType, CompositeArrayType, CompositeListType, CompositeType, ContainerType, UnionType } from "../../types";
import { Path } from "../backedValue";

@@ -48,2 +48,7 @@ import { ITreeBacked, TreeBacked, ValueOf } from "./interface";

abstract readonlyEntries(): IterableIterator<[string, ValueOf<T>]>;
abstract keysArray(): string[];
abstract valuesArray(): ValueOf<T>[];
abstract entriesArray(): [string, ValueOf<T>][];
abstract readonlyValuesArray(): ValueOf<T>[];
abstract readonlyEntriesArray(): [string, ValueOf<T>][];
}

@@ -60,2 +65,7 @@ export declare class BasicArrayTreeValue<T extends ArrayLike<unknown>> extends TreeValue<T> {

readonlyEntries(): IterableIterator<[string, ValueOf<T>]>;
keysArray(): string[];
valuesArray(): ValueOf<T>[];
entriesArray(): [string, ValueOf<T>][];
readonlyValuesArray(): ValueOf<T>[];
readonlyEntriesArray(): [string, ValueOf<T>][];
}

@@ -72,2 +82,7 @@ export declare class CompositeArrayTreeValue<T extends ArrayLike<unknown>> extends TreeValue<T> {

readonlyEntries(): IterableIterator<[string, ValueOf<T>]>;
keysArray(): string[];
valuesArray(): ValueOf<T>[];
entriesArray(): [string, ValueOf<T>][];
readonlyValuesArray(): ValueOf<T>[];
readonlyEntriesArray(): [string, ValueOf<T>][];
}

@@ -96,4 +111,25 @@ export declare class BasicListTreeValue<T extends List<unknown>> extends BasicArrayTreeValue<T> {

readonlyEntries(): IterableIterator<[string, ValueOf<T>]>;
keysArray(): string[];
valuesArray(): ValueOf<T>[];
entriesArray(): [string, ValueOf<T>][];
readonlyValuesArray(): ValueOf<T>[];
readonlyEntriesArray(): [string, ValueOf<T>][];
}
export declare class UnionTreeValue<T extends Union<unknown>> extends TreeValue<T> {
type: UnionType<T>;
constructor(type: UnionType<T>, tree: Tree);
getProperty<P extends keyof T>(property: P): ValueOf<T, P>;
setProperty<P extends keyof T>(property: P, value: ValueOf<T, P>): boolean;
keys(): IterableIterator<string>;
values(): IterableIterator<ValueOf<T>>;
entries(): IterableIterator<[string, ValueOf<T, keyof T>]>;
readonlyValues(): IterableIterator<ValueOf<T, keyof T>>;
readonlyEntries(): IterableIterator<[string, ValueOf<T, keyof T>]>;
keysArray(): string[];
valuesArray(): ValueOf<T>[];
entriesArray(): [string, ValueOf<T>][];
readonlyValuesArray(): ValueOf<T>[];
readonlyEntriesArray(): [string, ValueOf<T>][];
}
export {};
//# sourceMappingURL=treeValue.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ContainerTreeValue = exports.CompositeListTreeValue = exports.BasicListTreeValue = exports.CompositeArrayTreeValue = exports.BasicArrayTreeValue = exports.TreeValue = exports.TreeProxyHandler = exports.proxyWrapTreeValue = exports.getTreeValueClass = exports.createTreeBacked = exports.isTreeBacked = void 0;
exports.UnionTreeValue = exports.ContainerTreeValue = exports.CompositeListTreeValue = exports.BasicListTreeValue = exports.CompositeArrayTreeValue = exports.BasicArrayTreeValue = exports.TreeValue = exports.TreeProxyHandler = exports.proxyWrapTreeValue = exports.getTreeValueClass = exports.createTreeBacked = exports.isTreeBacked = void 0;
const types_1 = require("../../types");

@@ -40,2 +40,5 @@ const byteArray_1 = require("../../util/byteArray");

}
else if (types_1.isUnionType(type)) {
return UnionTreeValue;
}
throw Error("No TreeValueClass for type");

@@ -176,2 +179,32 @@ }

}
keysArray() {
const propNames = this.getPropertyNames();
// pop off "length"
propNames.pop();
return propNames.map(String);
}
valuesArray() {
return this.type.tree_getValues(this.tree);
}
entriesArray() {
const keys = this.getPropertyNames();
const values = this.valuesArray();
const entries = [];
for (let i = 0; i < values.length; i++) {
entries.push([String(keys[i]), values[i]]);
}
return entries;
}
readonlyValuesArray() {
return this.type.tree_readonlyGetValues(this.tree);
}
readonlyEntriesArray() {
const keys = this.getPropertyNames();
const values = this.readonlyValuesArray();
const entries = [];
for (let i = 0; i < values.length; i++) {
entries.push([String(keys[i]), values[i]]);
}
return entries;
}
}

@@ -228,2 +261,42 @@ exports.BasicArrayTreeValue = BasicArrayTreeValue;

}
keysArray() {
const propNames = this.getPropertyNames();
// pop off "length"
propNames.pop();
return propNames.map(String);
}
valuesArray() {
const values = [];
const rawValues = this.type.tree_getValues(this.tree);
for (let i = 0; i < rawValues.length; i++) {
values.push(createTreeBacked(this.type.elementType, rawValues[i]));
}
return values;
}
entriesArray() {
const keys = this.getPropertyNames();
const values = this.valuesArray();
const entries = [];
for (let i = 0; i < values.length; i++) {
entries.push([String(keys[i]), values[i]]);
}
return entries;
}
readonlyValuesArray() {
const values = [];
const rawValues = this.type.tree_readonlyGetValues(this.tree);
for (let i = 0; i < rawValues.length; i++) {
values.push(createTreeBacked(this.type.elementType, rawValues[i]));
}
return values;
}
readonlyEntriesArray() {
const keys = this.getPropertyNames();
const values = this.valuesArray();
const entries = [];
for (let i = 0; i < values.length; i++) {
entries.push([String(keys[i]), values[i]]);
}
return entries;
}
}

@@ -340,4 +413,116 @@ exports.CompositeArrayTreeValue = CompositeArrayTreeValue;

}
keysArray() {
return this.getPropertyNames();
}
valuesArray() {
const values = [];
const entries = this.entriesArray();
for (let i = 0; i < entries.length; i++) {
values.push(entries[i][1]);
}
return values;
}
entriesArray() {
const keys = this.keysArray();
const values = this.valuesArray();
const entries = [];
for (let i = 0; i < values.length; i++) {
const key = keys[i];
const value = values[i];
const fieldType = this.type.getPropertyType(key);
if (types_1.isCompositeType(fieldType)) {
entries.push([key, createTreeBacked(fieldType, value)]);
}
else {
entries.push([key, value]);
}
}
return entries;
}
readonlyValuesArray() {
const values = [];
const entries = this.readonlyEntriesArray();
for (let i = 0; i < entries.length; i++) {
values.push(entries[i][1]);
}
return values;
}
readonlyEntriesArray() {
const keys = this.keysArray();
const values = this.readonlyValuesArray();
const entries = [];
for (let i = 0; i < values.length; i++) {
const key = keys[i];
const value = values[i];
const fieldType = this.type.getPropertyType(key);
if (types_1.isCompositeType(fieldType)) {
entries.push([key, createTreeBacked(fieldType, value)]);
}
else {
entries.push([key, value]);
}
}
return entries;
}
}
exports.ContainerTreeValue = ContainerTreeValue;
class UnionTreeValue extends TreeValue {
type;
constructor(type, tree) {
super(type, tree);
this.type = type;
}
getProperty(property) {
if (property !== "selector" && property !== "value") {
throw new Error(`property ${property} does not exist in Union type`);
}
const propType = this.type.getPropertyTypeFromTree(this.tree, property);
const propValue = this.type.tree_getProperty(this.tree, property);
if (types_1.isCompositeType(propType)) {
return createTreeBacked(propType, propValue);
}
else {
return propValue;
}
}
setProperty(property, value) {
if (property !== "value") {
throw new Error(`Unsupport setting property ${property} for Union`);
}
return this.type.tree_setProperty(this.tree, property, value);
}
*keys() {
yield* this.getPropertyNames();
}
*values() {
for (const [_key, value] of this.entries()) {
yield value;
}
}
entries() {
throw new Error("Method not implemented for Union type");
}
readonlyValues() {
throw new Error("Method not implemented for Union type");
}
readonlyEntries() {
throw new Error("Method not implemented for Union type");
}
keysArray() {
throw new Error("Method not implemented for Union type");
}
valuesArray() {
throw new Error("Method not implemented for Union type");
}
entriesArray() {
throw new Error("Method not implemented for Union type");
}
readonlyValuesArray() {
throw new Error("Method not implemented for Union type");
}
readonlyEntriesArray() {
throw new Error("Method not implemented for Union type");
}
}
exports.UnionTreeValue = UnionTreeValue;
//# sourceMappingURL=treeValue.js.map

@@ -22,3 +22,7 @@ /**

}
export declare type CompositeValue = Record<string, any> | ArrayLike<unknown> | Record<string, never>;
export interface Union<T> {
readonly selector: number;
value: T;
}
export declare type CompositeValue = Record<string, any> | ArrayLike<unknown> | Union<unknown> | Record<string, never>;
/**

@@ -25,0 +29,0 @@ * The Json interface is used for json-serializable input

@@ -0,1 +1,2 @@

import { HashObject } from "@chainsafe/as-sha256";
import { Type } from "../type";

@@ -24,4 +25,6 @@ export declare const BASIC_TYPE: unique symbol;

abstract struct_deserializeFromBytes(data: Uint8Array, offset: number): T;
struct_deserializeFromHashObject?(data: HashObject, byteOffset: number): T;
struct_serializeToHashObject?(value: T, output: HashObject, byteOffset: number): number;
struct_hashTreeRoot(value: T): Uint8Array;
}
//# sourceMappingURL=abstract.d.ts.map

2

lib/types/basic/abstract.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BasicType = exports.isBasicType = exports.BASIC_TYPE = void 0;
/* eslint-disable @typescript-eslint/member-ordering */
/* eslint-disable @typescript-eslint/no-unused-vars */
const type_1 = require("../type");

@@ -7,0 +5,0 @@ exports.BASIC_TYPE = Symbol.for("ssz/BasicType");

import { Json } from "../../interface";
import { Type } from "../type";
import { BasicType } from "./abstract";
import { HashObject } from "@chainsafe/as-sha256";
export interface IUintOptions {

@@ -17,3 +18,5 @@ byteLength: number;

export declare const NUMBER_UINT_TYPE: unique symbol;
export declare const NUMBER_64_UINT_TYPE: unique symbol;
export declare function isNumberUintType(type: Type<unknown>): type is NumberUintType;
export declare function isNumber64UintType(type: Type<unknown>): type is Number64UintType;
export declare class NumberUintType extends UintType<number> {

@@ -30,2 +33,15 @@ _maxBigInt?: BigInt;

}
/**
* For 64 bit number, we want to operator on HashObject
* over bytes to improve performance.
*/
export declare class Number64UintType extends NumberUintType {
constructor();
/**
* TODO: move this logic all the way to persistent-merkle-tree?
* That's save us 1 time to traverse the tree in the applyDelta scenario
*/
struct_deserializeFromHashObject(data: HashObject, byteOffset: number): number;
struct_serializeToHashObject(value: number, output: HashObject, byteOffset: number): number;
}
export declare const BIGINT_UINT_TYPE: unique symbol;

@@ -32,0 +48,0 @@ export declare function isBigIntUintType(type: Type<unknown>): type is BigIntUintType;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BigIntUintType = exports.isBigIntUintType = exports.BIGINT_UINT_TYPE = exports.NumberUintType = exports.isNumberUintType = exports.NUMBER_UINT_TYPE = exports.UintType = exports.isUintType = exports.UINT_TYPE = void 0;
exports.BigIntUintType = exports.isBigIntUintType = exports.BIGINT_UINT_TYPE = exports.Number64UintType = exports.NumberUintType = exports.isNumber64UintType = exports.isNumberUintType = exports.NUMBER_64_UINT_TYPE = exports.NUMBER_UINT_TYPE = exports.UintType = exports.isUintType = exports.UINT_TYPE = void 0;
const bigInt_1 = require("../../util/bigInt");

@@ -28,2 +28,3 @@ const type_1 = require("../type");

exports.NUMBER_UINT_TYPE = Symbol.for("ssz/NumberUintType");
exports.NUMBER_64_UINT_TYPE = Symbol.for("ssz/Number64UintType");
const BIGINT_4_BYTES = BigInt(32);

@@ -34,2 +35,6 @@ function isNumberUintType(type) {

exports.isNumberUintType = isNumberUintType;
function isNumber64UintType(type) {
return type_1.isTypeOf(type, exports.NUMBER_64_UINT_TYPE);
}
exports.isNumber64UintType = isNumber64UintType;
class NumberUintType extends UintType {

@@ -117,2 +122,93 @@ _maxBigInt;

exports.NumberUintType = NumberUintType;
const TWO_POWER_32 = 2 ** 32;
/**
* For 64 bit number, we want to operator on HashObject
* over bytes to improve performance.
*/
class Number64UintType extends NumberUintType {
constructor() {
super({ byteLength: 8 });
this._typeSymbols.add(exports.NUMBER_64_UINT_TYPE);
}
/**
* TODO: move this logic all the way to persistent-merkle-tree?
* That's save us 1 time to traverse the tree in the applyDelta scenario
*/
struct_deserializeFromHashObject(data, byteOffset) {
const numberOffset = Math.floor(byteOffset / 8);
// a chunk contains 4 items
if (numberOffset < 0 || numberOffset > 3) {
throw new Error(`Invalid numberOffset ${numberOffset}`);
}
let low32Number = 0;
let high32Number = 0;
switch (numberOffset) {
case 0:
low32Number = data.h0 & 0xffffffff;
high32Number = data.h1 & 0xffffffff;
break;
case 1:
low32Number = data.h2 & 0xffffffff;
high32Number = data.h3 & 0xffffffff;
break;
case 2:
low32Number = data.h4 & 0xffffffff;
high32Number = data.h5 & 0xffffffff;
break;
case 3:
low32Number = data.h6 & 0xffffffff;
high32Number = data.h7 & 0xffffffff;
break;
default:
throw new Error(`Invalid offset ${numberOffset}`);
}
if (low32Number < 0)
low32Number = low32Number >>> 0;
if (high32Number === 0) {
return low32Number;
}
else if (high32Number < 0) {
high32Number = high32Number >>> 0;
}
if (low32Number === 0xffffffff && high32Number === 0xffffffff) {
return Infinity;
}
return high32Number * TWO_POWER_32 + low32Number;
}
struct_serializeToHashObject(value, output, byteOffset) {
const numberOffset = Math.floor(byteOffset / 8);
let low32Number;
let high32Number;
if (value !== Infinity) {
low32Number = value & 0xffffffff;
high32Number = Math.floor(value / TWO_POWER_32) & 0xffffffff;
}
else {
low32Number = 0xffffffff;
high32Number = 0xffffffff;
}
switch (numberOffset) {
case 0:
output.h0 = low32Number;
output.h1 = high32Number;
break;
case 1:
output.h2 = low32Number;
output.h3 = high32Number;
break;
case 2:
output.h4 = low32Number;
output.h5 = high32Number;
break;
case 3:
output.h6 = low32Number;
output.h7 = high32Number;
break;
default:
throw new Error(`Invalid offset ${numberOffset}`);
}
return numberOffset + 1;
}
}
exports.Number64UintType = Number64UintType;
exports.BIGINT_UINT_TYPE = Symbol.for("ssz/BigIntUintType");

@@ -119,0 +215,0 @@ function isBigIntUintType(type) {

import { CompositeValue, Json } from "../../interface";
import { BackedValue, Path, TreeBacked } from "../../backings";
import { IJsonOptions, Type } from "../type";
import { Gindex, Node, Proof, Tree } from "@chainsafe/persistent-merkle-tree";
import { Gindex, GindexBitstring, Node, Proof, Tree } from "@chainsafe/persistent-merkle-tree";
export declare const COMPOSITE_TYPE: unique symbol;

@@ -51,2 +51,3 @@ export declare function isCompositeType(type: Type<unknown>): type is CompositeType<CompositeValue>;

getGindexAtChunkIndex(index: number): Gindex;
getGindexBitStringAtChunkIndex(index: number): GindexBitstring;
tree_getSubtreeAtChunkIndex(target: Tree, index: number): Tree;

@@ -65,2 +66,4 @@ tree_setSubtreeAtChunkIndex(target: Tree, index: number, value: Tree, expand?: boolean): void;

abstract tree_readonlyIterateValues(tree: Tree): IterableIterator<Tree | unknown>;
abstract tree_getValues(tree: Tree): (Tree | unknown)[];
abstract tree_readonlyGetValues(tree: Tree): (Tree | unknown)[];
/**

@@ -67,0 +70,0 @@ * Navigate to a subtype & gindex using a path

@@ -105,13 +105,16 @@ "use strict";

}
getGindexBitStringAtChunkIndex(index) {
return persistent_merkle_tree_1.toGindexBitstring(this.getChunkDepth(), index);
}
tree_getSubtreeAtChunkIndex(target, index) {
return target.getSubtree(this.getGindexAtChunkIndex(index));
return target.getSubtree(this.getGindexBitStringAtChunkIndex(index));
}
tree_setSubtreeAtChunkIndex(target, index, value, expand = false) {
target.setSubtree(this.getGindexAtChunkIndex(index), value, expand);
target.setSubtree(this.getGindexBitStringAtChunkIndex(index), value, expand);
}
tree_getRootAtChunkIndex(target, index) {
return target.getRoot(this.getGindexAtChunkIndex(index));
return target.getRoot(this.getGindexBitStringAtChunkIndex(index));
}
tree_setRootAtChunkIndex(target, index, value, expand = false) {
target.setRoot(this.getGindexAtChunkIndex(index), value, expand);
target.setRoot(this.getGindexBitStringAtChunkIndex(index), value, expand);
}

@@ -118,0 +121,0 @@ /**

@@ -37,2 +37,4 @@ import { ArrayLike, CompositeValue, Json } from "../../interface";

tree_readonlyIterateValues(target: Tree): IterableIterator<Tree | unknown>;
tree_getValues(target: Tree): (Tree | unknown)[];
tree_readonlyGetValues(target: Tree): (Tree | unknown)[];
getChunkOffset(index: number): number;

@@ -80,2 +82,4 @@ getChunkIndex(index: number): number;

tree_readonlyIterateValues(target: Tree): IterableIterator<Tree | unknown>;
tree_getValues(target: Tree): (Tree | unknown)[];
tree_readonlyGetValues(target: Tree): (Tree | unknown)[];
bytes_getVariableOffsets(target: Uint8Array): [number, number][];

@@ -82,0 +86,0 @@ tree_getLeafGindices(target?: Tree, root?: Gindex): Gindex[];

@@ -172,2 +172,29 @@ "use strict";

}
tree_getValues(target) {
const length = this.tree_getLength(target);
if (length === 0) {
return [];
}
const elementSize = this.elementType.struct_getSerializedLength();
if (32 % elementSize !== 0) {
throw new Error("cannot handle a non-chunk-alignable elementType");
}
let left = length;
const values = [];
const nodes = target.getNodesAtDepth(this.getChunkDepth(), 0, this.tree_getChunkCount(target));
out: for (let i = 0; i < nodes.length; i++) {
const chunk = nodes[i].root;
for (let offset = 0; offset < 32; offset += elementSize) {
values.push(this.elementType.struct_deserializeFromBytes(chunk, offset));
left--;
if (left === 0) {
break out;
}
}
}
return values;
}
tree_readonlyGetValues(target) {
return this.tree_getValues(target);
}
getChunkOffset(index) {

@@ -185,8 +212,8 @@ const elementSize = this.elementType.struct_getSerializedLength();

tree_setValueAtIndex(target, index, value, expand = false) {
const chunkGindex = this.getGindexAtChunkIndex(this.getChunkIndex(index));
const chunkGindexBitString = this.getGindexBitStringAtChunkIndex(this.getChunkIndex(index));
// copy data from old chunk, use new memory to set a new chunk
const chunk = new Uint8Array(32);
chunk.set(target.getRoot(chunkGindex));
chunk.set(target.getRoot(chunkGindexBitString));
this.elementType.struct_serializeToBytes(value, chunk, this.getChunkOffset(index));
target.setRoot(chunkGindex, chunk, expand);
target.setRoot(chunkGindexBitString, chunk, expand);
return true;

@@ -467,2 +494,18 @@ }

}
tree_getValues(target) {
const values = [];
const gindices = persistent_merkle_tree_1.getGindicesAtDepth(this.getChunkDepth(), 0, this.tree_getLength(target));
for (let i = 0; i < gindices.length; i++) {
values.push(target.getSubtree(gindices[i]));
}
return values;
}
tree_readonlyGetValues(target) {
const values = [];
const nodes = target.getNodesAtDepth(this.getChunkDepth(), 0, this.tree_getLength(target));
for (let i = 0; i < nodes.length; i++) {
values.push(new persistent_merkle_tree_1.Tree(nodes[i]));
}
return values;
}
bytes_getVariableOffsets(target) {

@@ -469,0 +512,0 @@ if (this.elementType.hasVariableSerializedLength()) {

@@ -33,2 +33,3 @@ import { BitList, Json } from "../../interface";

tree_iterateValues(target: Tree): IterableIterator<Tree | unknown>;
tree_getValues(target: Tree): (Tree | unknown)[];
tree_getValueAtIndex(target: Tree, index: number): boolean;

@@ -35,0 +36,0 @@ tree_setValueAtIndex(target: Tree, property: number, value: boolean, expand?: boolean): boolean;

@@ -132,6 +132,6 @@ "use strict";

const target = super.tree_deserializeFromBytes(data, start, end);
const lastGindex = this.getGindexAtChunkIndex(Math.ceil((end - start) / 32) - 1);
const lastGindexBitString = this.getGindexBitStringAtChunkIndex(Math.ceil((end - start) / 32) - 1);
// copy chunk into new memory
const lastChunk = new Uint8Array(32);
lastChunk.set(target.getRoot(lastGindex));
lastChunk.set(target.getRoot(lastGindexBitString));
const lastChunkByte = ((end - start) % 32) - 1;

@@ -143,3 +143,3 @@ // mask lastChunkByte

lastChunk[lastChunkByte] &= mask;
target.setRoot(lastGindex, lastChunk);
target.setRoot(lastGindexBitString, lastChunk);
this.tree_setLength(target, length);

@@ -194,2 +194,17 @@ return target;

}
tree_getValues(target) {
const length = this.tree_getLength(target);
const chunkCount = this.tree_getChunkCount(target);
const nodes = target.getNodesAtDepth(this.getChunkDepth(), 0, chunkCount);
let i = 0;
const values = [];
for (let nodeIx = 0; nodeIx < nodes.length; nodeIx++) {
const chunk = nodes[nodeIx].root;
for (let j = 0; j < 256 && i < length; i++, j++) {
const byte = chunk[this.getChunkOffset(i)];
values.push(!!(byte & (1 << this.getBitOffset(i))));
}
}
return values;
}
tree_getValueAtIndex(target, index) {

@@ -201,5 +216,5 @@ const chunk = this.tree_getRootAtChunkIndex(target, this.getChunkIndex(index));

tree_setValueAtIndex(target, property, value, expand = false) {
const chunkGindex = this.getGindexAtChunkIndex(this.getChunkIndex(property));
const chunkGindexBitString = this.getGindexBitStringAtChunkIndex(this.getChunkIndex(property));
const chunk = new Uint8Array(32);
chunk.set(target.getRoot(chunkGindex));
chunk.set(target.getRoot(chunkGindexBitString));
const byteOffset = this.getChunkOffset(property);

@@ -212,3 +227,3 @@ if (value) {

}
target.setRoot(chunkGindex, chunk, expand);
target.setRoot(chunkGindexBitString, chunk, expand);
return true;

@@ -215,0 +230,0 @@ }

@@ -33,2 +33,3 @@ import { BitVector, Json } from "../../interface";

tree_iterateValues(target: Tree): IterableIterator<Tree | unknown>;
tree_getValues(target: Tree): (Tree | unknown)[];
tree_getValueAtIndex(target: Tree, index: number): boolean;

@@ -35,0 +36,0 @@ tree_setProperty(target: Tree, property: number, value: boolean): boolean;

@@ -146,2 +146,17 @@ "use strict";

}
tree_getValues(target) {
const length = this.tree_getLength(target);
const chunkCount = this.tree_getChunkCount(target);
const nodes = target.getNodesAtDepth(this.getChunkDepth(), 0, chunkCount);
let i = 0;
const values = [];
for (let nodeIx = 0; nodeIx < nodes.length; nodeIx++) {
const chunk = nodes[nodeIx].root;
for (let j = 0; j < 256 && i < length; i++, j++) {
const byte = chunk[this.getChunkOffset(i)];
values.push(!!(byte & (1 << this.getBitOffset(i))));
}
}
return values;
}
tree_getValueAtIndex(target, index) {

@@ -153,5 +168,5 @@ const chunk = this.tree_getRootAtChunkIndex(target, this.getChunkIndex(index));

tree_setProperty(target, property, value) {
const chunkGindex = this.getGindexAtChunkIndex(this.getChunkIndex(property));
const chunkGindexBitString = this.getGindexBitStringAtChunkIndex(this.getChunkIndex(property));
const chunk = new Uint8Array(32);
chunk.set(target.getRoot(chunkGindex));
chunk.set(target.getRoot(chunkGindexBitString));
const byteOffset = this.getChunkOffset(property);

@@ -164,3 +179,3 @@ if (value) {

}
target.setRoot(chunkGindex, chunk);
target.setRoot(chunkGindexBitString, chunk);
return true;

@@ -167,0 +182,0 @@ }

import { Json, ObjectLike } from "../../interface";
import { CompositeType } from "./abstract";
import { IJsonOptions, Type } from "../type";
import { Gindex, Node, Tree } from "@chainsafe/persistent-merkle-tree";
import { Gindex, GindexBitstring, Node, Tree } from "@chainsafe/persistent-merkle-tree";
export interface IContainerOptions {

@@ -12,3 +12,4 @@ fields: Record<string, Type<any>>;

isBasic: boolean;
gindex: bigint;
gIndexBitString: GindexBitstring;
gIndex: bigint;
};

@@ -52,2 +53,4 @@ export declare class ContainerType<T extends ObjectLike = ObjectLike> extends CompositeType<T> {

tree_readonlyIterateValues(target: Tree): IterableIterator<Tree | unknown>;
tree_getValues(target: Tree): (Tree | unknown)[];
tree_readonlyGetValues(target: Tree): (Tree | unknown)[];
hasVariableSerializedLength(): boolean;

@@ -54,0 +57,0 @@ getFixedSerializedLength(): null | number;

@@ -9,2 +9,4 @@ "use strict";

const json_1 = require("../../util/json");
const basic_1 = require("../../util/basic");
const hash_1 = require("../../util/hash");
const treeValue_1 = require("../../backings/tree/treeValue");

@@ -35,3 +37,4 @@ exports.CONTAINER_TYPE = Symbol.for("ssz/ContainerType");

isBasic: !abstract_1.isCompositeType(fieldType),
gindex: this.getGindexAtChunkIndex(chunkIndex),
gIndexBitString: this.getGindexBitStringAtChunkIndex(chunkIndex),
gIndex: this.getGindexAtChunkIndex(chunkIndex),
});

@@ -235,5 +238,3 @@ chunkIndex++;

if (!abstract_1.isCompositeType(fieldType)) {
const chunk = new Uint8Array(32);
fieldType.toBytes(value[fieldName], chunk, 0);
return new persistent_merkle_tree_1.LeafNode(chunk);
return basic_1.basicTypeToLeafNode(fieldType, value[fieldName]);
}

@@ -310,3 +311,3 @@ else {

if (fieldInfo.isBasic) {
const chunk = target.getRoot(fieldInfo.gindex);
const chunk = target.getRoot(fieldInfo.gIndexBitString);
value[fieldName] = fieldType.struct_deserializeFromBytes(chunk, 0);

@@ -317,3 +318,3 @@ }

const compositeType = fieldType;
const subtree = target.getSubtree(fieldInfo.gindex);
const subtree = target.getSubtree(fieldInfo.gIndexBitString);
value[fieldName] = compositeType.tree_convertToStruct(subtree);

@@ -330,3 +331,3 @@ }

s +=
fieldType.tree_getSerializedLength(target.getSubtree(this.fieldInfos.get(fieldName).gindex)) + 4;
fieldType.tree_getSerializedLength(target.getSubtree(this.fieldInfos.get(fieldName).gIndexBitString)) + 4;
}

@@ -344,3 +345,3 @@ else {

const [currentOffset, nextOffset] = offsets[i];
const { isBasic, gindex } = this.fieldInfos.get(fieldName);
const { isBasic, gIndex: gindex } = this.fieldInfos.get(fieldName);
if (isBasic) {

@@ -398,3 +399,3 @@ // view of the chunk, shared buffer from `data`

}
return fieldInfo.gindex;
return fieldInfo.gIndex;
}

@@ -417,9 +418,13 @@ getPropertyType(prop) {

}
const { isBasic, gindex } = fieldInfo;
if (isBasic) {
const chunk = target.getRoot(gindex);
if (fieldInfo.isBasic) {
// Number64Uint wants to work on HashObject to improve performance
if (fieldType.struct_deserializeFromHashObject) {
const hashObject = target.getHashObject(fieldInfo.gIndexBitString);
return fieldType.struct_deserializeFromHashObject(hashObject, 0);
}
const chunk = target.getRoot(fieldInfo.gIndexBitString);
return fieldType.struct_deserializeFromBytes(chunk, 0);
}
else {
return target.getSubtree(gindex);
return target.getSubtree(fieldInfo.gIndexBitString);
}

@@ -433,11 +438,17 @@ }

}
const { isBasic, gindex } = fieldInfo;
if (isBasic) {
if (fieldInfo.isBasic) {
// Number64Uint wants to work on HashObject to improve performance
if (fieldType.struct_serializeToHashObject) {
const hashObject = hash_1.newHashObject();
fieldType.struct_serializeToHashObject(value, hashObject, 0);
target.setHashObject(fieldInfo.gIndexBitString, hashObject);
return true;
}
const chunk = new Uint8Array(32);
fieldType.struct_serializeToBytes(value, chunk, 0);
target.setRoot(gindex, chunk);
target.setRoot(fieldInfo.gIndexBitString, chunk);
return true;
}
else {
target.setSubtree(gindex, value);
target.setSubtree(fieldInfo.gIndexBitString, value);
return true;

@@ -492,2 +503,33 @@ }

}
tree_getValues(target) {
const fieldTypes = Object.values(this.fields);
const gindices = persistent_merkle_tree_1.getGindicesAtDepth(this.getChunkDepth(), 0, fieldTypes.length);
const values = [];
for (let i = 0; i < fieldTypes.length; i++) {
const fieldType = fieldTypes[i];
if (!abstract_1.isCompositeType(fieldType)) {
values.push(fieldType.struct_deserializeFromBytes(target.getRoot(gindices[i]), 0));
}
else {
values.push(target.getSubtree(gindices[i]));
}
}
return values;
}
tree_readonlyGetValues(target) {
const fieldTypes = Object.values(this.fields);
const nodes = target.getNodesAtDepth(this.getChunkDepth(), 0, fieldTypes.length);
const values = [];
for (let i = 0; i < fieldTypes.length; i++) {
const fieldType = fieldTypes[i];
const node = nodes[i];
if (!abstract_1.isCompositeType(fieldType)) {
values.push(fieldType.struct_deserializeFromBytes(node.root, 0));
}
else {
values.push(new persistent_merkle_tree_1.Tree(node));
}
}
return values;
}
hasVariableSerializedLength() {

@@ -515,3 +557,3 @@ return Object.values(this.fields).some((fieldType) => fieldType.hasVariableSerializedLength());

for (const [fieldName, fieldType] of Object.entries(this.fields)) {
const { gindex: fieldGindex, isBasic } = this.fieldInfos.get(fieldName);
const { isBasic, gIndex: fieldGindex, gIndexBitString: gindexbitstring } = this.fieldInfos.get(fieldName);
const extendedFieldGindex = persistent_merkle_tree_1.concatGindices([root, fieldGindex]);

@@ -528,3 +570,3 @@ if (isBasic) {

}
gindices.push(...compositeType.tree_getLeafGindices(target.getSubtree(fieldGindex), extendedFieldGindex));
gindices.push(...compositeType.tree_getLeafGindices(target.getSubtree(gindexbitstring), extendedFieldGindex));
}

@@ -531,0 +573,0 @@ else {

export * from "./abstract";
export * from "./array";
export * from "./vector";
export * from "./list";
export * from "./bitList";
export * from "./bitVector";
export * from "./bitList";
export * from "./byteVector";
export * from "./container";
export * from "./containerLeafNodeStruct";
export * from "./list";
export * from "./root";
export * from "./container";
export * from "./union";
export * from "./vector";
//# sourceMappingURL=index.d.ts.map

@@ -15,9 +15,11 @@ "use strict";

__exportStar(require("./array"), exports);
__exportStar(require("./vector"), exports);
__exportStar(require("./list"), exports);
__exportStar(require("./bitList"), exports);
__exportStar(require("./bitVector"), exports);
__exportStar(require("./bitList"), exports);
__exportStar(require("./byteVector"), exports);
__exportStar(require("./container"), exports);
__exportStar(require("./containerLeafNodeStruct"), exports);
__exportStar(require("./list"), exports);
__exportStar(require("./root"), exports);
__exportStar(require("./container"), exports);
__exportStar(require("./union"), exports);
__exportStar(require("./vector"), exports);
//# sourceMappingURL=index.js.map

@@ -55,2 +55,27 @@ import { Json, List } from "../../interface";

}
/**
* An optimization for Number64 using HashObject and new method to work with deltas.
*/
export declare class Number64ListType<T extends List<number> = List<number>> extends BasicListType<T> {
constructor(options: IListOptions);
/** @override */
tree_getValueAtIndex(target: Tree, index: number): number;
/** @override */
tree_setValueAtIndex(target: Tree, index: number, value: number, expand?: boolean): boolean;
/**
* delta > 0 increments the underlying value, delta < 0 decrements the underlying value
* returns the new value
**/
tree_applyDeltaAtIndex(target: Tree, index: number, delta: number): number;
/**
* The same to tree_applyUint64Delta but we do it in batch.
* returns the new value
**/
tree_applyDeltaInBatch(target: Tree, deltaByIndex: Map<number, number>): number[];
/**
* delta > 0 means an increasement, delta < 0 means a decreasement
* returns the new tree and new values
**/
tree_newTreeFromDeltas(target: Tree, deltas: number[]): [Tree, number[]];
}
export declare class CompositeListType<T extends List<unknown> = List<unknown>> extends CompositeArrayType<T> {

@@ -57,0 +82,0 @@ limit: number;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CompositeListType = exports.BasicListType = exports.ListType = exports.isListType = exports.LIST_TYPE = exports.LENGTH_GINDEX = void 0;
exports.CompositeListType = exports.Number64ListType = exports.BasicListType = exports.ListType = exports.isListType = exports.LIST_TYPE = exports.LENGTH_GINDEX = void 0;
const array_1 = require("./array");

@@ -8,2 +8,3 @@ const basic_1 = require("../basic");

const compat_1 = require("../../util/compat");
const hash_1 = require("../../util/hash");
const persistent_merkle_tree_1 = require("@chainsafe/persistent-merkle-tree");

@@ -31,3 +32,6 @@ const treeValue_1 = require("../../backings/tree/treeValue");

function ListType(options) {
if (basic_1.isBasicType(options.elementType)) {
if (basic_1.isNumber64UintType(options.elementType)) {
return new Number64ListType(options);
}
else if (basic_1.isBasicType(options.elementType)) {
return new BasicListType(options);

@@ -193,2 +197,103 @@ }

exports.BasicListType = BasicListType;
/** For Number64UintType, it takes 64 / 8 = 8 bytes per item, each chunk has 32 bytes = 4 items */
const NUMBER64_LIST_NUM_ITEMS_PER_CHUNK = 4;
/**
* An optimization for Number64 using HashObject and new method to work with deltas.
*/
class Number64ListType extends BasicListType {
constructor(options) {
super(options);
}
/** @override */
tree_getValueAtIndex(target, index) {
const chunkGindex = this.getGindexAtChunkIndex(this.getChunkIndex(index));
const hashObject = target.getHashObject(chunkGindex);
// 4 items per chunk
const offsetInChunk = (index % 4) * 8;
return this.elementType.struct_deserializeFromHashObject(hashObject, offsetInChunk);
}
/** @override */
tree_setValueAtIndex(target, index, value, expand = false) {
const chunkGindex = this.getGindexAtChunkIndex(this.getChunkIndex(index));
const hashObject = hash_1.cloneHashObject(target.getHashObject(chunkGindex));
// 4 items per chunk
const offsetInChunk = (index % 4) * 8;
this.elementType.struct_serializeToHashObject(value, hashObject, offsetInChunk);
target.setHashObject(chunkGindex, hashObject, expand);
return true;
}
/**
* delta > 0 increments the underlying value, delta < 0 decrements the underlying value
* returns the new value
**/
tree_applyDeltaAtIndex(target, index, delta) {
const chunkGindex = this.getGindexAtChunkIndex(this.getChunkIndex(index));
// 4 items per chunk
const offsetInChunk = (index % 4) * 8;
let value = 0;
const hashObjectFn = (hashObject) => {
const newHashObject = hash_1.cloneHashObject(hashObject);
value = this.elementType.struct_deserializeFromHashObject(newHashObject, offsetInChunk);
value += delta;
if (value < 0)
value = 0;
this.elementType.struct_serializeToHashObject(value, newHashObject, offsetInChunk);
return newHashObject;
};
// it's 1.8x faster to use setHashObjectFn instead of getHashObject and setHashObject
target.setHashObjectFn(chunkGindex, hashObjectFn);
return value;
}
/**
* The same to tree_applyUint64Delta but we do it in batch.
* returns the new value
**/
tree_applyDeltaInBatch(target, deltaByIndex) {
// work on the new tree to avoid the hook
const newTree = target.clone();
const newValues = [];
for (const [index, delta] of deltaByIndex.entries()) {
this.tree_applyDeltaAtIndex(newTree, index, delta);
}
// update target, the hook should run 1 time only
target.rootNode = newTree.rootNode;
return newValues;
}
/**
* delta > 0 means an increasement, delta < 0 means a decreasement
* returns the new tree and new values
**/
tree_newTreeFromDeltas(target, deltas) {
if (deltas.length !== this.tree_getLength(target)) {
throw new Error(`Expect delta length ${this.tree_getLength(target)}, actual ${deltas.length}`);
}
const chunkDepth = this.getChunkDepth();
const length = deltas.length;
let nodeIdx = 0;
const newLeafNodes = [];
const newValues = [];
const chunkCount = Math.ceil(length / NUMBER64_LIST_NUM_ITEMS_PER_CHUNK);
const currentNodes = target.getNodesAtDepth(chunkDepth, 0, chunkCount);
for (let i = 0; i < currentNodes.length; i++) {
const node = currentNodes[i];
const hashObject = hash_1.cloneHashObject(node);
for (let offset = 0; offset < NUMBER64_LIST_NUM_ITEMS_PER_CHUNK; offset++) {
const index = nodeIdx * NUMBER64_LIST_NUM_ITEMS_PER_CHUNK + offset;
if (index >= length)
break;
let value = this.elementType.struct_deserializeFromHashObject(hashObject, offset * 8) + deltas[index];
if (value < 0)
value = 0;
newValues.push(value);
// mutate hashObject at offset
this.elementType.struct_serializeToHashObject(value, hashObject, offset * 8);
}
newLeafNodes.push(new persistent_merkle_tree_1.LeafNode(hashObject));
nodeIdx++;
}
const newRootNode = persistent_merkle_tree_1.subtreeFillToContents(newLeafNodes, chunkDepth);
return [new persistent_merkle_tree_1.Tree(newRootNode), newValues];
}
}
exports.Number64ListType = Number64ListType;
class CompositeListType extends array_1.CompositeArrayType {

@@ -195,0 +300,0 @@ limit;

@@ -0,1 +1,3 @@

/** @module ssz */
import { HashObject } from "@chainsafe/as-sha256";
/**

@@ -5,2 +7,14 @@ * Hash used for hashTreeRoot

export declare function hash(...inputs: Uint8Array[]): Uint8Array;
/**
* Clone a hash object using sharedHashObject, after doing this we usually
* apply HashObject to the Tree which make a copy there so it's safe to mutate
* this HashObject after that.
**/
export declare function cloneHashObject(hashObject: HashObject): HashObject;
/**
* Reset and return sharedHashObject, after doing this we usually
* apply HashObject to the Tree which make a copy there so it's safe to mutate
* this HashObject after that.
**/
export declare function newHashObject(): HashObject;
//# sourceMappingURL=hash.d.ts.map

@@ -6,3 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.hash = void 0;
exports.newHashObject = exports.cloneHashObject = exports.hash = void 0;
/** @module ssz */

@@ -17,2 +17,53 @@ const as_sha256_1 = __importDefault(require("@chainsafe/as-sha256"));

exports.hash = hash;
/**
* A temporary HashObject is needed in a lot of places, this HashObject is then
* applied to persistent-merkle-tree, it'll make a copy so it's safe to mutate it after that.
* It means that we could use a shared HashObject instead of having to always allocate
* a new one to save memory. This temporary HashObject is always allocated by cloneHashObject()
* or newHashObject() below.
**/
const sharedHashObject = {
h0: 0,
h1: 0,
h2: 0,
h3: 0,
h4: 0,
h5: 0,
h6: 0,
h7: 0,
};
/**
* Clone a hash object using sharedHashObject, after doing this we usually
* apply HashObject to the Tree which make a copy there so it's safe to mutate
* this HashObject after that.
**/
function cloneHashObject(hashObject) {
sharedHashObject.h0 = hashObject.h0;
sharedHashObject.h1 = hashObject.h1;
sharedHashObject.h2 = hashObject.h2;
sharedHashObject.h3 = hashObject.h3;
sharedHashObject.h4 = hashObject.h4;
sharedHashObject.h5 = hashObject.h5;
sharedHashObject.h6 = hashObject.h6;
sharedHashObject.h7 = hashObject.h7;
return sharedHashObject;
}
exports.cloneHashObject = cloneHashObject;
/**
* Reset and return sharedHashObject, after doing this we usually
* apply HashObject to the Tree which make a copy there so it's safe to mutate
* this HashObject after that.
**/
function newHashObject() {
sharedHashObject.h0 = 0;
sharedHashObject.h1 = 0;
sharedHashObject.h2 = 0;
sharedHashObject.h3 = 0;
sharedHashObject.h4 = 0;
sharedHashObject.h5 = 0;
sharedHashObject.h6 = 0;
sharedHashObject.h7 = 0;
return sharedHashObject;
}
exports.newHashObject = newHashObject;
//# sourceMappingURL=hash.js.map

@@ -7,3 +7,3 @@ {

"homepage": "https://github.com/chainsafe/ssz",
"version": "0.8.15",
"version": "0.8.16",
"main": "lib/index.js",

@@ -37,3 +37,3 @@ "files": [

"@chainsafe/as-sha256": "^0.2.4",
"@chainsafe/persistent-merkle-tree": "^0.3.5",
"@chainsafe/persistent-merkle-tree": "^0.3.7",
"case": "^1.6.3"

@@ -40,0 +40,0 @@ },

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc