Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@web3-storage/data-segment

Package Overview
Dependencies
Maintainers
5
Versions
16
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@web3-storage/data-segment - npm Package Compare versions

Comparing version 3.0.1 to 3.1.0

dist/src/ipld.d.ts

56

dist/src/aggregate.d.ts
export const MAX_CAPACITY: bigint;
export const Proof: typeof InclusionProof.Proof;
/**

@@ -8,3 +9,2 @@ * Default aggregate size (32GiB).

export const UnpaddedSize: typeof Piece.UnpaddedSize;
export { Tree };
export function createBuilder({ size }?: {

@@ -17,2 +17,5 @@ size?: API.PaddedPieceSize | undefined;

}): API.AggregateView;
export function resolveSegment(aggregate: Aggregate, piece: API.Piece): API.Result<[number, API.SegmentInfo], RangeError>;
export function resolveProof(aggregate: Aggregate, piece: API.Piece): API.Result<API.InclusionProof, RangeError>;
import * as InclusionProof from './inclusion.js';
import * as API from './api.js';

@@ -47,2 +50,6 @@ import * as Piece from './piece.js';

/**
* Height of the perfect binary merkle tree corresponding to this aggregate.
*/
get height(): number;
/**
*

@@ -71,2 +78,49 @@ * @returns {API.AggregateView}

}
/**
* @implements {API.AggregateView}
*/
declare class Aggregate implements API.AggregateView {
/**
* @param {object} source
* @param {API.PaddedPieceSize} source.size
* @param {API.uint64} source.offset
* @param {API.MerkleTreeNodeSource[]} source.parts
* @param {API.IndexData} source.index
* @param {number} source.limit
* @param {API.AggregateTree} source.tree
*/
constructor({ tree, parts, index, limit, size, offset }: {
size: API.PaddedPieceSize;
offset: API.uint64;
parts: API.MerkleTreeNodeSource[];
index: API.IndexData;
limit: number;
tree: API.AggregateTree;
});
tree: API.AggregateTree<bigint>;
parts: API.MerkleTreeNodeSource[];
index: API.IndexData;
limit: number;
size: API.PaddedPieceSize;
offset: bigint;
link: API.PieceLink;
/**
* Size of the index in bytes.
*/
get indexSize(): number;
get root(): API.MerkleTreeNode;
/**
* Height of the perfect binary merkle tree corresponding to this aggregate.
*/
get height(): number;
toJSON(): {
'/': API.ToString<API.PieceLink>;
};
toInfo(): API.PieceInfoView;
/**
* @param {API.PieceLink} piece
*/
resolveProof(piece: API.PieceLink): API.Result<API.InclusionProof, RangeError>;
}
export { InclusionProof, Tree };
//# sourceMappingURL=aggregate.d.ts.map

7

dist/src/aggregate/tree.d.ts

@@ -7,4 +7,5 @@ /**

export function create(height: number): API.AggregateTree;
export function batchSet(tree: API.MerkleTreeBuilder, values: API.MerkleTreeNodeSource[]): void;
export function batchSet(tree: API.MerkleTreeBuilder, values: Iterable<API.MerkleTreeNodeSource>): void;
export function clear(tree: AggregateTree): void;
export function validateLevelIndex(maxLevel: number, level: number, index: API.uint64): void;
export function idxFor(height: number, level: number, index: API.uint64): API.uint64;

@@ -39,6 +40,6 @@ export type Model = {

* @param {number} level
* @param {API.uint64} index
* @param {API.uint64} at
* @returns {API.ProofData}
*/
collectProof(level: number, index: API.uint64): API.ProofData;
collectProof(level: number, at: API.uint64): API.ProofData;
/**

@@ -45,0 +46,0 @@ *

@@ -1,8 +0,18 @@

import type { Link, ToString } from 'multiformats/link';
import type { MultihashDigest } from 'multiformats';
import type { Link, ToString } from 'multiformats';
import type { MultihashDigest, BlockEncoder, BlockDecoder, BlockCodec, SyncMultihashHasher } from 'multiformats';
import type * as Multihash from './multihash.js';
import type { Sha256Trunc254Padded, FilCommitmentUnsealed } from './piece.js';
export type RAW_CODE = MulticodecCode<0x55, 'raw'>;
export type { ToString };
/**
* Type describes a byte representation of a `Data` encoded using
* the multicodec with `Code` code.
*/
export type ByteView<Data, Code extends MulticodecCode> = New<{
Bytes: Uint8Array;
}, {
code: Code;
model: Data;
}>;
export type { ToString, Link, BlockEncoder, BlockDecoder, BlockCodec, SyncMultihashHasher, };
/**
* Implementers of the `Read` interface are called "readers". Readers

@@ -112,3 +122,36 @@ * allow for reading bytes from an underlying source.

tree: AggregateTree;
/**
* Resolves inclusion proof for the given piece. If aggregate does not include
* the given piece `{ error: RangeError }` is returned.
*/
resolveProof(piece: PieceLink): Result<InclusionProof, RangeError>;
}
/**
* @see https://github.com/filecoin-project/go-data-segment/blob/master/datasegment/verifier.go#L8-L14
*/
export type AggregationProof = [
InclusionProof,
AuxDataType,
SingletonMarketSource
];
/**
* @see https://github.com/filecoin-project/go-data-segment/blob/master/datasegment/verifier.go#L16-L18
*/
export type SingletonMarketSource = [DealID];
/**
* @see https://github.com/filecoin-project/go-state-types/blob/master/abi/deal.go#L5
*/
export type DealID = uint64;
/**
* @see https://github.com/filecoin-project/go-data-segment/blob/master/datasegment/verifier.go#L12
*/
export type AuxDataType = 0;
/**
* Proof that content piece (merkle tree) is a fully contained segment of the
* aggregate (merke tree).
*
* @see https://github.com/filecoin-project/go-data-segment/blob/e3257b64fa2c84e0df95df35de409cfed7a38438/merkletree/proof.go#L9-L14
* @see https://github.com/filecoin-project/go-data-segment/blob/e3257b64fa2c84e0df95df35de409cfed7a38438/datasegment/inclusion.go#L31-L39
*/
export type InclusionProof = [tree: ProofData, index: ProofData];
export interface Vector<T> extends Iterable<T> {

@@ -131,4 +174,3 @@ append(value: T): Vector<T>;

}>;
export interface IndexData {
entries: SegmentInfo[];
export interface IndexData extends Array<SegmentInfo> {
}

@@ -305,6 +347,7 @@ export interface MerkleTree<I extends uint64 | number = uint64 | number> extends Piece {

}
export interface ProofData {
path: MerkleTreeNode[];
index: uint64;
}
export type ProofData = [
at: uint64,
path: MerkleTreePath
];
export type MerkleTreePath = MerkleTreeNode[];
export type MerkleTreeNode = New<{

@@ -311,0 +354,0 @@ Node: Uint8Array;

@@ -0,3 +1,15 @@

export { Proof };
export function indexAreaStart(size: API.PaddedPieceSize): API.uint64;
export function encode(proof: API.InclusionProof): API.ByteView<API.InclusionProof, typeof CBOR.code>;
export function link(proof: API.InclusionProof): API.Link<API.InclusionProof, API.MulticodecCode<113, "dag-cbor">, 18, 1>;
export function decode(bytes: API.ByteView<API.InclusionProof, typeof CBOR.code>): API.InclusionProof;
export function create({ tree, index }: {
tree: API.ProofData;
index: API.ProofData;
}): API.InclusionProof;
export function tree([tree]: API.InclusionProof): API.ProofData;
export function index([_, index]: API.InclusionProof): API.ProofData;
import * as Proof from './proof.js';
import * as API from './api.js';
import { CBOR } from './ipld.js';
//# sourceMappingURL=inclusion.d.ts.map

@@ -37,5 +37,20 @@ /**

export function toDigest({ height, root }: API.Piece): API.PieceDigest;
export function createDigest(): Digest;
export type Layers = [API.MerkleTreeNode[], ...API.MerkleTreeNode[][]];
import * as API from './api.js';
import * as Piece from './piece.js';
declare class Digest {
/**
* @param {Uint8Array} bytes
*/
constructor(bytes: Uint8Array);
bytes: Uint8Array;
digest: Uint8Array;
root: Uint8Array;
get height(): number;
get size(): 33;
get name(): "fr32-sha2-256-trunc254-padded-binary-tree";
get code(): API.MulticodecCode<4113, "fr32-sha2-256-trunc254-padded-binary-tree">;
}
export {};
//# sourceMappingURL=multihash.d.ts.map

@@ -60,3 +60,3 @@ /**

link: {
'/': API.ToString<Link.Link<API.MerkleTreeNode, API.MulticodecCode<61697, "fil-commitment-unsealed">, API.MulticodecCode<4114, "sha2-256-trunc254-padded">, Link.Version>, string>;
'/': API.ToString<API.Link<API.MerkleTreeNode, API.MulticodecCode<61697, "fil-commitment-unsealed">, API.MulticodecCode<4114, "sha2-256-trunc254-padded">, Link.Version>, string>;
};

@@ -63,0 +63,0 @@ height: number;

/**
* @param {Uint8Array} data
* @param {API.MerkleTreeNode} root
* @param {API.ProofData} proofData
* @param {API.ProofData} proof
* @returns {API.Result<void, Error>}
*/
export function validateLeaf(data: Uint8Array, root: API.MerkleTreeNode, proofData: API.ProofData): API.Result<void, Error>;
export function validateLeaf(data: Uint8Array, root: API.MerkleTreeNode, proof: API.ProofData): API.Result<void, Error>;
/**
* @param {API.MerkleTreeNode} subtree
* @param {API.MerkleTreeNode} root
* @param {API.ProofData} proofData
* @param {API.ProofData} proof
* @returns {API.Result<void, Error>}
*/
export function validateSubtree(subtree: API.MerkleTreeNode, root: API.MerkleTreeNode, proofData: API.ProofData): API.Result<void, Error>;
export function validateSubtree(subtree: API.MerkleTreeNode, root: API.MerkleTreeNode, proof: API.ProofData): API.Result<void, Error>;
/**

@@ -43,5 +43,15 @@ * @param {API.MerkleTreeNode} subtree

export function truncate(node: API.MerkleTreeNode): API.MerkleTreeNode;
export function depth(proofData: API.ProofData): number;
export function path([, path]: API.ProofData): API.MerkleTreePath;
export function at([at]: API.ProofData): API.uint64;
export function depth(proof: API.ProofData): number;
export function computeNode(left: API.MerkleTreeNode, right: API.MerkleTreeNode): API.MerkleTreeNode;
export function create({ at, path }: {
at: API.uint64;
path: API.MerkleTreePath;
}): API.ProofData;
export function from(source: [number | bigint, API.MerkleTreePath] | {
at: API.uint64 | number;
path: API.MerkleTreePath;
}): API.ProofData;
import * as API from './api.js';
//# sourceMappingURL=proof.d.ts.map

@@ -16,2 +16,3 @@ /**

};
export function toSource(segment: API.SegmentInfo): API.MerkleTreeNodeSource;
export function fromSourceWithChecksum(source: API.MerkleTreeNodeSource): API.SegmentInfo;

@@ -18,0 +19,0 @@ export function toLeafIndex({ index, level }: API.MerkleTreeLocation): bigint;

{
"name": "@web3-storage/data-segment",
"description": "Implementation of [FRC-0058](https://github.com/filecoin-project/FIPs/blob/master/FRCs/frc-0058.md) verifiable aggregation scheme",
"version": "3.0.1",
"version": "3.1.0",
"keywords": [

@@ -27,3 +27,4 @@ "FRC-0058",

"multiformats": "^11.0.2",
"sync-multihash-sha2": "^1.0.0"
"sync-multihash-sha2": "^1.0.0",
"@ipld/dag-cbor": "^9.0.5"
},

@@ -30,0 +31,0 @@ "devDependencies": {

@@ -8,9 +8,11 @@ # @web3-storage/data-segment

```ts
import { CommP } from "@web3-storage/data-segment"
import { Piece, MIN_PAYLOAD_SIZE } from "@web3-storage/data-segment"
const demo = async (bytes: Uint8Array) => {
const commP = await CommP.build(bytes)
// Gives you a commP as a CID
const cid = commP.link()
}
// input must be >= 65 bytes
const bytes = new Uint8Array(MIN_PAYLOAD_SIZE)
const piece = Piece.fromPayload(bytes)
// bafkzcibbai3tdo4zvruj6zxo6wlt4suu3imi6to4vzmaojh4n475mdp5jcbtg
const cid = piece.link.toSting()
```

@@ -20,5 +22,5 @@

Implementation originally started as fork of [js-fill-utils] modernizing it to use ES modules and web crypto APIs in place of node APIs.
Sstarted as fork of [js-fil-utils] modernizing it to use ES modules and web crypto APIs in place of node APIs.
Hover it produces different results from more widely used go implementation which is why it got some heavy lifting inspired by [go-data-segment] and [go-fil-commp-hashhash] libraries.
However, [js-fil-utils] produces different results from the more widely used go implementation which is why it got some heavy lifting inspired by [go-data-segment] and [go-fil-commp-hashhash] libraries.

@@ -25,0 +27,0 @@ [go-data-segment]:https://github.com/filecoin-project/go-fil-commp-hashhash/tree/master

@@ -9,6 +9,11 @@ import * as API from './api.js'

import { indexAreaStart } from './inclusion.js'
import * as Bytes from 'multiformats/bytes'
import * as InclusionProof from './inclusion.js'
const NodeSize = BigInt(Node.Size)
const EntrySize = Number(Index.EntrySize)
export const MAX_CAPACITY = Piece.PaddedSize.fromHeight(Tree.MAX_HEIGHT)
export { InclusionProof }
export const Proof = InclusionProof.Proof

@@ -84,2 +89,9 @@ /**

/**
* Height of the perfect binary merkle tree corresponding to this aggregate.
*/
get height() {
return log2Ceil(this.size / NodeSize)
}
/**
*

@@ -89,32 +101,8 @@ * @returns {API.AggregateView}

build() {
const { size, parts, limit, offset } = this
const indexStartNodes = indexAreaStart(size) / NodeSize
const { size, parts, limit, offset, height } = this
const index = createIndex(parts)
/** @type {API.MerkleTreeNodeSource[]} */
const batch = new Array(2 * parts.length)
for (const [n, part] of parts.entries()) {
const segment = Segment.fromSourceWithChecksum(part)
const node = Segment.toIndexNode(segment)
const index = n * 2
batch[index] = {
node: segment.root,
location: {
level: 0,
index: indexStartNodes + BigInt(index),
},
}
batch[index + 1] = {
node,
location: {
level: 0,
index: indexStartNodes + BigInt(index + 1),
},
}
}
const tree = Tree.create(log2Ceil(size / NodeSize))
const tree = Tree.create(height)
Tree.batchSet(tree, parts)
Tree.batchSet(tree, batch)
Tree.batchSet(tree, createIndexNodes(size, index))

@@ -124,2 +112,3 @@ return new Aggregate({

tree,
index,
offset,

@@ -198,2 +187,39 @@ parts,

/**
* @param {API.PaddedPieceSize} size
* @param {API.SegmentInfo[]} segments
* @returns {Iterable<API.MerkleTreeNodeSource>}
*/
const createIndexNodes = function* (size, segments) {
const indexStartNodes = indexAreaStart(size) / NodeSize
for (const [n, segment] of segments.entries()) {
const node = Segment.toIndexNode(segment)
const index = n * 2
yield {
node: segment.root,
location: {
level: 0,
index: indexStartNodes + BigInt(index),
},
}
yield {
node,
location: {
level: 0,
index: indexStartNodes + BigInt(index + 1),
},
}
}
}
/**
* @param {API.MerkleTreeNodeSource[]} parts
* @returns {API.IndexData}
*/
const createIndex = (parts) =>
parts.map((part) => Segment.fromSourceWithChecksum(part))
/**
* @implements {API.AggregateView}

@@ -207,8 +233,10 @@ */

* @param {API.MerkleTreeNodeSource[]} source.parts
* @param {API.IndexData} source.index
* @param {number} source.limit
* @param {API.AggregateTree} source.tree
*/
constructor({ tree, parts, limit, size, offset }) {
constructor({ tree, parts, index, limit, size, offset }) {
this.tree = tree
this.parts = parts
this.index = index
this.limit = limit

@@ -241,2 +269,57 @@ this.size = size

}
/**
* @param {API.PieceLink} piece
*/
resolveProof(piece) {
return resolveProof(this, Piece.fromLink(piece))
}
}
/**
*
* @param {Aggregate} aggregate
* @param {API.Piece} piece
* @returns {API.Result<[number, API.SegmentInfo], RangeError>}
*/
export const resolveSegment = (aggregate, piece) => {
const { height, root } = piece
const size = PaddedSize.fromHeight(height)
for (const [n, segment] of aggregate.index.entries()) {
if (size === segment.size && Bytes.equals(root, segment.root)) {
return { ok: [n, segment] }
}
}
return {
error: new RangeError(
`Piece ${piece} was not found in aggregate ${aggregate.link}`
),
}
}
/**
* @see https://github.com/filecoin-project/go-data-segment/blob/master/datasegment/creation.go#L86-L105
*
* @param {Aggregate} aggregate
* @param {API.Piece} piece
* @returns {API.Result<API.InclusionProof, RangeError>}
*/
export const resolveProof = (aggregate, piece) => {
const result = resolveSegment(aggregate, piece)
if (result.error) {
return result
} else {
const [n, segment] = result.ok
const { level, index } = Segment.toSource(segment).location
const subTreeProof = aggregate.tree.collectProof(level, index)
const indexOffset =
indexAreaStart(aggregate.size) / BigInt(EntrySize) + BigInt(n)
const indexProof = aggregate.tree.collectProof(1, indexOffset)
const inclusion = { tree: subTreeProof, index: indexProof }
return { ok: InclusionProof.create(inclusion) }
}
}

@@ -65,14 +65,14 @@ import * as API from '../api.js'

* @param {number} level
* @param {API.uint64} index
* @param {API.uint64} at
* @returns {API.ProofData}
*/
collectProof(level, index) {
validateLevelIndex(this.height, level, index)
collectProof(level, at) {
validateLevelIndex(this.height, level, at)
const path = []
let currentLevel = level
let currentIndex = index
let position = at
while (currentLevel < this.height) {
// idx^1 is the sibling index
const node = this.node(currentLevel, currentIndex ^ 1n)
currentIndex = currentIndex / 2n
const node = this.node(currentLevel, position ^ 1n)
position = position / 2n
path.push(node)

@@ -82,3 +82,3 @@ currentLevel++

return { path, index }
return Proof.create({ path, at })
}

@@ -240,3 +240,3 @@

* @param {API.MerkleTreeBuilder} tree
* @param {API.MerkleTreeNodeSource[]} values
* @param {Iterable<API.MerkleTreeNodeSource>} values
*/

@@ -280,3 +280,3 @@ export const batchSet = (tree, values) => {

*/
const validateLevelIndex = (maxLevel, level, index) => {
export const validateLevelIndex = (maxLevel, level, index) => {
if (level < 0) {

@@ -283,0 +283,0 @@ throw new RangeError('level can not be negative')

@@ -1,4 +0,9 @@

import type { Link, ToString } from 'multiformats/link'
import type { MultihashDigest } from 'multiformats'
import type * as Raw from 'multiformats/codecs/raw'
import type { Link, ToString } from 'multiformats'
import type {
MultihashDigest,
BlockEncoder,
BlockDecoder,
BlockCodec,
SyncMultihashHasher,
} from 'multiformats'
import type * as Multihash from './multihash.js'

@@ -9,4 +14,20 @@ import type { Sha256Trunc254Padded, FilCommitmentUnsealed } from './piece.js'

export type { ToString }
/**
* Type describes a byte representation of a `Data` encoded using
* the multicodec with `Code` code.
*/
export type ByteView<Data, Code extends MulticodecCode> = New<
{ Bytes: Uint8Array },
{ code: Code; model: Data }
>
export type {
ToString,
Link,
BlockEncoder,
BlockDecoder,
BlockCodec,
SyncMultihashHasher,
}
/**
* Implementers of the `Read` interface are called "readers". Readers

@@ -127,5 +148,3 @@ * allow for reading bytes from an underlying source.

export interface Aggregate extends Piece {
}
export interface Aggregate extends Piece {}

@@ -136,4 +155,42 @@ export interface AggregateView extends Aggregate, PieceView {

tree: AggregateTree
/**
* Resolves inclusion proof for the given piece. If aggregate does not include
* the given piece `{ error: RangeError }` is returned.
*/
resolveProof(piece: PieceLink): Result<InclusionProof, RangeError>
}
/**
* @see https://github.com/filecoin-project/go-data-segment/blob/master/datasegment/verifier.go#L8-L14
*/
export type AggregationProof = [
InclusionProof,
AuxDataType,
SingletonMarketSource
]
/**
* @see https://github.com/filecoin-project/go-data-segment/blob/master/datasegment/verifier.go#L16-L18
*/
export type SingletonMarketSource = [DealID]
/**
* @see https://github.com/filecoin-project/go-state-types/blob/master/abi/deal.go#L5
*/
export type DealID = uint64
/**
* @see https://github.com/filecoin-project/go-data-segment/blob/master/datasegment/verifier.go#L12
*/
export type AuxDataType = 0
/**
* Proof that content piece (merkle tree) is a fully contained segment of the
* aggregate (merke tree).
*
* @see https://github.com/filecoin-project/go-data-segment/blob/e3257b64fa2c84e0df95df35de409cfed7a38438/merkletree/proof.go#L9-L14
* @see https://github.com/filecoin-project/go-data-segment/blob/e3257b64fa2c84e0df95df35de409cfed7a38438/datasegment/inclusion.go#L31-L39
*/
export type InclusionProof = [tree: ProofData, index: ProofData]
export interface Vector<T> extends Iterable<T> {

@@ -155,5 +212,3 @@ append(value: T): Vector<T>

export interface IndexData {
entries: SegmentInfo[]
}
export interface IndexData extends Array<SegmentInfo> {}

@@ -370,9 +425,11 @@ export interface MerkleTree<I extends uint64 | number = uint64 | number>

export interface ProofData {
path: MerkleTreeNode[]
// index indicates the index within the level where the element whose membership to prove is located
// Leftmost node is index 0
index: uint64
}
export type ProofData = [
// indicates the index within the level where the element whose membership to prove is located
// Leftmost node is at 0
at: uint64,
path: MerkleTreePath
]
export type MerkleTreePath = MerkleTreeNode[]
export type MerkleTreeNode = New<{ Node: Uint8Array }, { size: 32 }>

@@ -379,0 +436,0 @@

import * as API from './api.js'
import { maxIndexEntriesInDeal, EntrySize } from './index.js'
import * as Proof from './proof.js'
import { SHA256, CBOR } from './ipld.js'
import * as IPLD from './ipld.js'
export { Proof }
/**
*
* @param {API.PaddedPieceSize} size

@@ -11,1 +15,53 @@ * @returns {API.uint64}

size - BigInt(maxIndexEntriesInDeal(size)) * EntrySize
/**
* Encodes data layout into a CBOR block.
*
* @param {API.InclusionProof} proof
* @returns {API.ByteView<API.InclusionProof, typeof CBOR.code>}
*/
export const encode = (proof) => CBOR.encode(proof)
/**
* @param {API.InclusionProof} proof
*/
export const link = (proof) =>
IPLD.createLink(encode(proof), { codec: CBOR, hasher: SHA256 })
/**
* Decodes CBOR encoded data layout. It is reverse of {@link encode}.
*
* @param {API.ByteView<API.InclusionProof, typeof CBOR.code>} bytes
*/
export const decode = (bytes) => {
const [tree, index] = CBOR.decode(bytes)
// Note we need to go through this to ensure the bigint conversion
return create({ tree: Proof.from(tree), index: Proof.from(index) })
}
/**
* Takes data model and returns an IPLD View of it.
*
* @param {object} source
* @param {API.ProofData} source.tree
* @param {API.ProofData} source.index
* @returns {API.InclusionProof}
*/
export const create = ({ tree, index }) => [tree, index]
/**
* Accessor for the segment (sub) tree.
*
* @param {API.InclusionProof} proof
* @returns {API.ProofData}
*/
export const tree = ([tree]) => tree
/**
* Accessor for the segment index.
*
* @param {API.InclusionProof} proof
* @returns {API.ProofData}
*/
export const index = ([_, index]) => index

@@ -9,8 +9,6 @@ import * as API from './api.js'

import * as ZeroPad from './zero-comm.js'
import * as Proof from './proof.js'
import { computeNode } from './proof.js'
import { split } from './piece/tree.js'
import { pad } from './fr32.js'
import { fromHeight as piceSizeFromHeight } from './piece/padded-size.js'
import * as Link from 'multiformats/link'
import * as Raw from 'multiformats/codecs/raw'
import * as Piece from './piece.js'

@@ -139,5 +137,5 @@

digest() {
const buffer = new Uint8Array(MULTIHASH_SIZE)
this.digestInto(buffer)
return new Digest(buffer)
const digest = createDigest()
this.digestInto(digest.bytes)
return digest
}

@@ -329,2 +327,5 @@

}
export const createDigest = () => new Digest(new Uint8Array(MULTIHASH_SIZE))
/**

@@ -375,27 +376,26 @@ * Prunes layers by combining node pairs into nodes in the next layer and

// If we have 0 nodes in the current layer we just move to the next one.
if (layer.length) {
// If we have a next layer and we are building will combine nodes from the current layer
next = next ? (build ? [...next] : next) : []
let index = 0
// Note that we have checked that we have an even number of nodes so
// we will never end up with an extra node when consuming two at a time.
while (index + 1 < layer.length) {
const node = Proof.computeNode(layer[index], layer[index + 1])
// we proactively delete nodes in order to free up a memory used.
delete layer[index]
delete layer[index + 1]
// If we have a next layer and we are building will combine nodes from the current layer
next = next ? (build ? [...next] : next) : []
let index = 0
// Note that we have checked that we have an even number of nodes so
// we will never end up with an extra node when consuming two at a time.
while (index + 1 < layer.length) {
const node = computeNode(layer[index], layer[index + 1])
next.push(node)
index += 2
}
// we proactively delete nodes in order to free up a memory used.
delete layer[index]
delete layer[index + 1]
if (next.length) {
layers[level] = next
}
next.push(node)
index += 2
}
// we remove nodes that we have combined from the current layer to reduce
// memory overhead and move to the next layer.
layer.splice(0, index)
if (next.length) {
layers[level] = next
}
// we remove nodes that we have combined from the current layer to reduce
// memory overhead and move to the next layer.
layer.splice(0, index)
}

@@ -402,0 +402,0 @@

import * as API from './api.js'
import * as SHA256 from 'sync-multihash-sha2/sha256'
import { Size as NodeSize } from './node.js'
import { CBOR, SHA256 } from './ipld.js'
/**
* @param {API.ProofData} proofData
* @param {API.ProofData} proof
* @returns {API.MerkleTreePath}
*/
export const path = ([, path]) => path
/**
* @param {API.ProofData} proof
* @returns {API.uint64}
*/
export const at = ([at]) => at
/**
* @param {API.ProofData} proof
* @returns {number}
*/
export const depth = (proofData) => proofData.path.length
export const depth = (proof) => path(proof).length

@@ -16,8 +29,8 @@ /* c8 ignore next 98 */

* @param {API.MerkleTreeNode} root
* @param {API.ProofData} proofData
* @param {API.ProofData} proof
* @returns {API.Result<void, Error>}
*/
export function validateLeaf(data, root, proofData) {
export function validateLeaf(data, root, proof) {
const leaf = truncatedHash(data)
return validateSubtree(leaf, root, proofData)
return validateSubtree(leaf, root, proof)
}

@@ -28,8 +41,8 @@

* @param {API.MerkleTreeNode} root
* @param {API.ProofData} proofData
* @param {API.ProofData} proof
* @returns {API.Result<void, Error>}
*/
export function validateSubtree(subtree, root, proofData) {
export function validateSubtree(subtree, root, proof) {
// Validate the structure first to avoid panics
const structureValidation = validateProofStructure(proofData)
const structureValidation = validateProofStructure(proof)
if (structureValidation.error) {

@@ -42,3 +55,3 @@ return {

}
return validateProof(subtree, root, proofData)
return validateProof(subtree, root, proof)
}

@@ -64,3 +77,5 @@

}
if (proofData.index >> BigInt(depth(proofData)) !== 0n) {
let index = at(proofData)
if (index >> BigInt(depth(proofData)) !== 0n) {
return { error: new Error('index greater than width of the tree') }

@@ -70,6 +85,5 @@ }

let carry = subtree
let index = proofData.index
let right = 0n
for (const p of proofData.path) {
for (const p of path(proofData)) {
;[right, index] = [index & 1n, index >> 1n]

@@ -163,1 +177,22 @@ carry = right === 1n ? computeNode(p, carry) : computeNode(carry, p)

}
/**
* Takes data model and returns an IPLD View of it.
*
* @param {object} source
* @param {API.uint64} source.at
* @param {API.MerkleTreePath} source.path
* @returns {API.ProofData}
*/
export const create = ({ at, path }) => [at, path]
/**
* Takes proof in somewhat arbitrary form and returns a proof data.
* @param {[API.uint64|number, API.MerkleTreePath]|{at:API.uint64|number, path: API.MerkleTreePath}} source
* @returns {API.ProofData}
*/
export const from = (source) => {
const [at, path] = Array.isArray(source) ? source : [source.at, source.path]
return create({ at: BigInt(at), path })
}
import * as SHA256 from 'sync-multihash-sha2/sha256'
import { Size as NodeSize } from './node.js'
import * as API from './api.js'
import { pow2 } from './uint64.js'
import { log2Ceil, pow2 } from './uint64.js'

@@ -44,2 +44,19 @@ /**

/**
*
* @param {API.SegmentInfo} segment
* @returns {API.MerkleTreeNodeSource}
*/
export const toSource = (segment) => {
const level = log2Ceil(segment.size / BigInt(NodeSize))
return {
node: segment.root,
location: {
level,
index: (segment.offset / BigInt(NodeSize)) >> BigInt(level),
},
}
}
/**
* @param {API.MerkleTreeNodeSource} source

@@ -46,0 +63,0 @@ */

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc