hamt-sharding
Advanced tools
Comparing version 3.0.3 to 3.0.6
@@ -9,9 +9,9 @@ import type { InfiniteHash } from './consumable-hash.js'; | ||
length: number; | ||
compactArray: () => B[]; | ||
get: (i: number) => B; | ||
set: (i: number, value: B) => void; | ||
reduce: <A>(fn: (acc: A, curr: B, index: number) => A, initial: A) => B; | ||
find: (fn: (item: B) => boolean) => B | undefined; | ||
bitField: () => number[]; | ||
unset: (i: number) => void; | ||
compactArray(): B[]; | ||
get(i: number): B; | ||
set(i: number, value: B): void; | ||
reduce<A>(fn: (acc: A, curr: B, index: number) => A, initial: A): B; | ||
find(fn: (item: B) => boolean): B | undefined; | ||
bitField(): number[]; | ||
unset(i: number): void; | ||
} | ||
@@ -26,3 +26,3 @@ export interface BucketPosition<T> { | ||
bits: number; | ||
hash: (value: Uint8Array | InfiniteHash) => InfiniteHash; | ||
hash(value: Uint8Array | InfiniteHash): InfiniteHash; | ||
} | ||
@@ -44,5 +44,5 @@ export declare class Bucket<T> { | ||
eachLeafSeries(): Iterable<BucketChild<T>>; | ||
serialize(map: (value: BucketChild<T>, index: number) => T, reduce: (reduced: any) => any): any; | ||
asyncTransform(asyncMap: (value: BucketChild<T>) => Promise<T[]>, asyncReduce: (reduced: any) => Promise<any>): Promise<any>; | ||
toJSON(): any; | ||
serialize<M>(map: (value: BucketChild<T>, index: number) => M, reduce: (reduced: Bucket<T> | BucketChild<T>) => M): M; | ||
asyncTransform<R = T>(asyncMap: (value: BucketChild<T>) => Promise<T[]>, asyncReduce: (reduced: any) => Promise<R>): Promise<R>; | ||
toJSON(): Record<string, any>; | ||
prettyPrint(): string; | ||
@@ -49,0 +49,0 @@ tableSize(): number; |
@@ -1,5 +0,11 @@ | ||
// @ts-expect-error | ||
// @ts-expect-error no types | ||
import SparseArray from 'sparse-array'; | ||
import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string'; | ||
export class Bucket { | ||
_options; | ||
_popCount; | ||
_parent; | ||
_posAtParent; | ||
_children; | ||
key; | ||
constructor(options, parent, posAtParent = 0) { | ||
@@ -15,3 +21,3 @@ this._options = options; | ||
const place = await this._findNewBucketAndPos(key); | ||
await place.bucket._putAt(place, key, value); | ||
place.bucket._putAt(place, key, value); | ||
} | ||
@@ -73,3 +79,3 @@ async get(key) { | ||
async asyncTransform(asyncMap, asyncReduce) { | ||
return await asyncTransformBucket(this, asyncMap, asyncReduce); | ||
return asyncTransformBucket(this, asyncMap, asyncReduce); | ||
} | ||
@@ -102,3 +108,3 @@ toJSON() { | ||
if (child instanceof Bucket) { | ||
return await child._findPlace(hashValue); | ||
return child._findPlace(hashValue); | ||
} | ||
@@ -121,3 +127,3 @@ return { | ||
newPlace.bucket._putAt(newPlace, place.existingChild.key, place.existingChild.value); | ||
return await bucket._findNewBucketAndPos(place.hash); | ||
return bucket._findNewBucketAndPos(place.hash); | ||
} | ||
@@ -129,4 +135,4 @@ // no conflict, we found the place | ||
this._putObjectAt(place.pos, { | ||
key: key, | ||
value: value, | ||
key, | ||
value, | ||
hash: place.hash | ||
@@ -161,3 +167,3 @@ }); | ||
pos: this._posAtParent, | ||
hash: hash, | ||
hash, | ||
bucket: this._parent | ||
@@ -200,4 +206,4 @@ }; | ||
} | ||
return await asyncReduce(output); | ||
return asyncReduce(output); | ||
} | ||
//# sourceMappingURL=bucket.js.map |
@@ -22,2 +22,5 @@ const START_MASKS = [ | ||
export class ConsumableBuffer { | ||
_value; | ||
_currentBytePos; | ||
_currentBitPos; | ||
constructor(value) { | ||
@@ -24,0 +27,0 @@ this._value = value; |
@@ -0,3 +1,3 @@ | ||
import { concat as uint8ArrayConcat } from 'uint8arrays/concat'; | ||
import { ConsumableBuffer } from './consumable-buffer.js'; | ||
import { concat as uint8ArrayConcat } from 'uint8arrays/concat'; | ||
export function wrapHash(hashFn) { | ||
@@ -16,2 +16,8 @@ function hashing(value) { | ||
export class InfiniteHash { | ||
_value; | ||
_hashFn; | ||
_depth; | ||
_availableBits; | ||
_currentBufferIndex; | ||
_buffers; | ||
constructor(value, hashFn) { | ||
@@ -18,0 +24,0 @@ if (!(value instanceof Uint8Array)) { |
@@ -0,5 +1,36 @@ | ||
/** | ||
* @packageDocumentation | ||
* | ||
* A [Hash Mapped Trie](https://en.wikipedia.org/wiki/Hash_array_mapped_trie) implementation for JavaScript. | ||
* | ||
* This is used by [@helia/unixfs](https://www.npmjs.com/package/@helia/unixfs) for it's HAMT-sharded directory implementation. | ||
* | ||
* @example | ||
* | ||
* ```TypeScript | ||
* import { createHAMT } from 'hamt-sharding' | ||
* import crypto from 'crypto-promise' | ||
* | ||
* // decide how to hash buffers made from keys, can return a Promise | ||
* const hashFn = async (buf) => { | ||
* return crypto | ||
* .createHash('sha256') | ||
* .update(buf) | ||
* .digest() | ||
* } | ||
* | ||
* const bucket = createHAMT({ | ||
* hashFn: hashFn | ||
* }) | ||
* | ||
* await bucket.put('key', 'value') | ||
* | ||
* const output = await bucket.get('key') | ||
* // output === 'value' | ||
* ``` | ||
*/ | ||
import { Bucket } from './bucket.js'; | ||
import type { BucketOptions, BucketPosition, BucketChild } from './bucket.js'; | ||
interface UserBucketOptions { | ||
hashFn: (value: Uint8Array) => Promise<Uint8Array>; | ||
hashFn(value: Uint8Array): Promise<Uint8Array>; | ||
bits?: number; | ||
@@ -6,0 +37,0 @@ } |
@@ -0,1 +1,32 @@ | ||
/** | ||
* @packageDocumentation | ||
* | ||
* A [Hash Mapped Trie](https://en.wikipedia.org/wiki/Hash_array_mapped_trie) implementation for JavaScript. | ||
* | ||
* This is used by [@helia/unixfs](https://www.npmjs.com/package/@helia/unixfs) for it's HAMT-sharded directory implementation. | ||
* | ||
* @example | ||
* | ||
* ```TypeScript | ||
* import { createHAMT } from 'hamt-sharding' | ||
* import crypto from 'crypto-promise' | ||
* | ||
* // decide how to hash buffers made from keys, can return a Promise | ||
* const hashFn = async (buf) => { | ||
* return crypto | ||
* .createHash('sha256') | ||
* .update(buf) | ||
* .digest() | ||
* } | ||
* | ||
* const bucket = createHAMT({ | ||
* hashFn: hashFn | ||
* }) | ||
* | ||
* await bucket.put('key', 'value') | ||
* | ||
* const output = await bucket.get('key') | ||
* // output === 'value' | ||
* ``` | ||
*/ | ||
import { Bucket } from './bucket.js'; | ||
@@ -2,0 +33,0 @@ import { wrapHash } from './consumable-hash.js'; |
{ | ||
"name": "hamt-sharding", | ||
"version": "3.0.3", | ||
"version": "3.0.6", | ||
"description": "JavaScript implementation of sharding using hash array mapped tries", | ||
@@ -14,2 +14,6 @@ "license": "Apache-2.0 OR MIT", | ||
}, | ||
"publishConfig": { | ||
"access": "public", | ||
"provenance": true | ||
}, | ||
"keywords": [ | ||
@@ -20,6 +24,2 @@ "IPFS", | ||
], | ||
"engines": { | ||
"node": ">=16.0.0", | ||
"npm": ">=7.0.0" | ||
}, | ||
"type": "module", | ||
@@ -29,3 +29,3 @@ "types": "./dist/src/index.d.ts", | ||
"src", | ||
"dist/src", | ||
"dist", | ||
"!dist/test", | ||
@@ -43,2 +43,3 @@ "!**/*.tsbuildinfo" | ||
"parserOptions": { | ||
"project": true, | ||
"sourceType": "module" | ||
@@ -49,3 +50,3 @@ } | ||
"branches": [ | ||
"master" | ||
"main" | ||
], | ||
@@ -144,13 +145,15 @@ "plugins": [ | ||
"dep-check": "aegir dep-check", | ||
"release": "aegir release" | ||
"release": "aegir release", | ||
"docs": "aegir docs" | ||
}, | ||
"dependencies": { | ||
"sparse-array": "^1.3.1", | ||
"uint8arrays": "^4.0.2" | ||
"uint8arrays": "^5.0.1" | ||
}, | ||
"devDependencies": { | ||
"aegir": "^37.5.0", | ||
"aegir": "^42.2.3", | ||
"it-length": "^3.0.4", | ||
"multihashing-async": "^2.1.0" | ||
} | ||
}, | ||
"sideEffects": false | ||
} |
149
README.md
# hamt-sharding <!-- omit in toc --> | ||
[![ipfs.tech](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](https://ipfs.tech) | ||
[![Discuss](https://img.shields.io/discourse/https/discuss.ipfs.tech/posts.svg?style=flat-square)](https://discuss.ipfs.tech) | ||
[![codecov](https://img.shields.io/codecov/c/github/ipfs/js-hamt-sharding.svg?style=flat-square)](https://codecov.io/gh/ipfs/js-hamt-sharding) | ||
[![CI](https://img.shields.io/github/workflow/status/ipfs/js-hamt-sharding/test%20&%20maybe%20release/master?style=flat-square)](https://github.com/ipfs/js-hamt-sharding/actions/workflows/js-test-and-release.yml) | ||
[![codecov](https://img.shields.io/codecov/c/github/ipfs-shipyard/js-hamt-sharding.svg?style=flat-square)](https://codecov.io/gh/ipfs-shipyard/js-hamt-sharding) | ||
[![CI](https://img.shields.io/github/actions/workflow/status/ipfs-shipyard/js-hamt-sharding/js-test-and-release.yml?branch=main\&style=flat-square)](https://github.com/ipfs/js-hamt-sharding/actions/workflows/js-test-and-release.yml?query=branch%3Amain) | ||
> JavaScript implementation of sharding using hash array mapped tries | ||
## Table of contents <!-- omit in toc --> | ||
# About | ||
- [Install](#install) | ||
- [Usage](#usage) | ||
- [Example](#example) | ||
- [API](#api) | ||
- [`bucket.put(key, value)`](#bucketputkey-value) | ||
- [`bucket.get(key)`](#bucketgetkey) | ||
- [`bucket.del(key)`](#bucketdelkey) | ||
- [`bucket.leafCount()`](#bucketleafcount) | ||
- [`bucket.childrenCount()`](#bucketchildrencount) | ||
- [`bucket.onlyChild()`](#bucketonlychild) | ||
- [`bucket.eachLeafSeries()`](#bucketeachleafseries) | ||
- [`bucket.serialize(map, reduce)`](#bucketserializemap-reduce) | ||
- [`bucket.asyncTransform(asyncMap, asyncReduce)`](#bucketasynctransformasyncmap-asyncreduce) | ||
- [`bucket.toJSON()`](#buckettojson) | ||
- [`bucket.prettyPrint()`](#bucketprettyprint) | ||
- [`bucket.tableSize()`](#buckettablesize) | ||
- [Contribute](#contribute) | ||
- [License](#license) | ||
- [Contribute](#contribute-1) | ||
A [Hash Mapped Trie](https://en.wikipedia.org/wiki/Hash_array_mapped_trie) implementation for JavaScript. | ||
## Install | ||
This is used by [@helia/unixfs](https://www.npmjs.com/package/@helia/unixfs) for it's HAMT-sharded directory implementation. | ||
```console | ||
$ npm i hamt-sharding | ||
``` | ||
## Example | ||
## Usage | ||
### Example | ||
```javascript | ||
```TypeScript | ||
import { createHAMT } from 'hamt-sharding' | ||
@@ -64,101 +38,22 @@ import crypto from 'crypto-promise' | ||
## API | ||
# Install | ||
```javascript | ||
import { createHAMT } from 'hamt-sharding' | ||
```console | ||
$ npm i hamt-sharding | ||
``` | ||
### `bucket.put(key, value)` | ||
## Browser `<script>` tag | ||
```javascript | ||
import { createHAMT } from 'hamt-sharding' | ||
const bucket = createHAMT({...}) | ||
Loading this module through a script tag will make it's exports available as `HamtSharding` in the global namespace. | ||
await bucket.put('key', 'value') | ||
```html | ||
<script src="https://unpkg.com/hamt-sharding/dist/index.min.js"></script> | ||
``` | ||
### `bucket.get(key)` | ||
# API Docs | ||
```javascript | ||
import { createHAMT } from 'hamt-sharding' | ||
const bucket = createHAMT({...}) | ||
- <https://ipfs-shipyard.github.io/js-hamt-sharding> | ||
await bucket.put('key', 'value') | ||
# License | ||
console.info(await bucket.get('key')) // 'value' | ||
``` | ||
### `bucket.del(key)` | ||
```javascript | ||
import { createHAMT } from 'hamt-sharding' | ||
const bucket = createHAMT({...}) | ||
await bucket.put('key', 'value') | ||
await bucket.del('key', 'value') | ||
console.info(await bucket.get('key')) // undefined | ||
``` | ||
### `bucket.leafCount()` | ||
```javascript | ||
import { createHAMT } from 'hamt-sharding' | ||
const bucket = createHAMT({...}) | ||
console.info(bucket.leafCount()) // 0 | ||
await bucket.put('key', 'value') | ||
console.info(bucket.leafCount()) // 1 | ||
``` | ||
### `bucket.childrenCount()` | ||
```javascript | ||
import { createHAMT } from 'hamt-sharding' | ||
const bucket = createHAMT({...}) | ||
console.info(bucket.childrenCount()) // 0 | ||
await bucket.put('key', 'value') | ||
console.info(bucket.childrenCount()) // 234 -- dependent on hashing algorithm | ||
``` | ||
### `bucket.onlyChild()` | ||
### `bucket.eachLeafSeries()` | ||
```javascript | ||
import { createHAMT } from 'hamt-sharding' | ||
const bucket = createHAMT({...}) | ||
await bucket.put('key', 'value') | ||
for await (const child of bucket.eachLeafSeries()) { | ||
console.info(child.value) // 'value' | ||
} | ||
``` | ||
### `bucket.serialize(map, reduce)` | ||
### `bucket.asyncTransform(asyncMap, asyncReduce)` | ||
### `bucket.toJSON()` | ||
### `bucket.prettyPrint()` | ||
### `bucket.tableSize()` | ||
## Contribute | ||
Feel free to join in. All welcome. Open an [issue](https://github.com/ipfs-shipyard/js-hamt-sharding/issues)! | ||
This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). | ||
[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/contributing.md) | ||
## License | ||
Licensed under either of | ||
@@ -169,12 +64,4 @@ | ||
## Contribute | ||
# Contribution | ||
Contributions welcome! Please check out [the issues](https://github.com/ipfs/js-hamt-sharding/issues). | ||
Also see our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general. | ||
Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). | ||
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. | ||
[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) |
@@ -1,2 +0,2 @@ | ||
// @ts-expect-error | ||
// @ts-expect-error no types | ||
import SparseArray from 'sparse-array' | ||
@@ -14,9 +14,9 @@ import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' | ||
length: number | ||
compactArray: () => B[] | ||
get: (i: number) => B | ||
set: (i: number, value: B) => void | ||
reduce: <A> (fn: (acc: A, curr: B, index: number) => A, initial: A) => B | ||
find: (fn: (item: B) => boolean) => B | undefined | ||
bitField: () => number[] | ||
unset: (i: number) => void | ||
compactArray(): B[] | ||
get(i: number): B | ||
set(i: number, value: B): void | ||
reduce <A> (fn: (acc: A, curr: B, index: number) => A, initial: A): B | ||
find(fn: (item: B) => boolean): B | undefined | ||
bitField(): number[] | ||
unset(i: number): void | ||
} | ||
@@ -33,3 +33,3 @@ | ||
bits: number | ||
hash: (value: Uint8Array | InfiniteHash) => InfiniteHash | ||
hash(value: Uint8Array | InfiniteHash): InfiniteHash | ||
} | ||
@@ -55,9 +55,9 @@ | ||
async put (key: string, value: T) { | ||
async put (key: string, value: T): Promise<void> { | ||
const place = await this._findNewBucketAndPos(key) | ||
await place.bucket._putAt(place, key, value) | ||
place.bucket._putAt(place, key, value) | ||
} | ||
async get (key: string) { | ||
async get (key: string): Promise<T | undefined> { | ||
const child = await this._findChild(key) | ||
@@ -70,3 +70,3 @@ | ||
async del (key: string) { | ||
async del (key: string): Promise<void> { | ||
const place = await this._findPlace(key) | ||
@@ -92,7 +92,7 @@ const child = place.bucket._at(place.pos) | ||
childrenCount () { | ||
childrenCount (): number { | ||
return this._children.length | ||
} | ||
onlyChild () { | ||
onlyChild (): Bucket<T> | BucketChild<T> { | ||
return this._children.get(0) | ||
@@ -113,4 +113,4 @@ } | ||
serialize (map: (value: BucketChild<T>, index: number) => T, reduce: (reduced: any) => any) { | ||
const acc: T[] = [] | ||
serialize <M> (map: (value: BucketChild<T>, index: number) => M, reduce: (reduced: Bucket<T> | BucketChild<T>) => M): M { | ||
const acc: M[] = [] | ||
// serialize to a custom non-sparse representation | ||
@@ -129,19 +129,19 @@ return reduce(this._children.reduce((acc, child, index) => { | ||
async asyncTransform (asyncMap: (value: BucketChild<T>) => Promise<T[]>, asyncReduce: (reduced: any) => Promise<any>) { | ||
return await asyncTransformBucket(this, asyncMap, asyncReduce) | ||
async asyncTransform <R = T> (asyncMap: (value: BucketChild<T>) => Promise<T[]>, asyncReduce: (reduced: any) => Promise<R>): Promise<R> { | ||
return asyncTransformBucket(this, asyncMap, asyncReduce) | ||
} | ||
toJSON () { | ||
toJSON (): Record<string, any> { | ||
return this.serialize(mapNode, reduceNodes) | ||
} | ||
prettyPrint () { | ||
prettyPrint (): string { | ||
return JSON.stringify(this.toJSON(), null, ' ') | ||
} | ||
tableSize () { | ||
tableSize (): number { | ||
return Math.pow(2, this._options.bits) | ||
} | ||
async _findChild (key: string) { | ||
async _findChild (key: string): Promise<BucketChild<T> | undefined> { | ||
const result = await this._findPlace(key) | ||
@@ -168,3 +168,3 @@ const child = result.bucket._at(result.pos) | ||
if (child instanceof Bucket) { | ||
return await child._findPlace(hashValue) | ||
return child._findPlace(hashValue) | ||
} | ||
@@ -192,3 +192,3 @@ | ||
return await bucket._findNewBucketAndPos(place.hash) | ||
return bucket._findNewBucketAndPos(place.hash) | ||
} | ||
@@ -200,6 +200,6 @@ | ||
_putAt (place: BucketPosition<T>, key: string, value: T) { | ||
_putAt (place: BucketPosition<T>, key: string, value: T): void { | ||
this._putObjectAt(place.pos, { | ||
key: key, | ||
value: value, | ||
key, | ||
value, | ||
hash: place.hash | ||
@@ -209,3 +209,3 @@ }) | ||
_putObjectAt (pos: number, object: Bucket<T> | BucketChild<T>) { | ||
_putObjectAt (pos: number, object: Bucket<T> | BucketChild<T>): void { | ||
if (this._children.get(pos) == null) { | ||
@@ -217,3 +217,3 @@ this._popCount++ | ||
_delAt (pos: number) { | ||
_delAt (pos: number): void { | ||
if (pos === -1) { | ||
@@ -230,3 +230,3 @@ throw new Error('Invalid position') | ||
_level () { | ||
_level (): void { | ||
if (this._parent != null && this._popCount <= 1) { | ||
@@ -242,3 +242,3 @@ if (this._popCount === 1) { | ||
pos: this._posAtParent, | ||
hash: hash, | ||
hash, | ||
bucket: this._parent | ||
@@ -254,3 +254,3 @@ } | ||
_at (index: number) { | ||
_at (index: number): Bucket<T> | BucketChild<T> { | ||
return this._children.get(index) | ||
@@ -260,15 +260,15 @@ } | ||
function exists (o: any) { | ||
function exists (o: any): boolean { | ||
return Boolean(o) | ||
} | ||
function mapNode (node: any, _: number) { | ||
function mapNode (node: { key: string }, _: number): string { | ||
return node.key | ||
} | ||
function reduceNodes (nodes: any) { | ||
function reduceNodes <T> (nodes: T): any { | ||
return nodes | ||
} | ||
async function asyncTransformBucket<T> (bucket: Bucket<T>, asyncMap: (value: BucketChild<T>) => Promise<T[]>, asyncReduce: (reduced: any) => Promise<any>) { | ||
async function asyncTransformBucket <T, R = T> (bucket: Bucket<T>, asyncMap: (value: BucketChild<T>) => Promise<T[]>, asyncReduce: (reduced: any) => Promise<R>): Promise<R> { | ||
const output = [] | ||
@@ -289,3 +289,3 @@ | ||
return await asyncReduce(output) | ||
return asyncReduce(output) | ||
} |
@@ -34,11 +34,11 @@ const START_MASKS = [ | ||
availableBits () { | ||
availableBits (): number { | ||
return this._currentBitPos + 1 + this._currentBytePos * 8 | ||
} | ||
totalBits () { | ||
totalBits (): number { | ||
return this._value.length * 8 | ||
} | ||
take (bits: number) { | ||
take (bits: number): number { | ||
let pendingBits = bits | ||
@@ -65,3 +65,3 @@ let result = 0 | ||
untake (bits: number) { | ||
untake (bits: number): void { | ||
this._currentBitPos += bits | ||
@@ -74,3 +74,3 @@ while (this._currentBitPos > 7) { | ||
_haveBits () { | ||
_haveBits (): boolean { | ||
return this._currentBytePos >= 0 | ||
@@ -80,3 +80,3 @@ } | ||
function byteBitsToInt (byte: number, start: number, length: number) { | ||
function byteBitsToInt (byte: number, start: number, length: number): number { | ||
const mask = maskFor(start, length) | ||
@@ -86,4 +86,4 @@ return (byte & mask) >>> start | ||
function maskFor (start: number, length: number) { | ||
function maskFor (start: number, length: number): number { | ||
return START_MASKS[start] & STOP_MASKS[Math.min(length + start - 1, 7)] | ||
} |
@@ -0,6 +1,6 @@ | ||
import { concat as uint8ArrayConcat } from 'uint8arrays/concat' | ||
import { ConsumableBuffer } from './consumable-buffer.js' | ||
import { concat as uint8ArrayConcat } from 'uint8arrays/concat' | ||
export function wrapHash (hashFn: (value: Uint8Array) => Promise<Uint8Array>) { | ||
function hashing (value: InfiniteHash | Uint8Array) { | ||
export function wrapHash (hashFn: (value: Uint8Array) => Promise<Uint8Array>): (value: InfiniteHash | Uint8Array) => InfiniteHash { | ||
function hashing (value: InfiniteHash | Uint8Array): InfiniteHash { | ||
if (value instanceof InfiniteHash) { | ||
@@ -38,3 +38,3 @@ // already a hash. return it | ||
async take (bits: number) { | ||
async take (bits: number): Promise<number> { | ||
let pendingBits = bits | ||
@@ -64,3 +64,3 @@ | ||
untake (bits: number) { | ||
untake (bits: number): void { | ||
let pendingBits = bits | ||
@@ -82,3 +82,3 @@ | ||
async _produceMoreBits () { | ||
async _produceMoreBits (): Promise<void> { | ||
this._depth++ | ||
@@ -85,0 +85,0 @@ |
@@ -0,11 +1,43 @@ | ||
/** | ||
* @packageDocumentation | ||
* | ||
* A [Hash Mapped Trie](https://en.wikipedia.org/wiki/Hash_array_mapped_trie) implementation for JavaScript. | ||
* | ||
* This is used by [@helia/unixfs](https://www.npmjs.com/package/@helia/unixfs) for it's HAMT-sharded directory implementation. | ||
* | ||
* @example | ||
* | ||
* ```TypeScript | ||
* import { createHAMT } from 'hamt-sharding' | ||
* import crypto from 'crypto-promise' | ||
* | ||
* // decide how to hash buffers made from keys, can return a Promise | ||
* const hashFn = async (buf) => { | ||
* return crypto | ||
* .createHash('sha256') | ||
* .update(buf) | ||
* .digest() | ||
* } | ||
* | ||
* const bucket = createHAMT({ | ||
* hashFn: hashFn | ||
* }) | ||
* | ||
* await bucket.put('key', 'value') | ||
* | ||
* const output = await bucket.get('key') | ||
* // output === 'value' | ||
* ``` | ||
*/ | ||
import { Bucket } from './bucket.js' | ||
import { wrapHash } from './consumable-hash.js' | ||
import type { BucketOptions, BucketPosition, BucketChild } from './bucket.js' | ||
import { wrapHash } from './consumable-hash.js' | ||
interface UserBucketOptions { | ||
hashFn: (value: Uint8Array) => Promise<Uint8Array> | ||
hashFn(value: Uint8Array): Promise<Uint8Array> | ||
bits?: number | ||
} | ||
export function createHAMT<T> (options: UserBucketOptions) { | ||
export function createHAMT<T> (options: UserBucketOptions): Bucket<T> { | ||
if (options == null || options.hashFn == null) { | ||
@@ -12,0 +44,0 @@ throw new Error('please define an options.hashFn') |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
77678
25
1034
66
+ Addedmultiformats@13.3.1(transitive)
+ Addeduint8arrays@5.1.0(transitive)
- Removedmultiformats@12.1.3(transitive)
- Removeduint8arrays@4.0.10(transitive)
Updateduint8arrays@^5.0.1