You're Invited:Meet the Socket Team at BlackHat and DEF CON in Las Vegas, Aug 4-6.RSVP
Socket
Book a DemoInstallSign in
Socket

ipfs-unixfs-exporter

Package Overview
Dependencies
Maintainers
3
Versions
149
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ipfs-unixfs-exporter - npm Package Compare versions

Comparing version

to
13.1.0

29

dist/src/index.d.ts

@@ -6,4 +6,29 @@ import { CID } from 'multiformats/cid';

import type { Bucket } from 'hamt-sharding';
import type { ProgressOptions } from 'progress-events';
export interface ExporterOptions extends ProgressOptions {
import type { ProgressOptions, ProgressEvent } from 'progress-events';
export interface ExportProgress {
/**
* How many bytes of the file have been read
*/
bytesRead: bigint;
/**
* How many bytes of the file will be read - n.b. this may be
* smaller than `fileSize` if `offset`/`length` have been
* specified
*/
totalBytes: bigint;
/**
* The size of the file being read - n.b. this may be
* larger than `total` if `offset`/`length` has been
* specified
*/
fileSize: bigint;
}
export interface ExportWalk {
cid: CID;
}
/**
* Progress events emitted by the exporter
*/
export type ExporterProgressEvents = ProgressEvent<'unixfs:exporter:progress:unixfs:file', ExportProgress> | ProgressEvent<'unixfs:exporter:progress:unixfs:raw', ExportProgress> | ProgressEvent<'unixfs:exporter:progress:raw', ExportProgress> | ProgressEvent<'unixfs:exporter:progress:identity', ExportProgress> | ProgressEvent<'unixfs:exporter:walk:file', ExportWalk> | ProgressEvent<'unixfs:exporter:walk:directory', ExportWalk> | ProgressEvent<'unixfs:exporter:walk:hamt-sharded-directory', ExportWalk> | ProgressEvent<'unixfs:exporter:walk:raw', ExportWalk>;
export interface ExporterOptions extends ProgressOptions<ExporterProgressEvents> {
offset?: number;

@@ -10,0 +35,0 @@ length?: number;

@@ -5,6 +5,13 @@ import errCode from 'err-code';

import * as mh from 'multiformats/hashes/digest';
import { CustomProgressEvent } from 'progress-events';
const rawContent = (node) => {
async function* contentGenerator(options = {}) {
const { offset, length } = validateOffsetAndLength(node.length, options.offset, options.length);
yield extractDataFromBlock(node, 0n, offset, offset + length);
const buf = extractDataFromBlock(node, 0n, offset, offset + length);
options.onProgress?.(new CustomProgressEvent('unixfs:exporter:progress:identity', {
bytesRead: BigInt(buf.byteLength),
totalBytes: length - offset,
fileSize: BigInt(node.byteLength)
}));
yield buf;
}

@@ -11,0 +18,0 @@ return contentGenerator;

import errCode from 'err-code';
import extractDataFromBlock from '../utils/extract-data-from-block.js';
import validateOffsetAndLength from '../utils/validate-offset-and-length.js';
import { CustomProgressEvent } from 'progress-events';
const rawContent = (node) => {
async function* contentGenerator(options = {}) {
const { offset, length } = validateOffsetAndLength(node.length, options.offset, options.length);
yield extractDataFromBlock(node, 0n, offset, offset + length);
const buf = extractDataFromBlock(node, 0n, offset, offset + length);
options.onProgress?.(new CustomProgressEvent('unixfs:exporter:progress:raw', {
bytesRead: BigInt(buf.byteLength),
totalBytes: length - offset,
fileSize: BigInt(node.byteLength)
}));
yield buf;
}

@@ -9,0 +16,0 @@ return contentGenerator;

@@ -5,2 +5,3 @@ import parallel from 'it-parallel';

import filter from 'it-filter';
import { CustomProgressEvent } from 'progress-events';
const directoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {

@@ -11,2 +12,5 @@ async function* yieldDirectoryContent(options = {}) {

const links = node.Links.slice(offset, length);
options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:directory', {
cid
}));
yield* pipe(links, source => map(source, link => {

@@ -13,0 +17,0 @@ return async () => {

@@ -12,2 +12,3 @@ import extractDataFromBlock from '../../../utils/extract-data-from-block.js';

import PQueue from 'p-queue';
import { CustomProgressEvent } from 'progress-events';
async function walkDAG(blockstore, node, queue, streamPosition, start, end, options) {

@@ -92,2 +93,5 @@ // a `raw` node

void childQueue.add(async () => {
options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:file', {
cid: link.Hash
}));
await walkDAG(blockstore, child, queue, blockStart, start, end, options);

@@ -111,6 +115,9 @@ });

let read = 0n;
const wanted = length - offset;
const queue = pushable();
options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:file', {
cid
}));
void walkDAG(blockstore, node, queue, 0n, offset, offset + length, options)
.then(() => {
const wanted = length - offset;
if (read < wanted) {

@@ -135,2 +142,7 @@ throw errCode(new Error('Traversed entire DAG but did not read enough bytes'), 'ERR_UNDER_READ');

}
options.onProgress?.(new CustomProgressEvent('unixfs:exporter:progress:unixfs:file', {
bytesRead: read,
totalBytes: wanted,
fileSize
}));
yield buf;

@@ -137,0 +149,0 @@ }

@@ -5,4 +5,8 @@ import parallel from 'it-parallel';

import { decode } from '@ipld/dag-pb';
import { CustomProgressEvent } from 'progress-events';
const hamtShardedDirectoryContent = (cid, node, unixfs, path, resolve, depth, blockstore) => {
function yieldHamtDirectoryContent(options = {}) {
options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:hamt-sharded-directory', {
cid
}));
return listDirectory(node, path, resolve, depth, blockstore, options);

@@ -25,2 +29,5 @@ }

node = decode(block);
options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:hamt-sharded-directory', {
cid: link.Hash
}));
return { entries: listDirectory(node, path, resolve, depth, blockstore, options) };

@@ -27,0 +34,0 @@ }

@@ -0,1 +1,2 @@

import { CustomProgressEvent } from 'progress-events';
import extractDataFromBlock from '../../../utils/extract-data-from-block.js';

@@ -10,3 +11,12 @@ import validateOffsetAndLength from '../../../utils/validate-offset-and-length.js';

const { offset, length } = validateOffsetAndLength(size, options.offset, options.length);
yield extractDataFromBlock(unixfs.data, 0n, offset, offset + length);
options.onProgress?.(new CustomProgressEvent('unixfs:exporter:walk:raw', {
cid
}));
const buf = extractDataFromBlock(unixfs.data, 0n, offset, offset + length);
options.onProgress?.(new CustomProgressEvent('unixfs:exporter:progress:unixfs:raw', {
bytesRead: BigInt(buf.byteLength),
totalBytes: length - offset,
fileSize: BigInt(unixfs.data.byteLength)
}));
yield buf;
}

@@ -13,0 +23,0 @@ return yieldRawContent;

{
"ExportProgress": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_exporter.ExportProgress.html",
"ExportWalk": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_exporter.ExportWalk.html",
"Exportable": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_exporter.Exportable.html",

@@ -15,2 +17,3 @@ "ExporterOptions": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_exporter.ExporterOptions.html",

"UnixfsV1Resolver": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_exporter.UnixfsV1Resolver.html",
"ExporterProgressEvents": "https://ipfs.github.io/js-ipfs-unixfs/types/ipfs_unixfs_exporter.ExporterProgressEvents.html",
"ReadableStorage": "https://ipfs.github.io/js-ipfs-unixfs/types/ipfs_unixfs_exporter.ReadableStorage.html",

@@ -17,0 +20,0 @@ "UnixFSEntry": "https://ipfs.github.io/js-ipfs-unixfs/types/ipfs_unixfs_exporter.UnixFSEntry.html",

2

package.json
{
"name": "ipfs-unixfs-exporter",
"version": "13.0.6",
"version": "13.1.0",
"description": "JavaScript implementation of the UnixFs exporter used by IPFS",

@@ -5,0 +5,0 @@ "license": "Apache-2.0 OR MIT",

@@ -9,5 +9,43 @@ import errCode from 'err-code'

import type { Bucket } from 'hamt-sharding'
import type { ProgressOptions } from 'progress-events'
import type { ProgressOptions, ProgressEvent } from 'progress-events'
export interface ExporterOptions extends ProgressOptions {
export interface ExportProgress {
/**
* How many bytes of the file have been read
*/
bytesRead: bigint
/**
* How many bytes of the file will be read - n.b. this may be
* smaller than `fileSize` if `offset`/`length` have been
* specified
*/
totalBytes: bigint
/**
* The size of the file being read - n.b. this may be
* larger than `total` if `offset`/`length` has been
* specified
*/
fileSize: bigint
}
export interface ExportWalk {
cid: CID
}
/**
* Progress events emitted by the exporter
*/
export type ExporterProgressEvents =
ProgressEvent<'unixfs:exporter:progress:unixfs:file', ExportProgress> |
ProgressEvent<'unixfs:exporter:progress:unixfs:raw', ExportProgress> |
ProgressEvent<'unixfs:exporter:progress:raw', ExportProgress> |
ProgressEvent<'unixfs:exporter:progress:identity', ExportProgress> |
ProgressEvent<'unixfs:exporter:walk:file', ExportWalk> |
ProgressEvent<'unixfs:exporter:walk:directory', ExportWalk> |
ProgressEvent<'unixfs:exporter:walk:hamt-sharded-directory', ExportWalk> |
ProgressEvent<'unixfs:exporter:walk:raw', ExportWalk>
export interface ExporterOptions extends ProgressOptions<ExporterProgressEvents> {
offset?: number

@@ -14,0 +52,0 @@ length?: number

@@ -5,3 +5,4 @@ import errCode from 'err-code'

import * as mh from 'multiformats/hashes/digest'
import type { ExporterOptions, Resolver } from '../index.js'
import type { ExporterOptions, Resolver, ExportProgress } from '../index.js'
import { CustomProgressEvent } from 'progress-events'

@@ -15,3 +16,11 @@ const rawContent = (node: Uint8Array): ((options?: ExporterOptions) => AsyncGenerator<Uint8Array, void, undefined>) => {

yield extractDataFromBlock(node, 0n, offset, offset + length)
const buf = extractDataFromBlock(node, 0n, offset, offset + length)
options.onProgress?.(new CustomProgressEvent<ExportProgress>('unixfs:exporter:progress:identity', {
bytesRead: BigInt(buf.byteLength),
totalBytes: length - offset,
fileSize: BigInt(node.byteLength)
}))
yield buf
}

@@ -18,0 +27,0 @@

import errCode from 'err-code'
import type { ExporterOptions, Resolver } from '../index.js'
import type { ExporterOptions, Resolver, ExportProgress } from '../index.js'
import extractDataFromBlock from '../utils/extract-data-from-block.js'
import validateOffsetAndLength from '../utils/validate-offset-and-length.js'
import { CustomProgressEvent } from 'progress-events'

@@ -13,3 +14,11 @@ const rawContent = (node: Uint8Array): ((options?: ExporterOptions) => AsyncGenerator<Uint8Array, void, undefined>) => {

yield extractDataFromBlock(node, 0n, offset, offset + length)
const buf = extractDataFromBlock(node, 0n, offset, offset + length)
options.onProgress?.(new CustomProgressEvent<ExportProgress>('unixfs:exporter:progress:raw', {
bytesRead: BigInt(buf.byteLength),
totalBytes: length - offset,
fileSize: BigInt(node.byteLength)
}))
yield buf
}

@@ -16,0 +25,0 @@

@@ -5,3 +5,4 @@ import parallel from 'it-parallel'

import filter from 'it-filter'
import type { ExporterOptions, UnixfsV1DirectoryContent, UnixfsV1Resolver } from '../../../index.js'
import type { ExporterOptions, ExportWalk, UnixfsV1DirectoryContent, UnixfsV1Resolver } from '../../../index.js'
import { CustomProgressEvent } from 'progress-events'

@@ -14,2 +15,6 @@ const directoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => {

options.onProgress?.(new CustomProgressEvent<ExportWalk>('unixfs:exporter:walk:directory', {
cid
}))
yield * pipe(

@@ -16,0 +21,0 @@ links,

@@ -12,3 +12,4 @@ import extractDataFromBlock from '../../../utils/extract-data-from-block.js'

import PQueue from 'p-queue'
import type { ExporterOptions, UnixfsV1FileContent, UnixfsV1Resolver, ReadableStorage } from '../../../index.js'
import type { ExporterOptions, UnixfsV1FileContent, UnixfsV1Resolver, ReadableStorage, ExportProgress, ExportWalk } from '../../../index.js'
import { CustomProgressEvent } from 'progress-events'

@@ -114,2 +115,6 @@ async function walkDAG (blockstore: ReadableStorage, node: dagPb.PBNode | Uint8Array, queue: Pushable<Uint8Array>, streamPosition: bigint, start: bigint, end: bigint, options: ExporterOptions): Promise<void> {

void childQueue.add(async () => {
options.onProgress?.(new CustomProgressEvent<ExportWalk>('unixfs:exporter:walk:file', {
cid: link.Hash
}))
await walkDAG(blockstore, child, queue, blockStart, start, end, options)

@@ -143,8 +148,11 @@ })

let read = 0n
const wanted = length - offset
const queue = pushable()
options.onProgress?.(new CustomProgressEvent<ExportWalk>('unixfs:exporter:walk:file', {
cid
}))
void walkDAG(blockstore, node, queue, 0n, offset, offset + length, options)
.then(() => {
const wanted = length - offset
if (read < wanted) {

@@ -175,2 +183,8 @@ throw errCode(new Error('Traversed entire DAG but did not read enough bytes'), 'ERR_UNDER_READ')

options.onProgress?.(new CustomProgressEvent<ExportProgress>('unixfs:exporter:progress:unixfs:file', {
bytesRead: read,
totalBytes: wanted,
fileSize
}))
yield buf

@@ -177,0 +191,0 @@ }

@@ -5,6 +5,11 @@ import parallel from 'it-parallel'

import { decode, PBNode } from '@ipld/dag-pb'
import type { ExporterOptions, Resolve, UnixfsV1DirectoryContent, UnixfsV1Resolver, ReadableStorage } from '../../../index.js'
import type { ExporterOptions, Resolve, UnixfsV1DirectoryContent, UnixfsV1Resolver, ReadableStorage, ExportWalk } from '../../../index.js'
import { CustomProgressEvent } from 'progress-events'
const hamtShardedDirectoryContent: UnixfsV1Resolver = (cid, node, unixfs, path, resolve, depth, blockstore) => {
function yieldHamtDirectoryContent (options: ExporterOptions = {}): UnixfsV1DirectoryContent {
options.onProgress?.(new CustomProgressEvent<ExportWalk>('unixfs:exporter:walk:hamt-sharded-directory', {
cid
}))
return listDirectory(node, path, resolve, depth, blockstore, options)

@@ -34,2 +39,6 @@ }

options.onProgress?.(new CustomProgressEvent<ExportWalk>('unixfs:exporter:walk:hamt-sharded-directory', {
cid: link.Hash
}))
return { entries: listDirectory(node, path, resolve, depth, blockstore, options) }

@@ -36,0 +45,0 @@ }

@@ -1,2 +0,3 @@

import type { ExporterOptions, UnixfsV1Resolver } from '../../../index.js'
import { CustomProgressEvent } from 'progress-events'
import type { ExporterOptions, ExportProgress, ExportWalk, UnixfsV1Resolver } from '../../../index.js'
import extractDataFromBlock from '../../../utils/extract-data-from-block.js'

@@ -18,3 +19,15 @@ import validateOffsetAndLength from '../../../utils/validate-offset-and-length.js'

yield extractDataFromBlock(unixfs.data, 0n, offset, offset + length)
options.onProgress?.(new CustomProgressEvent<ExportWalk>('unixfs:exporter:walk:raw', {
cid
}))
const buf = extractDataFromBlock(unixfs.data, 0n, offset, offset + length)
options.onProgress?.(new CustomProgressEvent<ExportProgress>('unixfs:exporter:progress:unixfs:raw', {
bytesRead: BigInt(buf.byteLength),
totalBytes: length - offset,
fileSize: BigInt(unixfs.data.byteLength)
}))
yield buf
}

@@ -21,0 +34,0 @@

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

About

Packages

Stay in touch

Get open source security insights delivered straight into your inbox.

  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc

U.S. Patent No. 12,346,443 & 12,314,394. Other pending.