ipfs-unixfs-importer
Advanced tools
Comparing version 9.0.4 to 9.0.5
@@ -12,9 +12,6 @@ 'use strict'; | ||
var trickle = require('./trickle.js'); | ||
var bufferImporter = require('./buffer-importer.js'); | ||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } | ||
function _interopNamespaceDefaultOnly(e) { | ||
return Object.freeze({__proto__: null, 'default': e}); | ||
} | ||
function _interopNamespace(e) { | ||
@@ -53,9 +50,9 @@ if (e && e.__esModule) return e; | ||
let previous; | ||
let bufferImporter; | ||
let bufferImporter$1; | ||
if (typeof options.bufferImporter === 'function') { | ||
bufferImporter = options.bufferImporter; | ||
bufferImporter$1 = options.bufferImporter; | ||
} else { | ||
bufferImporter = (await Promise.resolve().then(function () { return /*#__PURE__*/_interopNamespaceDefaultOnly(require('./buffer-importer.js')); })).default; | ||
bufferImporter$1 = bufferImporter; | ||
} | ||
for await (const entry of parallelBatch__default['default'](bufferImporter(file, blockstore, options), options.blockWriteConcurrency)) { | ||
for await (const entry of parallelBatch__default['default'](bufferImporter$1(file, blockstore, options), options.blockWriteConcurrency)) { | ||
count++; | ||
@@ -62,0 +59,0 @@ if (count === 0) { |
@@ -6,9 +6,8 @@ 'use strict'; | ||
var errCode = require('err-code'); | ||
var rabin = require('../chunker/rabin.js'); | ||
var fixedSize = require('../chunker/fixed-size.js'); | ||
var validateChunks = require('./validate-chunks.js'); | ||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } | ||
function _interopNamespaceDefaultOnly(e) { | ||
return Object.freeze({__proto__: null, 'default': e}); | ||
} | ||
var errCode__default = /*#__PURE__*/_interopDefaultLegacy(errCode); | ||
@@ -53,5 +52,5 @@ | ||
} else if (options.chunker === 'rabin') { | ||
chunker = (await Promise.resolve().then(function () { return /*#__PURE__*/_interopNamespaceDefaultOnly(require('../chunker/rabin.js')); })).default; | ||
chunker = rabin; | ||
} else { | ||
chunker = (await Promise.resolve().then(function () { return /*#__PURE__*/_interopNamespaceDefaultOnly(require('../chunker/fixed-size.js')); })).default; | ||
chunker = fixedSize; | ||
} | ||
@@ -62,3 +61,3 @@ let chunkValidator; | ||
} else { | ||
chunkValidator = (await Promise.resolve().then(function () { return /*#__PURE__*/_interopNamespaceDefaultOnly(require('./validate-chunks.js')); })).default; | ||
chunkValidator = validateChunks; | ||
} | ||
@@ -65,0 +64,0 @@ const file = { |
@@ -7,9 +7,7 @@ 'use strict'; | ||
var options = require('./options.js'); | ||
var index = require('./dag-builder/index.js'); | ||
var treeBuilder = require('./tree-builder.js'); | ||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } | ||
function _interopNamespaceDefaultOnly(e) { | ||
return Object.freeze({__proto__: null, 'default': e}); | ||
} | ||
var parallelBatch__default = /*#__PURE__*/_interopDefaultLegacy(parallelBatch); | ||
@@ -23,9 +21,9 @@ | ||
} else { | ||
dagBuilder = (await Promise.resolve().then(function () { return /*#__PURE__*/_interopNamespaceDefaultOnly(require('./dag-builder/index.js')); })).default; | ||
dagBuilder = index; | ||
} | ||
let treeBuilder; | ||
let treeBuilder$1; | ||
if (typeof options$1.treeBuilder === 'function') { | ||
treeBuilder = options$1.treeBuilder; | ||
treeBuilder$1 = options$1.treeBuilder; | ||
} else { | ||
treeBuilder = (await Promise.resolve().then(function () { return /*#__PURE__*/_interopNamespaceDefaultOnly(require('./tree-builder.js')); })).default; | ||
treeBuilder$1 = treeBuilder; | ||
} | ||
@@ -38,3 +36,3 @@ let candidates; | ||
} | ||
for await (const entry of treeBuilder(parallelBatch__default['default'](dagBuilder(candidates, blockstore, opts), opts.fileImportConcurrency), blockstore, opts)) { | ||
for await (const entry of treeBuilder$1(parallelBatch__default['default'](dagBuilder(candidates, blockstore, opts), opts.fileImportConcurrency), blockstore, opts)) { | ||
yield { | ||
@@ -41,0 +39,0 @@ cid: entry.cid, |
@@ -14,2 +14,3 @@ import errCode from 'err-code'; | ||
import dagTrickle from './trickle.js'; | ||
import bufferImporterFn from './buffer-importer.js'; | ||
const dagBuilders = { | ||
@@ -27,3 +28,3 @@ flat: dagFlat, | ||
} else { | ||
bufferImporter = (await import('./buffer-importer.js')).default; | ||
bufferImporter = bufferImporterFn; | ||
} | ||
@@ -30,0 +31,0 @@ for await (const entry of parallelBatch(bufferImporter(file, blockstore, options), options.blockWriteConcurrency)) { |
import dirBuilder from './dir.js'; | ||
import fileBuilder from './file/index.js'; | ||
import errCode from 'err-code'; | ||
import rabin from '../chunker/rabin.js'; | ||
import fixedSize from '../chunker/fixed-size.js'; | ||
import validateChunks from './validate-chunks.js'; | ||
function isIterable(thing) { | ||
@@ -41,5 +44,5 @@ return Symbol.iterator in thing; | ||
} else if (options.chunker === 'rabin') { | ||
chunker = (await import('../chunker/rabin.js')).default; | ||
chunker = rabin; | ||
} else { | ||
chunker = (await import('../chunker/fixed-size.js')).default; | ||
chunker = fixedSize; | ||
} | ||
@@ -50,3 +53,3 @@ let chunkValidator; | ||
} else { | ||
chunkValidator = (await import('./validate-chunks.js')).default; | ||
chunkValidator = validateChunks; | ||
} | ||
@@ -53,0 +56,0 @@ const file = { |
import parallelBatch from 'it-parallel-batch'; | ||
import defaultOptions from './options.js'; | ||
import dagBuilderFn from './dag-builder/index.js'; | ||
import treeBuilderFn from './tree-builder.js'; | ||
export async function* importer(source, blockstore, options = {}) { | ||
@@ -9,3 +11,3 @@ const opts = defaultOptions(options); | ||
} else { | ||
dagBuilder = (await import('./dag-builder/index.js')).default; | ||
dagBuilder = dagBuilderFn; | ||
} | ||
@@ -16,3 +18,3 @@ let treeBuilder; | ||
} else { | ||
treeBuilder = (await import('./tree-builder.js')).default; | ||
treeBuilder = treeBuilderFn; | ||
} | ||
@@ -19,0 +21,0 @@ let candidates; |
{ | ||
"name": "ipfs-unixfs-importer", | ||
"version": "9.0.4", | ||
"version": "9.0.5", | ||
"description": "JavaScript implementation of the UnixFs importer used by IPFS", | ||
@@ -60,3 +60,3 @@ "leadMaintainer": "Alex Potsides <alex.potsides@protocol.ai>", | ||
"interface-blockstore": "^1.0.0", | ||
"ipfs-unixfs": "^6.0.4", | ||
"ipfs-unixfs": "^6.0.5", | ||
"it-all": "^1.0.5", | ||
@@ -78,3 +78,3 @@ "it-batch": "^1.0.8", | ||
}, | ||
"gitHead": "2713329ef7db782ef880cedb2aa4784f4ebe0f9a", | ||
"gitHead": "0f9092e49deb6cbad08b38651be17da93486c00a", | ||
"exports": { | ||
@@ -81,0 +81,0 @@ ".": { |
@@ -1,135 +0,145 @@ | ||
import { UnixFS, Mtime } from 'ipfs-unixfs' | ||
import { CID, CIDVersion } from 'multiformats/cid' | ||
import { MultihashHasher } from 'multiformats/hashes/interface' | ||
import { BlockCodec } from 'multiformats/codecs/interface' | ||
import { Blockstore } from 'interface-blockstore' | ||
interface ImportCandidate { | ||
path?: string | ||
content?: AsyncIterable<Uint8Array> | Iterable<Uint8Array> | Uint8Array | ||
mtime?: Mtime | ||
mode?: number | ||
import type { UnixFS, Mtime } from 'ipfs-unixfs'; | ||
import type { CID, CIDVersion } from 'multiformats/cid'; | ||
import type { MultihashHasher } from 'multiformats/hashes/interface'; | ||
import type { BlockCodec } from 'multiformats/codecs/interface'; | ||
import type { Blockstore } from 'interface-blockstore'; | ||
export interface ImportCandidate { | ||
path?: string; | ||
content?: AsyncIterable<Uint8Array> | Iterable<Uint8Array> | Uint8Array; | ||
mtime?: Mtime; | ||
mode?: number; | ||
} | ||
interface File { | ||
content: AsyncIterable<Uint8Array> | ||
path?: string | ||
mtime?: Mtime | ||
mode?: number | ||
export interface File { | ||
content: AsyncIterable<Uint8Array>; | ||
path?: string; | ||
mtime?: Mtime; | ||
mode?: number; | ||
} | ||
interface Directory { | ||
path?: string | ||
mtime?: Mtime | ||
mode?: number | ||
export interface Directory { | ||
path?: string; | ||
mtime?: Mtime; | ||
mode?: number; | ||
} | ||
interface ImportResult { | ||
cid: CID | ||
size: number | ||
path?: string | ||
unixfs?: UnixFS | ||
export interface ImportResult { | ||
cid: CID; | ||
size: number; | ||
path?: string; | ||
unixfs?: UnixFS; | ||
} | ||
interface InProgressImportResult extends ImportResult { | ||
single?: boolean | ||
export interface InProgressImportResult extends ImportResult { | ||
single?: boolean; | ||
} | ||
type ChunkerType = 'fixed' | 'rabin' | ||
interface ProgressHandler { (chunkSize: number, path?: string): void } | ||
interface HamtHashFn { (value: Uint8Array): Promise<Uint8Array> } | ||
interface Chunker { (source: AsyncIterable<Uint8Array>, options: ImporterOptions): AsyncIterable<Uint8Array> } | ||
interface DAGBuilder { (source: AsyncIterable<ImportCandidate> | Iterable<ImportCandidate>, blockstore: Blockstore, options: ImporterOptions): AsyncIterable<() => Promise<InProgressImportResult>> } | ||
interface TreeBuilder { (source: AsyncIterable<InProgressImportResult>, blockstore: Blockstore, options: ImporterOptions): AsyncIterable<ImportResult> } | ||
interface BufferImporter { (file: File, blockstore: Blockstore, options: ImporterOptions): AsyncIterable<() => Promise<InProgressImportResult>> } | ||
interface ChunkValidator { (source: AsyncIterable<Uint8Array>, options: ImporterOptions): AsyncIterable<Uint8Array> } | ||
interface UnixFSV1DagBuilder<T> { (item: T, blockstore: Blockstore, options: ImporterOptions): Promise<InProgressImportResult> } | ||
interface Reducer { (leaves: InProgressImportResult[]): Promise<InProgressImportResult> } | ||
interface FileDAGBuilder { (source: AsyncIterable<InProgressImportResult> | Iterable<InProgressImportResult>, reducer: Reducer, options: ImporterOptions): Promise<InProgressImportResult> } | ||
interface UserImporterOptions { | ||
strategy?: 'balanced' | 'flat' | 'trickle' | ||
rawLeaves?: boolean | ||
onlyHash?: boolean | ||
reduceSingleLeafToSelf?: boolean | ||
hasher?: MultihashHasher | ||
leafType?: 'file' | 'raw' | ||
cidVersion?: CIDVersion | ||
progress?: ProgressHandler | ||
shardSplitThreshold?: number | ||
fileImportConcurrency?: number | ||
blockWriteConcurrency?: number | ||
minChunkSize?: number | ||
maxChunkSize?: number | ||
avgChunkSize?: number | ||
window?: number | ||
polynomial?: number | ||
maxChildrenPerNode?: number | ||
layerRepeat?: number | ||
wrapWithDirectory?: boolean | ||
recursive?: boolean | ||
hidden?: boolean | ||
timeout?: number | ||
hamtHashFn?: HamtHashFn | ||
hamtBucketBits?: number | ||
hamtHashCode?: number | ||
chunker?: ChunkerType | Chunker | ||
dagBuilder?: DAGBuilder | ||
treeBuilder?: TreeBuilder | ||
bufferImporter?: BufferImporter | ||
chunkValidator?: ChunkValidator | ||
export declare type ChunkerType = 'fixed' | 'rabin'; | ||
export interface ProgressHandler { | ||
(chunkSize: number, path?: string): void; | ||
} | ||
interface ImporterOptions { | ||
strategy: 'balanced' | 'flat' | 'trickle' | ||
rawLeaves: boolean | ||
onlyHash: boolean | ||
reduceSingleLeafToSelf: boolean | ||
hasher: MultihashHasher | ||
leafType: 'file' | 'raw' | ||
cidVersion: CIDVersion | ||
progress: ProgressHandler | ||
shardSplitThreshold: number | ||
fileImportConcurrency: number | ||
blockWriteConcurrency: number | ||
minChunkSize: number | ||
maxChunkSize: number | ||
avgChunkSize: number | ||
window: number | ||
polynomial: number | ||
maxChildrenPerNode: number | ||
layerRepeat: number | ||
wrapWithDirectory: boolean | ||
recursive: boolean | ||
hidden: boolean | ||
timeout?: number | ||
hamtHashFn: HamtHashFn | ||
hamtBucketBits: number | ||
hamtHashCode: number | ||
chunker: ChunkerType | Chunker | ||
dagBuilder?: DAGBuilder | ||
treeBuilder?: TreeBuilder | ||
bufferImporter?: BufferImporter | ||
chunkValidator?: ChunkValidator | ||
export interface HamtHashFn { | ||
(value: Uint8Array): Promise<Uint8Array>; | ||
} | ||
export interface Chunker { | ||
(source: AsyncIterable<Uint8Array>, options: ImporterOptions): AsyncIterable<Uint8Array>; | ||
} | ||
export interface DAGBuilder { | ||
(source: AsyncIterable<ImportCandidate> | Iterable<ImportCandidate>, blockstore: Blockstore, options: ImporterOptions): AsyncIterable<() => Promise<InProgressImportResult>>; | ||
} | ||
export interface TreeBuilder { | ||
(source: AsyncIterable<InProgressImportResult>, blockstore: Blockstore, options: ImporterOptions): AsyncIterable<ImportResult>; | ||
} | ||
export interface BufferImporter { | ||
(file: File, blockstore: Blockstore, options: ImporterOptions): AsyncIterable<() => Promise<InProgressImportResult>>; | ||
} | ||
export interface ChunkValidator { | ||
(source: AsyncIterable<Uint8Array>, options: ImporterOptions): AsyncIterable<Uint8Array>; | ||
} | ||
export interface UnixFSV1DagBuilder<T> { | ||
(item: T, blockstore: Blockstore, options: ImporterOptions): Promise<InProgressImportResult>; | ||
} | ||
export interface Reducer { | ||
(leaves: InProgressImportResult[]): Promise<InProgressImportResult>; | ||
} | ||
export interface FileDAGBuilder { | ||
(source: AsyncIterable<InProgressImportResult> | Iterable<InProgressImportResult>, reducer: Reducer, options: ImporterOptions): Promise<InProgressImportResult>; | ||
} | ||
export interface UserImporterOptions { | ||
strategy?: 'balanced' | 'flat' | 'trickle'; | ||
rawLeaves?: boolean; | ||
onlyHash?: boolean; | ||
reduceSingleLeafToSelf?: boolean; | ||
hasher?: MultihashHasher; | ||
leafType?: 'file' | 'raw'; | ||
cidVersion?: CIDVersion; | ||
progress?: ProgressHandler; | ||
shardSplitThreshold?: number; | ||
fileImportConcurrency?: number; | ||
blockWriteConcurrency?: number; | ||
minChunkSize?: number; | ||
maxChunkSize?: number; | ||
avgChunkSize?: number; | ||
window?: number; | ||
polynomial?: number; | ||
maxChildrenPerNode?: number; | ||
layerRepeat?: number; | ||
wrapWithDirectory?: boolean; | ||
recursive?: boolean; | ||
hidden?: boolean; | ||
timeout?: number; | ||
hamtHashFn?: HamtHashFn; | ||
hamtBucketBits?: number; | ||
hamtHashCode?: number; | ||
chunker?: ChunkerType | Chunker; | ||
dagBuilder?: DAGBuilder; | ||
treeBuilder?: TreeBuilder; | ||
bufferImporter?: BufferImporter; | ||
chunkValidator?: ChunkValidator; | ||
} | ||
export interface ImporterOptions { | ||
strategy: 'balanced' | 'flat' | 'trickle'; | ||
rawLeaves: boolean; | ||
onlyHash: boolean; | ||
reduceSingleLeafToSelf: boolean; | ||
hasher: MultihashHasher; | ||
leafType: 'file' | 'raw'; | ||
cidVersion: CIDVersion; | ||
progress: ProgressHandler; | ||
shardSplitThreshold: number; | ||
fileImportConcurrency: number; | ||
blockWriteConcurrency: number; | ||
minChunkSize: number; | ||
maxChunkSize: number; | ||
avgChunkSize: number; | ||
window: number; | ||
polynomial: number; | ||
maxChildrenPerNode: number; | ||
layerRepeat: number; | ||
wrapWithDirectory: boolean; | ||
recursive: boolean; | ||
hidden: boolean; | ||
timeout?: number; | ||
hamtHashFn: HamtHashFn; | ||
hamtBucketBits: number; | ||
hamtHashCode: number; | ||
chunker: ChunkerType | Chunker; | ||
dagBuilder?: DAGBuilder; | ||
treeBuilder?: TreeBuilder; | ||
bufferImporter?: BufferImporter; | ||
chunkValidator?: ChunkValidator; | ||
} | ||
export interface TrickleDagNode { | ||
children: InProgressImportResult[] | ||
depth: number | ||
maxDepth: number | ||
maxChildren: number | ||
data?: InProgressImportResult[] | ||
parent?: TrickleDagNode | ||
cid?: CID | ||
size?: number | ||
unixfs?: UnixFS | ||
children: InProgressImportResult[]; | ||
depth: number; | ||
maxDepth: number; | ||
maxChildren: number; | ||
data?: InProgressImportResult[]; | ||
parent?: TrickleDagNode; | ||
cid?: CID; | ||
size?: number; | ||
unixfs?: UnixFS; | ||
} | ||
export interface PersistOptions { | ||
codec?: BlockCodec<any, any> | ||
hasher: MultihashHasher | ||
cidVersion: CIDVersion | ||
onlyHash: boolean | ||
signal?: AbortSignal | ||
codec?: BlockCodec<any, any>; | ||
hasher: MultihashHasher; | ||
cidVersion: CIDVersion; | ||
onlyHash: boolean; | ||
signal?: AbortSignal; | ||
} | ||
//# sourceMappingURL=types.d.ts.map |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
197571
116
2453
Updatedipfs-unixfs@^6.0.5