ipfs-unixfs-importer
Advanced tools
Comparing version 15.0.1 to 15.0.2
@@ -7,6 +7,6 @@ import { UnixFS } from 'ipfs-unixfs'; | ||
export function defaultBufferImporter(options) { | ||
return async function* bufferImporter(file, block) { | ||
for await (let buffer of file.content) { | ||
return async function* bufferImporter(file, blockstore) { | ||
for await (let block of file.content) { | ||
yield async () => { | ||
options.onProgress?.(new CustomProgressEvent('unixfs:importer:progress', { bytes: buffer.length, path: file.path })); | ||
options.onProgress?.(new CustomProgressEvent('unixfs:importer:progress', { bytes: block.byteLength, path: file.path })); | ||
let unixfs; | ||
@@ -25,5 +25,5 @@ const opts = { | ||
type: options.leafType, | ||
data: buffer | ||
data: block | ||
}); | ||
buffer = dagPb.encode({ | ||
block = dagPb.encode({ | ||
Data: unixfs.marshal(), | ||
@@ -34,5 +34,6 @@ Links: [] | ||
return { | ||
cid: await persist(buffer, block, opts), | ||
cid: await persist(block, blockstore, opts), | ||
unixfs, | ||
size: BigInt(buffer.length) | ||
size: BigInt(block.length), | ||
block | ||
}; | ||
@@ -39,0 +40,0 @@ }; |
@@ -10,4 +10,4 @@ import { UnixFS } from 'ipfs-unixfs'; | ||
}); | ||
const buffer = encode(prepare({ Data: unixfs.marshal() })); | ||
const cid = await persist(buffer, blockstore, options); | ||
const block = encode(prepare({ Data: unixfs.marshal() })); | ||
const cid = await persist(block, blockstore, options); | ||
const path = dir.path; | ||
@@ -18,6 +18,7 @@ return { | ||
unixfs, | ||
size: BigInt(buffer.length), | ||
originalPath: dir.originalPath | ||
size: BigInt(block.length), | ||
originalPath: dir.originalPath, | ||
block | ||
}; | ||
}; | ||
//# sourceMappingURL=dir.js.map |
@@ -12,24 +12,38 @@ import { UnixFS } from 'ipfs-unixfs'; | ||
if (count === 0) { | ||
previous = entry; | ||
// cache the first entry if case there aren't any more | ||
previous = { | ||
...entry, | ||
single: true | ||
}; | ||
continue; | ||
} | ||
else if (count === 1 && (previous != null)) { | ||
yield previous; | ||
// we have the second block of a multiple block import so yield the first | ||
yield { | ||
...previous, | ||
block: undefined, | ||
single: undefined | ||
}; | ||
previous = undefined; | ||
} | ||
yield entry; | ||
// yield the second or later block of a multiple block import | ||
yield { | ||
...entry, | ||
block: undefined | ||
}; | ||
} | ||
if (previous != null) { | ||
previous.single = true; | ||
yield previous; | ||
} | ||
} | ||
function isSingleBlockImport(result) { | ||
return result.single === true; | ||
} | ||
const reduce = (file, blockstore, options) => { | ||
const reducer = async function (leaves) { | ||
if (leaves.length === 1 && leaves[0]?.single === true && options.reduceSingleLeafToSelf) { | ||
if (leaves.length === 1 && isSingleBlockImport(leaves[0]) && options.reduceSingleLeafToSelf) { | ||
const leaf = leaves[0]; | ||
if (file.mtime !== undefined || file.mode !== undefined) { | ||
if (isSingleBlockImport(leaf) && (file.mtime !== undefined || file.mode !== undefined)) { | ||
// only one leaf node which is a raw leaf - we have metadata so convert it into a | ||
// UnixFS entry otherwise we'll have nowhere to store the metadata | ||
let buffer = await blockstore.get(leaf.cid, options); | ||
leaf.unixfs = new UnixFS({ | ||
@@ -39,29 +53,10 @@ type: 'file', | ||
mode: file.mode, | ||
data: buffer | ||
data: leaf.block | ||
}); | ||
buffer = encode(prepare({ Data: leaf.unixfs.marshal() })); | ||
// // TODO vmx 2021-03-26: This is what the original code does, it checks | ||
// // the multihash of the original leaf node and uses then the same | ||
// // hasher. i wonder if that's really needed or if we could just use | ||
// // the hasher from `options.hasher` instead. | ||
// const multihash = mh.decode(leaf.cid.multihash.bytes) | ||
// let hasher | ||
// switch multihash { | ||
// case sha256.code { | ||
// hasher = sha256 | ||
// break; | ||
// } | ||
// //case identity.code { | ||
// // hasher = identity | ||
// // break; | ||
// //} | ||
// default: { | ||
// throw new Error(`Unsupported hasher "${multihash}"`) | ||
// } | ||
// } | ||
leaf.cid = await persist(buffer, blockstore, { | ||
leaf.block = encode(prepare({ Data: leaf.unixfs.marshal() })); | ||
leaf.cid = await persist(leaf.block, blockstore, { | ||
...options, | ||
cidVersion: options.cidVersion | ||
}); | ||
leaf.size = BigInt(buffer.length); | ||
leaf.size = BigInt(leaf.block.length); | ||
} | ||
@@ -120,4 +115,4 @@ return { | ||
}; | ||
const buffer = encode(prepare(node)); | ||
const cid = await persist(buffer, blockstore, options); | ||
const block = encode(prepare(node)); | ||
const cid = await persist(block, blockstore, options); | ||
return { | ||
@@ -127,4 +122,5 @@ cid, | ||
unixfs: f, | ||
size: BigInt(buffer.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)), | ||
originalPath: file.originalPath | ||
size: BigInt(block.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)), | ||
originalPath: file.originalPath, | ||
block | ||
}; | ||
@@ -131,0 +127,0 @@ }; |
@@ -52,14 +52,15 @@ import { encode, prepare } from '@ipld/dag-pb'; | ||
const links = []; | ||
for (let [name, child] of this._children.entries()) { | ||
for (const [name, child] of this._children.entries()) { | ||
let result = child; | ||
if (child instanceof Dir) { | ||
for await (const entry of child.flush(block)) { | ||
child = entry; | ||
yield child; | ||
result = entry; | ||
yield entry; | ||
} | ||
} | ||
if (child.size != null && (child.cid != null)) { | ||
if (result.size != null && (result.cid != null)) { | ||
links.push({ | ||
Name: name, | ||
Tsize: Number(child.size), | ||
Hash: child.cid | ||
Tsize: Number(result.size), | ||
Hash: result.cid | ||
}); | ||
@@ -66,0 +67,0 @@ } |
@@ -13,6 +13,3 @@ import { DAGBuilder } from './dag-builder/index.js'; | ||
export type ImportContent = ByteStream | Uint8Array; | ||
export interface BlockstoreOptions { | ||
signal?: AbortSignal; | ||
} | ||
export type Blockstore = Pick<InterfaceBlockstore, 'has' | 'put' | 'get'>; | ||
export type Blockstore = Pick<InterfaceBlockstore, 'put'>; | ||
export interface FileCandidate { | ||
@@ -49,6 +46,14 @@ path?: string; | ||
} | ||
export interface InProgressImportResult extends ImportResult { | ||
single?: boolean; | ||
export interface MultipleBlockImportResult extends ImportResult { | ||
originalPath?: string; | ||
} | ||
export interface SingleBlockImportResult extends ImportResult { | ||
single: true; | ||
originalPath?: string; | ||
block: Uint8Array; | ||
} | ||
export type InProgressImportResult = SingleBlockImportResult | MultipleBlockImportResult; | ||
export interface BufferImporterResult extends ImportResult { | ||
block: Uint8Array; | ||
} | ||
export interface HamtHashFn { | ||
@@ -61,3 +66,3 @@ (value: Uint8Array): Promise<Uint8Array>; | ||
export interface BufferImporter { | ||
(file: File, blockstore: Blockstore): AsyncIterable<() => Promise<InProgressImportResult>>; | ||
(file: File, blockstore: Blockstore): AsyncIterable<() => Promise<BufferImporterResult>>; | ||
} | ||
@@ -64,0 +69,0 @@ export type ImportProgressEvents = BufferImportProgressEvents; |
@@ -10,4 +10,4 @@ { | ||
"ImporterSourceStream": "https://ipfs.github.io/js-ipfs-unixfs/types/ipfs_unixfs_importer._internal_.ImporterSourceStream.html", | ||
"BlockstoreOptions": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_importer.BlockstoreOptions.html", | ||
"BufferImporter": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_importer.BufferImporter.html", | ||
"BufferImporterResult": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_importer.BufferImporterResult.html", | ||
"Directory": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_importer.Directory.html", | ||
@@ -20,3 +20,4 @@ "DirectoryCandidate": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_importer.DirectoryCandidate.html", | ||
"ImporterOptions": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_importer.ImporterOptions.html", | ||
"InProgressImportResult": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_importer.InProgressImportResult.html", | ||
"MultipleBlockImportResult": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_importer.MultipleBlockImportResult.html", | ||
"SingleBlockImportResult": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_importer.SingleBlockImportResult.html", | ||
"TreeBuilder": "https://ipfs.github.io/js-ipfs-unixfs/interfaces/ipfs_unixfs_importer.TreeBuilder.html", | ||
@@ -29,2 +30,3 @@ "Blockstore": "https://ipfs.github.io/js-ipfs-unixfs/types/ipfs_unixfs_importer.Blockstore.html", | ||
"ImportProgressEvents": "https://ipfs.github.io/js-ipfs-unixfs/types/ipfs_unixfs_importer.ImportProgressEvents.html", | ||
"InProgressImportResult": "https://ipfs.github.io/js-ipfs-unixfs/types/ipfs_unixfs_importer.InProgressImportResult.html", | ||
"importByteStream": "https://ipfs.github.io/js-ipfs-unixfs/functions/ipfs_unixfs_importer.importByteStream.html", | ||
@@ -31,0 +33,0 @@ "importBytes": "https://ipfs.github.io/js-ipfs-unixfs/functions/ipfs_unixfs_importer.importBytes.html", |
{ | ||
"name": "ipfs-unixfs-importer", | ||
"version": "15.0.1", | ||
"version": "15.0.2", | ||
"description": "JavaScript implementation of the UnixFs importer used by IPFS", | ||
@@ -5,0 +5,0 @@ "license": "Apache-2.0 OR MIT", |
@@ -35,6 +35,6 @@ import { UnixFS } from 'ipfs-unixfs' | ||
export function defaultBufferImporter (options: BufferImporterOptions): BufferImporter { | ||
return async function * bufferImporter (file, block) { | ||
for await (let buffer of file.content) { | ||
return async function * bufferImporter (file, blockstore) { | ||
for await (let block of file.content) { | ||
yield async () => { | ||
options.onProgress?.(new CustomProgressEvent<ImportProgressData>('unixfs:importer:progress', { bytes: buffer.length, path: file.path })) | ||
options.onProgress?.(new CustomProgressEvent<ImportProgressData>('unixfs:importer:progress', { bytes: block.byteLength, path: file.path })) | ||
let unixfs | ||
@@ -54,6 +54,6 @@ | ||
type: options.leafType, | ||
data: buffer | ||
data: block | ||
}) | ||
buffer = dagPb.encode({ | ||
block = dagPb.encode({ | ||
Data: unixfs.marshal(), | ||
@@ -65,5 +65,6 @@ Links: [] | ||
return { | ||
cid: await persist(buffer, block, opts), | ||
cid: await persist(block, blockstore, opts), | ||
unixfs, | ||
size: BigInt(buffer.length) | ||
size: BigInt(block.length), | ||
block | ||
} | ||
@@ -70,0 +71,0 @@ } |
@@ -20,4 +20,4 @@ import { UnixFS } from 'ipfs-unixfs' | ||
const buffer = encode(prepare({ Data: unixfs.marshal() })) | ||
const cid = await persist(buffer, blockstore, options) | ||
const block = encode(prepare({ Data: unixfs.marshal() })) | ||
const cid = await persist(block, blockstore, options) | ||
const path = dir.path | ||
@@ -29,5 +29,6 @@ | ||
unixfs, | ||
size: BigInt(buffer.length), | ||
originalPath: dir.originalPath | ||
size: BigInt(block.length), | ||
originalPath: dir.originalPath, | ||
block | ||
} | ||
} |
@@ -6,3 +6,3 @@ import { UnixFS } from 'ipfs-unixfs' | ||
import * as rawCodec from 'multiformats/codecs/raw' | ||
import type { BufferImporter, File, InProgressImportResult, Blockstore } from '../index.js' | ||
import type { BufferImporter, File, InProgressImportResult, Blockstore, SingleBlockImportResult } from '../index.js' | ||
import type { FileLayout, Reducer } from '../layout/index.js' | ||
@@ -19,3 +19,3 @@ import type { Version } from 'multiformats/cid' | ||
let count = -1 | ||
let previous: InProgressImportResult | undefined | ||
let previous: SingleBlockImportResult | undefined | ||
@@ -26,14 +26,27 @@ for await (const entry of parallelBatch(options.bufferImporter(file, blockstore), options.blockWriteConcurrency)) { | ||
if (count === 0) { | ||
previous = entry | ||
// cache the first entry if case there aren't any more | ||
previous = { | ||
...entry, | ||
single: true | ||
} | ||
continue | ||
} else if (count === 1 && (previous != null)) { | ||
yield previous | ||
// we have the second block of a multiple block import so yield the first | ||
yield { | ||
...previous, | ||
block: undefined, | ||
single: undefined | ||
} | ||
previous = undefined | ||
} | ||
yield entry | ||
// yield the second or later block of a multiple block import | ||
yield { | ||
...entry, | ||
block: undefined | ||
} | ||
} | ||
if (previous != null) { | ||
previous.single = true | ||
yield previous | ||
@@ -49,12 +62,14 @@ } | ||
function isSingleBlockImport (result: any): result is SingleBlockImportResult { | ||
return result.single === true | ||
} | ||
const reduce = (file: File, blockstore: Blockstore, options: ReduceOptions): Reducer => { | ||
const reducer: Reducer = async function (leaves) { | ||
if (leaves.length === 1 && leaves[0]?.single === true && options.reduceSingleLeafToSelf) { | ||
if (leaves.length === 1 && isSingleBlockImport(leaves[0]) && options.reduceSingleLeafToSelf) { | ||
const leaf = leaves[0] | ||
if (file.mtime !== undefined || file.mode !== undefined) { | ||
if (isSingleBlockImport(leaf) && (file.mtime !== undefined || file.mode !== undefined)) { | ||
// only one leaf node which is a raw leaf - we have metadata so convert it into a | ||
// UnixFS entry otherwise we'll have nowhere to store the metadata | ||
let buffer = await blockstore.get(leaf.cid, options) | ||
leaf.unixfs = new UnixFS({ | ||
@@ -64,31 +79,12 @@ type: 'file', | ||
mode: file.mode, | ||
data: buffer | ||
data: leaf.block | ||
}) | ||
buffer = encode(prepare({ Data: leaf.unixfs.marshal() })) | ||
leaf.block = encode(prepare({ Data: leaf.unixfs.marshal() })) | ||
// // TODO vmx 2021-03-26: This is what the original code does, it checks | ||
// // the multihash of the original leaf node and uses then the same | ||
// // hasher. i wonder if that's really needed or if we could just use | ||
// // the hasher from `options.hasher` instead. | ||
// const multihash = mh.decode(leaf.cid.multihash.bytes) | ||
// let hasher | ||
// switch multihash { | ||
// case sha256.code { | ||
// hasher = sha256 | ||
// break; | ||
// } | ||
// //case identity.code { | ||
// // hasher = identity | ||
// // break; | ||
// //} | ||
// default: { | ||
// throw new Error(`Unsupported hasher "${multihash}"`) | ||
// } | ||
// } | ||
leaf.cid = await persist(buffer, blockstore, { | ||
leaf.cid = await persist(leaf.block, blockstore, { | ||
...options, | ||
cidVersion: options.cidVersion | ||
}) | ||
leaf.size = BigInt(buffer.length) | ||
leaf.size = BigInt(leaf.block.length) | ||
} | ||
@@ -155,4 +151,4 @@ | ||
} | ||
const buffer = encode(prepare(node)) | ||
const cid = await persist(buffer, blockstore, options) | ||
const block = encode(prepare(node)) | ||
const cid = await persist(block, blockstore, options) | ||
@@ -163,4 +159,5 @@ return { | ||
unixfs: f, | ||
size: BigInt(buffer.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)), | ||
originalPath: file.originalPath | ||
size: BigInt(block.length + node.Links.reduce((acc, curr) => acc + (curr.Tsize ?? 0), 0)), | ||
originalPath: file.originalPath, | ||
block | ||
} | ||
@@ -167,0 +164,0 @@ } |
import { encode, PBNode, prepare } from '@ipld/dag-pb' | ||
import type { Blockstore } from 'interface-blockstore' | ||
import { UnixFS } from 'ipfs-unixfs' | ||
import type { CID } from 'multiformats/cid' | ||
import { Dir, CID_V0, CID_V1, DirProps } from './dir.js' | ||
@@ -71,16 +72,18 @@ import type { ImportResult, InProgressImportResult } from './index.js' | ||
for (let [name, child] of this._children.entries()) { | ||
for (const [name, child] of this._children.entries()) { | ||
let result: { size?: bigint | number, cid?: CID } = child | ||
if (child instanceof Dir) { | ||
for await (const entry of child.flush(block)) { | ||
child = entry | ||
result = entry | ||
yield child | ||
yield entry | ||
} | ||
} | ||
if (child.size != null && (child.cid != null)) { | ||
if (result.size != null && (result.cid != null)) { | ||
links.push({ | ||
Name: name, | ||
Tsize: Number(child.size), | ||
Hash: child.cid | ||
Tsize: Number(result.size), | ||
Hash: result.cid | ||
}) | ||
@@ -87,0 +90,0 @@ } |
@@ -20,8 +20,4 @@ import parallelBatch from 'it-parallel-batch' | ||
export interface BlockstoreOptions { | ||
signal?: AbortSignal | ||
} | ||
export type Blockstore = Pick<InterfaceBlockstore, 'put'> | ||
export type Blockstore = Pick<InterfaceBlockstore, 'has' | 'put' | 'get'> | ||
export interface FileCandidate { | ||
@@ -64,10 +60,21 @@ path?: string | ||
export interface InProgressImportResult extends ImportResult { | ||
single?: boolean | ||
export interface MultipleBlockImportResult extends ImportResult { | ||
originalPath?: string | ||
} | ||
export interface SingleBlockImportResult extends ImportResult { | ||
single: true | ||
originalPath?: string | ||
block: Uint8Array | ||
} | ||
export type InProgressImportResult = SingleBlockImportResult | MultipleBlockImportResult | ||
export interface BufferImporterResult extends ImportResult { | ||
block: Uint8Array | ||
} | ||
export interface HamtHashFn { (value: Uint8Array): Promise<Uint8Array> } | ||
export interface TreeBuilder { (source: AsyncIterable<InProgressImportResult>, blockstore: Blockstore): AsyncIterable<ImportResult> } | ||
export interface BufferImporter { (file: File, blockstore: Blockstore): AsyncIterable<() => Promise<InProgressImportResult>> } | ||
export interface BufferImporter { (file: File, blockstore: Blockstore): AsyncIterable<() => Promise<BufferImporterResult>> } | ||
@@ -74,0 +81,0 @@ export type ImportProgressEvents = |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
261314
3536