New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

ipfs-unixfs-importer

Package Overview
Dependencies
Maintainers
1
Versions
136
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ipfs-unixfs-importer - npm Package Compare versions

Comparing version 0.43.1 to 0.44.0

src/dag-builder/file/buffer-importer.js

10

CHANGELOG.md

@@ -0,1 +1,11 @@

<a name="0.44.0"></a>
# [0.44.0](https://github.com/ipfs/js-ipfs-unixfs-importer/compare/v0.43.1...v0.44.0) (2020-01-15)
### Features
* allow overriding of internal functions ([#48](https://github.com/ipfs/js-ipfs-unixfs-importer/issues/48)) ([0bff5f2](https://github.com/ipfs/js-ipfs-unixfs-importer/commit/0bff5f2))
<a name="0.43.1"></a>

@@ -2,0 +12,0 @@ ## [0.43.1](https://github.com/ipfs/js-ipfs-unixfs-importer/compare/v0.43.0...v0.43.1) (2020-01-09)

4

package.json
{
"name": "ipfs-unixfs-importer",
"version": "0.43.1",
"version": "0.44.0",
"description": "JavaScript implementation of the UnixFs importer used by IPFS",

@@ -73,2 +73,3 @@ "leadMaintainer": "Alex Potsides <alex.potsides@protocol.ai>",

"Alan Shaw <alan@tableflip.io>",
"Alex Potsides <alex@achingbrain.net>",
"Arpit Agarwal <atvanguard@users.noreply.github.com>",

@@ -90,3 +91,2 @@ "Bernard Mordan <bernard@tableflip.io>",

"Volker Mische <volker.mische@gmail.com>",
"achingbrain <alex@achingbrain.net>",
"greenkeeper[bot] <greenkeeper[bot]@users.noreply.github.com>",

@@ -93,0 +93,0 @@ "jbenet <juan@benet.ai>",

@@ -27,2 +27,3 @@ # ipfs-unixfs-importer <!-- omit in toc -->

- [const import = importer(source, ipld [, options])](#const-import--importersource-ipld--options)
- [Overriding internals](#overriding-internals)
- [Contribute](#contribute)

@@ -149,4 +150,28 @@ - [License](#license)

## Overriding internals
Several aspects of the importer are overridable by specifying functions as part of the options object with these keys:
- `chunkValidator` (function): Optional function that supports the signature `async function * (source, options)`
- This function takes input from the `content` field of imported entries. It should transform them into `Buffer`s, throwing an error if it cannot.
- It should yield `Buffer` objects constructed from the `source` or throw an `Error`
- `chunker` (function): Optional function that supports the signature `async function * (source, options)` where `source` is an async generator and `options` is an options object
- It should yield `Buffer` objects.
- `bufferImporter` (function): Optional function that supports the signature `async function * (entry, source, ipld, options)`
- This function should read `Buffer`s from `source` and persist them using `ipld.put` or similar
- `entry` is the `{ path, content }` entry, `source` is an async generator that yields Buffers
- It should yield functions that return a Promise that resolves to an object with the properties `{ cid, unixfs, size }` where `cid` is a [CID], `unixfs` is a [UnixFS] entry and `size` is a `Number` that represents the serialized size of the [IPLD] node that holds the buffer data.
- Values will be pulled from this generator in parallel - the amount of parallelisation is controlled by the `blockWriteConcurrency` option (default: 10)
- `dagBuilder` (function): Optional function that supports the signature `async function * (source, ipld, options)`
- This function should read `{ path, content }` entries from `source` and turn them into DAGs
- It should yield a `function` that returns a `Promise` that resolves to `{ cid, path, unixfs, node }` where `cid` is a `CID`, `path` is a string, `unixfs` is a UnixFS entry and `node` is a `DAGNode`.
- Values will be pulled from this generator in parallel - the amount of parallelisation is controlled by the `fileImportConcurrency` option (default: 50)
- `treeBuilder` (function): Optional function that supports the signature `async function * (source, ipld, options)`
- This function should read `{ cid, path, unixfs, node }` entries from `source` and place them in a directory structure
- It should yield an object with the properties `{ cid, path, unixfs, size }` where `cid` is a `CID`, `path` is a string, `unixfs` is a UnixFS entry and `size` is a `Number`.
[ipld-resolver instance]: https://github.com/ipld/js-ipld-resolver
[UnixFS]: https://github.com/ipfs/specs/tree/master/unixfs
[IPLD]: https://github.com/ipld/js-ipld
[CID]: https://github.com/multiformats/js-cid

@@ -153,0 +178,0 @@ ## Contribute

@@ -24,3 +24,3 @@ 'use strict'

unixfs,
node
size: node.size
}

@@ -27,0 +27,0 @@ }

@@ -19,45 +19,14 @@ 'use strict'

async function * importBuffer (file, source, ipld, options) {
for await (const buffer of source) {
yield async () => {
options.progress(buffer.length)
let node
let unixfs
const opts = {
...options
}
if (options.rawLeaves) {
node = buffer
opts.codec = 'raw'
opts.cidVersion = 1
} else {
unixfs = new UnixFS({
type: options.leafType,
data: buffer,
mtime: file.mtime,
mode: file.mode
})
node = new DAGNode(unixfs.marshal())
}
const cid = await persist(node, ipld, opts)
return {
cid: cid,
unixfs,
node
}
}
}
}
async function * buildFileBatch (file, source, ipld, options) {
let count = -1
let previous
let bufferImporter
for await (const entry of parallelBatch(importBuffer(file, source, ipld, options), options.blockWriteConcurrency)) {
if (typeof options.bufferImporter === 'function') {
bufferImporter = options.bufferImporter
} else {
bufferImporter = require('./buffer-importer')
}
for await (const entry of parallelBatch(bufferImporter(file, source, ipld, options), options.blockWriteConcurrency)) {
count++

@@ -90,5 +59,4 @@

path: file.path,
name: (file.path || '').split('/').pop(),
unixfs: leaf.unixfs,
node: leaf.node
size: leaf.size
}

@@ -106,3 +74,3 @@ }

.filter(leaf => {
if (leaf.cid.codec === 'raw' && leaf.node.length) {
if (leaf.cid.codec === 'raw' && leaf.size) {
return true

@@ -120,5 +88,5 @@ }

// node is a leaf buffer
f.addBlockSize(leaf.node.length)
f.addBlockSize(leaf.size)
return new DAGLink(leaf.name, leaf.node.length, leaf.cid)
return new DAGLink(leaf.name, leaf.size, leaf.cid)
}

@@ -134,3 +102,3 @@

return new DAGLink(leaf.name, leaf.node.size, leaf.cid)
return new DAGLink(leaf.name, leaf.size, leaf.cid)
})

@@ -145,3 +113,2 @@

unixfs: f,
node,
size: node.size

@@ -148,0 +115,0 @@ }

@@ -5,4 +5,2 @@ 'use strict'

const fileBuilder = require('./file')
const createChunker = require('../chunker')
const validateChunks = require('./validate-chunks')

@@ -34,6 +32,22 @@ async function * dagBuilder (source, ipld, options) {

const chunker = createChunker(options.chunker, validateChunks(source), options)
let chunker
if (typeof options.chunker === 'function') {
chunker = options.chunker
} else if (options.chunker === 'rabin') {
chunker = require('../chunker/rabin')
} else {
chunker = require('../chunker/fixed-size')
}
let chunkValidator
if (typeof options.chunkValidator === 'function') {
chunkValidator = options.chunkValidator
} else {
chunkValidator = require('./validate-chunks')
}
// item is a file
yield () => fileBuilder(entry, chunker, ipld, options)
yield () => fileBuilder(entry, chunker(chunkValidator(source, options), options), ipld, options)
} else {

@@ -40,0 +54,0 @@ // item is a directory

@@ -68,3 +68,3 @@ 'use strict'

links.push(new DAGLink(children[i], child.node.length || child.node.size, child.cid))
links.push(new DAGLink(children[i], child.size, child.cid))
}

@@ -88,3 +88,3 @@

path,
node
size: node.size
}

@@ -91,0 +91,0 @@ }

@@ -110,3 +110,3 @@ 'use strict'

links.push(new DAGLink(labelPrefix, shard.node.size, shard.cid))
links.push(new DAGLink(labelPrefix, shard.size, shard.cid))
} else if (typeof child.value.flush === 'function') {

@@ -123,3 +123,3 @@ const dir = child.value

const label = labelPrefix + child.key
links.push(new DAGLink(label, flushedDir.node.size, flushedDir.cid))
links.push(new DAGLink(label, flushedDir.size, flushedDir.cid))
} else {

@@ -160,6 +160,6 @@ const value = child.value

cid,
node,
unixfs: dir,
path
path,
size: node.size
}
}
'use strict'
const dagBuilder = require('./dag-builder')
const treeBuilder = require('./tree-builder')
const parallelBatch = require('it-parallel-batch')

@@ -33,3 +31,5 @@ const mergeOptions = require('merge-options').bind({ ignoreUndefined: true })

hidden: false,
preload: true
preload: true,
chunkValidator: null,
importBuffer: null
}

@@ -62,2 +62,18 @@

let dagBuilder
if (typeof options.dagBuilder === 'function') {
dagBuilder = options.dagBuilder
} else {
dagBuilder = require('./dag-builder')
}
let treeBuilder
if (typeof options.treeBuilder === 'function') {
treeBuilder = options.treeBuilder
} else {
treeBuilder = require('./tree-builder')
}
for await (const entry of treeBuilder(parallelBatch(dagBuilder(source, ipld, opts), opts.fileImportConcurrency), ipld, opts)) {

@@ -64,0 +80,0 @@ yield {

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc