ipfs-unixfs-exporter
Advanced tools
Comparing version
@@ -0,1 +1,19 @@ | ||
<a name="0.35.0"></a> | ||
# [0.35.0](https://github.com/ipfs/js-ipfs-unixfs-exporter/compare/v0.34.0...v0.35.0) (2018-11-23) | ||
### Bug Fixes | ||
* export all files from hamt sharded directories ([4add865](https://github.com/ipfs/js-ipfs-unixfs-exporter/commit/4add865)) | ||
* increase test timeout ([0ae27e1](https://github.com/ipfs/js-ipfs-unixfs-exporter/commit/0ae27e1)) | ||
* support slashes in filenames ([b69abce](https://github.com/ipfs/js-ipfs-unixfs-exporter/commit/b69abce)) | ||
* use dag.getMany to avoid overloading the DHT, when it arrives ([f479d28](https://github.com/ipfs/js-ipfs-unixfs-exporter/commit/f479d28)), closes [ipfs/js-ipfs-unixfs-engine#216](https://github.com/ipfs/js-ipfs-unixfs-engine/issues/216) | ||
### Features | ||
* add `fullPath` option ([21dd221](https://github.com/ipfs/js-ipfs-unixfs-exporter/commit/21dd221)) | ||
<a name="0.34.0"></a> | ||
@@ -2,0 +20,0 @@ # [0.34.0](https://github.com/ipfs/js-ipfs-unixfs-engine/compare/v0.33.0...v0.34.0) (2018-11-12) |
{ | ||
"name": "ipfs-unixfs-exporter", | ||
"version": "0.34.0", | ||
"version": "0.35.0", | ||
"description": "JavaScript implementation of the UnixFs exporter used by IPFS", | ||
@@ -43,3 +43,3 @@ "leadMaintainer": "Alex Potsides <alex.potsides@protocol.ai>", | ||
"dirty-chai": "^2.0.1", | ||
"ipfs-unixfs-importer": "~0.34.0", | ||
"ipfs-unixfs-importer": "~0.36.0", | ||
"ipld": "~0.20.0", | ||
@@ -46,0 +46,0 @@ "ipld-dag-pb": "~0.15.0", |
@@ -107,2 +107,87 @@ # ipfs-unixfs-exporter | ||
### `fullPath` | ||
If specified the exporter will emit an entry for every path component encountered. | ||
```javascript | ||
const exporter = require('ipfs-unixfs-exporter') | ||
const pull = require('pull-stream') | ||
const collect = require('pull-stream/sinks/collect') | ||
pull( | ||
exporter('QmFoo.../bar/baz.txt', ipld, { | ||
fullPath: true | ||
}) | ||
collect((err, files) => { | ||
console.info(files) | ||
// [{ | ||
// depth: 0, | ||
// name: 'QmFoo...', | ||
// path: 'QmFoo...', | ||
// size: ... | ||
// hash: Buffer | ||
// content: undefined | ||
// type: 'dir' | ||
// }, { | ||
// depth: 1, | ||
// name: 'bar', | ||
// path: 'QmFoo.../bar', | ||
// size: ... | ||
// hash: Buffer | ||
// content: undefined | ||
// type: 'dir' | ||
// }, { | ||
// depth: 2, | ||
// name: 'baz.txt', | ||
// path: 'QmFoo.../bar/baz.txt', | ||
// size: ... | ||
// hash: Buffer | ||
// content: <Pull stream> | ||
// type: 'file' | ||
// }] | ||
// | ||
}) | ||
) | ||
``` | ||
### `maxDepth` | ||
If specified the exporter will only emit entries up to the specified depth. | ||
```javascript | ||
const exporter = require('ipfs-unixfs-exporter') | ||
const pull = require('pull-stream') | ||
const collect = require('pull-stream/sinks/collect') | ||
pull( | ||
exporter('QmFoo.../bar/baz.txt', ipld, { | ||
fullPath: true, | ||
maxDepth: 1 | ||
}) | ||
collect((err, files) => { | ||
console.info(files) | ||
// [{ | ||
// depth: 0, | ||
// name: 'QmFoo...', | ||
// path: 'QmFoo...', | ||
// size: ... | ||
// hash: Buffer | ||
// content: undefined | ||
// type: 'dir' | ||
// }, { | ||
// depth: 1, | ||
// name: 'bar', | ||
// path: 'QmFoo.../bar', | ||
// size: ... | ||
// hash: Buffer | ||
// content: undefined | ||
// type: 'dir' | ||
// }] | ||
// | ||
}) | ||
) | ||
``` | ||
[dag API]: https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md | ||
@@ -109,0 +194,0 @@ [ipld-resolver instance]: https://github.com/ipld/js-ipld-resolver |
@@ -9,3 +9,3 @@ 'use strict' | ||
function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth) { | ||
function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) { | ||
const accepts = pathRest[0] | ||
@@ -41,3 +41,3 @@ | ||
// place dir before if not specifying subtree | ||
if (!pathRest.length) { | ||
if (!pathRest.length || options.fullPath) { | ||
streams.unshift(pull.values([dir])) | ||
@@ -44,0 +44,0 @@ } |
@@ -9,3 +9,3 @@ 'use strict' | ||
function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth) { | ||
function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) { | ||
let dir | ||
@@ -37,3 +37,3 @@ if (!parent || (parent.path !== path)) { | ||
return { | ||
depth: depth + 1, | ||
depth: p ? depth + 1 : depth, | ||
name: p, | ||
@@ -54,3 +54,3 @@ path: pp, | ||
if (!pathRest.length) { | ||
if (!pathRest.length || options.fullPath) { | ||
streams.unshift(pull.values([dir])) | ||
@@ -57,0 +57,0 @@ } |
@@ -10,3 +10,3 @@ 'use strict' | ||
// Logic to export a single (possibly chunked) unixfs file. | ||
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, offset, length) => { | ||
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) => { | ||
const accepts = pathRest[0] | ||
@@ -28,2 +28,5 @@ | ||
let offset = options.offset | ||
let length = options.length | ||
if (offset < 0) { | ||
@@ -144,11 +147,22 @@ return pull.error(new Error('Offset must be greater than or equal to 0')) | ||
return pull( | ||
pull.values(filteredLinks), | ||
paramap((child, cb) => { | ||
dag.get(child.link.cid, (error, result) => cb(error, { | ||
start: child.start, | ||
end: child.end, | ||
node: result && result.value, | ||
size: child.size | ||
})) | ||
}) | ||
pull.once(filteredLinks), | ||
paramap((children, cb) => { | ||
dag.getMany(children.map(child => child.link.cid), (error, results) => { | ||
if (error) { | ||
return cb(error) | ||
} | ||
cb(null, results.map((result, index) => { | ||
const child = children[index] | ||
return { | ||
start: child.start, | ||
end: child.end, | ||
node: result, | ||
size: child.size | ||
} | ||
})) | ||
}) | ||
}), | ||
pull.flatten() | ||
) | ||
@@ -155,0 +169,0 @@ } |
@@ -34,3 +34,3 @@ 'use strict' | ||
base: pathBase, | ||
rest: pathRest.split('/').filter(Boolean) | ||
rest: toPathComponents(pathRest) | ||
} | ||
@@ -42,3 +42,4 @@ } | ||
offset: undefined, | ||
length: undefined | ||
length: undefined, | ||
fullPath: false | ||
} | ||
@@ -75,3 +76,3 @@ | ||
name: node.name, | ||
path: finalPathFor(node), | ||
path: options.fullPath ? node.path : finalPathFor(node), | ||
size: node.size, | ||
@@ -109,1 +110,9 @@ hash: node.multihash, | ||
} | ||
const toPathComponents = (path = '') => { | ||
// split on / unless escaped with \ | ||
return (path | ||
.trim() | ||
.match(/([^\\\][^/]|\\\/)+/g) || []) | ||
.filter(Boolean) | ||
} |
@@ -7,3 +7,3 @@ 'use strict' | ||
// Logic to export a single raw block | ||
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, offset, length) => { | ||
module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, depth, options) => { | ||
const accepts = pathRest[0] | ||
@@ -17,2 +17,5 @@ | ||
let offset = options.offset | ||
let length = options.length | ||
if (offset < 0) { | ||
@@ -19,0 +22,0 @@ return pull.error(new Error('Offset must be greater than or equal to 0')) |
@@ -7,2 +7,3 @@ 'use strict' | ||
const CID = require('cids') | ||
const waterfall = require('async/waterfall') | ||
@@ -38,3 +39,3 @@ const resolvers = { | ||
if (item.object) { | ||
return cb(null, resolveItem(null, item.object, item, options.offset, options.length)) | ||
return cb(null, resolveItem(null, item.object, item, options)) | ||
} | ||
@@ -44,10 +45,6 @@ | ||
dag.get(cid, (err, node) => { | ||
if (err) { | ||
return cb(err) | ||
} | ||
// const name = item.fromPathRest ? item.name : item.path | ||
cb(null, resolveItem(cid, node.value, item, options.offset, options.length)) | ||
}) | ||
waterfall([ | ||
(done) => dag.get(cid, done), | ||
(node, done) => done(null, resolveItem(cid, node.value, item, options)) | ||
], cb) | ||
}), | ||
@@ -59,7 +56,18 @@ pull.flatten(), | ||
function resolveItem (cid, node, item, offset, length) { | ||
return resolve(cid, node, item.name, item.path, item.pathRest, item.size, dag, item.parent || parent, item.depth, offset, length) | ||
function resolveItem (cid, node, item, options) { | ||
return resolve({ | ||
cid, | ||
node, | ||
name: item.name, | ||
path: item.path, | ||
pathRest: item.pathRest, | ||
size: item.size, | ||
dag, | ||
parentNode: item.parent || parent, | ||
depth: item.depth, | ||
options | ||
}) | ||
} | ||
function resolve (cid, node, name, path, pathRest, size, dag, parentNode, depth, offset, length) { | ||
function resolve ({ cid, node, name, path, pathRest, size, dag, parentNode, depth, options }) { | ||
let type | ||
@@ -74,7 +82,10 @@ | ||
const nodeResolver = resolvers[type] | ||
if (!nodeResolver) { | ||
return pull.error(new Error('Unkown node type ' + type)) | ||
} | ||
const resolveDeep = createResolver(dag, options, depth, node) | ||
return nodeResolver(cid, node, name, path, pathRest, resolveDeep, size, dag, parentNode, depth, offset, length) | ||
return nodeResolver(cid, node, name, path, pathRest, resolveDeep, size, dag, parentNode, depth, options) | ||
} | ||
@@ -81,0 +92,0 @@ } |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
1014447
2407.66%20
53.85%9940
1888%208
69.11%1
Infinity%