Comparing version 0.0.3 to 0.0.4
@@ -5,2 +5,3 @@ 'use strict'; | ||
var path = require('./tools/path'); | ||
var clone = require('clone'); | ||
@@ -23,2 +24,6 @@ function Context (options) { | ||
this.options = function () { | ||
return clone(options); | ||
}; | ||
this.filters = function () { | ||
@@ -25,0 +30,0 @@ return options.filters || []; |
@@ -14,2 +14,3 @@ 'use strict'; | ||
var self = this; | ||
var streamEnd = this.end; | ||
var context; | ||
@@ -22,22 +23,48 @@ | ||
this._read = function _read () { | ||
function crawl() { | ||
var path = context.folder.next(); | ||
if (!context.file.hasNext() && context.folder.hasNext()) { | ||
if (path) { | ||
// all files has been streamed yet, | ||
// but some directories have not been crawled | ||
var path = context.folder.next(); | ||
fs.list(context, path, function dirCrawlHandler(contentStats) { | ||
Object.getOwnPropertyNames(contentStats).forEach(function (contentPath) { | ||
var stats = contentStats[contentPath]; | ||
self.push(JSON.stringify(stats)); | ||
//console.log(' -------\n -- add content stats buffer:', stats.path.replace(context.options().root, '')); | ||
//console.log(' -- this._readableState.buffer length:', self._readableState.buffer.length); | ||
}); | ||
}); | ||
if (path) { | ||
// wait for directory listing to end | ||
self.pause(); | ||
return true; | ||
} else { | ||
// nothing to crawl anymore | ||
// return EOF | ||
//console.log(' -------\n -- issue EOF:'); | ||
return self.push(null); | ||
} | ||
} | ||
fs.list(context, path, function dirCrawlHandler() { | ||
// reactivate crawler | ||
self.resume(); | ||
}); | ||
} | ||
/* | ||
TODO emit a custom "end" event | ||
Readable.end = function end() { | ||
streamEnd(); | ||
var report = { | ||
duration: new Date() - new Date(context.crawlSessionTime()) | ||
}; | ||
if (context.file.all().length) { | ||
report.data = context.file.all(); | ||
} | ||
else if (!context.file.hasNext() && !this._readableState.buffer.length) { | ||
// the end of the directory structure has been reached | ||
console.log('EMIT END EVENT'); | ||
self.emit('end', report); | ||
}; | ||
*/ | ||
this._read = function _read () { | ||
var status = crawl(); | ||
//console.log(' -- crawl status:', status); | ||
if (!status) { | ||
var report = { | ||
@@ -51,10 +78,4 @@ duration: new Date() - new Date(context.crawlSessionTime()) | ||
self.emit('end', report); | ||
//console.log(' -- stream ended:'); | ||
} | ||
else if (context.file.hasNext()) { | ||
// emit next file stats | ||
var stats = context.file.next(); | ||
self.push(JSON.stringify(stats)); | ||
} | ||
}; | ||
@@ -88,4 +109,4 @@ | ||
self._read(); | ||
self.resume(); | ||
//self._read(); | ||
//self.resume(); | ||
@@ -92,0 +113,0 @@ return self; |
@@ -6,8 +6,8 @@ 'use strict'; | ||
exports.formatStats = function formatStats (absolutePath, rawStats, crawlSessionTime, folderId) { | ||
exports.formatStats = function formatStats (absolutePath, rawStats, context) { | ||
var stats = { | ||
name: path.basename(absolutePath), | ||
path: absolutePath, | ||
folderId: folderId, | ||
crawlSessionTime: crawlSessionTime | ||
folderId: context.folderId(), | ||
crawlSessionTime: context.crawlSessionTime() | ||
}; | ||
@@ -20,3 +20,7 @@ | ||
else { | ||
stats.stats = rawStats; | ||
if (!context.ignoreStats()) { | ||
// add the raw file stats | ||
stats.stats = rawStats; | ||
} | ||
stats.is = { | ||
@@ -40,28 +44,24 @@ directory: rawStats.isDirectory(), | ||
if (!context.ignoreStats()) { | ||
try { | ||
stats = fs.lstatSync(absolutePath); | ||
try { | ||
stats = fs.lstatSync(absolutePath); | ||
} | ||
catch (error) { | ||
stats = error; | ||
} | ||
if (stats.isDirectory() && context.isTreeMode()) { | ||
// directories found on the actual depth | ||
// are part of the next depth's directories | ||
context.folder.add(absolutePath); | ||
} | ||
if (stats.isDirectory && stats.isDirectory() && context.isTreeMode()) { | ||
// directories found on the actual depth | ||
// are part of the next depth's directories | ||
context.folder.add(absolutePath); | ||
} | ||
fileStats = exports.formatStats(absolutePath, stats, context.crawlSessionTime(), context.folderId()); | ||
fileStats = exports.formatStats(absolutePath, stats, context); | ||
if (fileStats.is.link) { | ||
var info = exports.getSymlinkInfo(absolutePath); | ||
if (fileStats.is.link) { | ||
var info = exports.getSymlinkInfo(absolutePath); | ||
fileStats.targetPath = info.target; | ||
fileStats.targetStats = info.stats || null; | ||
} | ||
} | ||
catch (error) { | ||
stats = error; | ||
} | ||
fileStats.targetPath = info.target; | ||
fileStats.targetStats = info.stats || null; | ||
} | ||
fileStats = exports.formatStats(absolutePath, stats, context.crawlSessionTime(), context.folderId()); | ||
return fileStats; | ||
@@ -68,0 +68,0 @@ }; |
{ | ||
"name": "fs-crawler", | ||
"license": "MIT", | ||
"version": "0.0.3", | ||
"version": "0.0.4", | ||
"homepage": "https://github.com/luscus/fs-crawler", | ||
@@ -15,2 +15,3 @@ "author": "https://github.com/luscus/fs-crawler/graphs/contributors", | ||
"dependencies": { | ||
"clone": "^1.0.2" | ||
}, | ||
@@ -17,0 +18,0 @@ "devDependencies": { |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
NPM Shrinkwrap
Supply chain riskPackage contains a shrinkwrap file. This may allow the package to bypass normal install procedures.
Found 1 instance in 1 package
55009
612
0
1
+ Addedclone@^1.0.2
+ Addedclone@1.0.4(transitive)