Comparing version 0.0.22 to 0.0.23
39
index.js
@@ -8,3 +8,3 @@ #! /usr/bin/env node | ||
, uriManager = require("./lib/uriManager") | ||
, Crawler = require("simplecrawler"); | ||
//argv | ||
@@ -15,2 +15,3 @@ var destination | ||
, fileNameLocal | ||
, spiderDomain | ||
, reqUrls = [ ] | ||
@@ -27,4 +28,9 @@ , startPath; | ||
} | ||
//crawl domain and log | ||
if (val === "-rl") { | ||
var x = array.slice(index, index+1); | ||
return spiderDomain = array[index+1]; | ||
} | ||
//push values to array for processing | ||
else if ( index > 1 && index !== array.indexOf(destination + 1) && index !== array.indexOf(destination)){ | ||
else if ( index > 1) { | ||
reqUrls.push(val); | ||
@@ -34,7 +40,30 @@ } | ||
//spider | ||
if(spiderDomain) { | ||
console.log("Crawling...".red); | ||
var spider = uriManager(spiderDomain); | ||
var crawler = Crawler.crawl(spider); | ||
crawler.interval = 500; | ||
crawler.on("fetchcomplete",function(queueItem, responseBuffer, response){ | ||
console.log("Completed fetching resource:",queueItem.url); | ||
console.log(""); | ||
//requrls.push(queueItem.url) | ||
}); | ||
crawler.on("queueadd", function(data, response){ | ||
console.log(response); | ||
}); | ||
} | ||
if(spider) console.log(spider, "spider"); | ||
//process urls | ||
var sendDownPipe = reqUrls.map(uriManager); | ||
var recersivePipe = require("./lib/recersivePipe") | ||
if(typeof reqUrls && reqUrls.length > 0){ | ||
var sendDownPipe = reqUrls.map(uriManager); | ||
var recersivePipe = require("./lib/recersivePipe"); | ||
recersivePipe(sendDownPipe); | ||
} | ||
recersivePipe(sendDownPipe) |
@@ -25,5 +25,16 @@ var request = require("request") | ||
function bytesToSize(bytes) { | ||
var k = 1000; | ||
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']; | ||
if (bytes === 0) return '0 Bytes'; | ||
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)),10); | ||
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i]; | ||
} | ||
// Takes array of urls and pipes them disk | ||
var time, diff, stateDisplay, fileSize; | ||
function recersivePipe(urls, distination){ | ||
console.log(urls, "here"); | ||
//start timer | ||
time = process.hrtime(); | ||
//location to fs.write | ||
@@ -39,15 +50,17 @@ var writePath = process.cwd().toString(); | ||
} | ||
//check if the file already exist on disk | ||
if(fs.lstatSync(writePath).isFile()) { | ||
writePath = writePath + "(" + new Date() + ")" | ||
console.log("Duplicate File Name Found".red, writePath); | ||
} | ||
var r = progress(request(popped)) | ||
.on("response", function(res){ | ||
console.log("File Request: ", fileCounter++); | ||
if(urls > 1) console.log("%d Request: ", fileCounter++); | ||
}) | ||
.on("progress", function (state) { | ||
//console.log('getting size in bytes', state.received); | ||
// The properties bellow can be null if response does not contain | ||
// the content-length header | ||
//console.log('got all these bytes', state.total); | ||
console.log(" ", JSON.stringify(state)); | ||
fileSize = state.total; | ||
//console.log('File Size', state.total + "bytes" , state.percent + "%"); | ||
}) | ||
.on("end", function(){ | ||
console.log("Stream End") | ||
//console.log("Stream End") | ||
}) | ||
@@ -62,7 +75,10 @@ .on("data", function(chunk){ | ||
.on("finish", function () { | ||
console.log("Pipe Closed".rainbow.bold + ' 😜'); | ||
console.log("Download Complete".rainbow.bold + ' 😜'); | ||
console.log("File Size".green, bytesToSize(fileSize)); | ||
diff = process.hrtime(time); | ||
console.log('%d nanoseconds'.yellow, diff[0] * 1e9 + diff[1]); | ||
}); | ||
if(urls.length === 0){ | ||
console.log("All Requests Made".cyan); | ||
//console.log("All Requests Made".cyan); | ||
return | ||
@@ -69,0 +85,0 @@ } else { |
@@ -15,4 +15,3 @@ var path = require("path"); | ||
//Rebuild the uri based on rules from aboe | ||
console.log(url); | ||
//Rebuild the uri based on conditionals from aboe | ||
reqUrl = require("url").format(url); | ||
@@ -19,0 +18,0 @@ |
{ | ||
"name": "n-get", | ||
"version": "0.0.22", | ||
"version": "0.0.23", | ||
"homepage": "https://github.com/bingeboy/n-get", | ||
@@ -24,3 +24,4 @@ "description": "pipe request to disk", | ||
"request-progress": "^0.3.1", | ||
"emoji": "^0.3.2" | ||
"emoji": "^0.3.2", | ||
"simplecrawler": "^0.3.6" | ||
}, | ||
@@ -27,0 +28,0 @@ "preferGlobal": "true", |
@@ -19,3 +19,3 @@ #n-get | ||
``` | ||
Or from the git repo: | ||
Or from the repo if you are a dev or want to test latest features: | ||
``` | ||
@@ -26,3 +26,3 @@ $git clone https://github.com/bingeboy/n-get | ||
``` | ||
### How To Use | ||
### How To Use Basic Mode | ||
Download a single file | ||
@@ -39,5 +39,12 @@ ``` | ||
$nget [protocal]filePath [protocal]filePath2 [protocal]filePath3 ... -d [WritePath] | ||
``` | ||
### Adavanced Mode | ||
All of the above and spider crawling abilites | ||
``` | ||
This only works in repo for now. | ||
$nget -rl domainToCrawl | ||
``` | ||
* If no protocal is used in the file path http will be used by default | ||
@@ -44,0 +51,0 @@ * If no writePath is provided current location will be used by default |
2146505
16
226
59
6
+ Addedsimplecrawler@^0.3.6
+ AddedURIjs@1.16.1(transitive)
+ Addedsimplecrawler@0.3.14(transitive)