supercrawler
Advanced tools
Comparing version 0.10.0 to 0.10.1
@@ -313,3 +313,4 @@ var Crawler, | ||
encoding: null, | ||
followRedirect: Boolean(followRedirect) | ||
followRedirect: Boolean(followRedirect), | ||
gzip: true | ||
}).catch(function (err) { | ||
@@ -316,0 +317,0 @@ err = new error.RequestError("A request error occured. " + err.message); |
{ | ||
"name": "supercrawler", | ||
"description": "A web crawler. Supercrawler automatically crawls websites. Define custom handlers to parse content. Obeys robots.txt, rate limits and concurrency limits.", | ||
"version": "0.10.0", | ||
"version": "0.10.1", | ||
"homepage": "https://github.com/brendonboshell/supercrawler", | ||
@@ -6,0 +6,0 @@ "author": "Brendon Boshell <brendonboshell@gmail.com>", |
@@ -312,2 +312,7 @@ # Node.js Web Crawler | ||
### 0.10.1 | ||
* [Fixed] Request sends `Accept-Encoding: gzip, deflate` header, so the | ||
responses arrive compressed (saving data transfer). | ||
### 0.10.0 | ||
@@ -314,0 +319,0 @@ |
@@ -499,2 +499,18 @@ var proxyquire = require('proxyquire'), | ||
}); | ||
it("asks for a gzipped response", function (done) { | ||
var crawler = new Crawler({ | ||
interval: 10 | ||
}); | ||
crawler.start(); | ||
setTimeout(function () { | ||
crawler.stop(); | ||
sinon.assert.calledWith(requestSpy, sinon.match({ | ||
gzip: true | ||
})); | ||
done(); | ||
}, 100); | ||
}); | ||
}); | ||
@@ -501,0 +517,0 @@ |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
99053
2343
403