robots-parser
Advanced tools
Comparing version 2.1.0 to 2.1.1
{ | ||
"name": "robots-parser", | ||
"version": "2.1.0", | ||
"version": "2.1.1", | ||
"description": "Robots.txt parser.", | ||
@@ -5,0 +5,0 @@ "main": "index.js", |
@@ -98,2 +98,7 @@ # Robots Parser [](https://deepscan.io/dashboard/#view=project&pid=1275&bid=3378) [](https://github.com/samclarke/robots-parser/blob/master/license.md) | ||
### Version 2.1.1: | ||
* Fix bug that could be used to causing rule checking to take a long time | ||
– Thanks to @andeanfog | ||
### Version 2.1.0: | ||
@@ -111,3 +116,3 @@ | ||
* Update code to not use deprecated URL module API's. | ||
* Update code to not use deprecated URL module API's. | ||
– Thanks to @kdzwinel | ||
@@ -121,5 +126,5 @@ | ||
* Fixed bug with the "user-agent" rule being treated as case sensitive. | ||
* Fixed bug with the "user-agent" rule being treated as case sensitive. | ||
– Thanks to @brendonboshell | ||
* Improved test coverage. | ||
* Improved test coverage. | ||
– Thanks to @schornio | ||
@@ -126,0 +131,0 @@ |
@@ -28,3 +28,3 @@ var URL = require('url').URL; | ||
* | ||
* @param {string} line | ||
* @param {string} line | ||
* @return {string} | ||
@@ -61,3 +61,3 @@ * @private | ||
* Normalises the user-agent string by converting it to | ||
* lowercase and removing any version numbers. | ||
* lower case and removing any version numbers. | ||
* | ||
@@ -97,6 +97,6 @@ * @param {string} userAgent | ||
/** | ||
* Convert URL encodings to upport case. | ||
* | ||
* e.g.: %2a%ef becomes %2A%EF | ||
* | ||
* Convert URL encodings to support case. | ||
* | ||
* e.g.: %2a%ef becomes %2A%EF | ||
* | ||
* @param {string} path | ||
@@ -124,3 +124,4 @@ * @return {string} | ||
var regexSpecialChars = /[\-\[\]\/\{\}\(\)\+\?\.\\\^\$\|]/g; | ||
var wildCardPattern = /\*/g; | ||
// Treat consecutive wildcards as one (#12) | ||
var wildCardPattern = /\*+/g; | ||
var endOfLinePattern = /\\\$$/; | ||
@@ -251,3 +252,3 @@ | ||
this._sitemaps = []; | ||
this._preferedHost = null; | ||
this._preferredHost = null; | ||
@@ -320,3 +321,3 @@ parseRobots(contents || '', this); | ||
Robots.prototype.setPreferredHost = function (url) { | ||
this._preferedHost = url; | ||
this._preferredHost = url; | ||
}; | ||
@@ -415,3 +416,3 @@ | ||
Robots.prototype.getPreferredHost = function () { | ||
return this._preferedHost; | ||
return this._preferredHost; | ||
}; | ||
@@ -418,0 +419,0 @@ |
var robotsParser = require('../index'); | ||
var expect = require('chai').expect; | ||
var punycode = require('punycode'); | ||
@@ -275,3 +274,3 @@ | ||
]; | ||
var disallowed = [ | ||
@@ -278,0 +277,0 @@ 'http://www.example.com/%CF%80', |
31154
157