robots-txt-guard
Advanced tools
Comparing version 0.0.2 to 0.1.0
@@ -69,5 +69,18 @@ 'use strict'; | ||
function isDissalowAll(userAgent) { | ||
var group = findGroup(userAgent); | ||
if (group) { | ||
var allowRules = group.rules.filter(function (rule) { | ||
return rule.allow; | ||
}); | ||
return allowRules.length <= 0; | ||
} | ||
// no group matched? assume allowed | ||
return false; | ||
} | ||
return { | ||
isAllowed: isAllowed | ||
isAllowed: isAllowed, | ||
isDissalowAll: isDissalowAll | ||
}; | ||
}; |
{ | ||
"name": "robots-txt-guard", | ||
"version": "0.0.2", | ||
"version": "0.1.0", | ||
"description": "Validate urls against robots.txt rules.", | ||
@@ -14,5 +14,5 @@ "main": "lib/guard.js", | ||
"chai": "^1.9.1", | ||
"mocha": "^1.18.2" | ||
"mocha": "^2.0.1" | ||
}, | ||
"dependencies": {} | ||
} |
@@ -100,3 +100,26 @@ /*global describe, it*/ | ||
}); | ||
it('should detect disallow all', function () { | ||
// both groups should behave the same, regardless of the order of the rules | ||
var robotsTxt = guard({ | ||
groups: [{ | ||
agents: [ '*' ], | ||
rules: [ | ||
{ rule: 'disallow', path: '/' } | ||
] | ||
}, { | ||
agents: [ 'googlebot' ], | ||
rules: [ | ||
{ rule: 'disallow', path: '/' }, | ||
{ rule: 'allow', path: '/fish' } | ||
] | ||
}] | ||
}); | ||
assert.isTrue(robotsTxt.isDissalowAll('somebot')); | ||
assert.isFalse(robotsTxt.isDissalowAll('googlebot')); | ||
}); | ||
}); |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
13993
336