es6-crawler-detect
Advanced tools
Comparing version 3.1.2 to 3.1.3
@@ -0,0 +0,0 @@ --- |
@@ -0,0 +0,0 @@ --- |
@@ -0,0 +0,0 @@ # Contributor Covenant Code of Conduct |
@@ -0,0 +0,0 @@ 'use strict'; |
{ | ||
"name": "es6-crawler-detect", | ||
"version": "3.1.2", | ||
"version": "3.1.3", | ||
"description": "This is an ES6 adaptation of the original PHP library CrawlerDetect, this library will help you detect bots/crawlers/spiders vie the useragent.", | ||
@@ -46,4 +46,3 @@ "main": "src/index.js", | ||
"webpack-merge": "^4.2.2" | ||
}, | ||
"dependencies": {} | ||
} | ||
} |
@@ -0,0 +0,0 @@ <p align="center"><a href="http://crawlerdetect.io/" target="_blank"><img src="https://cloud.githubusercontent.com/assets/340752/23082173/1bd1a396-f550-11e6-8aba-4d3c75edea2f.png" width="321" height="219" /></a><br><br> |
const Crawler = require("./lib/crawler") | ||
const vm = require('vm'); | ||
const CrawlerRunner = new vm.Script('new Crawler(req)'); | ||
module.exports = { | ||
Crawler, | ||
Crawler, | ||
middleware (req, res, next) { | ||
req.Crawler = new Crawler(req) | ||
next() | ||
} | ||
middleware(req, res, next) { | ||
req.Crawler = CrawlerRunner.runInThisContext({timeout: 100, microtaskMode: 'afterEvaluate'}) | ||
next() | ||
} | ||
} |
'use strict'; | ||
const Crawlers = require('./crawler/crawlers'); | ||
const Crawlers = require('./crawler/crawlers'); | ||
const Exclusions = require('./crawler/exclusions'); | ||
const Headers = require('./crawler/headers'); | ||
const Headers = require('./crawler/headers'); | ||
class Crawler | ||
{ | ||
constructor(request, headers, userAgent) | ||
{ | ||
/** | ||
* Init classes | ||
*/ | ||
this._init(); | ||
class Crawler { | ||
constructor(request, headers, userAgent) { | ||
/** | ||
* Init classes | ||
*/ | ||
this._init(); | ||
/** | ||
* This request must be an object | ||
*/ | ||
this.request = typeof request === 'object' ? request : {}; | ||
/** | ||
* This request must be an object | ||
*/ | ||
this.request = typeof request === 'object' ? request : {}; | ||
// The regex-list must not be used with g-flag! | ||
// See: https://stackoverflow.com/questions/1520800/why-does-a-regexp-with-global-flag-give-wrong-results | ||
this.compiledRegexList = this.compileRegex(this.crawlers.getAll(), 'i'); | ||
// The regex-list must not be used with g-flag! | ||
// See: https://stackoverflow.com/questions/1520800/why-does-a-regexp-with-global-flag-give-wrong-results | ||
this.compiledRegexList = this.compileRegex(this.crawlers.getAll(), 'i'); | ||
// The exclusions should be used with g-flag in order to remove each value. | ||
this.compiledExclusions = this.compileRegex(this.exclusions.getAll(), 'g'); | ||
// The exclusions should be used with g-flag in order to remove each value. | ||
this.compiledExclusions = this.compileRegex(this.exclusions.getAll(), 'g'); | ||
/** | ||
* Set http headers | ||
*/ | ||
this.setHttpHeaders(headers); | ||
/** | ||
* Set http headers | ||
*/ | ||
this.setHttpHeaders(headers); | ||
/** | ||
* Set userAgent | ||
*/ | ||
this.userAgent = this.setUserAgent(userAgent); | ||
} | ||
/** | ||
* Set userAgent | ||
*/ | ||
this.userAgent = this.setUserAgent(userAgent); | ||
} | ||
/** | ||
* Init Classes Instances | ||
*/ | ||
_init() | ||
{ | ||
this.crawlers = new Crawlers(); | ||
this.headers = new Headers(); | ||
this.exclusions = new Exclusions(); | ||
} | ||
/** | ||
* Init Classes Instances | ||
*/ | ||
_init() { | ||
this.crawlers = new Crawlers(); | ||
this.headers = new Headers(); | ||
this.exclusions = new Exclusions(); | ||
} | ||
compileRegex(patterns, flags) { | ||
return new RegExp(patterns.join('|').trim(), flags); | ||
} | ||
compileRegex(patterns, flags) { | ||
return new RegExp(patterns.join('|').trim(), flags); | ||
} | ||
/** | ||
* Set HTTP headers. | ||
*/ | ||
setHttpHeaders(headers) | ||
{ | ||
// Use the Request headers if httpHeaders is not defined | ||
if ( typeof headers === 'undefined' || Object.keys(headers).length === 0 ) | ||
{ | ||
headers = Object.keys(this.request).length ? this.request.headers : {}; | ||
/** | ||
* Set HTTP headers. | ||
*/ | ||
setHttpHeaders(headers) { | ||
// Use the Request headers if httpHeaders is not defined | ||
if (typeof headers === 'undefined' || Object.keys(headers).length === 0) { | ||
headers = Object.keys(this.request).length ? this.request.headers : {}; | ||
} | ||
// Clear existing headers. | ||
this.httpHeaders = []; | ||
// Clear existing headers. | ||
this.httpHeaders = []; | ||
// Only save HTTP headers. | ||
for ( const key in headers ) | ||
{ | ||
this.httpHeaders[key] = headers[key]; | ||
} | ||
} | ||
for (const key in headers) { | ||
this.httpHeaders[key] = headers[key]; | ||
} | ||
} | ||
/** | ||
* Set user agent | ||
*/ | ||
setUserAgent(userAgent) | ||
{ | ||
if ( typeof userAgent === 'undefined' || userAgent === null || ! userAgent.length ) | ||
{ | ||
for ( const header of this.getUaHttpHeaders() ) | ||
{ | ||
if ( Object.keys(this.httpHeaders).indexOf(header.toLowerCase()) >= 0 ) | ||
{ | ||
userAgent += this.httpHeaders[header] + ' '; | ||
} | ||
} | ||
} | ||
/** | ||
* Set user agent | ||
*/ | ||
setUserAgent(userAgent) { | ||
if (typeof userAgent === 'undefined' || userAgent === null || !userAgent.length) { | ||
for (const header of this.getUaHttpHeaders()) { | ||
if (Object.keys(this.httpHeaders).indexOf(header.toLowerCase()) >= 0) { | ||
userAgent += this.httpHeaders[header] + ' '; | ||
} | ||
} | ||
} | ||
return userAgent; | ||
} | ||
return userAgent; | ||
} | ||
/** | ||
* Get user agent headers | ||
*/ | ||
getUaHttpHeaders() | ||
{ | ||
return this.headers.getAll(); | ||
} | ||
/** | ||
* Get user agent headers | ||
*/ | ||
getUaHttpHeaders() { | ||
return this.headers.getAll(); | ||
} | ||
/** | ||
* Check user agent string against the regex. | ||
*/ | ||
isCrawler(userAgent = undefined) | ||
{ | ||
var agent = (typeof userAgent === 'undefined' || userAgent === null ? this.userAgent : userAgent); | ||
/** | ||
* Check user agent string against the regex. | ||
*/ | ||
isCrawler(userAgent = undefined) { | ||
if (Buffer.byteLength(userAgent || "", 'utf8') > 4096) { | ||
return false; | ||
} | ||
var agent = (typeof userAgent === 'undefined' || userAgent === null ? this.userAgent : userAgent); | ||
// test on compiled regx | ||
agent = agent.replace(this.compiledExclusions, ''); | ||
// test on compiled regx | ||
agent = agent.replace(this.compiledExclusions, ''); | ||
if ( agent.trim().length === 0 ) | ||
{ | ||
return false; | ||
} | ||
if (agent.trim().length === 0) { | ||
return false; | ||
} | ||
var matches = this.compiledRegexList.exec(agent.trim()); | ||
if ( matches ) | ||
{ | ||
this.matches = matches; | ||
} | ||
var matches = this.compiledRegexList.exec(agent.trim()); | ||
return matches !== null ? (matches.length ? true : false) : false; | ||
} | ||
if (matches) { | ||
this.matches = matches; | ||
} | ||
/** | ||
* Return the matches. | ||
*/ | ||
getMatches() | ||
{ | ||
return this.matches !== undefined ? (this.matches.length ? this.matches[0] : null) : {}; | ||
} | ||
return matches !== null ? (matches.length ? true : false) : false; | ||
} | ||
/** | ||
* Return the matches. | ||
*/ | ||
getMatches() { | ||
return this.matches !== undefined ? (this.matches.length ? this.matches[0] : null) : {}; | ||
} | ||
} | ||
module.exports = Crawler; |
@@ -0,0 +0,0 @@ 'use strict'; |
@@ -11,6 +11,6 @@ 'use strict'; | ||
this.data = ["Safari.[\\d\\.]*","Firefox.[\\d\\.]*"," Chrome.[\\d\\.]*","Chromium.[\\d\\.]*","MSIE.[\\d\\.]","Opera\\\/[\\d\\.]*","Mozilla.[\\d\\.]*","AppleWebKit.[\\d\\.]*","Trident.[\\d\\.]*","Windows NT.[\\d\\.]*","Android [\\d\\.]*","Macintosh.","Ubuntu","Linux","[ ]Intel","Mac OS X [\\d_]*","(like )?Gecko(.[\\d\\.]*)?","KHTML,","CriOS.[\\d\\.]*","CPU iPhone OS ([0-9_])* like Mac OS X","CPU OS ([0-9_])* like Mac OS X","iPod","compatible","x86_..","i686","x64","X11","rv:[\\d\\.]*","Version.[\\d\\.]*","WOW64","Win64","Dalvik.[\\d\\.]*"," \\.NET CLR [\\d\\.]*","Presto.[\\d\\.]*","Media Center PC","BlackBerry","Build","Opera Mini\\\/\\d{1,2}\\.\\d{1,2}\\.[\\d\\.]*\\\/\\d{1,2}\\.","Opera"," \\.NET[\\d\\.]*","cubot","; M bot","; CRONO","; B bot","; IDbot","; ID bot","; POWER BOT",";"]; | ||
this.data = ["Safari.[\\d\\.]*","Firefox.[\\d\\.]*"," Chrome.[\\d\\.]*","Chromium.[\\d\\.]*","MSIE.[\\d\\.]","Opera\\\/[\\d\\.]*","Mozilla.[\\d\\.]*","AppleWebKit.[\\d\\.]*","Trident.[\\d\\.]*","Windows NT.[\\d\\.]*","Android [\\d\\.]*","Macintosh.","Ubuntu","Linux","[ ]Intel","Mac OS X [\\d_]*","(like )?Gecko(.[\\d\\.]*)?","KHTML,","CriOS.[\\d\\.]*","CPU iPhone OS ([0-9_])* like Mac OS X","CPU OS ([0-9_])* like Mac OS X","iPod","compatible","x86_..","i686","x64","X11","rv:[\\d\\.]*","Version.[\\d\\.]*","WOW64","Win64","Dalvik.[\\d\\.]*"," \\.NET CLR [\\d\\.]*","Presto.[\\d\\.]*","Media Center PC","BlackBerry","Build","Opera Mini\\\/\\d{1,2}\\.\\d{1,2}\\.[\\d\\.]*\\\/\\d{1,2}\\.","Opera"," \\.NET[\\d\\.]*","cubot","; M bot","; CRONO","; B bot","; IDbot","; ID bot","; POWER BOT","kube-probe.[\\d\\.]*",";"]; | ||
} | ||
} | ||
module.exports = Exclusions; | ||
module.exports = Exclusions; |
@@ -0,0 +0,0 @@ 'use strict'; |
@@ -0,0 +0,0 @@ 'use strict'; |
@@ -0,0 +0,0 @@ var assert = require("assert"); |
@@ -0,0 +0,0 @@ const path = require('path') |
@@ -0,0 +0,0 @@ const merge = require('webpack-merge') |
@@ -0,0 +0,0 @@ const merge = require('webpack-merge') |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Debug access
Supply chain riskUses debug, reflection and dynamic code execution features.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
137604
0
303
1