Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

node-ajax-seo

Package Overview
Dependencies
Maintainers
2
Versions
10
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

node-ajax-seo - npm Package Compare versions

Comparing version 1.2.3 to 1.3.0

0

CHANGELOG.md

@@ -0,0 +0,0 @@ ## 1.2.3 (2016-01-13)

Eric Lara <ericzon@gmail.com>
Santi Pérez <santiperezfernandez@gmail.com>
module.exports = require('./lib/node-ajax-seo.js');

40

lib/node-ajax-seo.js

@@ -36,16 +36,5 @@

//console.log("node-ajax-seo", siteConfig);
var fragment = req.query._escaped_fragment_;
var crawlers = ['FacebookExternalHit', 'Twitterbot', 'Bingbot', 'Pinterest', 'Baiduspider', 'LinkedInBot', 'FlipboardProxy', 'WhatsApp'];
var isCrawler = checkCrawler(req);
// google crawler checking
var isCrawler = (fragment != undefined);
// crawler checking
// We don't put Google here because interferes with Google Img Proxy, with more time we can try to tune better (TODO)
crawlers.forEach(function(c){
var patt = new RegExp(c,"i");
isCrawler = isCrawler || (patt.test(req.headers['user-agent'])) ;
});
if(siteConfig.debug) console.log(siteConfig.appPrefix+'isCrawler',isCrawler);

@@ -58,3 +47,3 @@

res.setHeader('Content-Type', 'text/html');
res.sendfile(siteConfig.ajaxPath);
res.sendFile(siteConfig.ajaxPath);
}else{

@@ -70,9 +59,22 @@ fragment = req._parsedUrl.pathname;

function checkCrawler(req){
var fragment = req.query._escaped_fragment_;
var crawlers = ['Alexabot', 'FacebookExternalHit', 'FacebookBot', 'Twitterbot', 'Googlebot', 'Bingbot', 'Yahoo', 'Applebot', 'SemrushBot', 'Pinterest', 'Baiduspider', 'LinkedInBot', 'FlipboardProxy', 'FlipboardBot', 'WhatsApp', 'Telegram', 'Slackbot', 'Screaming Frog SEO Spider', 'UptimeRobot', 'Feedly', 'PaperLiBot', 'LoadImpact', 'GTmetrix'];
// google crawler checking
var isCrawler = (fragment != undefined);
// crawler checking
// We don't put Google here because interferes with Google Img Proxy, with more time we can try to tune better (TODO)
crawlers.forEach(function(c){
var patt = new RegExp(c,"i");
isCrawler = isCrawler || (patt.test(req.headers['user-agent'])) ;
});
return isCrawler;
}
function isAjaxAllowedRequest(siteConfig, req){
var ajaxCondition = false;
if(siteConfig.nonAjaxCondition instanceof RegExp){
//if(siteConfig.ajaxCondition.pattern){
// /((^\/admin)|(\.)|(^\/$))/gi
// var urlTest = req.url.match(urlRegex);
// console.log("URLTEST -[[",urlTest,"]] test: ",(urlTest == null || urlTest.length == 0));
ajaxCondition = !siteConfig.nonAjaxCondition.test(req.url);

@@ -82,3 +84,3 @@ if(siteConfig.debug) console.log(siteConfig.appPrefix+"using REGEX condition",req.url);

}else if(siteConfig.nonAjaxCondition !== ""){
// using EVAL condition
// Example using EVAL condition
//ajaxCondition = (req.url.indexOf('.') == -1 && req.url != '/' && req.url.indexOf('/admin') == -1);

@@ -125,3 +127,3 @@ ajaxCondition = eval(siteConfig.nonAjaxCondition);

};
res.sendfile(fragment, options, cbk);
res.sendFile(fragment, options, cbk);
} catch (err) {

@@ -128,0 +130,0 @@ console.log(siteConfig.appPrefix+"static page is not found! :( ",err);

{
"name": "node-ajax-seo",
"version": "1.2.3",
"description": "It deals with the most popular crawlers, redirecting them to static directory and serving fresh pages to human users.",
"version": "1.3.0",
"description": "It deals with the most popular crawlers (Google, Twitter, Fb, LinkedIn, ...) redirecting them to static files but serving fresh pages to human users.",
"main": "index.js",

@@ -6,0 +6,0 @@ "scripts": {

@@ -0,0 +0,0 @@ node-ajax-seo

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc