express-robots-txt
Advanced tools
Comparing version 0.3.0 to 0.4.0
@@ -20,3 +20,3 @@ var fs = require('fs'); | ||
module.exports = function(robots) { | ||
var app = require('express')(); | ||
var router = require('express').Router(); | ||
@@ -30,3 +30,3 @@ if(robots) { | ||
app.get('/robots.txt', function(req, res) { | ||
router.get('/robots.txt', function(req, res) { | ||
res.header('Content-Type', 'text/plain'); | ||
@@ -36,3 +36,3 @@ res.send(robots); | ||
return app; | ||
return router; | ||
}; | ||
@@ -39,0 +39,0 @@ |
{ | ||
"name": "express-robots-txt", | ||
"version": "0.3.0", | ||
"version": "0.4.0", | ||
"description": "Express middleware to serve and generate robots.txt", | ||
@@ -10,3 +10,3 @@ "main": "index.js", | ||
"scripts": { | ||
"test": "mocha" | ||
"test": "jest" | ||
}, | ||
@@ -32,3 +32,3 @@ "repository": { | ||
"express": "^4.12.1", | ||
"mocha": "^5.2.0", | ||
"jest": "^23.3.0", | ||
"supertest": "^3.0.0" | ||
@@ -38,3 +38,8 @@ }, | ||
"express": "^4.12.1" | ||
}, | ||
"jest": { | ||
"setupTestFrameworkScriptFile": | ||
"<rootDir>/test/setup-test-framework-script.js", | ||
"testMatch": ["<rootDir>/test/index.js"] | ||
} | ||
} |
@@ -1,2 +0,2 @@ | ||
# express-robots-txt [![npm version](https://badge.fury.io/js/express-robots-txt.svg)](https://badge.fury.io/js/express-robots-txt) | ||
# express-robots-txt [![npm version](https://badge.fury.io/js/express-robots-txt.svg)](https://badge.fury.io/js/express-robots-txt) [![CircleCI](https://circleci.com/gh/modosc/express-robots-txt.svg?style=shield)](https://circleci.com/gh/modosc/express-robots-txt) | ||
@@ -3,0 +3,0 @@ Express middleware for generating a robots.txt or responding with an existing file. Forked from [weo-edu/express-robots](https://github.com/weo-edu/express-robots). |
@@ -1,9 +0,8 @@ | ||
var fs = require('fs'); | ||
var expect = require('chai').expect; | ||
var supertest = require('supertest'); | ||
var robots = require('../'); | ||
var express = require('express'); | ||
var fs = require("fs"); | ||
var supertest = require("supertest"); | ||
var robots = require("../"); | ||
var express = require("express"); | ||
var app; | ||
describe('express-robots', function() { | ||
describe("express-robots", () => { | ||
function createSuperTest(robots) { | ||
@@ -14,122 +13,144 @@ var app = express(); | ||
} | ||
beforeEach(function () { | ||
beforeEach(() => { | ||
app = express(); | ||
}); | ||
it('should work', function(done) { | ||
test("should work", done => { | ||
app.use(robots()); | ||
var request = createSuperTest(robots({UserAgent: '*', Disallow: '/'})); | ||
request | ||
.get('/robots.txt') | ||
.end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers['content-type']).to.equal('text/plain; charset=utf-8'); | ||
expect(res.text).to.equal('User-agent: *\nDisallow: /'); | ||
done(); | ||
}); | ||
var request = createSuperTest(robots({ UserAgent: "*", Disallow: "/" })); | ||
request.get("/robots.txt").end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers["content-type"]).to.equal("text/plain; charset=utf-8"); | ||
expect(res.text).to.equal("User-agent: *\nDisallow: /"); | ||
done(); | ||
}); | ||
}); | ||
it('should work with a crawl delay', function(done) { | ||
var request = createSuperTest(robots({UserAgent: '*', CrawlDelay: '5'})); | ||
request | ||
.get('/robots.txt') | ||
.end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers['content-type']).to.equal('text/plain; charset=utf-8'); | ||
expect(res.text).to.equal('User-agent: *\nCrawl-delay: 5'); | ||
done(); | ||
}); | ||
test("should work with a crawl delay", done => { | ||
var request = createSuperTest(robots({ UserAgent: "*", CrawlDelay: "5" })); | ||
request.get("/robots.txt").end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers["content-type"]).to.equal("text/plain; charset=utf-8"); | ||
expect(res.text).to.equal("User-agent: *\nCrawl-delay: 5"); | ||
done(); | ||
}); | ||
}); | ||
it('should work with multiple crawl delays', function(done) { | ||
var request = createSuperTest(robots([ | ||
{UserAgent: '*', CrawlDelay: '5'}, | ||
{UserAgent: 'Foo', CrawlDelay: '10'} | ||
])); | ||
request | ||
.get('/robots.txt') | ||
.end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers['content-type']).to.equal('text/plain; charset=utf-8'); | ||
expect(res.text).to.equal('User-agent: *\nCrawl-delay: 5\nUser-agent: Foo\nCrawl-delay: 10'); | ||
done(); | ||
}); | ||
test("should work with multiple crawl delays", done => { | ||
var request = createSuperTest( | ||
robots([ | ||
{ UserAgent: "*", CrawlDelay: "5" }, | ||
{ UserAgent: "Foo", CrawlDelay: "10" } | ||
]) | ||
); | ||
request.get("/robots.txt").end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers["content-type"]).to.equal("text/plain; charset=utf-8"); | ||
expect(res.text).to.equal( | ||
"User-agent: *\nCrawl-delay: 5\nUser-agent: Foo\nCrawl-delay: 10" | ||
); | ||
done(); | ||
}); | ||
}); | ||
it('should work with a sitemap', function(done) { | ||
var Sitemap = 'https://nowhere.com/sitemap.xml'; | ||
var request = createSuperTest(robots({UserAgent: '*', Sitemap: Sitemap})); | ||
request | ||
.get('/robots.txt') | ||
.end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers['content-type']).to.equal('text/plain; charset=utf-8'); | ||
expect(res.text).to.equal('User-agent: *\nSitemap: ' + Sitemap); | ||
done(); | ||
}); | ||
test("should work with a sitemap", done => { | ||
var Sitemap = "https://nowhere.com/sitemap.xml"; | ||
var request = createSuperTest(robots({ UserAgent: "*", Sitemap: Sitemap })); | ||
request.get("/robots.txt").end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers["content-type"]).to.equal("text/plain; charset=utf-8"); | ||
expect(res.text).to.equal("User-agent: *\nSitemap: " + Sitemap); | ||
done(); | ||
}); | ||
}); | ||
it('should work with multiple sitemaps', function(done) { | ||
var Sitemaps = ['https://nowhere.com/sitemap.xml', 'https://nowhere.com/sitemap2.xml']; | ||
test("should work with multiple sitemaps", done => { | ||
var Sitemaps = [ | ||
"https://nowhere.com/sitemap.xml", | ||
"https://nowhere.com/sitemap2.xml" | ||
]; | ||
var request = createSuperTest(robots({UserAgent: '*', Sitemap: Sitemaps})); | ||
request | ||
.get('/robots.txt') | ||
.end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers['content-type']).to.equal('text/plain; charset=utf-8'); | ||
expect(res.text).to.equal('User-agent: *\nSitemap: ' + Sitemaps[0] + '\nSitemap: ' + Sitemaps[1]); | ||
done(); | ||
}); | ||
var request = createSuperTest( | ||
robots({ UserAgent: "*", Sitemap: Sitemaps }) | ||
); | ||
request.get("/robots.txt").end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers["content-type"]).to.equal("text/plain; charset=utf-8"); | ||
expect(res.text).to.equal( | ||
"User-agent: *\nSitemap: " + Sitemaps[0] + "\nSitemap: " + Sitemaps[1] | ||
); | ||
done(); | ||
}); | ||
}); | ||
it('should work with sitemaps in multiple configs', function(done) { | ||
var Sitemaps = ['https://nowhere.com/sitemap.xml', 'https://nowhere.com/sitemap2.xml']; | ||
test("should work with sitemaps in multiple configs", done => { | ||
var Sitemaps = [ | ||
"https://nowhere.com/sitemap.xml", | ||
"https://nowhere.com/sitemap2.xml" | ||
]; | ||
var request = createSuperTest(robots([{UserAgent: '*', Sitemap: Sitemaps[0]}, {UserAgent: 'Foo', Sitemap: Sitemaps[1]}])); | ||
request | ||
.get('/robots.txt') | ||
.end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers['content-type']).to.equal('text/plain; charset=utf-8'); | ||
expect(res.text).to.equal('User-agent: *\nUser-agent: Foo\nSitemap: ' + Sitemaps[0] + '\nSitemap: ' + Sitemaps[1]); | ||
done(); | ||
}); | ||
var request = createSuperTest( | ||
robots([ | ||
{ UserAgent: "*", Sitemap: Sitemaps[0] }, | ||
{ UserAgent: "Foo", Sitemap: Sitemaps[1] } | ||
]) | ||
); | ||
request.get("/robots.txt").end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers["content-type"]).to.equal("text/plain; charset=utf-8"); | ||
expect(res.text).to.equal( | ||
"User-agent: *\nUser-agent: Foo\nSitemap: " + | ||
Sitemaps[0] + | ||
"\nSitemap: " + | ||
Sitemaps[1] | ||
); | ||
done(); | ||
}); | ||
}); | ||
it('should work with multiple sitemaps in multiple configs', function(done) { | ||
var Sitemaps = ['https://nowhere.com/sitemap.xml', 'https://nowhere.com/sitemap2.xml']; | ||
test("should work with multiple sitemaps in multiple configs", done => { | ||
var Sitemaps = [ | ||
"https://nowhere.com/sitemap.xml", | ||
"https://nowhere.com/sitemap2.xml" | ||
]; | ||
var request = createSuperTest(robots([{UserAgent: '*', Sitemap: Sitemaps}, {UserAgent: 'Foo', Sitemap: Sitemaps}])); | ||
request | ||
.get('/robots.txt') | ||
.end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers['content-type']).to.equal('text/plain; charset=utf-8'); | ||
expect(res.text).to.equal('User-agent: *\nUser-agent: Foo\nSitemap: ' + Sitemaps[0] + '\nSitemap: ' + Sitemaps[1] + '\nSitemap: ' + Sitemaps[0] + '\nSitemap: ' + Sitemaps[1]); | ||
done(); | ||
}); | ||
var request = createSuperTest( | ||
robots([ | ||
{ UserAgent: "*", Sitemap: Sitemaps }, | ||
{ UserAgent: "Foo", Sitemap: Sitemaps } | ||
]) | ||
); | ||
request.get("/robots.txt").end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.headers["content-type"]).to.equal("text/plain; charset=utf-8"); | ||
expect(res.text).to.equal( | ||
"User-agent: *\nUser-agent: Foo\nSitemap: " + | ||
Sitemaps[0] + | ||
"\nSitemap: " + | ||
Sitemaps[1] + | ||
"\nSitemap: " + | ||
Sitemaps[0] + | ||
"\nSitemap: " + | ||
Sitemaps[1] | ||
); | ||
done(); | ||
}); | ||
}); | ||
it('should work with files', function() { | ||
var request = createSuperTest(robots(__dirname + '/fixtures/robots.txt')); | ||
request | ||
.get('/robots.txt') | ||
.end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.text).to.equal(fs.readFileSync(__dirname + '/fixtures/robots.txt', 'utf8')); | ||
}); | ||
test("should work with files", () => { | ||
var request = createSuperTest(robots(__dirname + "/fixtures/robots.txt")); | ||
request.get("/robots.txt").end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.text).to.equal( | ||
fs.readFileSync(__dirname + "/fixtures/robots.txt", "utf8") | ||
); | ||
}); | ||
}); | ||
it('should respond with an empty file if nothing is specified', function() { | ||
test("should respond with an empty file if nothing is specified", () => { | ||
var request = createSuperTest(robots()); | ||
request | ||
.get('/robots.txt') | ||
.end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.text).to.equal(''); | ||
}); | ||
request.get("/robots.txt").end(function(err, res) { | ||
expect(res.status).to.equal(200); | ||
expect(res.text).to.equal(""); | ||
}); | ||
}); | ||
}); |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
125513
9
205