backstop-crawl
Advanced tools
Comparing version 2.0.0 to 2.1.0
25
index.js
@@ -15,17 +15,28 @@ #!/usr/bin/env node | ||
--outfile, -o Save the backstop config to this file | ||
--debug Logs out errors produced while crawling | ||
--ignore-robots Ignore the sites robots.txt | ||
--ignore-ssl-errors Treat any certificate as valid (e.g. self-signed | ||
or expired) | ||
--debug Logs out errors produced while crawling | ||
--allow-subdomains Allow crawling links found to subdomains of the | ||
current domain | ||
--limit-similar[=3] Limits the number of similar URLs to a set number | ||
Defaults to 3 | ||
e.g /blog/1, /blog/2, /blog/3 | ||
Examples | ||
$ backstop-crawl http://localhost | ||
`, { | ||
alias: { | ||
o: 'outfile', | ||
}, | ||
}); | ||
`, | ||
{ | ||
alias: { | ||
o: 'outfile', | ||
}, | ||
}); | ||
if (cli.flags.limitSimilar) { | ||
if (!Number.isInteger(cli.flags.limitSimilar)) { | ||
// Set default if true | ||
cli.flags.limitSimilar = 3; | ||
} | ||
} | ||
if (cli.input.length) { | ||
@@ -35,3 +46,3 @@ if (validurl(cli.input[0])) { | ||
} else { | ||
console.error(`Error: "${cli.input[0]}" isn't a valid URL`); | ||
console.error(`> Error: "${cli.input[0]}" isn't a valid URL`); | ||
process.exit(1); | ||
@@ -38,0 +49,0 @@ } |
@@ -11,6 +11,7 @@ 'use strict'; | ||
const defaultConf = require('./default-config'); | ||
const limitSimilar = require('./limit-similar'); | ||
const EXT_BLACKLIST = /\.pdf|\.js|\.css|\.png|\.jpg|\.jpeg|\.gif|\.json|\.xml|\.txt$/i; | ||
const SPINNER_WIDTH = 2; | ||
const urls = []; | ||
let urls = []; | ||
@@ -87,2 +88,10 @@ module.exports = function crawl (url, flags) { | ||
crawler.on('complete', () => { | ||
if (flags.limitSimilar) { | ||
spinner.stopAndPersist({ | ||
symbol: '>', | ||
text: `Limiting similar urls to ${flags.limitSimilar} of each`, | ||
}); | ||
urls = limitSimilar(urls, flags.limitSimilar); | ||
} | ||
defaultConf.scenarios = urls; | ||
@@ -89,0 +98,0 @@ const path = dirname(outfile); |
{ | ||
"name": "backstop-crawl", | ||
"version": "2.0.0", | ||
"version": "2.1.0", | ||
"description": "Crawl a site to generate a backstopjs config", | ||
@@ -52,8 +52,10 @@ "repository": "https://github.com/fffunction/backstop-crawl", | ||
"mkpath": "1.0.0", | ||
"object.entries": "^1.0.4", | ||
"ora": "1.1.0", | ||
"simplecrawler": "1.0.3", | ||
"url-parse": "^1.1.7", | ||
"valid-url": "1.0.9" | ||
}, | ||
"devDependencies": { | ||
"ava": "git://github.com/avajs/ava.git#magic-assert", | ||
"ava": "0.18.1", | ||
"coveralls": "2.11.15", | ||
@@ -60,0 +62,0 @@ "eslint": "3.14.1", |
@@ -17,6 +17,2 @@ # $ backstop-crawl | ||
``` | ||
$ npm install --global backstop-crawl | ||
``` | ||
``` | ||
❯ backstop-crawl | ||
@@ -31,6 +27,11 @@ | ||
--outfile, -o Save the backstop config to this file | ||
--debug Logs out errors produced while crawling | ||
--ignore-robots Ignore the sites robots.txt | ||
--ignore-ssl-errors Treat any certificate as valid (e.g. self-signed | ||
or expired) | ||
--debug Logs out errors produced while crawling | ||
--allow-subdomains Allow crawling links found to subdomains of the | ||
current domain | ||
--limit-similar[=3] Limits the number of similar URLs to a set number | ||
Defaults to 3 | ||
e.g /blog/1, /blog/2, /blog/3 | ||
@@ -45,2 +46,2 @@ Examples | ||
MIT © fffunction [fffunction.co](fffunction.co) | ||
MIT © fffunction [fffunction.co](https://fffunction.co) |
10661
6
217
45
10
+ Addedobject.entries@^1.0.4
+ Addedurl-parse@^1.1.7
+ Addedcall-bind@1.0.7(transitive)
+ Addeddefine-data-property@1.1.4(transitive)
+ Addeddefine-properties@1.2.1(transitive)
+ Addedes-define-property@1.0.0(transitive)
+ Addedes-errors@1.3.0(transitive)
+ Addedes-object-atoms@1.0.0(transitive)
+ Addedget-intrinsic@1.2.4(transitive)
+ Addedgopd@1.0.1(transitive)
+ Addedhas-property-descriptors@1.0.2(transitive)
+ Addedhas-proto@1.0.3(transitive)
+ Addedhas-symbols@1.0.3(transitive)
+ Addedobject-keys@1.1.1(transitive)
+ Addedobject.entries@1.1.8(transitive)
+ Addedquerystringify@2.2.0(transitive)
+ Addedrequires-port@1.0.0(transitive)
+ Addedset-function-length@1.2.2(transitive)
+ Addedurl-parse@1.5.10(transitive)