linkinator
Advanced tools
Comparing version 2.3.0 to 2.4.0
@@ -25,29 +25,34 @@ #!/usr/bin/env node | ||
Flags | ||
--concurrency | ||
The number of connections to make simultaneously. Defaults to 100. | ||
--config | ||
Path to the config file to use. Looks for \`linkinator.config.json\` by default. | ||
--concurrency | ||
The number of connections to make simultaneously. Defaults to 100. | ||
--format, -f | ||
Return the data in CSV or JSON format. | ||
--help | ||
Show this command. | ||
--markdown | ||
Automatically parse and scan markdown if scanning from a location on disk. | ||
--recurse, -r | ||
Recursively follow links on the same root domain. | ||
--skip, -s | ||
List of urls in regexy form to not include in the check. | ||
--server-root | ||
When scanning a locally directory, customize the location on disk | ||
where the server is started. Defaults to the path passed in [LOCATION]. | ||
--format, -f | ||
Return the data in CSV or JSON format. | ||
--silent | ||
Only output broken links | ||
--skip, -s | ||
List of urls in regexy form to not include in the check. | ||
--timeout | ||
Request timeout in ms. Defaults to 0 (no timeout). | ||
--markdown | ||
Automatically parse and scan markdown if scanning from a location on disk. | ||
--help | ||
Show this command. | ||
Examples | ||
@@ -69,2 +74,3 @@ $ linkinator docs/ | ||
markdown: { type: 'boolean' }, | ||
serverRoot: { type: 'string' }, | ||
}, | ||
@@ -116,2 +122,3 @@ booleanDefault: undefined, | ||
concurrency: Number(flags.concurrency), | ||
serverRoot: flags.serverRoot, | ||
}; | ||
@@ -118,0 +125,0 @@ if (flags.skip) { |
@@ -10,3 +10,4 @@ export interface Flags { | ||
markdown?: boolean; | ||
serverRoot?: string; | ||
} | ||
export declare function getConfig(flags: Flags): Promise<Flags>; |
@@ -14,2 +14,3 @@ /// <reference types="node" /> | ||
linksToSkip?: string[] | ((link: string) => Promise<boolean>); | ||
serverRoot?: string; | ||
} | ||
@@ -54,2 +55,13 @@ export declare enum LinkState { | ||
/** | ||
* Validate the provided flags all work with each other. | ||
* @param options CheckOptions passed in from the CLI (or API) | ||
*/ | ||
private validateOptions; | ||
/** | ||
* Figure out which directory should be used as the root for the web server, | ||
* and how that impacts the path to the file for the first request. | ||
* @param options CheckOptions passed in from the CLI or API | ||
*/ | ||
private getServerRoot; | ||
/** | ||
* Spin up a local HTTP server to serve static requests from disk | ||
@@ -56,0 +68,0 @@ * @param root The local path that should be mounted as a static web server |
@@ -33,2 +33,3 @@ "use strict"; | ||
async check(options) { | ||
this.validateOptions(options); | ||
options.linksToSkip = options.linksToSkip || []; | ||
@@ -38,16 +39,7 @@ options.path = path.normalize(options.path); | ||
if (!options.path.startsWith('http')) { | ||
let localDirectory = options.path; | ||
let localFile = ''; | ||
const s = await stat(options.path); | ||
if (s.isFile()) { | ||
const pathParts = options.path.split(path.sep); | ||
localFile = path.sep + pathParts[pathParts.length - 1]; | ||
localDirectory = pathParts | ||
.slice(0, pathParts.length - 1) | ||
.join(path.sep); | ||
} | ||
const serverOptions = await this.getServerRoot(options); | ||
const port = options.port || 5000 + Math.round(Math.random() * 1000); | ||
server = await this.startWebServer(localDirectory, port, options.markdown); | ||
server = await this.startWebServer(serverOptions.serverRoot, port, options.markdown); | ||
enableDestroy(server); | ||
options.path = `http://localhost:${port}${localFile}`; | ||
options.path = `http://localhost:${port}${serverOptions.path}`; | ||
} | ||
@@ -82,2 +74,39 @@ const queue = new p_queue_1.default({ | ||
/** | ||
* Validate the provided flags all work with each other. | ||
* @param options CheckOptions passed in from the CLI (or API) | ||
*/ | ||
validateOptions(options) { | ||
if (options.serverRoot && options.path.startsWith('http')) { | ||
throw new Error("'serverRoot' cannot be defined when the 'path' points to an HTTP endpoint."); | ||
} | ||
} | ||
/** | ||
* Figure out which directory should be used as the root for the web server, | ||
* and how that impacts the path to the file for the first request. | ||
* @param options CheckOptions passed in from the CLI or API | ||
*/ | ||
async getServerRoot(options) { | ||
if (options.serverRoot) { | ||
const filePath = options.path.startsWith('/') | ||
? options.path | ||
: '/' + options.path; | ||
return { | ||
serverRoot: options.serverRoot, | ||
path: filePath, | ||
}; | ||
} | ||
let localDirectory = options.path; | ||
let localFile = ''; | ||
const s = await stat(options.path); | ||
if (s.isFile()) { | ||
const pathParts = options.path.split(path.sep); | ||
localFile = path.sep + pathParts[pathParts.length - 1]; | ||
localDirectory = pathParts.slice(0, pathParts.length - 1).join(path.sep); | ||
} | ||
return { | ||
serverRoot: localDirectory, | ||
path: localFile, | ||
}; | ||
} | ||
/** | ||
* Spin up a local HTTP server to serve static requests from disk | ||
@@ -84,0 +113,0 @@ * @param root The local path that should be mounted as a static web server |
{ | ||
"name": "linkinator", | ||
"description": "Find broken links, missing images, etc in your HTML. Scurry around your site and find all those broken links.", | ||
"version": "2.3.0", | ||
"version": "2.4.0", | ||
"license": "MIT", | ||
@@ -6,0 +6,0 @@ "repository": "JustinBeckwith/linkinator", |
@@ -5,6 +5,7 @@ # 🐿 linkinator | ||
[![npm version](https://img.shields.io/npm/v/linkinator.svg)](https://www.npmjs.org/package/linkinator) | ||
[![Build Status](https://api.cirrus-ci.com/github/JustinBeckwith/linkinator.svg)](https://cirrus-ci.com/github/JustinBeckwith/linkinator) | ||
[![Build Status](https://github.com/JustinBeckwith/linkinator/workflows/ci/badge.svg)](https://github.com/JustinBeckwith/linkinator/actions) | ||
[![codecov](https://codecov.io/gh/JustinBeckwith/linkinator/branch/master/graph/badge.svg)](https://codecov.io/gh/JustinBeckwith/linkinator) | ||
[![Dependency Status](https://img.shields.io/david/JustinBeckwith/linkinator.svg)](https://david-dm.org/JustinBeckwith/linkinator) | ||
[![Known Vulnerabilities](https://snyk.io/test/github/JustinBeckwith/linkinator/badge.svg)](https://snyk.io/test/github/JustinBeckwith/linkinator) | ||
[![Code Style: Google](https://img.shields.io/badge/code%20style-google-blueviolet.svg)](https://github.com/google/gts) | ||
[![semantic-release](https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg)](https://github.com/semantic-release/semantic-release) | ||
@@ -30,3 +31,3 @@ | ||
```sh | ||
``` | ||
$ linkinator LOCATION [ --arguments ] | ||
@@ -41,31 +42,35 @@ | ||
--config | ||
Path to the config file to use. Looks for `linkinator.config.json` by default. | ||
--concurrency | ||
The number of connections to make simultaneously. Defaults to 100. | ||
--recurse, -r | ||
Recursively follow links on the same root domain. | ||
--config | ||
Path to the config file to use. Looks for `linkinator.config.json` by default. | ||
--format, -f | ||
Return the data in CSV or JSON format. | ||
--help | ||
Show this command. | ||
--skip, -s | ||
List of urls in regexy form to not include in the check. | ||
--include, -i | ||
List of urls in regexy form to include. The opposite of --skip. | ||
--format, -f | ||
Return the data in CSV or JSON format. | ||
--markdown | ||
Automatically parse and scan markdown if scanning from a location on disk. | ||
--recurse, -r | ||
Recursively follow links on the same root domain. | ||
--server-root | ||
When scanning a locally directory, customize the location on disk | ||
where the server is started. Defaults to the path passed in [LOCATION]. | ||
--silent | ||
Only output broken links. | ||
--skip, -s | ||
List of urls in regexy form to not include in the check. | ||
--timeout | ||
Request timeout in ms. Defaults to 0 (no timeout). | ||
--markdown | ||
Automatically parse and scan markdown if scanning from a location on disk. | ||
--help | ||
Show this command. | ||
``` | ||
@@ -148,2 +153,4 @@ | ||
- `recurse` (boolean) - By default, all scans are shallow. Only the top level links on the requested page will be scanned. By setting `recurse` to `true`, the crawler will follow all links on the page, and continue scanning links **on the same domain** for as long as it can go. Results are cached, so no worries about loops. | ||
- `serverRoot` (string) - When scanning a locally directory, customize the location on disk | ||
where the server is started. Defaults to the path passed in `path`. | ||
- `timeout` (number) - By default, requests made by linkinator do not time out (or follow the settings of the OS). This option (in milliseconds) will fail requests after the configured amount of time. | ||
@@ -256,2 +263,2 @@ - `markdown` (boolean) - Automatically parse and scan markdown if scanning from a location on disk. | ||
[MIT](LICENSE) | ||
[MIT](LICENSE.md) |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
57468
758
260