couchdb-stat-collector
Advanced tools
Comparing version 2.1.0-alpha to 2.1.0
148
bin.js
@@ -17,5 +17,2 @@ #!/usr/bin/env node | ||
const chalk = require('chalk') | ||
const fs = require('fs') | ||
const path = require('path') | ||
const pkg = require('./package.json') | ||
@@ -26,27 +23,72 @@ const run = require('.') | ||
// check for in-process collection | ||
const currentDirectory = fs.readdirSync(process.cwd()) | ||
.filter(file => fs.statSync(path.join(process.cwd(), file)).isDirectory()) | ||
.filter(directory => directory.includes('_current'))[0] | ||
// YARRRRRGS | ||
yargs | ||
.version(pkg.version) | ||
.config() | ||
.example(`Usage: ${pkg.name} [couchUrl] [options]`) | ||
.example(`Usage: ${pkg.name} http://admin:password@localhost:5984`) | ||
.option('resume', { | ||
alias: 'R', | ||
description: 'Whether to resume collection from a prior, halted run.', | ||
default: !!process.env.RESUME | ||
.options({ | ||
resume: { | ||
alias: 'R', | ||
description: 'Whether to resume collection from a prior, halted run.', | ||
default: !!process.env.RESUME | ||
}, | ||
service: { | ||
alias: 's', | ||
description: 'Send collector results to a remote endpoint instead of writing it into a file.', | ||
default: process.env.COUCH_COLLECT_SERVICE || false, | ||
implies: ['service-url', 'service-token'] | ||
}, | ||
'service-token': { | ||
alias: 'k', | ||
desciption: 'Token to authenticate against a remote endpoint, use with --service', | ||
default: process.env.COUCH_COLLECT_SERVICE_TOKEN || false | ||
}, | ||
'service-url': { | ||
alias: 'u', | ||
description: 'Remote endpoint to send reports to, if --service is used.', | ||
default: process.env.COUCH_COLLECT_SERVICE_URL || 'https://api.nhabcd.net', | ||
coerce: function (host) { | ||
if (/api\/v1\/collect$/.test(host)) { | ||
return host | ||
} else { | ||
if (host.slice(-1) === '/') host = host.slice(0, -1) | ||
return `${host}/api/v1/collect` | ||
} | ||
} | ||
}, | ||
'task-suite': { | ||
alias: 't', | ||
description: 'Name of the task suite to use. "Basic" does not scan databases. "Full" reports document conflicts.', | ||
default: 'standard', | ||
choices: ['full', 'standard', 'basic'] | ||
}, | ||
'filter-dbs': { | ||
alias: 'f', | ||
description: 'Investigate only databases that match the given regex pattern.', | ||
default: process.env.COUCH_COLLECT_FILTER_DBS || undefined, | ||
coerce: (val) => { return val ? new RegExp(val) : null } | ||
}, | ||
'filter-limit': { | ||
alias: 'l', | ||
description: 'Investigate no more than this number of databases.', | ||
default: process.env.COUCH_COLLECT_FILTER_LIMIT || 25 | ||
}, | ||
'negative-filter-dbs': { | ||
alias: 'F', | ||
description: 'Do not investigate databases that match the given regex pattern', | ||
default: process.env.COUCH_COLLECT_NEGATIVE_FILTER_DBS || undefined, | ||
coerce: (val) => { return val ? new RegExp(val) : null } | ||
}, | ||
'negative-filter-limit': { | ||
alias: 'L', | ||
description: 'Investigate no more than this number of databases which match a given regex pattern', | ||
default: process.env.COUCH_COLLECT_NEGATIVE_FILTER_LIMIT || 0 | ||
}, | ||
verbose: { | ||
alias: 'v', | ||
description: 'Log detailed information about the investigation as it proceeds.', | ||
default: false, | ||
boolean: true | ||
} | ||
}) | ||
.option('parallel', { | ||
alias: 'P', | ||
description: 'Number of requests to perform in parallel at most.', | ||
default: process.env.PARALLEL_LIMIT || 10 | ||
}) | ||
.option('conflicts', { | ||
alias: 'C', | ||
description: 'Collect information about document conflicts. This involves posting an index to each database.', | ||
default: false | ||
}) | ||
.command({ | ||
@@ -57,3 +99,3 @@ command: '$0 [couchUrl]', | ||
yargs.positional('couchUrl', { | ||
describe: 'URL to the CouchDB cluster. Must include credentials.', | ||
describe: 'URL to the CouchDB cluster. Must include credentials. Can be set with the $COUCH_URL environment variable.', | ||
type: 'string', | ||
@@ -66,20 +108,44 @@ coerce: function (arg) { | ||
}, | ||
handler: function (argv) { | ||
const dateString = new Date().toISOString().split('.')[0].replace(/-/g, '').replace('T', '_').replace(/:/g, '') | ||
let instanceName = argv.couchUrl.host.replace(':', '_') + '_' + dateString | ||
if (currentDirectory && argv.resume) { | ||
console.log('Continuing collection in ' + chalk.green(currentDirectory) + '.') | ||
instanceName = currentDirectory.replace('_current', '') | ||
handler: async function (argv) { | ||
if (argv.verbose) { | ||
// enable logging | ||
process.env.LOG = true | ||
} | ||
run({ | ||
couchUrl: argv.couchUrl.href.slice(0, -1), // strip trailing slash | ||
resumeOptionSet: argv.resume, | ||
instanceName, | ||
parallelLimit: argv.parallel, | ||
conflicts: argv.conflicts | ||
}, (err) => { | ||
if (err) console.trace(err) | ||
process.exit(err ? 1 : 0) | ||
}) | ||
let couchUrl = argv.couchUrl | ||
const taskSuite = argv['task-suite'] | ||
const filterDbs = argv['filter-dbs'] | ||
const filterLimit = argv['filter-limit'] | ||
const negativeFilterDbs = argv['negative-filter-dbs'] | ||
const negativeFilterLimit = argv['negative-filter-limit'] | ||
const { resume, service } = argv | ||
const { href } = couchUrl | ||
couchUrl = href.slice(-1) === '/' ? href.slice(0, -1) : href | ||
const options = { | ||
couchUrl, | ||
filterDbs, | ||
filterLimit, | ||
negativeFilterDbs, | ||
negativeFilterLimit, | ||
isResume: resume, | ||
service, | ||
serviceToken: argv['service-token'], | ||
serviceUrl: argv['service-url'], | ||
taskSuite | ||
} | ||
try { | ||
await run(options) | ||
} catch (error) { | ||
if (error.code === 'unauthorized') { | ||
console.error('I am not authorized to investigate this CouchDB installation. Did you provide accurate credentials?') | ||
} else if (error.code === 'MODULE_NOT_FOUND') { | ||
console.error('I could not find the task file you specified. Does it exist?') | ||
} else { | ||
console.error('UNEXPECTED ERROR') | ||
console.error(error.message) | ||
if (error.req) { | ||
console.error(JSON.stringify(error.req, undefined, 2)) | ||
} | ||
} | ||
process.exit(1) | ||
} | ||
} | ||
@@ -86,0 +152,0 @@ }) |
186
index.js
@@ -1,79 +0,127 @@ | ||
'use strict' | ||
const fs = require('fs') | ||
const md5 = require('md5') | ||
const path = require('path') | ||
const rimraf = require('rimraf') | ||
const url = require('url') | ||
const { gzipSync } = require('zlib') | ||
module.exports = run | ||
const getOsInfo = require('./lib/os') | ||
const Task = require('./lib/task') | ||
const { log, request, getResults } = require('./lib/util') | ||
const { version } = require('./package.json') | ||
const async = require('async') | ||
const taskSuites = { | ||
basic: require('./tasks/basic.json'), | ||
standard: require('./tasks/standard.json'), | ||
full: require('./tasks/full.json') | ||
} | ||
// helpers | ||
const zipIt = require('./lib/zip-it') | ||
module.exports = async function ({ | ||
cleanUp, | ||
couchUrl, | ||
filterDbs, | ||
filterLimit, | ||
instanceName, | ||
isResume, | ||
negativeFilterDbs, | ||
negativeFilterLimit, | ||
outDir, | ||
save, | ||
service, | ||
serviceToken, | ||
serviceUrl, | ||
tasks, | ||
taskSuite | ||
}) { | ||
const date = Date.now() | ||
const { host } = url.parse(couchUrl) | ||
try { | ||
log('Beginning investigation...') | ||
// retrieve tasks | ||
if (service) { | ||
try { | ||
const { body } = await request({ url: serviceUrl, json: true }) | ||
tasks = body | ||
} catch (error) { | ||
console.log(error) | ||
tasks = taskSuites.standard | ||
} | ||
} else if (taskSuite in taskSuites) { | ||
tasks = taskSuites[taskSuite] | ||
} else if (!tasks) { | ||
console.log(`Task suite ${taskSuite} not recognized. Using standard suite...`) | ||
tasks = taskSuites.standard | ||
} | ||
// sanitize couchUrl before assigning instanceName, use url.parse | ||
instanceName = instanceName || md5(couchUrl) | ||
const inProgressDir = path.join(`${instanceName}_in_progress`) | ||
// maybe clean up after last time | ||
if (!isResume) { | ||
rimraf.sync(inProgressDir) | ||
fs.mkdirSync(inProgressDir) | ||
} | ||
// execute tasks | ||
await Task.execute(tasks, { | ||
couchUrl, | ||
filterDbs, | ||
filterLimit, | ||
inProgressDir, | ||
instanceName, | ||
negativeFilterDbs, | ||
negativeFilterLimit | ||
}) | ||
// tasks | ||
const couchInfo = require('./tasks/couch/info') | ||
const couchConfig = require('./tasks/couch/config') | ||
const couchAllDbs = require('./tasks/couch/all-dbs') | ||
const couchSession = require('./tasks/couch/session') | ||
const couchMembership = require('./tasks/couch/membership') | ||
// const couchStats = require('./tasks/couch/stats') | ||
// const couchSystem = require('./tasks/couch/system') | ||
const clusterNode = require('./tasks/cluster/node') | ||
const dbInfo = require('./tasks/db/info') | ||
const dbDesignDocs = require('./tasks/db/design-docs') | ||
const dbDesignDocsInfo = require('./tasks/db/design-docs-info') | ||
const dbSecurity = require('./tasks/db/security') | ||
const dbConflicts = require('./tasks/db/conflicts') | ||
function run (options, callback) { | ||
options.parallelLimit = options.parallelLimit || 10 | ||
async.waterfall([ | ||
couchSession.bind(null, options), | ||
couchInfo.bind(null, options), | ||
function (info, callback) { | ||
var tasks = { | ||
// couch | ||
allDbs: couchAllDbs.bind(null, options), | ||
// db | ||
db: function (allDbs, callback) { | ||
async.eachLimit(allDbs, options.parallelLimit, function (dbName, callback) { | ||
async.applyEach({ | ||
info: dbInfo, | ||
design: function (options, dbName, callback) { | ||
async.applyEachSeries([ | ||
dbDesignDocs, | ||
dbDesignDocsInfo | ||
], options, dbName, callback) | ||
}, | ||
security: dbSecurity | ||
}, options, dbName, callback) | ||
}, callback) | ||
// retrieve results | ||
const results = getResults(inProgressDir).reduce((results, { name, result }) => { | ||
results[name] = result | ||
return results | ||
}, {}) | ||
// get OS-level stats | ||
results._os = await getOsInfo() | ||
log(`Processed ${tasks.length} task${tasks.length ? 's' : ''}.`) | ||
// post-process the results: save locally or report to service | ||
const zjson = gzipSync(JSON.stringify({ version, results, date, host })) | ||
if (service) { | ||
// post to service | ||
const { response, body } = await request({ | ||
url: serviceUrl, | ||
method: 'POST', | ||
body: zjson, | ||
headers: { | ||
'Authorization': Buffer.from(serviceToken).toString('base64') | ||
} | ||
} | ||
if (info.version > '2') { | ||
// 2.x | ||
tasks.membership = couchMembership.bind(null, options) | ||
tasks.cluster = function (membership, callback) { | ||
clusterNode(options, callback) | ||
} | ||
if (options.conflicts) { | ||
tasks.conflicts = function (allDbs, callback) { | ||
let task = dbConflicts.bind(null, options) | ||
async.eachLimit(allDbs, options.parallelLimit, task, callback) | ||
} | ||
} | ||
}) | ||
if (response.statusCode !== 200) { | ||
console.error(`Unexpected failure posting results to service. [code: ${response.statusCode}`) | ||
console.error(body) | ||
} else { | ||
// 1.x | ||
tasks.config = couchConfig.bind(null, options) | ||
// tasks.stats = couchStats.bind(null, options) | ||
// tasks.system = couchSystem.bind(null, options) | ||
console.log(`Investigation results posted to ${serviceUrl}`) | ||
} | ||
async.autoInject(tasks, callback) | ||
} else if (save !== false) { | ||
// write to disk | ||
const outFile = path.resolve(path.join(outDir || '.', `${instanceName}-${date}.json.gz`)) | ||
fs.writeFileSync(outFile, zjson) | ||
console.log(`Investigation results zipped and saved to ${outFile}`) | ||
} | ||
], function (error) { | ||
if (error) { | ||
callback(error) | ||
} else { | ||
zipIt(options, callback) | ||
// clean up | ||
if (cleanUp !== false) rimraf.sync(inProgressDir) | ||
return { version, results, date, host } | ||
} catch (error) { | ||
if (service) { | ||
// upload error report | ||
const { message, code, reason } = error | ||
const zjson = gzipSync(JSON.stringify({ date, error: { message, code, reason }, host, version })) | ||
await request({ | ||
url: serviceUrl, | ||
method: 'POST', | ||
body: zjson, | ||
headers: { | ||
'Authorization': Buffer.from(serviceToken).toString('base64') | ||
} | ||
}) | ||
console.error('Error reported to service:') | ||
console.error({ message, ...error }) | ||
} | ||
}) | ||
throw error | ||
} | ||
} |
{ | ||
"name": "couchdb-stat-collector", | ||
"version": "2.1.0-alpha", | ||
"version": "2.1.0", | ||
"description": "A stat collection tool for CouchDB.", | ||
@@ -9,4 +9,11 @@ "main": "index.js", | ||
"start": "node bin.js", | ||
"test": "standard && dependency-check . --unused --no-dev && tap test/index.js", | ||
"test-coverage": "npm test -- --coverage-report=text" | ||
"test": "standard && dependency-check . --unused --no-dev && mocha --recursive && npm audit", | ||
"test-coverage": "npm test -- --coverage-report=text", | ||
"travis-deploy-once": "npx travis-deploy-once --pro", | ||
"semantic-release": "npx semantic-release", | ||
"prerelease": "mkdir -p build", | ||
"release": "npm run release:linux && npm run release:windows && npm run release:mac", | ||
"release:linux": "npx nexe@3.0.0-beta.9 -i bin.js -t linux-x64 -o build/couchdb-stat-collector-linux-x64", | ||
"release:windows": "npx nexe@3.0.0-beta.9 -i bin.js -t windows-x64 -o build/couchdb-stat-collector-windows-x64.exe", | ||
"release:mac": "npx nexe@3.0.0-beta.9 -i bin.js -t mac-x64 -o build/couchdb-stat-collector-mac-x64" | ||
}, | ||
@@ -21,19 +28,22 @@ "keywords": [ | ||
"dependencies": { | ||
"async": "^2.5.0", | ||
"chalk": "^2.1.0", | ||
"fs-walk": "0.0.2", | ||
"graceful-fs": "^4.1.11", | ||
"request": "^2.83.0", | ||
"rimraf": "^2.6.2", | ||
"yargs": "^10.1.1" | ||
"check-disk-space": "^1.5.0", | ||
"handlebars": "^4.0.12", | ||
"jsonpath": "^1.0.0", | ||
"md5": "^2.2.1", | ||
"request": "^2.88.0", | ||
"rimraf": "^2.6.3", | ||
"semver": "^5.6.0", | ||
"yargs": "^12.0.5" | ||
}, | ||
"devDependencies": { | ||
"dependency-check": "^3.0.0", | ||
"hapi": "^16.6.2", | ||
"lolex": "^2.1.3", | ||
"mock-fs": "^4.4.1", | ||
"simple-mock": "^0.8.0", | ||
"standard": "^11.0.0", | ||
"tap": "^11.0.0" | ||
"dependency-check": "^3.3.0", | ||
"mocha": "^5.2.0", | ||
"mock-fs": "^4.7.0", | ||
"nock": "^10.0.6", | ||
"standard": "^12.0.1" | ||
}, | ||
"repository": { | ||
"type": "git", | ||
"url": "https://github.com/neighbourhoodie/couchdb-stat-collector.git" | ||
} | ||
} |
165
README.md
@@ -5,3 +5,4 @@ # couchdb-stat-collector | ||
[![Coverage Status](https://coveralls.io/repos/github/neighbourhoodie/couchdb-stat-collector/badge.svg?branch=master&t=vpkela)](https://coveralls.io/github/neighbourhoodie/couchdb-stat-collector?branch=master) | ||
[![Greenkeeper badge](https://badges.greenkeeper.io/neighbourhoodie/couchdb-stat-collector.svg?token=af224994b54bcdad24bcc85c92c43542176f70749a1c4a0051fe9836150a058b&ts=1509385229661)](https://greenkeeper.io/) | ||
[![JS Standard Style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](https://github.com/feross/standard) | ||
[![Greenkeeper badge](https://badges.greenkeeper.io/neighbourhoodie/couchdb-stat-collector.svg?token=af224994b54bcdad24bcc85c92c43542176f70749a1c4a0051fe9836150a058b&ts=1542892976355)](https://greenkeeper.io/) | ||
@@ -17,3 +18,3 @@ > A CLI tool for collecting statistics about a CouchDB node or cluster. | ||
```bash | ||
npm i -g couchdb-stat-collector | ||
$ npm i -g couchdb-stat-collector | ||
``` | ||
@@ -24,3 +25,3 @@ | ||
```bash | ||
couchdb-stat-collector http://admin:password@localhost:5984 | ||
$ couchdb-stat-collector http://admin:password@localhost:5984 | ||
``` | ||
@@ -33,7 +34,151 @@ | ||
```bash | ||
✅ All done! | ||
$ couchdb-stat-collector | ||
Investigation results zipped and saved to /path/to/${md5(COUCH_URL)}.json.gz | ||
``` | ||
⚠️ Please send ${host}_${port}_${date}_${time}.gz back to Neighbourhoodie Software GmbH <couchdb@neighbourhood.ie> for analysis. | ||
For more usage information, run `couchdb-stat-collector --help`. | ||
### Reporting to an Analyser Service | ||
You can tell the collector to send its investigation to an analyser service using an access token provided by the service. For example: | ||
```bash | ||
$ couchdb-stat-collector \ | ||
--service \ | ||
--service-url https://localhost:5001 \ | ||
--service-token ... | ||
Investigation results posted to https://localhost:5001/api/v1/collect | ||
``` | ||
Now you will see this report on your service dashboard. We recommend you attach a command like this to `cron` or a similar program in order to investigate your CouchDB installation regularly, so that any issues can be identified and addressed proactively. | ||
To protect values like your CouchDB credentials and analyser service access token, we recommend placing any configuration values for the collector into a config file. For example, consider this `config.json`: | ||
```json | ||
{ | ||
"couchUrl": "http://USERNAME:PASSWORD@localhost:5984", | ||
"service": "true", | ||
"service-url": "http://localhost:5001", | ||
"service-token": "..." | ||
} | ||
``` | ||
Then you can tell the collector to use this file, like this: | ||
```bash | ||
$ couchdb-stat-collector --config config.json | ||
``` | ||
By setting the config file's permission settings, you can appropriately lock it down for use only by authorized parties. As CouchDB often runs under a `couchdb` user, we recommend placing this config file in that user's purview, setting its access permissions to `400`, and then setting up a cronjob under that user to run regularly. Here is an example of how to do that: | ||
```bash | ||
$ su couchdb | ||
$ cd | ||
$ touch config.json | ||
# then, edit the config file appropriately | ||
$ chmod 400 config.json | ||
``` | ||
Now you can create a cronjob from the `couchdb` user which uses that file. A cronjob that investigates your cluster hourly would look like this: | ||
``` | ||
0 * * * * /PATH/TO/couchdb-stat-collector --config /PATH/TO/config.json | ||
``` | ||
### Task Files | ||
The collector uses a "task file" to conduct its investigation. By default, it uses `tasks.json` in the project's root directory. It is a JSON file containing an array where each element is an object representing a task for the collector to execute. | ||
Tasks have these recognizes properties: | ||
- `access`: an object containing keys with [jsonpath](https://github.com/dchester/jsonpath#readme) expressions. Task results are passed through these expressions and the results are mapped to the associated keys, for example: given the results of querying `/_all_dbs`, the access object `{ db: '$..*'}` will result in `[{ db: '_replicator' }, ...]`. This is used to template `route` and `name` in sub-tasks. | ||
- `after`: an array of other task objects to run after this one. Arguments parsed with `access` from the task's results are merged with the task's own arguments and passed to these sub-tasks. | ||
- `name`: a friendly identifier for the task. Defaults to the task's route, and so is useful when that information may not reliably indicate the task's nature or purpose. Like `route`, this property is templated using [handlebars](https://www.npmjs.com/package/handlebars). | ||
- `mode`: an optional key that can be set to `try` in order to indicate that a task should be skipped if it fails. By default, a task that fails halts the investigation. | ||
- `query`: an object whose properties are converted into a querystring and used in the task's request. | ||
- `route`: a URL fragment indicating the path of the task's request. Like `name`, this property is templated using [handlebars](https://www.npmjs.com/package/handlebars). | ||
- `save`: a boolean flag for whether or not the result of a task should be saved. Useful for running tasks solely to populate their sub-tasks. | ||
- `version`: a [semver](https://semver.org/) version identifier that is tested against a `version` argument (if it is present) which should be acquired by querying the cluster's root endpoint (`/`). There is an example of this in the test suite under "should do version checking". | ||
#### Name and Route Templating | ||
The keys `name` and `route` are templated using [handlebars](https://www.npmjs.com/package/handlebars) and the variables gathered by running a task result through the `access` filter plus any variables from parent tasks. The `e` helper encodes the parameter using `encodeURIComponent` so that database and document names can be properly mapped to a URL. In the example, the task which gathers a database's design documents uses the name `{{e db}}/_design_docs` to indicate that its nature, whereas the route is `{{e db}}/_all_docs`. | ||
For more task examples, check the collector's default task file. | ||
### Output Object | ||
A collector's investigation will produce a gzipped JSON file containing an object with these keys: | ||
- `date`: A timestamp in milliseconds since the UNIX epoch for when the investigation occurred. | ||
- `host`: The host that was investigated. For example: `localhost:5984` | ||
- `version`: The version of the collector used in the investigation | ||
- `results`: Object containing all the results of the investigation. | ||
The `results` key contains an object keyed with all the results of the investigation. Each key corresponds to the name of some task that was run, for example: `_all_dbs` will contain the results of querying `/_all_dbs`, and `$DB_NAME/_design_docs` will contain the results of the route named `$DB_NAME/_design_docs`. | ||
The object behind each of these keys comes directly from the result of the task's HTTP query, so for example `_all_dbs` might look like this: | ||
```json | ||
{ | ||
"_all_dbs": ["_replicator", ... ], | ||
... | ||
} | ||
``` | ||
The special `_os` key contains information about the system where the investigation was performed and includes data about system CPU, RAM, network, and disk. Here is an example output: | ||
```json | ||
{ | ||
"cpus": [ | ||
{ | ||
"model": "Intel(R) Core(TM) i7-7500U CPU @ 2.70GHz", | ||
"speed": 3174, | ||
"times": { | ||
"user": 2265800, | ||
"nice": 10800, | ||
"sys": 703400, | ||
"idle": 12098800, | ||
"irq": 0 | ||
} | ||
}, | ||
... | ||
], | ||
"disk": { | ||
"diskPath": "/", | ||
"free": 10341093376, | ||
"size": 110863347712 | ||
}, | ||
"network": { | ||
"lo": [ | ||
{ | ||
"address": "127.0.0.1", | ||
"netmask": "255.0.0.0", | ||
"family": "IPv4", | ||
"mac": "NO AWOO $350 PENALTY", | ||
"internal": true, | ||
"cidr": "127.0.0.1/8" | ||
}, | ||
... | ||
], | ||
... | ||
}, | ||
"platform": "linux", | ||
"ram": { | ||
"free": 11225653248, | ||
"total": 16538009600 | ||
}, | ||
"release": "4.15.0-36-generic" | ||
} | ||
``` | ||
The NodeJS built-in library [os](https://nodejs.org/api/os.html) is used to investigate the host system. Here are the methods used to populate the results object: | ||
- `cpus`: [os.cpus()](https://nodejs.org/api/os.html#os_os_cpus) | ||
- `disk`: [check-disk-usage](https://github.com/Alex-D/check-disk-space) | ||
- `network`: [os.networkInterfaces()](https://nodejs.org/api/os.html#os_os_networkinterfaces) | ||
- `platform`: [os.platform()](https://nodejs.org/api/os.html#os_os_platform) | ||
- `ram`: [os.freemem()](https://nodejs.org/api/os.html#os_os_freemem) and [os.totalmem()](https://nodejs.org/api/os.html#os_os_totalmem) | ||
- `release`: [os.release()](https://nodejs.org/api/os.html#os_os_release) | ||
## Development & Testing | ||
@@ -44,5 +189,5 @@ | ||
```bash | ||
git clone neighbourhoodie/couchdb-stat-collector | ||
cd couchdb-stat-collector | ||
npm install | ||
$ git clone neighbourhoodie/couchdb-stat-collector | ||
$ cd couchdb-stat-collector | ||
$ npm install | ||
``` | ||
@@ -53,3 +198,3 @@ | ||
```bash | ||
npm test | ||
$ npm test | ||
``` | ||
@@ -60,3 +205,3 @@ | ||
```bash | ||
npm start | ||
$ npm start | ||
``` | ||
@@ -63,0 +208,0 @@ |
@@ -1,103 +0,150 @@ | ||
const fs = require('graceful-fs') | ||
const zlib = require('zlib') | ||
/* global afterEach, describe, it */ | ||
const fsMock = require('mock-fs') | ||
const Hapi = require('hapi') | ||
const simple = require('simple-mock') | ||
const test = require('tap').test | ||
const assert = require('assert') | ||
const fs = require('fs') | ||
// const fsMock = require('mock-fs') | ||
const nock = require('nock') | ||
const path = require('path') | ||
const rimraf = require('rimraf') | ||
const runCollector = require('..') | ||
const collect = require('..') | ||
const { name } = require('../package.json') | ||
test('collector', (group) => { | ||
group.beforeEach((done) => { | ||
simple.mock(console, 'log').callFn(() => {}) | ||
const couchUrl = process.env.COUCH_URL || 'http://localhost:5984' | ||
// const serviceUrl = 'https://api.nhabcd.net/' | ||
const instanceName = 'test-investigation' | ||
this.server = new Hapi.Server() | ||
this.server.connection({ | ||
host: 'localhost', | ||
port: 7000 | ||
describe(name, function () { | ||
afterEach(function () { | ||
// clear interceptors | ||
nock.restore() | ||
nock.cleanAll() | ||
nock.activate() | ||
// remove results | ||
fs.readdirSync(path.resolve('.')).filter((fileName) => { | ||
return fileName.indexOf(instanceName) === 0 | ||
}).forEach((fileName) => { | ||
rimraf.sync(fileName) | ||
}) | ||
this.server.register(require('../mock-couchdb/routes')) | ||
this.server.start((error) => { | ||
if (error) { | ||
return done(error) | ||
} else { | ||
done() | ||
} | ||
}) | ||
}) | ||
group.afterEach((done) => { | ||
simple.restore() | ||
fsMock.restore() | ||
this.server.stop(done) | ||
it('should run', async function () { | ||
// nock it off, already! | ||
nock(couchUrl) | ||
.get('/_session') | ||
.reply(200, { | ||
'ok': true, | ||
'userCtx': { | ||
'name': 'admin', | ||
'roles': [ | ||
'_admin' | ||
] | ||
}, | ||
'info': { | ||
'authentication_db': '_users', | ||
'authentication_handlers': [ | ||
'cookie', | ||
'default' | ||
], | ||
'authenticated': 'default' | ||
} | ||
}) | ||
.get('/') | ||
.reply(200, { version: '1.6.1' }) | ||
.get('/_config') | ||
.reply(200, {}) | ||
.get('/_stats') | ||
.reply(200, {}) | ||
.get('/_all_dbs') | ||
.reply(200, ['hello-world']) | ||
.get('/hello-world') | ||
.reply(200, {}) | ||
.get('/hello-world/_security') | ||
.reply(200, {}) | ||
.get('/hello-world/_all_docs') | ||
.query({ startkey: '"_design/"', endkey: '"_design/\uffff"' }) | ||
.reply(200, {}) | ||
.get('/hello-world/_all_docs') | ||
.query({ conflicts: true, include_docs: true }) | ||
.reply(200, { rows: [] }) | ||
.get('/_active_tasks') | ||
.reply(200, []) | ||
// commence test | ||
await collect({ couchUrl, instanceName }) | ||
}) | ||
group.test('without previous data', (t) => { | ||
fsMock() | ||
const instanceName = 'localhost_7000_19700101_000000' | ||
runCollector({ | ||
it('should resume', async function () { | ||
// i've got a nock nock joke for you | ||
nock(couchUrl) | ||
.get('/') | ||
.reply(200, { version: '1.6.1' }) | ||
.get('/_all_dbs') | ||
.reply(200, ['hello-world']) | ||
// commence tests | ||
await collect({ | ||
cleanUp: false, | ||
couchUrl, | ||
instanceName, | ||
couchUrl: 'http://admin:admin@localhost:7000', | ||
conflicts: true | ||
}, (error, result) => { | ||
t.error(error) | ||
t.ok(result) | ||
t.ok(fs.statSync(`${instanceName}.json.gz`).isFile()) | ||
const data = JSON.parse(zlib.gunzipSync(fs.readFileSync(`${instanceName}.json.gz`))) | ||
t.is(data._db_info.couchdb, 'Welcome') | ||
// ensure config is obfuscated | ||
if (data._config) { | ||
t.is(data._config.admins.redacted, true) | ||
} else if (data._membership) { | ||
data._membership.all_nodes.forEach((nodeName) => { | ||
var config = data['_node-' + nodeName]._config | ||
t.is(config.admins.redacted, true) | ||
}) | ||
} | ||
t.end() | ||
tasks: [{ route: '' }] | ||
}) | ||
}) | ||
group.test('resume', (t) => { | ||
fsMock({ | ||
localhost_7000_19700101_000000_current: { | ||
'_db_info.json': JSON.stringify({foo: 'bar'}) | ||
} | ||
}) | ||
const instanceName = 'localhost_7000_19700101_000000' | ||
runCollector({ | ||
// assert folder exists | ||
fs.accessSync(`${instanceName}_in_progress`) | ||
fs.accessSync(path.join(`${instanceName}_in_progress`, 'be47ec8561f8d94171e2ff397917f945.json')) | ||
await collect({ | ||
cleanUp: false, | ||
couchUrl, | ||
instanceName, | ||
couchUrl: 'http://admin:admin@localhost:7000', | ||
isResume: true | ||
}, (error) => { | ||
t.error(error) | ||
t.ok(fs.statSync(`${instanceName}.json.gz`).isFile()) | ||
const data = JSON.parse(zlib.gunzipSync(fs.readFileSync(`${instanceName}.json.gz`))) | ||
t.is(data._db_info.foo, 'bar') | ||
t.end() | ||
isResume: true, | ||
tasks: [{ route: '' }, { route: '_all_dbs' }] | ||
}) | ||
// assert folder has more in it, that one hasn't been changed | ||
fs.accessSync(`${instanceName}_in_progress`) | ||
fs.accessSync(path.join(`${instanceName}_in_progress`, 'be47ec8561f8d94171e2ff397917f945.json')) | ||
fs.accessSync(path.join(`${instanceName}_in_progress`, 'c9c332fc1d7acd421714d7bd1d0519e0.json')) | ||
}) | ||
group.test('bad auth', (t) => { | ||
runCollector({ | ||
instanceName: 'localhost_7000_19700101_000000', | ||
couchUrl: 'http://admin:BAD_PASS@localhost:7000' | ||
}, (error) => { | ||
t.is(error, 'The login credentials you provided are wrong.') | ||
it('should fail', async function () { | ||
this.timeout(4000) | ||
// hey!! nock it off >:( | ||
nock(couchUrl) | ||
.get('/') | ||
.times(4) | ||
.reply(403, { | ||
error: 'unauthorized' | ||
}) | ||
// commence tests | ||
try { | ||
await collect({ | ||
couchUrl: 'http://bad:pass@localhost:5984', | ||
instanceName, | ||
argv: {}, | ||
tasks: [{ route: '' }] | ||
}) | ||
assert(false, 'Investigation should fail, but did not.') | ||
} catch (error) { | ||
assert(error.req.url.includes('REDACTED@'), 'URL creds not censored!') | ||
assert.strictEqual(error.code, 'unauthorized') | ||
} | ||
}) | ||
t.end() | ||
it('should upload to service', async function () { | ||
// nock nock, it's mocking time | ||
nock(couchUrl) | ||
.get('/') | ||
.reply(200, { version: '1.6.1' }) | ||
nock('https://example.com') | ||
.get('/') | ||
.reply(200, [{ route: '' }]) | ||
.post('/') | ||
.reply(200) | ||
// commence tests | ||
await collect({ | ||
couchUrl, | ||
instanceName, | ||
service: true, | ||
serviceUrl: 'https://example.com', | ||
serviceToken: 'hello world', | ||
argv: {} | ||
}) | ||
}) | ||
group.end() | ||
}) |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 10 instances in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Native code
Supply chain riskContains native code (e.g., compiled binaries or shared libraries). Including native code can obscure malicious behavior.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
No repository
Supply chain riskPackage does not have a linked source code repository. Without this field, a package will have no reference to the location of the source code use to generate the package.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
5
1
205
0
206923
8
19
5655
23
+ Addedcheck-disk-space@^1.5.0
+ Addedhandlebars@^4.0.12
+ Addedjsonpath@^1.0.0
+ Addedmd5@^2.2.1
+ Addedsemver@^5.6.0
+ Addedcamelcase@5.3.1(transitive)
+ Addedcharenc@0.0.2(transitive)
+ Addedcheck-disk-space@1.5.0(transitive)
+ Addedcross-spawn@6.0.6(transitive)
+ Addedcrypt@0.0.2(transitive)
+ Addeddeep-is@0.1.4(transitive)
+ Addedend-of-stream@1.4.4(transitive)
+ Addedescodegen@1.14.3(transitive)
+ Addedesprima@1.2.24.0.1(transitive)
+ Addedestraverse@4.3.0(transitive)
+ Addedesutils@2.0.3(transitive)
+ Addedexeca@1.0.0(transitive)
+ Addedfast-levenshtein@2.0.6(transitive)
+ Addedfind-up@3.0.0(transitive)
+ Addedget-stream@4.1.0(transitive)
+ Addedhandlebars@4.7.8(transitive)
+ Addedinvert-kv@2.0.0(transitive)
+ Addedis-buffer@1.1.6(transitive)
+ Addedjsonpath@1.1.1(transitive)
+ Addedlcid@2.0.0(transitive)
+ Addedlevn@0.3.0(transitive)
+ Addedlocate-path@3.0.0(transitive)
+ Addedmap-age-cleaner@0.1.3(transitive)
+ Addedmd5@2.3.0(transitive)
+ Addedmem@4.3.0(transitive)
+ Addedmimic-fn@2.1.0(transitive)
+ Addedminimist@1.2.8(transitive)
+ Addedneo-async@2.6.2(transitive)
+ Addednice-try@1.0.5(transitive)
+ Addedoptionator@0.8.3(transitive)
+ Addedos-locale@3.1.0(transitive)
+ Addedp-defer@1.0.0(transitive)
+ Addedp-is-promise@2.1.0(transitive)
+ Addedp-limit@2.3.0(transitive)
+ Addedp-locate@3.0.0(transitive)
+ Addedp-try@2.2.0(transitive)
+ Addedprelude-ls@1.1.2(transitive)
+ Addedpump@3.0.2(transitive)
+ Addedsemver@5.7.2(transitive)
+ Addedsource-map@0.6.1(transitive)
+ Addedstatic-eval@2.0.2(transitive)
+ Addedtype-check@0.3.2(transitive)
+ Addeduglify-js@3.19.3(transitive)
+ Addedunderscore@1.12.1(transitive)
+ Addedword-wrap@1.2.5(transitive)
+ Addedwordwrap@1.0.0(transitive)
+ Addedy18n@4.0.3(transitive)
+ Addedyargs@12.0.5(transitive)
+ Addedyargs-parser@11.1.1(transitive)
- Removedasync@^2.5.0
- Removedchalk@^2.1.0
- Removedfs-walk@0.0.2
- Removedgraceful-fs@^4.1.11
- Removedansi-styles@3.2.1(transitive)
- Removedasync@2.6.4(transitive)
- Removedcamelcase@4.1.0(transitive)
- Removedchalk@2.4.2(transitive)
- Removedcolor-convert@1.9.3(transitive)
- Removedcolor-name@1.1.3(transitive)
- Removedcross-spawn@5.1.0(transitive)
- Removedescape-string-regexp@1.0.5(transitive)
- Removedexeca@0.7.0(transitive)
- Removedfind-up@2.1.0(transitive)
- Removedfs-walk@0.0.2(transitive)
- Removedget-stream@3.0.0(transitive)
- Removedgraceful-fs@4.2.11(transitive)
- Removedhas-flag@3.0.0(transitive)
- Removedinvert-kv@1.0.0(transitive)
- Removedlcid@1.0.0(transitive)
- Removedlocate-path@2.0.0(transitive)
- Removedlodash@4.17.21(transitive)
- Removedlru-cache@4.1.5(transitive)
- Removedmem@1.1.0(transitive)
- Removedmimic-fn@1.2.0(transitive)
- Removedos-locale@2.1.0(transitive)
- Removedp-limit@1.3.0(transitive)
- Removedp-locate@2.0.0(transitive)
- Removedp-try@1.0.0(transitive)
- Removedpseudomap@1.0.2(transitive)
- Removedsupports-color@5.5.0(transitive)
- Removedy18n@3.2.2(transitive)
- Removedyallist@2.1.2(transitive)
- Removedyargs@10.1.2(transitive)
- Removedyargs-parser@8.1.0(transitive)
Updatedrequest@^2.88.0
Updatedrimraf@^2.6.3
Updatedyargs@^12.0.5