express-har-capture
Advanced tools
Comparing version 0.0.0 to 1.0.0-beta
319
index.js
@@ -14,129 +14,218 @@ /* Capture data for a bunch of simple requests | ||
var fs = require('fs'), | ||
path = require('path'); | ||
path = require('path'); | ||
module.exports = function harCaptureMiddlewareSetup(options) { | ||
// Extract options | ||
var mapRequestToName = options.mapRequestToName || function (req) { | ||
return 'insert-remote-addr-here'; | ||
}; | ||
var harOutputDir = options.harOutputDir || process.cwd(); | ||
// Extract options | ||
var mapHarToName = options.mapHarToName; | ||
var saveRequestBody = options.saveRequestBody; | ||
var harOutputDir = options.harOutputDir || process.cwd(); | ||
// Default 10 minutes | ||
var maxCaptureTime = (options.maxCaptureSeconds || 600) * 1000; | ||
var maxCaptureRequests = options.maxCaptureRequests || 1000; | ||
return function harCaptureMiddleware(req, res, next) { | ||
var startTime = Date.now(), | ||
outputName = mapRequestToName(req); | ||
var filterFunction = options.filter || function () { | ||
return true; | ||
}; | ||
// Shadow the 'end' request | ||
var end = res.end; | ||
res.end = function () { | ||
var endTime = Date.now(), | ||
deltaTime = endTime - startTime; | ||
// Call the real 'end' | ||
end.apply(res, arguments); | ||
var flushBeforeRequest = options.flushBeforeRequest || function () { | ||
return false; | ||
}; | ||
// Store har-stuff... | ||
var data = { | ||
log: { | ||
version: '1.1', // Version of HAR file-format | ||
creator: { | ||
name: 'node-express-har-capture', | ||
version: '0.0.0' // TODO: Get from package.json | ||
// comment: "" | ||
}, | ||
pages: [{ | ||
startedDateTime: new Date(startTime).toISOString(), | ||
id: 'page' + startTime, | ||
title: req.url, | ||
pageTimings: { onLoad: deltaTime } | ||
}], | ||
entries: [{ | ||
timings: { | ||
send: -1, | ||
receive: -1, | ||
wait: deltaTime, | ||
comment: "Server-side processing only", | ||
onLoad: -1, | ||
}, | ||
startedDateTime: new Date(startTime).toISOString(), | ||
time: deltaTime, | ||
request: { | ||
method: req.method, | ||
url: req.originalUrl, | ||
httpVersion: 'HTTP/' + req.httpVersion, | ||
headersSize: 0, // Filled out later | ||
headers: [], // Filled out later | ||
queryString: [], // TODO | ||
cookies: [], // TODO | ||
bodySize: req.client.bytesRead // TODO | ||
}, | ||
response: { | ||
status: res.statusCode, | ||
redirectURL: req.originalUrl, | ||
httpVersion: 'HTTP/' + req.httpVersion, // TODO | ||
headersSize: -1, | ||
statusText: 'OK', // TODO | ||
headers: [], | ||
cookies: [], // TODO | ||
bodySize: -1, // TODO | ||
content: { // TODO | ||
size: -1, | ||
mimeType: '', | ||
compression: -1 | ||
}, | ||
timings: { | ||
send: 0, | ||
receive: 0, | ||
wait: deltaTime, | ||
comment: "Server-side processing only" | ||
} | ||
}, | ||
cache: {}, // TODO / is it optional | ||
pageref: 'page' + startTime | ||
}] | ||
} | ||
}; | ||
var flushAfterRequest = options.flushAfterRequest || function () { | ||
return false; | ||
}; | ||
// REQUEST DATA | ||
// Fix up data-stucture with iterative data from request | ||
// Headers | ||
Object.keys(req.headers).forEach(function (headerName) { | ||
data.log.entries[0].request.headersSize += headerName.length + 2 + req.headers[headerName].length; | ||
data.log.entries[0].request.headers.push({ | ||
name: headerName, | ||
value: req.headers[headerName] | ||
}); | ||
}); | ||
// Query strings | ||
Object.keys(req.query).forEach(function (queryName) { | ||
data.log.entries[0].request.queryString.push({ | ||
name: queryName, | ||
value: req.query[queryName] | ||
}); | ||
}); | ||
// TODO: Cookies | ||
var entries = []; | ||
var lastTimer = null; | ||
var lastFlushTime = Date.now(); | ||
// RESPONSE DATA | ||
// Headers | ||
if (res._headerSent) { | ||
data.log.entries[0].response.headersSize = res._header.length; | ||
Object.keys(res._headers).forEach(function (headerName) { | ||
var realHeaderName = res._headerNames[headerName] || headerName; | ||
data.log.entries[0].response.headers.push({ | ||
name: realHeaderName, | ||
value: res._headers[headerName] | ||
}); | ||
}); | ||
} | ||
function flush() { | ||
if (entries.length > 0) { | ||
if (lastTimer) { | ||
clearTimeout(lastTimer); | ||
} | ||
var now = Date.now(); | ||
// Write the data out | ||
fs.writeFile( | ||
path.join(harOutputDir, Date.now().toString() + '-' + outputName + '.har'), | ||
JSON.stringify(data, undefined, 2) | ||
); | ||
}; | ||
var har = { | ||
log: { | ||
version: '1.1', // Version of HAR file-format | ||
creator: { | ||
name: 'node-express-har-capture', | ||
version: '0.1.0' // TODO: Get from package.json | ||
// comment: "" | ||
}, | ||
pages: [], | ||
entries: entries | ||
} | ||
}; | ||
// Continue processing the request | ||
next(); | ||
var customPart = mapHarToName ? '-' + mapHarToName(har) : ''; | ||
fs.writeFile( | ||
path.join(harOutputDir, now + customPart + '.har'), | ||
JSON.stringify(har, undefined, 2) | ||
); | ||
entries = []; | ||
lastFlushTime = now; | ||
} | ||
} | ||
function checkAndFlush(force) { | ||
var timeSinceLastFlush = Date.now() - lastFlushTime; | ||
var timeUntilFlush = maxCaptureTime - timeSinceLastFlush; | ||
if (force || timeUntilFlush <= 0 || entries.length >= maxCaptureRequests) { | ||
flush(); | ||
} | ||
else { | ||
if (lastTimer) { | ||
clearTimeout(lastTimer); | ||
} | ||
lastTimer = setTimeout(flush, timeUntilFlush).unref(); | ||
} | ||
} | ||
return function harCaptureMiddleware(req, res, next) { | ||
if (flushBeforeRequest(req)) { flush() } | ||
// Filter out stuff we don't want to run | ||
if (!filterFunction(req)) { return next(); } | ||
var startTime = Date.now(); | ||
// Listen in on body parsing | ||
// NOTE: We do not resume the stream, as it would make actual parsers | ||
// miss out on the data. On the down-side, it doesn't capture a body | ||
// when the body isn't used later. | ||
var requestBodySize = 0, | ||
requestBody = []; | ||
req.on('data', function (chunck) { | ||
requestBodySize += chunck.length; | ||
if (saveRequestBody) { | ||
requestBody.push(chunck); | ||
} | ||
}); | ||
req.on('end', function (chunck) { | ||
if (chunck) { | ||
requestBodySize += chunck.length; | ||
if (saveRequestBody) { | ||
requestBody.push(chunck); | ||
} | ||
} | ||
if (requestBody.length < 0) { | ||
requestBody = ""; | ||
return; | ||
} | ||
if (Buffer.isBuffer(requestBody[0])) { | ||
requestBody = Buffer.concat(requestBody).encode('base64'); | ||
} else { | ||
requestBody = requestBody.join(""); | ||
} | ||
}); | ||
// Shadow the 'end' request | ||
var end = res.end; | ||
res.end = function () { | ||
var endTime = Date.now(), | ||
deltaTime = endTime - startTime; | ||
// Call the real 'end' | ||
end.apply(res, arguments); | ||
// Store har-stuff... | ||
var reqEntry = { | ||
timings: { | ||
send: -1, | ||
receive: -1, | ||
wait: deltaTime, | ||
comment: "Server-side processing only", | ||
onLoad: -1 | ||
}, | ||
startedDateTime: new Date(startTime).toISOString(), | ||
time: deltaTime, | ||
request: { | ||
method: req.method, | ||
url: req.protocol + '://' + req.get('host') + req.originalUrl, | ||
httpVersion: 'HTTP/' + req.httpVersion, | ||
headersSize: 0, // Filled out later | ||
headers: [], // Filled out later | ||
queryString: [], // TODO | ||
cookies: [], // TODO | ||
bodySize: requestBodySize, | ||
content: { | ||
size: requestBodySize, | ||
text: requestBody, | ||
comment: "Captured input stream" | ||
} | ||
}, | ||
response: { | ||
status: res.statusCode, | ||
redirectURL: req.originalUrl, | ||
httpVersion: 'HTTP/' + req.httpVersion, // TODO | ||
headersSize: -1, | ||
statusText: 'OK', // TODO | ||
headers: [], | ||
cookies: [], // TODO | ||
bodySize: -1, // TODO | ||
content: { // TODO | ||
size: -1, | ||
mimeType: '', | ||
compression: -1 | ||
}, | ||
timings: { | ||
send: 0, | ||
receive: 0, | ||
wait: deltaTime, | ||
comment: "Server-side processing only" | ||
} | ||
}, | ||
cache: {}, // TODO / is it optional | ||
pageref: 'page' + startTime | ||
}; | ||
// REQUEST DATA | ||
// Fix up data-stucture with iterative data from request | ||
// Headers | ||
Object.keys(req.headers).forEach(function (headerName) { | ||
reqEntry.request.headersSize += headerName.length + 2 + req.headers[headerName].length; | ||
reqEntry.request.headers.push({ | ||
name: headerName, | ||
value: req.headers[headerName] | ||
}); | ||
}); | ||
// Query strings | ||
Object.keys(req.query).forEach(function (queryName) { | ||
reqEntry.request.queryString.push({ | ||
name: queryName, | ||
value: req.query[queryName] | ||
}); | ||
}); | ||
// TODO: Cookies | ||
// RESPONSE DATA | ||
// Headers | ||
if (res._headerSent) { | ||
reqEntry.response.headersSize = res._header.length; | ||
Object.keys(res._headers).forEach(function (headerName) { | ||
var realHeaderName = res._headerNames[headerName] || headerName; | ||
reqEntry.response.headers.push({ | ||
name: realHeaderName, | ||
value: res._headers[headerName] | ||
}); | ||
}); | ||
} | ||
entries.push(reqEntry); | ||
checkAndFlush(flushAfterRequest(req)); | ||
}; | ||
// Continue processing the request | ||
next(); | ||
}; | ||
}; |
{ | ||
"name": "express-har-capture", | ||
"version": "0.0.0", | ||
"version": "1.0.0-beta", | ||
"description": "Express middleware for capturing HAR (HTTP ARchive)-files", | ||
"main": "index.js", | ||
"scripts": { | ||
"test": "echo \"Error: no test specified\" && exit 1" | ||
"test": "mocha -R spec" | ||
}, | ||
"repository": { | ||
"type": "git", | ||
"url": "https://github.com/msiebuhr/node-express-har-capture.git" | ||
"url": "https://github.com/idoco/node-express-har-capture.git" | ||
}, | ||
@@ -13,0 +13,0 @@ "keywords": [ |
@@ -6,4 +6,6 @@ /* Build a simple server, make a request and check the output is OK. | ||
express = require('express'), | ||
request = require('supertest') | ||
har = require('../index.js'); | ||
fs = require('fs'), | ||
har = require('../index.js'), | ||
path = require('path'), | ||
request = require('supertest'); | ||
@@ -17,2 +19,4 @@ describe('Simple test', function () { | ||
app.use(har({ | ||
maxCaptureRequests: 2, | ||
harOutputDir: __dirname | ||
})); | ||
@@ -25,12 +29,53 @@ | ||
// Remove *.har-files after each test | ||
afterEach(function (done) { | ||
fs.readdir(__dirname, function (err, files) { | ||
files | ||
.filter(function (filename) { | ||
return filename.indexOf('.har') > 10; | ||
}) | ||
.forEach(function (filename) { | ||
fs.unlinkSync(path.join(__dirname, filename)); | ||
}); | ||
done(err); | ||
}); | ||
}); | ||
it('Sends requests', function (done) { | ||
request(app) | ||
.get('/') | ||
.set('Custom-header', 'foo/bar') | ||
.expect(200) | ||
.end(function (err, res) { | ||
request(app) | ||
.get('/') | ||
.set('Custom-header', 'foo/bar') | ||
.expect(200) | ||
.end(function (err, res) { | ||
// TODO: Check emitted HAR-file | ||
done(err); | ||
}); | ||
.get('/') | ||
.set('Custom-header', 'foo/bar') | ||
.expect(200) | ||
.end(function (err, res) { | ||
// Wait for file to be written to disk | ||
setTimeout(function () { | ||
var filename = fs.readdirSync(__dirname).filter(function (filename) { | ||
return filename.indexOf('.har') > 10; | ||
})[0]; | ||
// It is valid JSON | ||
var json; | ||
try { | ||
var fullFilename = path.join(__dirname, filename), | ||
data = fs.readFileSync(fullFilename); | ||
json = JSON.parse(data); | ||
} catch (e) { | ||
assert(e, 'Could not parse JSON'); | ||
return done(e); | ||
} | ||
// Simple sanity check | ||
assert.deepProperty(json, 'log.entries.0'); | ||
assert.deepProperty(json, 'log.entries.1'); | ||
done(err); | ||
}, 5); | ||
}); | ||
}); | ||
}); | ||
}); |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Misc. License Issues
License(Experimental) A package's licensing information has fine-grained problems.
Found 1 instance in 1 package
No tests
QualityPackage does not have any tests. This is a strong signal of a poorly maintained or low quality package.
Found 1 instance in 1 package
16146
7
0
416
2
4
2