New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

chrome-har

Package Overview
Dependencies
Maintainers
1
Versions
39
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

chrome-har - npm Package Compare versions

Comparing version 0.1.0 to 0.2.0

.eslintrc.json

11

CHANGELOG.md
# CHANGELOG
version 0.2.0 2017-05-31
-------------------------
* Add "serverIPAddress" field to entries.
* Set bodySize for requests correctly.
* Set bodySize and compression for responses correctly.
* Add _transferSize field for responses, just like Chrome does.
version 0.1.0 2017-03-05
-------------------------
* Initial release

636

index.js
'use strict';
const {name, version, homepage} = require('./package');
const { name, version, homepage } = require('./package');

@@ -9,4 +9,9 @@ const urlParser = require('url');

const {parseRequestCookies, parseResponseCookies} = require('./lib/cookies');
const {calculateRequestHeaderSize, calculateResponseHeaderSize, getHeaderValue, parseHeaders} = require('./lib/headers');
const { parseRequestCookies, parseResponseCookies } = require('./lib/cookies');
const {
calculateRequestHeaderSize,
calculateResponseHeaderSize,
getHeaderValue,
parseHeaders
} = require('./lib/headers');

@@ -19,3 +24,3 @@ const max = Math.max;

const isEmpty = (o) => !o;
const isEmpty = o => !o;

@@ -42,6 +47,7 @@ function formatMillis(time, fractionalDigits = 3) {

cookies: parseResponseCookies(cookieHeader),
headers: parseHeaders(responseHeaders)
headers: parseHeaders(responseHeaders),
_transferSize: response.encodedDataLength
};
let locationHeaderValue = getHeaderValue(responseHeaders, 'Location');
const locationHeaderValue = getHeaderValue(responseHeaders, 'Location');
if (locationHeaderValue) {

@@ -65,5 +71,4 @@ entry.response.redirectURL = locationHeaderValue;

hitCount: 0
}
};
}
} else {

@@ -84,2 +89,5 @@ if (response.requestHeaders) {

entry.response.bodySize =
response.encodedDataLength - entry.response.headersSize;
if (response.requestHeadersText) {

@@ -95,2 +103,3 @@ entry.request.headersSize = response.requestHeadersText.length;

entry.connection = response.connectionId.toString();
entry.serverIPAddress = response.remoteIPAddress;

@@ -104,5 +113,7 @@ function parseOptionalTime(timing, start, end) {

let timing = response.timing;
const timing = response.timing;
if (timing) {
const blocked = formatMillis(firstNonNegative([timing.dnsStart, timing.connectStart, timing.sendStart]));
const blocked = formatMillis(
firstNonNegative([timing.dnsStart, timing.connectStart, timing.sendStart])
);

@@ -134,3 +145,4 @@ const dns = parseOptionalTime(timing, 'dnsStart', 'dnsEnd');

entry.time = max(0, blocked) + max(0, dns) + max(0, connect) + send + wait + receive;
entry.time =
max(0, blocked) + max(0, dns) + max(0, connect) + send + wait + receive;

@@ -142,6 +154,8 @@ // Some cached responses generate a Network.requestServedFromCache event,

// (see https://cs.chromium.org/chromium/src/third_party/WebKit/Source/platform/network/ResourceLoadTiming.h?q=requestTime+package:%5Echromium$&dr=CSs&l=84)
const entrySecs = page.__wallTime + (timing.requestTime - page.__timestamp);
const entrySecs =
page.__wallTime + (timing.requestTime - page.__timestamp);
entry.startedDateTime = moment.unix(entrySecs).toISOString();
const queuedMillis = (timing.requestTime - entry.__requestWillBeSentTime) * 1000;
const queuedMillis =
(timing.requestTime - entry.__requestWillBeSentTime) * 1000;
if (queuedMillis > 0) {

@@ -170,269 +184,340 @@ entry.timings._queued = formatMillis(queuedMillis);

const ignoredRequests = new Set(),
rootFrameMappings = new Map();
const ignoredRequests = new Set(), rootFrameMappings = new Map();
let pages = [],
entries = [],
currentPageId;
let pages = [], entries = [], currentPageId;
for (let message of messages) {
for (const message of messages) {
const params = message.params;
switch (message.method) {
case 'Page.frameStartedLoading': {
const frameId = params.frameId;
let previousFrameId = entries.find((entry) => entry.__frameId === frameId);
case 'Page.frameStartedLoading':
{
const frameId = params.frameId;
const previousFrameId = entries.find(
entry => entry.__frameId === frameId
);
if (rootFrameMappings.has(frameId) || previousFrameId) {
// This is a sub frame, there's already a page for the root frame
continue;
if (rootFrameMappings.has(frameId) || previousFrameId) {
// This is a sub frame, there's already a page for the root frame
continue;
}
currentPageId = 'page_' + (pages.length + 1);
const page = {
id: currentPageId,
startedDateTime: '',
title: '',
pageTimings: {},
__frameId: frameId
};
pages.push(page);
}
currentPageId = 'page_' + (pages.length + 1);
let page = {
id: currentPageId,
startedDateTime: '',
title: '',
pageTimings: {},
__frameId: frameId
};
pages.push(page);
}
break;
case 'Network.requestWillBeSent': {
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue
}
const request = params.request;
if (!isSupportedProtocol(request.url)) {
ignoredRequests.add(params.requestId);
continue;
}
let frameId = rootFrameMappings.get(params.frameId) || params.frameId;
let page = pages.find((page) => page.__frameId === frameId);
if (!page) {
debug('Request will be sent with requestId ' + params.requestId + ' that can\'t be mapped to any page.');
continue;
}
case 'Network.requestWillBeSent':
{
if (pages.length < 1) {
//we haven't loaded any pages yet.
ignoredRequests.add(params.requestId);
continue;
}
const request = params.request;
if (!isSupportedProtocol(request.url)) {
ignoredRequests.add(params.requestId);
continue;
}
const frameId =
rootFrameMappings.get(params.frameId) || params.frameId;
const page = pages.find(page => page.__frameId === frameId);
if (!page) {
debug(
'Request will be sent with requestId ' +
params.requestId +
" that can't be mapped to any page."
);
ignoredRequests.add(params.requestId);
continue;
}
const cookieHeader = getHeaderValue(request.headers, 'Cookie');
const cookieHeader = getHeaderValue(request.headers, 'Cookie');
// Remove fragment, that's what Chrome does.
const url = urlParser.parse(request.url, true);
url.hash = null;
// Remove fragment, that's what Chrome does.
const url = urlParser.parse(request.url, true);
url.hash = null;
let req = {
method: request.method,
url: urlParser.format(url),
queryString: toNameValuePairs(url.query),
postData: parsePostData(getHeaderValue(request.headers, 'Content-Type'), request.postData),
headersSize: -1,
bodySize: -1, // FIXME calculate based on postData
cookies: parseRequestCookies(cookieHeader),
headers: parseHeaders(request.headers)
};
const postData = parsePostData(
getHeaderValue(request.headers, 'Content-Type'),
request.postData
);
let entry = {
cache: {},
startedDateTime: '',
__requestWillBeSentTime: params.timestamp,
__wallTime: params.wallTime,
__requestId: params.requestId,
__frameId: params.frameId,
_initialPriority: request.initialPriority,
_priority: request.initialPriority,
_initiator: params.initiator.url,
_initiator_line: params.initiator.lineNumber,
pageref: currentPageId,
request: req,
time: 0
};
const req = {
method: request.method,
url: urlParser.format(url),
queryString: toNameValuePairs(url.query),
postData,
headersSize: -1,
bodySize: isEmpty(request.postData) ? 0 : request.postData.length,
cookies: parseRequestCookies(cookieHeader),
headers: parseHeaders(request.headers)
};
if (params.redirectResponse) {
let previousEntry = entries.find((entry) => entry.__requestId === params.requestId);
if (previousEntry) {
previousEntry.__requestId += 'r';
populateEntryFromResponse(previousEntry, params.redirectResponse, page);
} else {
debug('Couldn\'t find original request for redirect response: ' + params.requestId);
const entry = {
cache: {},
startedDateTime: '',
__requestWillBeSentTime: params.timestamp,
__wallTime: params.wallTime,
__requestId: params.requestId,
__frameId: params.frameId,
_initialPriority: request.initialPriority,
_priority: request.initialPriority,
_initiator: params.initiator.url,
_initiator_line: params.initiator.lineNumber,
pageref: currentPageId,
request: req,
time: 0
};
if (params.redirectResponse) {
const previousEntry = entries.find(
entry => entry.__requestId === params.requestId
);
if (previousEntry) {
previousEntry.__requestId += 'r';
populateEntryFromResponse(
previousEntry,
params.redirectResponse,
page
);
} else {
debug(
"Couldn't find original request for redirect response: " +
params.requestId
);
}
}
}
entries.push(entry);
entries.push(entry);
// this is the first request for this page, so set timestamp of page.
if (!page.__timestamp) {
entry.__mainRequest = true;
page.__wallTime = params.wallTime;
page.__timestamp = params.timestamp;
page.startedDateTime = moment.unix(params.wallTime).toISOString(); //epoch float64, eg 1440589909.59248
// URL is better than blank, and it's what devtools uses.
page.title = request.url;
// this is the first request for this page, so set timestamp of page.
if (!page.__timestamp) {
page.__wallTime = params.wallTime;
page.__timestamp = params.timestamp;
page.startedDateTime = moment.unix(params.wallTime).toISOString(); //epoch float64, eg 1440589909.59248
// URL is better than blank, and it's what devtools uses.
page.title = request.url;
}
// wallTime is not necessarily monotonic, timestamp is. So calculate startedDateTime from timestamp diffs.
// (see https://cs.chromium.org/chromium/src/third_party/WebKit/Source/platform/network/ResourceLoadTiming.h?q=requestTime+package:%5Echromium$&dr=CSs&l=84)
const entrySecs =
page.__wallTime + (params.timestamp - page.__timestamp);
entry.startedDateTime = moment.unix(entrySecs).toISOString();
}
// wallTime is not necessarily monotonic, timestamp is. So calculate startedDateTime from timestamp diffs.
// (see https://cs.chromium.org/chromium/src/third_party/WebKit/Source/platform/network/ResourceLoadTiming.h?q=requestTime+package:%5Echromium$&dr=CSs&l=84)
const entrySecs = page.__wallTime + (params.timestamp - page.__timestamp);
entry.startedDateTime = moment.unix(entrySecs).toISOString();
}
break;
case 'Network.requestServedFromCache': {
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue
}
case 'Network.requestServedFromCache':
{
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue;
}
if (ignoredRequests.has(params.requestId)) {
continue;
}
if (ignoredRequests.has(params.requestId)) {
continue;
}
let entry = entries.find((entry) => entry.__requestId === params.requestId);
if (!entry) {
debug('Received requestServedFromCache for requestId ' + params.requestId + ' with no matching request.');
continue;
}
const entry = entries.find(
entry => entry.__requestId === params.requestId
);
if (!entry) {
debug(
'Received requestServedFromCache for requestId ' +
params.requestId +
' with no matching request.'
);
continue;
}
entry.__servedFromCache = true;
entry.cache.beforeRequest = {
lastAccess: '',
eTag: '',
hitCount: 0
entry.__servedFromCache = true;
entry.cache.beforeRequest = {
lastAccess: '',
eTag: '',
hitCount: 0
};
}
}
break;
case 'Network.responseReceived': {
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue
}
if (ignoredRequests.has(params.requestId)) {
continue;
}
case 'Network.responseReceived':
{
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue;
}
if (ignoredRequests.has(params.requestId)) {
continue;
}
let entry = entries.find((entry) => entry.__requestId === params.requestId);
if (!entry) {
debug('Received network response for requestId ' + params.requestId + ' with no matching request.');
continue;
}
const entry = entries.find(
entry => entry.__requestId === params.requestId
);
if (!entry) {
debug(
'Received network response for requestId ' +
params.requestId +
' with no matching request.'
);
continue;
}
let frameId = rootFrameMappings.get(params.frameId) || params.frameId;
let page = pages.find((page) => page.__frameId === frameId);
if (!page) {
debug('Received network response for requestId ' + params.requestId + ' that can\'t be mapped to any page.');
continue;
}
const frameId =
rootFrameMappings.get(params.frameId) || params.frameId;
const page = pages.find(page => page.__frameId === frameId);
if (!page) {
debug(
'Received network response for requestId ' +
params.requestId +
" that can't be mapped to any page."
);
continue;
}
try {
populateEntryFromResponse(entry, params.response, page);
} catch (e) {
debug('Error parsing response: %j', params);
throw e;
try {
populateEntryFromResponse(entry, params.response, page);
} catch (e) {
debug('Error parsing response: %j', params);
throw e;
}
}
}
break;
case 'Network.dataReceived': {
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue
}
if (ignoredRequests.has(params.requestId)) {
continue;
}
case 'Network.dataReceived':
{
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue;
}
if (ignoredRequests.has(params.requestId)) {
continue;
}
let entry = entries.find((entry) => entry.__requestId === params.requestId);
if (!entry) {
debug('Received network data for requestId ' + params.requestId + ' with no matching request.');
continue;
const entry = entries.find(
entry => entry.__requestId === params.requestId
);
if (!entry) {
debug(
'Received network data for requestId ' +
params.requestId +
' with no matching request.'
);
continue;
}
entry.response.content.size += params.dataLength;
}
entry.response.content.size += params.dataLength;
}
break;
case 'Network.loadingFinished': {
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue
}
if (ignoredRequests.has(params.requestId)) {
ignoredRequests.delete(params.requestId);
continue;
}
case 'Network.loadingFinished':
{
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue;
}
if (ignoredRequests.has(params.requestId)) {
ignoredRequests.delete(params.requestId);
continue;
}
let entry = entries.find((entry) => entry.__requestId === params.requestId);
if (!entry) {
debug('Network loading finished for requestId ' + params.requestId + ' with no matching request.');
continue;
}
const entry = entries.find(
entry => entry.__requestId === params.requestId
);
if (!entry) {
debug(
'Network loading finished for requestId ' +
params.requestId +
' with no matching request.'
);
continue;
}
const timings = entry.timings;
timings.receive = formatMillis((params.timestamp - entry._requestTime) * 1000 - entry.__receiveHeadersEnd);
entry.time = max(0, timings.blocked) + max(0, timings.dns) + max(0, timings.connect) +
timings.send + timings.wait + timings.receive;
const timings = entry.timings;
timings.receive = formatMillis(
(params.timestamp - entry._requestTime) * 1000 -
entry.__receiveHeadersEnd
);
entry.time =
max(0, timings.blocked) +
max(0, timings.dns) +
max(0, timings.connect) +
timings.send +
timings.wait +
timings.receive;
// FIXME, encodedDataLength includes headers according to https://github.com/cyrus-and/chrome-har-capturer/issues/25
entry.response.bodySize = params.encodedDataLength > 0 ? params.encodedDataLength : entry.response.bodySize;
//if (entry.response.headersSize > -1) {
// entry.response.bodySize -= entry.response.headersSize;
//}
// encodedDataLength will be -1 sometimes
if (params.encodedDataLength >= 0) {
const response = entry.response;
// encodedDataLength will be -1 sometimes
if (params.encodedDataLength > 0) {
// encodedDataLength seems to be larger than body size sometimes. Perhaps it's because full packets are
// listed even though the actual data might be very small.
// I've seen dataLength: 416, encodedDataLength: 1016,
response._transferSize = params.encodedDataLength;
response.bodySize = params.encodedDataLength;
const compression = Math.max(0, entry.response.bodySize - params.encodedDataLength);
if (compression > 0) {
entry.response.content.compression = compression;
if (isHttp1x(response.httpVersion) && response.headersSize > -1) {
response.bodySize -= response.headersSize;
}
const compression = Math.max(
0,
response.content.size - response.bodySize
);
if (compression > 0) {
response.content.compression = compression;
}
}
}
}
break;
case 'Page.loadEventFired': {
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue;
}
case 'Page.loadEventFired':
{
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue;
}
let page = pages[pages.length - 1];
const page = pages[pages.length - 1];
if (params.timestamp && page.__timestamp) {
page.pageTimings.onLoad = formatMillis((params.timestamp - page.__timestamp) * 1000);
if (params.timestamp && page.__timestamp) {
page.pageTimings.onLoad = formatMillis(
(params.timestamp - page.__timestamp) * 1000
);
}
}
}
break;
case 'Page.domContentEventFired': {
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue;
}
case 'Page.domContentEventFired':
{
if (pages.length < 1) {
//we haven't loaded any pages yet.
continue;
}
let page = pages[pages.length - 1];
const page = pages[pages.length - 1];
if (params.timestamp && page.__timestamp) {
page.pageTimings.onContentLoad = formatMillis((params.timestamp - page.__timestamp) * 1000);
if (params.timestamp && page.__timestamp) {
page.pageTimings.onContentLoad = formatMillis(
(params.timestamp - page.__timestamp) * 1000
);
}
}
}
break;
case 'Page.frameAttached': {
const frameId = params.frameId,
parentId = params.parentFrameId;
case 'Page.frameAttached':
{
const frameId = params.frameId, parentId = params.parentFrameId;
rootFrameMappings.set(frameId, parentId);
rootFrameMappings.set(frameId, parentId);
let grandParentId = rootFrameMappings.get(parentId);
while (grandParentId) {
rootFrameMappings.set(frameId, grandParentId);
grandParentId = rootFrameMappings.get(grandParentId);
let grandParentId = rootFrameMappings.get(parentId);
while (grandParentId) {
rootFrameMappings.set(frameId, grandParentId);
grandParentId = rootFrameMappings.get(grandParentId);
}
}
}
break;

@@ -459,19 +544,30 @@

case 'Network.loadingFailed': {
if (ignoredRequests.has(params.requestId)) {
ignoredRequests.delete(params.requestId);
continue;
}
case 'Network.loadingFailed':
{
if (ignoredRequests.has(params.requestId)) {
ignoredRequests.delete(params.requestId);
continue;
}
let entry = entries.find((entry) => entry.__requestId === params.requestId);
if (!entry) {
debug('Network loading failed for requestId ' + params.requestId + ' with no matching request.');
continue;
const entry = entries.find(
entry => entry.__requestId === params.requestId
);
if (!entry) {
debug(
'Network loading failed for requestId ' +
params.requestId +
' with no matching request.'
);
continue;
}
// This could be due to incorrect domain name etc. Sad, but unfortunately not something that a HAR file can
// represent.
debug(
`Failed to load url '${entry.request.url}' (canceled: ${params.canceled})`
);
entries = entries.filter(
entry => entry.__requestId !== params.requestId
);
}
// This could be due to incorrect domain name etc. Sad, but unfortunately not something that a HAR file can
// represent.
debug(`Failed to load url '${entry.request.url}' (canceled: ${params.canceled})`);
entries = entries.filter((entry) => entry.__requestId !== params.requestId);
}
break;

@@ -492,13 +588,19 @@

break;
case 'Network.resourceChangedPriority': {
let entry = entries.find((entry) => entry.__requestId === params.requestId);
case 'Network.resourceChangedPriority':
{
const entry = entries.find(
entry => entry.__requestId === params.requestId
);
if (!entry) {
debug('Received resourceChangedPriority for requestId ' + params.requestId + ' with no matching request.');
continue;
if (!entry) {
debug(
'Received resourceChangedPriority for requestId ' +
params.requestId +
' with no matching request.'
);
continue;
}
entry._priority = message.params.newPriority;
}
entry._priority = message.params.newPriority;
}
break;

@@ -513,8 +615,10 @@

if (!options.includeResourcesFromDiskCache) {
entries = entries.filter((entry) => entry.cache.beforeRequest === undefined);
entries = entries.filter(
entry => entry.cache.beforeRequest === undefined
);
}
const deleteInternalProperties = (o) => {
const deleteInternalProperties = o => {
// __ properties are only for internal use, _ properties are custom properties for the HAR
for (let prop in o) {
for (const prop in o) {
if (prop.startsWith('__')) {

@@ -528,3 +632,3 @@ delete o[prop];

entries = entries
.filter((entry) => {
.filter(entry => {
if (!entry.response) {

@@ -544,7 +648,7 @@ debug('Dropping incomplete request: ' + entry.request.url);

version: '1.2',
creator: {name, version, comment: homepage},
creator: { name, version, comment: homepage },
pages,
entries
}
}
};
}

@@ -554,13 +658,14 @@ };

function toNameValuePairs(object) {
return Object.keys(object)
.reduce((result, name) => {
const value = object[name];
if (Array.isArray(value)) {
return result.concat(value.map((v) => {
return {name, value: v};
}));
} else {
return result.concat([{name, value}]);
}
}, []);
return Object.keys(object).reduce((result, name) => {
const value = object[name];
if (Array.isArray(value)) {
return result.concat(
value.map(v => {
return { name, value: v };
})
);
} else {
return result.concat([{ name, value }]);
}
}, []);
}

@@ -602,3 +707,3 @@

function isHttp1x(version) {
return version.toLowerCase().startsWith('http/1.')
return version.toLowerCase().startsWith('http/1.');
}

@@ -608,6 +713,5 @@

for (let i = 0; i < values.length; ++i) {
if (values[i] >= 0)
return values[i];
if (values[i] >= 0) return values[i];
}
return -1;
}

@@ -11,9 +11,11 @@ const Cookie = require('tough-cookie').Cookie;

return {
'name': cookie.key,
'value': cookie.value,
'path': cookie.path || undefined, // must be undefined, not null, to exclude empty path
'domain': cookie.domain || undefined, // must be undefined, not null, to exclude empty domain
'expires': cookie.expires === 'Infinity' ? undefined : moment(cookie.expires).toISOString(),
'httpOnly': cookie.httpOnly,
'secure': cookie.secure
name: cookie.key,
value: cookie.value,
path: cookie.path || undefined, // must be undefined, not null, to exclude empty path
domain: cookie.domain || undefined, // must be undefined, not null, to exclude empty domain
expires: cookie.expires === 'Infinity'
? undefined
: moment(cookie.expires).toISOString(),
httpOnly: cookie.httpOnly,
secure: cookie.secure
};

@@ -23,6 +25,3 @@ }

function splitAndParse(header, divider) {
return header.split(divider)
.filter(Boolean)
.map(parseCookie)
.filter(Boolean);
return header.split(divider).filter(Boolean).map(parseCookie).filter(Boolean);
}

@@ -29,0 +28,0 @@

@@ -5,5 +5,12 @@ const util = require('util');

calculateRequestHeaderSize(harRequest) {
let buffer = util.format('%s %s %s\r\n', harRequest.method, harRequest.url, harRequest.httpVersion);
let buffer = util.format(
'%s %s %s\r\n',
harRequest.method,
harRequest.url,
harRequest.httpVersion
);
const headerLines = harRequest.headers.map((header) => util.format('%s: %s\r\n', header.name, header.value));
const headerLines = harRequest.headers.map(header =>
util.format('%s: %s\r\n', header.name, header.value)
);
buffer = buffer.concat(headerLines.join(''));

@@ -15,6 +22,12 @@ buffer = buffer.concat('\r\n');

calculateResponseHeaderSize(perflogResponse) {
let buffer = util.format('%s %d %s\r\n', perflogResponse.protocol,
perflogResponse.status, perflogResponse.statusText);
Object.keys(perflogResponse.headers).forEach((key) => {
buffer = buffer.concat(util.format('%s: %s\r\n', key, perflogResponse.headers[key]));
let buffer = util.format(
'%s %d %s\r\n',
perflogResponse.protocol,
perflogResponse.status,
perflogResponse.statusText
);
Object.keys(perflogResponse.headers).forEach(key => {
buffer = buffer.concat(
util.format('%s: %s\r\n', key, perflogResponse.headers[key])
);
});

@@ -29,7 +42,7 @@ buffer = buffer.concat('\r\n');

}
return Object.keys(headers).map((key) => {
return Object.keys(headers).map(key => {
return {
name: key,
value: headers[key]
}
};
});

@@ -44,7 +57,9 @@ },

const headerNames = Object.keys(headers);
return headerNames
.filter((key) => key.toLowerCase() === lowerCaseHeader)
.map((key) => headers[key])
.shift() || '';
return (
headerNames
.filter(key => key.toLowerCase() === lowerCaseHeader)
.map(key => headers[key])
.shift() || ''
);
}
};
{
"name": "chrome-har",
"version": "0.1.0",
"version": "0.2.0",
"description": "Create HAR files from Chrome Debugging Protocol data.",

@@ -8,2 +8,4 @@ "main": "index.js",

"lint": "eslint .",
"lint:fix": "eslint . --fix",
"eslint-check": "eslint --print-config .eslintrc.js | eslint-config-prettier-check",
"test": "ava",

@@ -25,12 +27,15 @@ "test:watch": "ava --watch"

"devDependencies": {
"ava": "^0.18.2",
"bluebird": "^3.4.7",
"eslint": "^3.16.0",
"har-validator": "^4.2.0"
"ava": "^0.19.1",
"bluebird": "^3.5.0",
"eslint": "^3.19.0",
"eslint-config-prettier": "^2.1.1",
"eslint-plugin-prettier": "^2.1.1",
"har-validator": "^5.0.3",
"prettier": "^1.3.1"
},
"dependencies": {
"debug": "^2.6.1",
"moment": "^2.17.1",
"debug": "2.6.8",
"moment": "2.18.1",
"tough-cookie": "^2.3.2"
}
}

@@ -1,10 +0,12 @@

import test from "ava";
import * as validator from "har-validator";
import * as Promise from "bluebird";
import * as fs from "fs";
import * as path from "path";
import parser from "../";
import test from 'ava';
import * as validator from 'har-validator';
import * as Promise from 'bluebird';
import * as fs from 'fs';
import * as path from 'path';
import parser from '../';
Promise.promisifyAll(fs);
const PERFLOGSPATH = path.resolve(__dirname, 'perflogs');
/**

@@ -15,3 +17,3 @@ * Validate that, for each tcp connection, the previous request is fully completed before then next starts.

const entriesByConnection = entries
.filter((entry) => entry.response.httpVersion !== 'h2')
.filter(entry => !['h2', 'spdy/3.1'].includes(entry.response.httpVersion))
.reduce((entries, entry) => {

@@ -27,6 +29,9 @@ const e = entries.get(entry.connection) || [];

for (let entry of entries) {
const previousEnd = previousEntry._requestTime + (previousEntry.time / 1000);
const previousEnd =
previousEntry._requestTime + previousEntry.time / 1000;
const timings = entry.timings;
t.true((entry._requestTime + Math.max(0, timings.blocked) / 1000 > previousEnd),
`Two entries too close on connection ${connection}`);
t.true(
entry._requestTime + Math.max(0, timings.blocked) / 1000 > previousEnd,
`Two entries too close on connection ${connection}`
);
previousEntry = entry;

@@ -37,37 +42,46 @@ }

function testdata(filename) {
return path.resolve(__dirname, 'testdata', filename);
function perflog(filename) {
return path.resolve(PERFLOGSPATH, filename);
}
function perflogs() {
return fs
.readdirAsync(PERFLOGSPATH)
.filter(filename => path.extname(filename) === '.json');
}
function parsePerflog(perflogPath) {
return fs.readFileAsync(perflogPath)
return fs
.readFileAsync(perflogPath)
.then(JSON.parse)
.then((messages) => parser.harFromMessages(messages))
.tap((har) => validator.har(har));
.then(messages => parser.harFromMessages(messages))
.tap(har => validator.har(har));
}
function sortedByRequestTime(entries) {
return entries.sort((e1, e2) => e1._requestTime - e2._requestTime)
return entries.sort((e1, e2) => e1._requestTime - e2._requestTime);
}
test('h1', t => {
const perflogPath = testdata('h1.json');
return parsePerflog(perflogPath)
.then(() => t.pass('Valid HAR'));
test('Generates valid HARs', t => {
return perflogs().each(filename => {
return parsePerflog(perflog(filename))
.tap(har =>
t.deepEqual(sortedByRequestTime(har.log.entries), har.log.entries)
)
.tap(har => validateConnectionOverlap(t, har.log.entries))
.then(() => t.pass('Valid HAR'));
});
});
test('h2', t => {
const perflogPath = testdata('h2.json');
return parsePerflog(perflogPath)
.then(() => t.pass('Valid HAR'));
});
test('zdnet', t => {
const perflogPath = testdata('www.zdnet.com.json');
const perflogPath = perflog('www.zdnet.com.json');
return parsePerflog(perflogPath)
.then(har => har.log)
.tap(log => t.is(log.pages.length, 1))
.tap(log => t.is(log.entries.length, 343))
.tap(log => t.deepEqual(sortedByRequestTime(log.entries), log.entries))
.tap(log => validateConnectionOverlap(t, log.entries));
.tap(log => t.is(log.entries.length, 343));
});
test('Generates multiple pages', t => {
const perflogPath = perflog('www.wikipedia.org.json');
return parsePerflog(perflogPath).tap(har => t.is(har.log.pages.length, 2));
});

@@ -19,6 +19,9 @@ #!/usr/bin/env node

fs.readFileAsync(path.resolve(perflogPath), 'utf8')
fs
.readFileAsync(path.resolve(perflogPath), 'utf8')
.then(JSON.parse)
.then((messages) => parser.harFromMessages(messages))
.then((har) => JSON.stringify(har, null, 2))
.then((har) => fs.writeFileAsync(path.basename(perflogPath, '.json') + '.har', har, 'utf8'));
.then(messages => parser.harFromMessages(messages))
.then(har => JSON.stringify(har, null, 2))
.then(har =>
fs.writeFileAsync(path.basename(perflogPath, '.json') + '.har', har, 'utf8')
);

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc