Socket
Socket
Sign inDemoInstall

mockttp

Package Overview
Dependencies
Maintainers
1
Versions
125
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

mockttp - npm Package Compare versions

Comparing version 1.2.2 to 2.0.0

custom-typings/proxy-agent.d.ts

1

dist/client/introspection-query.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.introspectionQuery = void 0;
// Taken from src/utilities/introspectionQuery.js in GraphQL-js

@@ -4,0 +5,0 @@ // Copied directly, to avoid bundling the whole thing into frontend code.

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.MockedEndpointClient = void 0;
class MockedEndpointClient {

@@ -17,0 +18,0 @@ constructor(id, explanation, endpointDataGetter) {

@@ -52,5 +52,7 @@ /**

private mockServerSchema;
private subscriptionClient;
constructor(options?: MockttpClientOptions);
private requestFromStandalone;
private openStreamToMockServer;
private prepareSubscriptionClientToMockServer;
private requestFromMockServer;

@@ -57,0 +59,0 @@ private queryMockServer;

70

dist/client/mockttp-client.js

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.GraphQLError = exports.RequestError = exports.ConnectionError = void 0;
const typed_error_1 = require("typed-error");

@@ -45,3 +46,8 @@ const getFetchPonyfill = require("fetch-ponyfill");

constructor(response, errors) {
super(`GraphQL request failed, with errors:\n${errors.map((e) => e.message).join('\n')}`, response);
super(errors.length === 0
? `GraphQL request failed with ${response.status} response`
: errors.length === 1
? `GraphQL request failed with: ${errors[0].message}`
: // >1
`GraphQL request failed, with errors:\n${errors.map((e) => e.message).join('\n')}`, response);
this.errors = errors;

@@ -83,3 +89,3 @@ }

}
if (message.body) {
if (message.body !== undefined) {
// Body is serialized as the raw encoded buffer in base64

@@ -267,2 +273,17 @@ message.body = request_utils_1.buildBodyReader(Buffer.from(message.body, 'base64'), message.headers);

}
prepareSubscriptionClientToMockServer(config) {
const standaloneWsServer = this.mockServerOptions.standaloneServerUrl.replace(/^http/, 'ws');
const subscriptionUrl = `${standaloneWsServer}/server/${config.port}/subscription`;
this.subscriptionClient = new subscriptions_transport_ws_1.SubscriptionClient(subscriptionUrl, {
lazy: true,
reconnect: true,
reconnectionAttempts: 8,
wsOptionArguments: [this.mockClientOptions]
}, WebSocket);
this.subscriptionClient.onError((e) => {
if (this.debug)
console.error("Subscription error", e);
});
this.subscriptionClient.onReconnecting(() => console.warn(`Reconnecting Mockttp subscription client`));
}
requestFromMockServer(path, options) {

@@ -301,9 +322,11 @@ return __awaiter(this, void 0, void 0, function* () {

catch (e) {
let graphQLErrors = undefined;
try {
let graphQLErrors = (yield e.response.json()).errors;
graphQLErrors = (yield e.response.json()).errors;
}
catch (e2) { }
if (graphQLErrors) {
throw new GraphQLError(e, graphQLErrors);
}
catch (e2) {
// If we fail to get a proper JSON graphql error, just throw the
// underlying exception without decoration
else {
throw e;

@@ -328,2 +351,4 @@ }

this.mockServerStream = yield this.openStreamToMockServer(mockServerConfig);
// Create a subscription client, preconfigured & ready to connect if on() is called later:
this.prepareSubscriptionClientToMockServer(mockServerConfig);
// We don't persist the config or resolve this promise until everything is set up

@@ -340,2 +365,3 @@ this.mockServerConfig = mockServerConfig;

this.mockServerStream.end();
this.subscriptionClient.close();
yield this.requestFromMockServer('/stop', {

@@ -416,9 +442,2 @@ method: 'POST'

return Promise.resolve();
const standaloneStreamServer = this.mockServerOptions.standaloneServerUrl.replace(/^http/, 'ws');
const url = `${standaloneStreamServer}/server/${this.port}/subscription`;
const client = new subscriptions_transport_ws_1.SubscriptionClient(url, {
reconnect: true,
reconnectionAttempts: 8,
wsOptionArguments: [this.mockClientOptions]
}, WebSocket);
// Note the typeHasField checks - these are a quick hack for backward compatibility,

@@ -539,3 +558,6 @@ // introspecting the server schema to avoid requesting fields that don't exist on old servers.

}[event];
client.request(query).subscribe({
// This isn't 100% correct (you can be WS-connected, but still negotiating some GQL
// setup) but it's good enough for our purposes (knowing-ish if the connection worked).
let isConnected = !!this.subscriptionClient.client;
this.subscriptionClient.request(query).subscribe({
next: (value) => {

@@ -568,17 +590,9 @@ if (value.data) {

return new Promise((resolve, reject) => {
client.onConnected(() => {
if (this.debug)
console.log("Subscription connected");
if (isConnected)
resolve();
});
client.onDisconnected(() => {
if (this.debug)
console.warn("Subscription disconnected");
reject();
});
client.onError((e) => {
if (this.debug)
console.error("Subscription error", e);
});
client.onReconnecting(() => console.warn(`Reconnecting ${event} subscription`));
else {
this.subscriptionClient.onConnected(resolve);
this.subscriptionClient.onDisconnected(reject);
this.subscriptionClient.onError(reject);
}
});

@@ -585,0 +599,0 @@ }

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getStandalone = exports.getRemote = exports.getLocal = exports.handlers = exports.completionCheckers = exports.webSocketHandlers = exports.requestHandlers = exports.matchers = exports.Method = void 0;
const mockttp_client_1 = require("./client/mockttp-client");
var types_1 = require("./types");
exports.Method = types_1.Method;
Object.defineProperty(exports, "Method", { enumerable: true, get: function () { return types_1.Method; } });
// Export rule data builders:

@@ -7,0 +8,0 @@ const matchers = require("./rules/matchers");

@@ -15,2 +15,3 @@ /**

import { WebSocketRuleData } from './rules/websockets/websocket-rule';
export { ProxyConfig } from './util/http-agents';
export { RequestRuleData, WebSocketRuleData };

@@ -17,0 +18,0 @@ export { matchers, requestHandlers, webSocketHandlers, completionCheckers };

@@ -6,2 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.getStandalone = exports.getRemote = exports.getLocal = exports.generateSPKIFingerprint = exports.generateCACertificate = exports.handlers = exports.completionCheckers = exports.webSocketHandlers = exports.requestHandlers = exports.matchers = exports.Method = void 0;
const mockttp_server_1 = require("./server/mockttp-server");

@@ -12,3 +13,3 @@ const mockttp_client_1 = require("./client/mockttp-client");

var types_1 = require("./types");
exports.Method = types_1.Method;
Object.defineProperty(exports, "Method", { enumerable: true, get: function () { return types_1.Method; } });
// Export rule data builders & type definitions:

@@ -26,4 +27,4 @@ const matchers = require("./rules/matchers");

var tls_1 = require("./util/tls");
exports.generateCACertificate = tls_1.generateCACertificate;
exports.generateSPKIFingerprint = tls_1.generateSPKIFingerprint;
Object.defineProperty(exports, "generateCACertificate", { enumerable: true, get: function () { return tls_1.generateCACertificate; } });
Object.defineProperty(exports, "generateSPKIFingerprint", { enumerable: true, get: function () { return tls_1.generateSPKIFingerprint; } });
// Export the core API:

@@ -30,0 +31,0 @@ /**

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.AbstractMockttp = void 0;
/**

@@ -4,0 +5,0 @@ * @module Mockttp

@@ -6,2 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.BaseRuleBuilder = void 0;
const lodash_1 = require("lodash");

@@ -8,0 +9,0 @@ const completion_checkers_1 = require("./completion-checkers");

@@ -38,7 +38,7 @@ /**

export declare const CompletionCheckerLookup: {
'always': typeof Always;
'once': typeof Once;
'twice': typeof Twice;
'thrice': typeof Thrice;
'times': typeof NTimes;
always: typeof Always;
once: typeof Once;
twice: typeof Twice;
thrice: typeof Thrice;
times: typeof NTimes;
};

@@ -6,2 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.CompletionCheckerLookup = exports.NTimes = exports.Thrice = exports.Twice = exports.Once = exports.Always = void 0;
const serialization_1 = require("../util/serialization");

@@ -8,0 +9,0 @@ class Always extends serialization_1.Serializable {

@@ -137,9 +137,9 @@ /**

export declare const MatcherLookup: {
'wildcard': typeof WildcardMatcher;
'method': typeof MethodMatcher;
'host': typeof HostMatcher;
wildcard: typeof WildcardMatcher;
method: typeof MethodMatcher;
host: typeof HostMatcher;
'simple-path': typeof SimplePathMatcher;
'regex-path': typeof RegexPathMatcher;
'header': typeof HeaderMatcher;
'query': typeof QueryMatcher;
header: typeof HeaderMatcher;
query: typeof QueryMatcher;
'exact-query-string': typeof ExactQueryMatcher;

@@ -152,5 +152,5 @@ 'form-data': typeof FormDataMatcher;

'json-body-matching': typeof JsonBodyFlexibleMatcher;
'cookie': typeof CookieMatcher;
cookie: typeof CookieMatcher;
};
export declare function matchesAll(req: OngoingRequest, matchers: RequestMatcher[]): Promise<boolean>;
export declare function explainMatchers(matchers: RequestMatcher[]): string;

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.explainMatchers = exports.matchesAll = exports.MatcherLookup = exports.CookieMatcher = exports.JsonBodyFlexibleMatcher = exports.JsonBodyMatcher = exports.RegexBodyMatcher = exports.RawBodyIncludesMatcher = exports.RawBodyMatcher = exports.FormDataMatcher = exports.QueryMatcher = exports.ExactQueryMatcher = exports.HeaderMatcher = exports.RegexPathMatcher = exports.SimplePathMatcher = exports.HostMatcher = exports.MethodMatcher = exports.WildcardMatcher = void 0;
const _ = require("lodash");

@@ -17,0 +18,0 @@ const url = require("url");

@@ -7,5 +7,6 @@ /**

import { TypedError } from 'typed-error';
import { Headers, OngoingRequest, CompletedRequest, OngoingResponse, CompletedBody, Explainable } from "../../types";
import { MaybePromise, Replace } from '../../util/type-utils';
import { Serializable, ClientServerChannel } from "../../util/serialization";
import { MaybePromise } from '../../util/type-utils';
import { Headers, OngoingRequest, CompletedRequest, OngoingResponse, CompletedBody, Explainable } from "../../types";
import { ProxyConfig } from '../../util/http-agents';
export declare class AbortError extends TypedError {

@@ -31,3 +32,3 @@ }

json?: any;
body?: string | Buffer;
body?: string | Buffer | Uint8Array;
}

@@ -41,6 +42,6 @@ export interface RequestHandler extends Explainable, Serializable {

statusMessage?: string | undefined;
data?: string | Buffer | SerializedBuffer | undefined;
data?: string | Uint8Array | Buffer | SerializedBuffer | undefined;
headers?: Headers | undefined;
readonly type = "simple";
constructor(status: number, statusMessage?: string | undefined, data?: string | Buffer | SerializedBuffer | undefined, headers?: Headers | undefined);
constructor(status: number, statusMessage?: string | undefined, data?: string | Uint8Array | Buffer | SerializedBuffer | undefined, headers?: Headers | undefined);
explain(): string;

@@ -52,2 +53,3 @@ handle(_request: OngoingRequest, response: OngoingResponse): Promise<void>;

name?: string;
version?: number;
}

@@ -61,3 +63,3 @@ export declare class CallbackHandler extends Serializable implements RequestHandler {

serialize(channel: ClientServerChannel): SerializedCallbackHandlerData;
static deserialize({ name }: SerializedCallbackHandlerData, channel: ClientServerChannel): CallbackHandler;
static deserialize({ name, version }: SerializedCallbackHandlerData, channel: ClientServerChannel): CallbackHandler;
}

@@ -156,2 +158,6 @@ export interface SerializedStreamHandlerData {

/**
* Upstream proxy configuration: pass through requests via this proxy
*/
proxyConfig?: ProxyConfig;
/**
* Custom DNS options, to allow configuration of the resolver used

@@ -164,2 +170,30 @@ * when forwarding requests upstream. Passing any option switches

/**
* A set of data to automatically transform a request. This includes properties
* to support many transformation common use cases.
*
* For advanced cases, a custom callback using beforeRequest can be used instead.
* Using this field however where possible is typically simpler, more declarative,
* and can be more performant. The two options are mutually exclusive: you cannot
* use both transformRequest and a beforeRequest callback.
*
* Only one transformation for each target (method, headers & body) can be
* specified. If more than one is specified then an error will be thrown when the
* rule is registered.
*/
transformRequest?: RequestTransform;
/**
* A set of data to automatically transform a response. This includes properties
* to support many transformation common use cases.
*
* For advanced cases, a custom callback using beforeResponse can be used instead.
* Using this field however where possible is typically simpler, more declarative,
* and can be more performant. The two options are mutually exclusive: you cannot
* use both transformResponse and a beforeResponse callback.
*
* Only one transformation for each target (status, headers & body) can be
* specified. If more than one is specified then an error will be thrown when the
* rule is registered.
*/
transformResponse?: ResponseTransform;
/**
* A callback that will be passed the full request before it is passed through,

@@ -201,2 +235,84 @@ * and which returns an object that defines how the the request content should

}
export interface RequestTransform {
/**
* A replacement HTTP method. Case insensitive.
*/
replaceMethod?: string;
/**
* A headers object which will be merged with the real request headers to add or
* replace values. Headers with undefined values will be removed.
*/
updateHeaders?: Headers;
/**
* A headers object which will completely replace the real request headers.
*/
replaceHeaders?: Headers;
/**
* A string or buffer that replaces the request body entirely.
*
* If this is specified, the upstream request will not wait for the original request
* body, so this may make responses faster than they would be otherwise given large
* request bodies or slow/streaming clients.
*/
replaceBody?: string | Uint8Array | Buffer;
/**
* The path to a file, which will be used to replace the request body entirely. The
* file will be re-read for each request, so the body will always reflect the latest
* file contents.
*
* If this is specified, the upstream request will not wait for the original request
* body, so this may make responses faster than they would be otherwise given large
* request bodies or slow/streaming clients.
*/
replaceBodyFromFile?: string;
/**
* A JSON object which will be merged with the real request body. Undefined values
* will be removed. Any requests which are received with an invalid JSON body that
* match this rule will fail.
*/
updateJsonBody?: {
[key: string]: any;
};
}
export interface ResponseTransform {
/**
* A replacement response status code.
*/
replaceStatus?: number;
/**
* A headers object which will be merged with the real response headers to add or
* replace values. Headers with undefined values will be removed.
*/
updateHeaders?: Headers;
/**
* A headers object which will completely replace the real response headers.
*/
replaceHeaders?: Headers;
/**
* A string or buffer that replaces the response body entirely.
*
* If this is specified, the downstream response will not wait for the original response
* body, so this may make responses arrive faster than they would be otherwise given large
* response bodies or slow/streaming servers.
*/
replaceBody?: string | Uint8Array | Buffer;
/**
* The path to a file, which will be used to replace the response body entirely. The
* file will be re-read for each response, so the body will always reflect the latest
* file contents.
*
* If this is specified, the downstream response will not wait for the original response
* body, so this may make responses arrive faster than they would be otherwise given large
* response bodies or slow/streaming servers.
*/
replaceBodyFromFile?: string;
/**
* A JSON object which will be merged with the real response body. Undefined values
* will be removed. Any responses which are received with an invalid JSON body that
* match this rule will fail.
*/
updateJsonBody?: {
[key: string]: any;
};
}
interface SerializedPassThroughData {

@@ -206,2 +322,3 @@ type: 'passthrough';

forwarding?: ForwardingOptions;
proxyConfig?: ProxyConfig;
ignoreHostCertificateErrors?: string[];

@@ -215,2 +332,8 @@ clientCertificateHostMap?: {

lookupOptions?: PassThroughLookupOptions;
transformRequest?: Replace<RequestTransform, 'replaceBody' | 'updateHeaders' | 'updateJsonBody', // Serialized as a string to preserve undefined values
// Serialized as a string to preserve undefined values
string | undefined>;
transformResponse?: Replace<ResponseTransform, 'replaceBody' | 'updateHeaders' | 'updateJsonBody', // Serialized as a string to preserve undefined values
// Serialized as a string to preserve undefined values
string | undefined>;
hasBeforeRequestCallback?: boolean;

@@ -229,7 +352,11 @@ hasBeforeResponseCallback?: boolean;

};
readonly transformRequest?: RequestTransform;
readonly transformResponse?: ResponseTransform;
readonly beforeRequest?: (req: CompletedRequest) => MaybePromise<CallbackRequestResult>;
readonly beforeResponse?: (res: PassThroughResponse) => MaybePromise<CallbackResponseResult>;
readonly lookupOptions: PassThroughLookupOptions | undefined;
readonly lookupOptions?: PassThroughLookupOptions;
readonly proxyConfig?: ProxyConfig;
private _cacheableLookupInstance;
private lookup;
private outgoingSockets;
constructor(options?: PassThroughHandlerOptions);

@@ -252,10 +379,10 @@ explain(): string;

export declare const HandlerLookup: {
'simple': typeof SimpleHandler;
'callback': typeof CallbackHandler;
'stream': typeof StreamHandler;
'file': typeof FileHandler;
'passthrough': typeof PassThroughHandler;
simple: typeof SimpleHandler;
callback: typeof CallbackHandler;
stream: typeof StreamHandler;
file: typeof FileHandler;
passthrough: typeof PassThroughHandler;
'close-connection': typeof CloseConnectionHandler;
'timeout': typeof TimeoutHandler;
timeout: typeof TimeoutHandler;
};
export {};

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.HandlerLookup = exports.TimeoutHandler = exports.CloseConnectionHandler = exports.PassThroughHandler = exports.FileHandler = exports.StreamHandler = exports.CallbackHandler = exports.SimpleHandler = exports.AbortError = void 0;
const _ = require("lodash");

@@ -28,7 +29,10 @@ const url = require("url");

const typed_error_1 = require("typed-error");
const http_encoding_1 = require("http-encoding");
const util_1 = require("../../util/util");
const fs_1 = require("../../util/fs");
const request_utils_1 = require("../../util/request-utils");
const buffer_utils_1 = require("../../util/buffer-utils");
const socket_util_1 = require("../../util/socket-util");
const serialization_1 = require("../../util/serialization");
const fs_1 = require("../../util/fs");
const util_1 = require("../../util/util");
const http_agents_1 = require("../../util/http-agents");
// An error that indicates that the handler is aborting the request.

@@ -113,9 +117,18 @@ // This could be intentional, or an upstream server aborting the request.

channel.onRequest((streamMsg) => __awaiter(this, void 0, void 0, function* () {
return serialization_1.withSerializedBodyBuffer(yield this.callback.apply(null, streamMsg.args));
const request = _.isString(streamMsg.args[0].body)
? serialization_1.withDeserializedBodyReader(// New format: body serialized as base64
streamMsg.args[0])
: Object.assign(Object.assign({}, streamMsg.args[0]), { body: request_utils_1.buildBodyReader(streamMsg.args[0].body.buffer, streamMsg.args[0].headers) });
const callbackResult = yield this.callback.call(null, request);
return serialization_1.withSerializedBodyBuffer(callbackResult);
}));
return { type: this.type, name: this.callback.name };
return { type: this.type, name: this.callback.name, version: 2 };
}
static deserialize({ name }, channel) {
static deserialize({ name, version }, channel) {
const rpcCallback = (request) => __awaiter(this, void 0, void 0, function* () {
return serialization_1.withDeserializedBodyBuffer(yield channel.request({ args: [request] }));
return serialization_1.withDeserializedBodyBuffer(yield channel.request({ args: [
(version || -1) >= 2
? serialization_1.withSerializedBodyReader(request)
: request // Backward compat: old handlers
] }));
});

@@ -264,3 +277,3 @@ // Pass across the name from the real callback, for explain()

}
else if (replacementBody.hasOwnProperty('decodedBuffer')) {
else if (request_utils_1.isMockttpBody(replacementBody)) {
// It's our own bodyReader instance. That's not supposed to happen, but

@@ -275,3 +288,3 @@ // it's ok, we just need to use the buffer data instead of the whole object

else {
return replacementBody;
return buffer_utils_1.asBuffer(replacementBody);
}

@@ -347,3 +360,3 @@ }

console.warn(common_tags_1.oneLine `
Passthrough callback overrode the body and the content-length header
Passthrough modifications overrode the body and the content-length header
with mismatched values, which may be a mistake. The body contains

@@ -372,11 +385,10 @@ ${util_1.byteLength(body)} bytes, whilst the header was set to ${lengthOverride}.

}
const KeepAliveAgents = util_1.isNode
? {
'http:': new http.Agent({
keepAlive: true
}),
'https:': new https.Agent({
keepAlive: true
})
} : {};
// Used in merging as a marker for values to omit, because lodash ignores undefineds.
const OMIT_SYMBOL = Symbol('omit-value');
const SERIALIZED_OMIT = "__mockttp__transform__omit__";
// We play some games to preserve undefined values during serialization, because we differentiate them
// in some transforms from null/not-present keys.
const mapOmitToUndefined = (input) => _.mapValues(input, (v) => v === SERIALIZED_OMIT || v === OMIT_SYMBOL
? undefined // Replace our omit placeholders with actual undefineds
: v);
class PassThroughHandler extends serialization_1.Serializable {

@@ -387,2 +399,3 @@ constructor(options = {}) {

this.ignoreHostHttpsErrors = [];
this.outgoingSockets = new Set();
// If a location is provided, and it's not a bare hostname, it must be parseable

@@ -409,4 +422,47 @@ const { forwarding } = options;

this.clientCertificateHostMap = options.clientCertificateHostMap || {};
this.beforeRequest = options.beforeRequest;
this.beforeResponse = options.beforeResponse;
this.proxyConfig = options.proxyConfig;
if (options.beforeRequest && options.transformRequest && !_.isEmpty(options.transformRequest)) {
throw new Error("BeforeRequest and transformRequest options are mutually exclusive");
}
else if (options.beforeRequest) {
this.beforeRequest = options.beforeRequest;
}
else if (options.transformRequest) {
if ([
options.transformRequest.updateHeaders,
options.transformRequest.replaceHeaders
].filter(o => !!o).length > 1) {
throw new Error("Only one request header transform can be specified at a time");
}
if ([
options.transformRequest.replaceBody,
options.transformRequest.replaceBodyFromFile,
options.transformRequest.updateJsonBody
].filter(o => !!o).length > 1) {
throw new Error("Only one request body transform can be specified at a time");
}
this.transformRequest = options.transformRequest;
}
if (options.beforeResponse && options.transformResponse && !_.isEmpty(options.transformResponse)) {
throw new Error("BeforeResponse and transformResponse options are mutually exclusive");
}
else if (options.beforeResponse) {
this.beforeResponse = options.beforeResponse;
}
else if (options.transformResponse) {
if ([
options.transformResponse.updateHeaders,
options.transformResponse.replaceHeaders
].filter(o => !!o).length > 1) {
throw new Error("Only one response header transform can be specified at a time");
}
if ([
options.transformResponse.replaceBody,
options.transformResponse.replaceBodyFromFile,
options.transformResponse.updateJsonBody
].filter(o => !!o).length > 1) {
throw new Error("Only one response body transform can be specified at a time");
}
this.transformResponse = options.transformResponse;
}
}

@@ -463,3 +519,3 @@ lookup() {

// Check if this request is a request loop:
if (isRequestLoop(clientReq.socket)) {
if (socket_util_1.isSocketLoop(this.outgoingSockets, clientReq.socket)) {
throw new Error(common_tags_1.oneLine `

@@ -475,6 +531,50 @@ Passthrough loop detected. This probably means you're sending a request directly

}
// Override the request details, if a callback is specified:
// Override the request details, if a transform or callback is specified:
let reqBodyOverride;
let headersManuallyModified = false;
if (this.beforeRequest) {
if (this.transformRequest) {
const { replaceMethod, updateHeaders, replaceHeaders, replaceBody, replaceBodyFromFile, updateJsonBody } = this.transformRequest;
if (replaceMethod) {
method = replaceMethod;
}
if (updateHeaders) {
headers = Object.assign(Object.assign({}, headers), updateHeaders);
headersManuallyModified = true;
}
else if (replaceHeaders) {
headers = Object.assign({}, replaceHeaders);
headersManuallyModified = true;
}
if (replaceBody) {
// Note that we're replacing the body without actually waiting for the real one, so
// this can result in sending a request much more quickly!
reqBodyOverride = buffer_utils_1.asBuffer(replaceBody);
}
else if (replaceBodyFromFile) {
reqBodyOverride = yield fs_1.readFile(replaceBodyFromFile, null);
}
else if (updateJsonBody) {
const { body: realBody } = yield request_utils_1.waitForCompletedRequest(clientReq);
if ((yield realBody.getJson()) === undefined) {
throw new Error("Can't transform non-JSON request body");
}
const updatedBody = _.mergeWith(yield realBody.getJson(), updateJsonBody, (_oldValue, newValue) => {
// We want to remove values with undefines, but Lodash ignores
// undefined return values here. Fortunately, JSON.stringify
// ignores Symbols, omitting them from the result.
if (newValue === undefined)
return OMIT_SYMBOL;
});
reqBodyOverride = buffer_utils_1.asBuffer(JSON.stringify(updatedBody));
}
if (reqBodyOverride) {
// We always re-encode the body to match the resulting content-encoding header:
reqBodyOverride = yield http_encoding_1.encodeBuffer(reqBodyOverride, (headers['content-encoding'] || ''));
headers['content-length'] = getCorrectContentLength(reqBodyOverride, clientReq.headers, (updateHeaders && updateHeaders['content-length'] !== undefined)
? headers // Iff you replaced the content length
: replaceHeaders);
}
headers = dropUndefinedValues(headers);
}
else if (this.beforeRequest) {
const completedRequest = yield request_utils_1.waitForCompletedRequest(clientReq);

@@ -498,3 +598,3 @@ const modifiedReq = yield this.beforeRequest(Object.assign(Object.assign({}, completedRequest), { headers: _.clone(completedRequest.headers) }));

headers['content-type'] = 'application/json';
reqBodyOverride = JSON.stringify(modifiedReq.json);
reqBodyOverride = buffer_utils_1.asBuffer(JSON.stringify(modifiedReq.json));
}

@@ -525,10 +625,6 @@ else {

// and we can't use ALPN to detect HTTP/2 support cleanly.
const shouldTryH2Upstream = isH2Downstream && protocol === 'https:';
let makeRequest = shouldTryH2Upstream
? h2Client.auto
// HTTP/1 + TLS
: protocol === 'https:'
? https.request
// HTTP/1 plaintext:
: http.request;
let shouldTryH2Upstream = isH2Downstream && protocol === 'https:';
const effectivePort = !!port
? parseInt(port, 10)
: (protocol === 'https:' ? 443 : 80);
let family;

@@ -539,6 +635,3 @@ if (hostname === 'localhost') {

// We need to work out which one family is, as Node sometimes makes bad choices.
const portToTest = !!port
? parseInt(port, 10)
: (protocol === 'https:' ? 443 : 80);
if (yield socket_util_1.isLocalPortActive('::1', portToTest))
if (yield socket_util_1.isLocalPortActive('::1', effectivePort))
family = 6;

@@ -549,10 +642,21 @@ else

// Mirror the keep-alive-ness of the incoming request in our outgoing request
const agent = shouldTryH2Upstream
// H2 client takes multiple agents, uses the appropriate one for the detected protocol
? { https: KeepAliveAgents['https:'], http2: undefined }
// HTTP/1 + KA:
: request_utils_1.shouldKeepAlive(clientReq)
? KeepAliveAgents[protocol || 'http:']
// HTTP/1 without KA:
: undefined;
const agent = http_agents_1.getAgent({
protocol: (protocol || undefined),
hostname: hostname,
port: effectivePort,
tryHttp2: shouldTryH2Upstream,
keepAlive: request_utils_1.shouldKeepAlive(clientReq),
proxyConfig: this.proxyConfig
});
if (agent && !('http2' in agent)) {
// I.e. only use HTTP/2 if we're using an HTTP/2-compatible agent
shouldTryH2Upstream = false;
}
let makeRequest = (shouldTryH2Upstream
? h2Client.auto
// HTTP/1 + TLS
: protocol === 'https:'
? https.request
// HTTP/1 plaintext:
: http.request);
if (isH2Downstream && shouldTryH2Upstream) {

@@ -575,3 +679,3 @@ // We drop all incoming pseudoheaders, and regenerate them (except legally modified ones)

path,
headers, lookup: this.lookup(), agent: agent, minVersion: strictHttpsChecks ? tls.DEFAULT_MIN_VERSION : 'TLSv1', rejectUnauthorized: strictHttpsChecks }, clientCert), (serverRes) => (() => __awaiter(this, void 0, void 0, function* () {
headers, lookup: this.lookup(), agent, minVersion: strictHttpsChecks ? tls.DEFAULT_MIN_VERSION : 'TLSv1', rejectUnauthorized: strictHttpsChecks }, clientCert), (serverRes) => (() => __awaiter(this, void 0, void 0, function* () {
serverRes.on('error', reject);

@@ -585,6 +689,51 @@ let serverStatusCode = serverRes.statusCode;

}
if (this.beforeResponse) {
if (this.transformResponse) {
const { replaceStatus, updateHeaders, replaceHeaders, replaceBody, replaceBodyFromFile, updateJsonBody } = this.transformResponse;
if (replaceStatus) {
serverStatusCode = replaceStatus;
serverStatusMessage = undefined; // Reset to default
}
if (updateHeaders) {
serverHeaders = Object.assign(Object.assign({}, serverHeaders), updateHeaders);
}
else if (replaceHeaders) {
serverHeaders = Object.assign({}, replaceHeaders);
}
if (replaceBody) {
// Note that we're replacing the body without actually waiting for the real one, so
// this can result in sending a request much more quickly!
resBodyOverride = buffer_utils_1.asBuffer(replaceBody);
}
else if (replaceBodyFromFile) {
resBodyOverride = yield fs_1.readFile(replaceBodyFromFile, null);
}
else if (updateJsonBody) {
const rawBody = yield buffer_utils_1.streamToBuffer(serverRes);
const realBody = request_utils_1.buildBodyReader(rawBody, serverRes.headers);
if ((yield realBody.getJson()) === undefined) {
throw new Error("Can't transform non-JSON response body");
}
const updatedBody = _.mergeWith(yield realBody.getJson(), updateJsonBody, (_oldValue, newValue) => {
// We want to remove values with undefines, but Lodash ignores
// undefined return values here. Fortunately, JSON.stringify
// ignores Symbols, omitting them from the result.
if (newValue === undefined)
return OMIT_SYMBOL;
});
resBodyOverride = buffer_utils_1.asBuffer(JSON.stringify(updatedBody));
}
if (resBodyOverride) {
// We always re-encode the body to match the resulting content-encoding header:
resBodyOverride = yield http_encoding_1.encodeBuffer(resBodyOverride, (serverHeaders['content-encoding'] || ''));
serverHeaders['content-length'] = getCorrectContentLength(resBodyOverride, serverRes.headers, (updateHeaders && updateHeaders['content-length'] !== undefined)
? serverHeaders // Iff you replaced the content length
: replaceHeaders, method === 'HEAD' // HEAD responses are allowed mismatched content-length
);
}
serverHeaders = dropUndefinedValues(serverHeaders);
}
else if (this.beforeResponse) {
let modifiedRes;
let body;
body = yield request_utils_1.streamToBuffer(serverRes);
body = yield buffer_utils_1.streamToBuffer(serverRes);
const cleanHeaders = request_utils_1.cleanUpHeaders(serverHeaders);

@@ -607,3 +756,3 @@ modifiedRes = yield this.beforeResponse({

serverHeaders['content-type'] = 'application/json';
resBodyOverride = JSON.stringify(modifiedRes.json);
resBodyOverride = buffer_utils_1.asBuffer(JSON.stringify(modifiedRes.json));
}

@@ -658,3 +807,3 @@ else {

// make multiple requests. If/when that happens, we don't need more event listeners.
if (currentlyForwardingSockets.has(socket))
if (this.outgoingSockets.has(socket))
return;

@@ -664,13 +813,14 @@ // Add this port to our list of active ports, once it's connected (before then it has no port)

socket.once('connect', () => {
currentlyForwardingSockets.add(socket);
this.outgoingSockets.add(socket);
});
}
else if (socket.localPort !== undefined) {
currentlyForwardingSockets.add(socket);
this.outgoingSockets.add(socket);
}
// Remove this port from our list of active ports when it's closed
// This is called for both clean closes & errors.
socket.once('close', () => currentlyForwardingSockets.delete(socket));
socket.once('close', () => this.outgoingSockets.delete(socket));
});
if (reqBodyOverride) {
clientReq.body.asStream().resume(); // Dump any remaining real request body
if (reqBodyOverride.length > 0)

@@ -730,2 +880,3 @@ serverReq.end(reqBodyOverride);

serialize(channel) {
var _a, _b, _c, _d, _e, _f;
if (this.beforeRequest) {

@@ -748,5 +899,26 @@ channel.onRequest('beforeRequest', (req) => __awaiter(this, void 0, void 0, function* () {

forwarding: this.forwarding
} : {}), { lookupOptions: this.lookupOptions, ignoreHostCertificateErrors: this.ignoreHostHttpsErrors, clientCertificateHostMap: _.mapValues(this.clientCertificateHostMap, ({ pfx, passphrase }) => ({ pfx: serialization_1.serializeBuffer(pfx), passphrase })), hasBeforeRequestCallback: !!this.beforeRequest, hasBeforeResponseCallback: !!this.beforeResponse });
} : {}), { proxyConfig: this.proxyConfig, lookupOptions: this.lookupOptions, ignoreHostCertificateErrors: this.ignoreHostHttpsErrors, clientCertificateHostMap: _.mapValues(this.clientCertificateHostMap, ({ pfx, passphrase }) => ({ pfx: serialization_1.serializeBuffer(pfx), passphrase })), transformRequest: Object.assign(Object.assign({}, this.transformRequest), {
// Body is always serialized as a base64 buffer:
replaceBody: !!((_a = this.transformRequest) === null || _a === void 0 ? void 0 : _a.replaceBody)
? serialization_1.serializeBuffer(buffer_utils_1.asBuffer(this.transformRequest.replaceBody))
: undefined,
// Update objects need to capture undefined & null as distict values:
updateHeaders: !!((_b = this.transformRequest) === null || _b === void 0 ? void 0 : _b.updateHeaders)
? JSON.stringify(this.transformRequest.updateHeaders, (k, v) => v === undefined ? SERIALIZED_OMIT : v)
: undefined, updateJsonBody: !!((_c = this.transformRequest) === null || _c === void 0 ? void 0 : _c.updateJsonBody)
? JSON.stringify(this.transformRequest.updateJsonBody, (k, v) => v === undefined ? SERIALIZED_OMIT : v)
: undefined }), transformResponse: Object.assign(Object.assign({}, this.transformResponse), {
// Body is always serialized as a base64 buffer:
replaceBody: !!((_d = this.transformResponse) === null || _d === void 0 ? void 0 : _d.replaceBody)
? serialization_1.serializeBuffer(buffer_utils_1.asBuffer(this.transformResponse.replaceBody))
: undefined,
// Update objects need to capture undefined & null as distict values:
updateHeaders: !!((_e = this.transformResponse) === null || _e === void 0 ? void 0 : _e.updateHeaders)
? JSON.stringify(this.transformResponse.updateHeaders, (k, v) => v === undefined ? SERIALIZED_OMIT : v)
: undefined, updateJsonBody: !!((_f = this.transformResponse) === null || _f === void 0 ? void 0 : _f.updateJsonBody)
? JSON.stringify(this.transformResponse.updateJsonBody, (k, v) => v === undefined ? SERIALIZED_OMIT : v)
: undefined }), hasBeforeRequestCallback: !!this.beforeRequest, hasBeforeResponseCallback: !!this.beforeResponse });
}
static deserialize(data, channel) {
var _a, _b, _c, _d, _e, _f;
let beforeRequest;

@@ -773,5 +945,17 @@ let beforeResponse;

return new PassThroughHandler(Object.assign(Object.assign({ beforeRequest,
beforeResponse }, data.forwardToLocation ? {
beforeResponse, transformRequest: Object.assign(Object.assign(Object.assign(Object.assign({}, data.transformRequest), (((_a = data.transformRequest) === null || _a === void 0 ? void 0 : _a.replaceBody) !== undefined ? {
replaceBody: serialization_1.deserializeBuffer(data.transformRequest.replaceBody)
} : {})), (((_b = data.transformRequest) === null || _b === void 0 ? void 0 : _b.updateHeaders) !== undefined ? {
updateHeaders: mapOmitToUndefined(JSON.parse(data.transformRequest.updateHeaders))
} : {})), (((_c = data.transformRequest) === null || _c === void 0 ? void 0 : _c.updateJsonBody) !== undefined ? {
updateJsonBody: mapOmitToUndefined(JSON.parse(data.transformRequest.updateJsonBody))
} : {})), transformResponse: Object.assign(Object.assign(Object.assign(Object.assign({}, data.transformResponse), (((_d = data.transformResponse) === null || _d === void 0 ? void 0 : _d.replaceBody) !== undefined ? {
replaceBody: serialization_1.deserializeBuffer(data.transformResponse.replaceBody)
} : {})), (((_e = data.transformResponse) === null || _e === void 0 ? void 0 : _e.updateHeaders) !== undefined ? {
updateHeaders: mapOmitToUndefined(JSON.parse(data.transformResponse.updateHeaders))
} : {})), (((_f = data.transformResponse) === null || _f === void 0 ? void 0 : _f.updateJsonBody) !== undefined ? {
updateJsonBody: mapOmitToUndefined(JSON.parse(data.transformResponse.updateJsonBody))
} : {})) }, data.forwardToLocation ? {
forwarding: { targetHost: data.forwardToLocation }
} : {}), { forwarding: data.forwarding, lookupOptions: data.lookupOptions, ignoreHostHttpsErrors: data.ignoreHostCertificateErrors, clientCertificateHostMap: _.mapValues(data.clientCertificateHostMap, ({ pfx, passphrase }) => ({ pfx: serialization_1.deserializeBuffer(pfx), passphrase })) }));
} : {}), { forwarding: data.forwarding, proxyConfig: data.proxyConfig, lookupOptions: data.lookupOptions, ignoreHostHttpsErrors: data.ignoreHostCertificateErrors, clientCertificateHostMap: _.mapValues(data.clientCertificateHostMap, ({ pfx, passphrase }) => ({ pfx: serialization_1.deserializeBuffer(pfx), passphrase })) }));
}

@@ -822,27 +1006,2 @@ }

};
// Passthrough handlers need to spot loops - tracking ongoing sockets lets us get pretty
// close to doing that (for 1 step loops, at least):
// We keep a list of all currently active outgoing sockets.
const currentlyForwardingSockets = new Set();
// We need to normalize ips for comparison, because the same ip may be reported as ::ffff:127.0.0.1
// and 127.0.0.1 on the two sides of the connection, for the same ip.
const normalizeIp = (ip) => (ip && ip.startsWith('::ffff:'))
? ip.slice('::ffff:'.length)
: ip;
// For incoming requests, compare the address & port: if they match, we've almost certainly got a loop.
// I don't think it's generally possible to see the same ip on different interfaces from one process (you need
// ip-netns network namespaces), but if it is, then there's a tiny chance of false positives here. If we have ip X,
// and on another interface somebody else has ip X, and the send a request with the same incoming port as an
// outgoing request we have on the other interface, we'll assume it's a loop. Extremely unlikely imo.
const isRequestLoop = (incomingSocket) => _.some([...currentlyForwardingSockets], (outgoingSocket) => {
if (!outgoingSocket.localAddress || !outgoingSocket.localPort) {
// It's possible for sockets in currentlyForwardingSockets to be closed, in which case these
// properties will be undefined. If so, we know they're not relevant to loops, so skip entirely.
return false;
}
else {
return normalizeIp(outgoingSocket.localAddress) === normalizeIp(incomingSocket.remoteAddress) &&
outgoingSocket.localPort === incomingSocket.remotePort;
}
});
//# sourceMappingURL=request-handlers.js.map

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.RequestRuleBuilder = void 0;
const lodash_1 = require("lodash");

@@ -17,0 +18,0 @@ const request_handlers_1 = require("./request-handlers");

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.RequestRule = void 0;
const uuid = require("uuid/v4");

@@ -17,0 +18,0 @@ const request_utils_1 = require("../../util/request-utils");

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.deserializeWebSocketRuleData = exports.deserializeRuleData = exports.serializeRuleData = exports.validateMockRuleData = void 0;
const serialization_1 = require("../util/serialization");

@@ -4,0 +5,0 @@ const matchers = require("./matchers");

@@ -9,2 +9,3 @@ /**

import { CloseConnectionHandler, TimeoutHandler, ForwardingOptions, PassThroughLookupOptions } from '../requests/request-handlers';
import { ProxyConfig } from '../../util/http-agents';
export interface WebSocketHandler extends Explainable, Serializable {

@@ -34,2 +35,6 @@ type: keyof typeof WsHandlerLookup;

/**
* Upstream proxy configuration: pass through requests via this proxy
*/
proxyConfig?: ProxyConfig;
/**
* Custom DNS options, to allow configuration of the resolver used

@@ -45,2 +50,3 @@ * when forwarding requests upstream. Passing any option switches

forwarding?: ForwardingOptions;
proxyConfig?: ProxyConfig;
ignoreHostCertificateErrors?: string[];

@@ -55,2 +61,3 @@ lookupOptions?: PassThroughLookupOptions;

readonly lookupOptions: PassThroughLookupOptions | undefined;
readonly proxyConfig?: ProxyConfig;
private _cacheableLookupInstance;

@@ -70,3 +77,3 @@ private lookup;

'close-connection': typeof CloseConnectionHandler;
'timeout': typeof TimeoutHandler;
timeout: typeof TimeoutHandler;
};

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.WsHandlerLookup = exports.TimeoutHandler = exports.CloseConnectionHandler = exports.PassThroughWebSocketHandler = void 0;
const _ = require("lodash");

@@ -23,8 +24,21 @@ const url = require("url");

const request_handlers_1 = require("../requests/request-handlers");
exports.CloseConnectionHandler = request_handlers_1.CloseConnectionHandler;
exports.TimeoutHandler = request_handlers_1.TimeoutHandler;
Object.defineProperty(exports, "CloseConnectionHandler", { enumerable: true, get: function () { return request_handlers_1.CloseConnectionHandler; } });
Object.defineProperty(exports, "TimeoutHandler", { enumerable: true, get: function () { return request_handlers_1.TimeoutHandler; } });
const request_utils_1 = require("../../util/request-utils");
const buffer_utils_1 = require("../../util/buffer-utils");
const http_agents_1 = require("../../util/http-agents");
function isOpen(socket) {
return socket.readyState === WebSocket.OPEN;
}
// Based on ws's validation.js
function isValidStatusCode(code) {
return ( // Standard code:
code >= 1000 &&
code <= 1014 &&
code !== 1004 &&
code !== 1005 &&
code !== 1006) || ( // Application-specific code:
code >= 3000 && code <= 4999);
}
const INVALID_STATUS_REGEX = /Invalid WebSocket frame: invalid status code (\d+)/;
function pipeWebSocket(inSocket, outSocket) {

@@ -43,8 +57,12 @@ const onPipeFailed = (op) => (err) => {

inSocket.on('close', (num, reason) => {
if (num >= 1000 && num <= 1004) {
outSocket.close(num, reason);
if (isValidStatusCode(num)) {
try {
outSocket.close(num, reason);
}
catch (e) {
console.warn(e);
outSocket.close();
}
}
else {
console.log(`Unhappily closing websocket ${num}: ${reason}`);
// Unspecified or invalid error
outSocket.close();

@@ -61,2 +79,22 @@ }

});
// If either socket has an general error (connection failure, but also could be invalid WS
// frames) then we kill the raw connection upstream to simulate a generic connection error:
inSocket.on('error', (err) => {
console.log(`Error in proxied WebSocket:`, err);
const rawOutSocket = outSocket;
if (err.message.match(INVALID_STATUS_REGEX)) {
const status = parseInt(INVALID_STATUS_REGEX.exec(err.message)[1]);
// Simulate errors elsewhere by messing with ws internals. This may break things,
// that's effectively on purpose: we're simulating the client going wrong:
const buf = Buffer.allocUnsafe(2);
buf.writeUInt16BE(status); // status comes from readUInt16BE, so always fits
rawOutSocket._sender.doClose(buf, true, () => {
rawOutSocket._socket.destroy();
});
}
else {
// Unknown error, just kill the connection with no explanation
rawOutSocket._socket.destroy();
}
});
}

@@ -70,3 +108,3 @@ function mirrorRejection(socket, rejectionResponse) {

'\r\n\r\n');
const body = yield request_utils_1.streamToBuffer(rejectionResponse);
const body = yield buffer_utils_1.streamToBuffer(rejectionResponse);
if (socket.writable)

@@ -103,2 +141,3 @@ socket.write(body);

this.lookupOptions = options.lookupOptions;
this.proxyConfig = options.proxyConfig;
}

@@ -131,4 +170,4 @@ lookup() {

this.wsServer.on('connection', (ws) => {
pipeWebSocket(ws, ws.upstreamSocket);
pipeWebSocket(ws.upstreamSocket, ws);
pipeWebSocket(ws, ws.upstreamWebSocket);
pipeWebSocket(ws.upstreamWebSocket, ws);
});

@@ -191,3 +230,3 @@ }

// Initialize the server when we handle the first actual request. Mainly just so we
// don't try to initialize it in a browser when buiding rules initially.
// don't try to initialize it in a browser when building rules initially.
if (!this.wsServer)

@@ -199,5 +238,17 @@ this.wsServer = new WebSocket.Server({ noServer: true });

!_.includes(this.ignoreHostHttpsErrors, parsedUrl.host);
const upstreamSocket = new WebSocket(wsUrl, {
const effectivePort = !!parsedUrl.port
? parseInt(parsedUrl.port, 10)
: parsedUrl.protocol == 'wss:' ? 443 : 80;
const agent = http_agents_1.getAgent({
protocol: parsedUrl.protocol,
hostname: parsedUrl.hostname,
port: effectivePort,
proxyConfig: this.proxyConfig,
tryHttp2: false,
keepAlive: false // Not a thing for websockets: they take over the whole connection
});
const upstreamWebSocket = new WebSocket(wsUrl, {
rejectUnauthorized: checkServerCertificate,
maxPayload: 0,
agent,
lookup: this.lookup(),

@@ -207,6 +258,6 @@ headers: _.omitBy(headers, (_v, headerName) => headerName.toLowerCase().startsWith('sec-websocket') ||

});
upstreamSocket.once('open', () => {
upstreamWebSocket.once('open', () => {
// Presumably the below adds an error handler. But what about before we get here?
this.wsServer.handleUpgrade(req, incomingSocket, head, (ws) => {
ws.upstreamSocket = upstreamSocket;
ws.upstreamWebSocket = upstreamWebSocket;
this.wsServer.emit('connection', ws);

@@ -216,3 +267,3 @@ });

// If the upstream says no, we say no too.
upstreamSocket.on('unexpected-response', (req, res) => {
upstreamWebSocket.on('unexpected-response', (req, res) => {
console.log(`Unexpected websocket response from ${wsUrl}: ${res.statusCode}`);

@@ -222,7 +273,7 @@ mirrorRejection(incomingSocket, res);

// If there's some other error, we just kill the socket:
upstreamSocket.on('error', (e) => {
upstreamWebSocket.on('error', (e) => {
console.warn(e);
incomingSocket.end();
});
incomingSocket.on('error', () => upstreamSocket.close(1011)); // Internal error
incomingSocket.on('error', () => upstreamWebSocket.close(1011)); // Internal error
}

@@ -234,2 +285,3 @@ serialize() {

forwarding: this.forwarding,
proxyConfig: this.proxyConfig,
ignoreHostCertificateErrors: this.ignoreHostHttpsErrors,

@@ -236,0 +288,0 @@ lookupOptions: this.lookupOptions

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.WebSocketRuleBuilder = void 0;
const websocket_handlers_1 = require("./websocket-handlers");

@@ -17,0 +18,0 @@ const base_rule_builder_1 = require("../base-rule-builder");

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.WebSocketRule = void 0;
const uuid = require("uuid/v4");

@@ -17,0 +18,0 @@ const request_utils_1 = require("../../util/request-utils");

@@ -12,2 +12,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.createComboServer = void 0;
const _ = require("lodash");

@@ -33,9 +34,11 @@ const net = require("net");

_handle.oncertcb = function (info) {
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
var _a, _b, _c, _d, _e, _f, _g, _h;
tlsSocket.servername = info.servername;
tlsSocket.initialRemoteAddress = tlsSocket.remoteAddress || // Normal case
((_a = tlsSocket._parent) === null || _a === void 0 ? void 0 : _a.remoteAddress) || // For early failing sockets
((_d = (_c = (_b = tlsSocket._handle) === null || _b === void 0 ? void 0 : _b._parentWrap) === null || _c === void 0 ? void 0 : _c.stream) === null || _d === void 0 ? void 0 : _d.remoteAddress); // For HTTP/2 CONNECT
tlsSocket.initialRemotePort = tlsSocket.remotePort || ((_e = tlsSocket._parent) === null || _e === void 0 ? void 0 : _e.remotePort) || ((_h = (_g = (_f = tlsSocket._handle) === null || _f === void 0 ? void 0 : _f._parentWrap) === null || _g === void 0 ? void 0 : _g.stream) === null || _h === void 0 ? void 0 : _h.remotePort);
return (_j = loadSNI) === null || _j === void 0 ? void 0 : _j.apply(this, arguments);
((_a = tlsSocket._parent) === null || _a === void 0 ? void 0 : _a.remoteAddress) || // For early failing sockets
((_d = (_c = (_b = tlsSocket._handle) === null || _b === void 0 ? void 0 : _b._parentWrap) === null || _c === void 0 ? void 0 : _c.stream) === null || _d === void 0 ? void 0 : _d.remoteAddress); // For HTTP/2 CONNECT
tlsSocket.initialRemotePort = tlsSocket.remotePort ||
((_e = tlsSocket._parent) === null || _e === void 0 ? void 0 : _e.remotePort) ||
((_h = (_g = (_f = tlsSocket._handle) === null || _f === void 0 ? void 0 : _f._parentWrap) === null || _g === void 0 ? void 0 : _g.stream) === null || _h === void 0 ? void 0 : _h.remotePort);
return loadSNI === null || loadSNI === void 0 ? void 0 : loadSNI.apply(this, arguments);
};

@@ -49,2 +52,3 @@ };

// If you silently close it very quicky, you probably don't trust us
socket.once('error', reject);
socket.once('close', reject);

@@ -104,3 +108,4 @@ socket.once('end', reject);

var _a, _b, _c;
const timingInfo = socket.__timingInfo || ((_a = socket._parent) === null || _a === void 0 ? void 0 : _a.__timingInfo) ||
const timingInfo = socket.__timingInfo ||
((_a = socket._parent) === null || _a === void 0 ? void 0 : _a.__timingInfo) ||
buildTimingInfo();

@@ -112,5 +117,6 @@ return {

remoteIpAddress: socket.remoteAddress || // Normal case
((_b = socket._parent) === null || _b === void 0 ? void 0 : _b.remoteAddress) || // Pre-certCB error, e.g. timeout
((_b = socket._parent) === null || _b === void 0 ? void 0 : _b.remoteAddress) || // Pre-certCB error, e.g. timeout
socket.initialRemoteAddress,
remotePort: socket.remotePort || ((_c = socket._parent) === null || _c === void 0 ? void 0 : _c.remotePort) ||
remotePort: socket.remotePort ||
((_c = socket._parent) === null || _c === void 0 ? void 0 : _c.remotePort) ||
socket.initialRemotePort,

@@ -117,0 +123,0 @@ tags: [],

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.ServerMockedEndpoint = void 0;
const util = require("util");

@@ -17,0 +18,0 @@ class ServerMockedEndpoint {

@@ -8,3 +8,3 @@ /**

import http2 = require("http2");
import { InitiatedRequest, CompletedRequest, CompletedResponse, TlsRequest, ClientError, ParsedBody } from "../types";
import { InitiatedRequest, CompletedRequest, CompletedResponse, TlsRequest, ClientError, OngoingBody } from "../types";
import { Mockttp, AbstractMockttp, MockttpOptions, PortRange } from "../mockttp";

@@ -16,3 +16,3 @@ import { RequestRuleData } from "../rules/requests/request-rule";

protocol?: string;
body?: ParsedBody;
body?: OngoingBody;
path?: string;

@@ -19,0 +19,0 @@ };

@@ -71,4 +71,4 @@ "use strict";

this.httpsOptions = options.https;
this.isHttp2Enabled = (_a = options.http2, (_a !== null && _a !== void 0 ? _a : 'fallback'));
this.maxBodySize = (_b = options.maxBodySize, (_b !== null && _b !== void 0 ? _b : Infinity));
this.isHttp2Enabled = (_a = options.http2) !== null && _a !== void 0 ? _a : 'fallback';
this.maxBodySize = (_b = options.maxBodySize) !== null && _b !== void 0 ? _b : Infinity;
this.eventEmitter = new events_1.EventEmitter();

@@ -326,2 +326,6 @@ this.defaultWsHandler = new websocket_handlers_1.PassThroughWebSocketHandler({

request.once('aborted', abort);
// In Node 16+ we don't get an abort event in many cases, just closes, but we know
// it's aborted because the response is closed with no other result being set.
rawResponse.once('close', () => setImmediate(abort));
request.once('error', () => setImmediate(abort));
this.announceInitialRequestAsync(request);

@@ -328,0 +332,0 @@ const response = request_utils_1.trackResponse(rawResponse, request.timingEvents, request.tags, { maxSize: this.maxBodySize });

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.MockttpStandalone = void 0;
/// <reference path="../../custom-typings/asynciterator.d.ts" />

@@ -155,3 +156,4 @@ const path = require("path");

let port = parseInt(isMatch[1], 10);
let wsServer = isSubscriptionRequest ? (_a = this.subscriptionServers[port]) === null || _a === void 0 ? void 0 : _a.wsServer :
let wsServer = isSubscriptionRequest ?
(_a = this.subscriptionServers[port]) === null || _a === void 0 ? void 0 : _a.wsServer :
this.streamServers[port];

@@ -187,6 +189,11 @@ if (wsServer) {

this.routers[mockPort] = mockServerRouter;
mockServerRouter.post('/stop', (req, res) => __awaiter(this, void 0, void 0, function* () {
let running = true;
const stopServer = () => __awaiter(this, void 0, void 0, function* () {
if (!running)
return;
running = false;
yield mockServer.stop();
this.mockServers = _.reject(this.mockServers, mockServer);
delete this.routers[mockPort];
this.subscriptionServers[mockPort].close();
delete this.subscriptionServers[mockPort];

@@ -196,2 +203,5 @@ this.streamServers[mockPort].close();

delete this.streamServers[mockPort];
});
mockServerRouter.post('/stop', (req, res) => __awaiter(this, void 0, void 0, function* () {
yield stopServer();
res.status(200).send(JSON.stringify({

@@ -222,2 +232,10 @@ success: true

});
// Handle errors by logging & stopping this server instance
const onStreamError = (e) => {
console.error("Error in server standalone stream, shutting down mock server");
console.error(e);
stopServer();
};
wsSocket.on('error', onStreamError);
serverSocket.on('error', onStreamError);
const schema = yield this.loadSchema('schema.gql', mockServer, serverSocket);

@@ -224,0 +242,0 @@ this.subscriptionServers[mockPort] = subscriptions_transport_ws_1.SubscriptionServer.create({

@@ -6,4 +6,4 @@ /**

import { Duplex } from "stream";
import { IResolvers } from "@graphql-tools/utils/Interfaces";
import type { IResolvers } from "@graphql-tools/utils/Interfaces";
import MockttpServer from "../server/mockttp-server";
export declare function buildStandaloneModel(mockServer: MockttpServer, stream: Duplex): IResolvers;

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.buildStandaloneModel = void 0;
const _ = require("lodash");

@@ -17,0 +18,0 @@ const graphql_1 = require("graphql");

@@ -59,6 +59,6 @@ /**

export interface OngoingRequest extends Request, EventEmitter {
body: ParsedBody;
body: OngoingBody;
timingEvents: TimingEvents;
}
export interface ParsedBody {
export interface OngoingBody {
asStream: () => stream.Readable;

@@ -73,9 +73,54 @@ asBuffer: () => Promise<Buffer>;

export interface CompletedBody {
/**
* The raw bytes of the response. If a content encoding was used, this is
* the raw encoded data.
*/
buffer: Buffer;
/**
* @deprecated Use `getDecodedBuffer()` instead with promises, to support
* more encodings and improve performance.
*/
decodedBuffer: Buffer | undefined;
/**
* The decoded bytes of the response. If no encoding was used, this is the
* same as `.buffer`. The response is decoded and returned asynchronously
* as a Promise.
*/
getDecodedBuffer(): Promise<Buffer | undefined>;
/**
* @deprecated Use `getText()` instead with promises, to support
* more encodings and improve performance.
*/
text: string | undefined;
/**
* The contents of the response, decoded and parsed as a UTF-8 string.
* The response is decoded and returned asynchronously as a Promise.
*/
getText(): Promise<string | undefined>;
/**
* @deprecated Use `getJson()` instead with promises, to support
* more encodings and improve performance.
*/
json: object | undefined;
/**
* The contents of the response, decoded, parsed as UTF-8 string, and
* then parsed a JSON. The response is decoded and returned asynchronously
* as a Promise.
*/
getJson(): Promise<object | undefined>;
/**
* @deprecated Use `getDecodedBuffer()` instead with promises, to support
* more encodings and improve performance.
*/
formData: {
[key: string]: string | string[] | undefined;
} | undefined;
/**
* The contents of the response, decoded, parsed as UTF-8 string, and
* then parsed form-encoded data. The response is decoded and returned
* asynchronously as a Promise.
*/
getFormData(): Promise<{
[key: string]: string | string[] | undefined;
} | undefined>;
}

@@ -99,3 +144,3 @@ export interface InitiatedRequest extends Request {

getHeaders(): Headers;
body: ParsedBody;
body: OngoingBody;
timingEvents: TimingEvents;

@@ -102,0 +147,0 @@ tags: string[];

@@ -6,2 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.Method = exports.DEFAULT_STANDALONE_PORT = void 0;
exports.DEFAULT_STANDALONE_PORT = 45454;

@@ -8,0 +9,0 @@ var Method;

@@ -6,2 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.destroyable = void 0;
// Mostly from https://github.com/isaacs/server-destroy (which seems to be unmaintained)

@@ -8,0 +9,0 @@ function destroyable(server) {

@@ -6,2 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.readFile = void 0;
const fs = require("fs");

@@ -8,0 +9,0 @@ function readFile(filename, encoding) {

@@ -6,2 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.normalizeUrl = exports.legacyNormalizeUrl = void 0;
const url = require("url");

@@ -8,0 +9,0 @@ const _ = require("lodash");

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.filter = void 0;
function filter(array, test) {

@@ -17,0 +18,0 @@ return __awaiter(this, void 0, void 0, function* () {

@@ -8,3 +8,2 @@ /**

import * as http2 from 'http2';
import * as stream from 'stream';
import { Headers, OngoingRequest, CompletedRequest, OngoingResponse, CompletedResponse, CompletedBody, TimingEvents, InitiatedRequest } from "../types";

@@ -22,8 +21,3 @@ export declare const isAbsoluteUrl: (url: string) => boolean;

export declare function h1HeadersToH2(headers: Headers): Headers;
declare type BufferInProgress = Promise<Buffer> & {
currentChunks: Buffer[];
failedWith?: Error;
};
export declare const streamToBuffer: (input: stream.Readable, maxSize?: any) => BufferInProgress;
export declare const handleContentEncoding: (body: Buffer, encoding?: string | string[] | undefined) => Buffer;
export declare const isMockttpBody: (body: any) => body is CompletedBody;
export declare const buildBodyReader: (body: Buffer, headers: Headers) => CompletedBody;

@@ -30,0 +24,0 @@ export declare const parseRequestBody: (req: http.IncomingMessage | http2.Http2ServerRequest, options: {

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.tryToParseHttp = exports.waitForCompletedResponse = exports.trackResponse = exports.waitForCompletedRequest = exports.buildAbortedRequest = exports.buildInitiatedRequest = exports.cleanUpHeaders = exports.parseRequestBody = exports.buildBodyReader = exports.isMockttpBody = exports.h1HeadersToH2 = exports.h2HeadersToH1 = exports.isHttp2 = exports.dropDefaultHeaders = exports.setHeaders = exports.shouldKeepAlive = exports.getPathFromAbsoluteUrl = exports.getUrlWithoutProtocol = exports.isAbsoluteProtocollessUrl = exports.isRelativeUrl = exports.isAbsoluteUrl = void 0;
const _ = require("lodash");

@@ -21,20 +22,21 @@ const tls_1 = require("tls");

const querystring = require("querystring");
const zlib = require("zlib");
const brotliDecompress = require("brotli/decompress");
const now = require("performance-now");
const url = require("url");
const util_1 = require("../util/util");
const MAX_BUFFER_SIZE = util_1.isNode
? require('buffer').constants.MAX_LENGTH
: Infinity;
const http_encoding_1 = require("http-encoding");
const util_1 = require("./util");
const buffer_utils_1 = require("./buffer-utils");
// Is this URL fully qualified?
// Note that this supports only HTTP - no websockets or anything else.
exports.isAbsoluteUrl = (url) => url.toLowerCase().startsWith('http://') ||
const isAbsoluteUrl = (url) => url.toLowerCase().startsWith('http://') ||
url.toLowerCase().startsWith('https://');
exports.isRelativeUrl = (url) => url.startsWith('/');
exports.isAbsoluteProtocollessUrl = (url) => !exports.isAbsoluteUrl(url) && !exports.isRelativeUrl(url);
exports.getUrlWithoutProtocol = (url) => {
exports.isAbsoluteUrl = isAbsoluteUrl;
const isRelativeUrl = (url) => url.startsWith('/');
exports.isRelativeUrl = isRelativeUrl;
const isAbsoluteProtocollessUrl = (url) => !exports.isAbsoluteUrl(url) && !exports.isRelativeUrl(url);
exports.isAbsoluteProtocollessUrl = isAbsoluteProtocollessUrl;
const getUrlWithoutProtocol = (url) => {
return url.split('://', 2).slice(-1).join('');
};
exports.getPathFromAbsoluteUrl = (url) => {
exports.getUrlWithoutProtocol = getUrlWithoutProtocol;
const getPathFromAbsoluteUrl = (url) => {
const pathIndex = util_1.nthIndexOf(url, '/', 3);

@@ -48,6 +50,8 @@ if (pathIndex !== -1) {

};
exports.shouldKeepAlive = (req) => req.httpVersion !== '1.0' &&
exports.getPathFromAbsoluteUrl = getPathFromAbsoluteUrl;
const shouldKeepAlive = (req) => req.httpVersion !== '1.0' &&
req.headers['connection'] !== 'close' &&
req.headers['proxy-connection'] !== 'close';
exports.setHeaders = (response, headers) => {
exports.shouldKeepAlive = shouldKeepAlive;
const setHeaders = (response, headers) => {
Object.keys(headers).forEach((header) => {

@@ -60,2 +64,3 @@ let value = headers[header];

};
exports.setHeaders = setHeaders;
// If the user explicitly specifies headers, we tell Node not to handle them,

@@ -108,56 +113,5 @@ // so the user-defined headers are the full set.

exports.h1HeadersToH2 = h1HeadersToH2;
// Takes a buffer and a stream, returns a simple stream that outputs the buffer then the stream.
const bufferThenStream = (buffer, inputStream) => {
const outputStream = new stream.PassThrough();
// Forward the buffered data so far
outputStream.write(Buffer.concat(buffer.currentChunks));
// After the data, forward errors from the buffer
if (buffer.failedWith) {
// Announce async, to ensure listeners have time to get set up
setTimeout(() => outputStream.emit('error', buffer.failedWith));
}
else {
// Forward future data as it arrives
inputStream.pipe(outputStream);
// Forward any future errors from the input stream
inputStream.on('error', (e) => outputStream.emit('error', e));
// Silence 'unhandled rejection' warnings here, since we'll handle them on the stream instead
buffer.catch(() => { });
}
return outputStream;
};
const bufferToStream = (buffer) => {
const outputStream = new stream.PassThrough();
outputStream.end(buffer);
return outputStream;
};
exports.streamToBuffer = (input, maxSize = MAX_BUFFER_SIZE) => {
let chunks = [];
const bufferPromise = new Promise((resolve, reject) => {
let currentSize = 0;
input.on('data', (d) => {
currentSize += d.length;
// If we go over maxSize, drop the whole stream, so the buffer
// resolves empty. MaxSize should be large, so this is rare,
// and only happens as an alternative to crashing the process.
if (currentSize > maxSize) {
chunks = []; // Drop all the data so far
return; // Don't save any more data
}
chunks.push(d);
});
input.once('end', () => resolve(Buffer.concat(chunks)));
input.once('aborted', () => {
bufferPromise.failedWith = new Error('Aborted');
reject(bufferPromise.failedWith);
});
input.on('error', (e) => {
bufferPromise.failedWith = bufferPromise.failedWith || e;
reject(e);
});
});
bufferPromise.currentChunks = chunks;
return bufferPromise;
};
const parseBodyStream = (bodyStream, maxSize) => {
// Parse an in-progress request or response stream, i.e. where the body or possibly even the headers have
// not been fully received/sent yet.
const parseBodyStream = (bodyStream, maxSize, getHeaders) => {
let bufferPromise = null;

@@ -172,8 +126,8 @@ let completedBuffer = null;

return completedBuffer
? bufferToStream(completedBuffer)
: bufferThenStream(body.asBuffer(), bodyStream);
? buffer_utils_1.bufferToStream(completedBuffer)
: buffer_utils_1.bufferThenStream(body.asBuffer(), bodyStream);
},
asBuffer() {
if (!bufferPromise) {
bufferPromise = exports.streamToBuffer(bodyStream, maxSize);
bufferPromise = buffer_utils_1.streamToBuffer(bodyStream, maxSize);
bufferPromise

@@ -185,4 +139,10 @@ .then((buffer) => completedBuffer = buffer)

},
asDecodedBuffer() {
return __awaiter(this, void 0, void 0, function* () {
const buffer = yield body.asBuffer();
return http_encoding_1.decodeBuffer(buffer, getHeaders()['content-encoding']);
});
},
asText(encoding = 'utf8') {
return body.asBuffer().then((b) => b.toString(encoding));
return body.asDecodedBuffer().then((b) => b.toString(encoding));
},

@@ -206,2 +166,12 @@ asJson() {

}
function runAsyncOrUndefined(func) {
return __awaiter(this, void 0, void 0, function* () {
try {
return yield func();
}
catch (_a) {
return undefined;
}
});
}
const waitForBody = (body, headers) => __awaiter(void 0, void 0, void 0, function* () {

@@ -211,44 +181,38 @@ const bufferBody = yield body.asBuffer();

});
exports.handleContentEncoding = (body, encoding) => {
if (_.isArray(encoding) || (typeof encoding === 'string' && encoding.indexOf(', ') >= 0)) {
const encodings = typeof encoding === 'string' ? encoding.split(', ').reverse() : encoding;
return encodings.reduce((content, nextEncoding) => {
return exports.handleContentEncoding(content, nextEncoding);
}, body);
}
if (encoding === 'gzip' || encoding === 'x-gzip') {
return zlib.gunzipSync(body);
}
else if (encoding === 'deflate' || encoding === 'x-deflate') {
// Deflate is ambiguous, and may or may not have a zlib wrapper.
// This checks the buffer header directly, based on
// https://stackoverflow.com/a/37528114/68051
const lowNibble = body[0] & 0xF;
if (lowNibble === 8) {
return zlib.inflateSync(body);
}
else {
return zlib.inflateRawSync(body);
}
}
else if (encoding === 'br') {
return Buffer.from(brotliDecompress(body));
}
else if (encoding === 'amz-1.0') {
// Weird encoding used by some AWS requests, actually just unencoded JSON:
// https://docs.aws.amazon.com/en_us/AmazonCloudWatch/latest/APIReference/making-api-requests.html
return body;
}
else if (!encoding || encoding === 'identity') {
return body;
}
else {
throw new Error(`Unknown encoding: ${encoding}`);
}
const isMockttpBody = (body) => {
return body.hasOwnProperty('getDecodedBuffer');
};
exports.buildBodyReader = (body, headers) => {
exports.isMockttpBody = isMockttpBody;
const buildBodyReader = (body, headers) => {
const completedBody = {
buffer: body,
getDecodedBuffer() {
return __awaiter(this, void 0, void 0, function* () {
return runAsyncOrUndefined(() => __awaiter(this, void 0, void 0, function* () {
return buffer_utils_1.asBuffer(yield http_encoding_1.decodeBuffer(this.buffer, headers['content-encoding']));
}));
});
},
getText() {
return __awaiter(this, void 0, void 0, function* () {
return runAsyncOrUndefined(() => __awaiter(this, void 0, void 0, function* () { return (yield this.getDecodedBuffer()).toString(); }));
});
},
getJson() {
return __awaiter(this, void 0, void 0, function* () {
return runAsyncOrUndefined(() => __awaiter(this, void 0, void 0, function* () { return JSON.parse((yield completedBody.getText())); }));
});
},
getFormData() {
return __awaiter(this, void 0, void 0, function* () {
return runAsyncOrUndefined(() => __awaiter(this, void 0, void 0, function* () {
const text = yield completedBody.getText();
return text ? querystring.parse(text) : undefined;
}));
});
},
// Deprecated sync properties, for backwards compat. Note that these do not
// support new encodings, e.g. Brotli/Zstandard.
get decodedBuffer() {
return runOrUndefined(() => exports.handleContentEncoding(this.buffer, headers['content-encoding']));
return runOrUndefined(() => http_encoding_1.decodeBufferSync(this.buffer, headers['content-encoding']));
},

@@ -267,6 +231,8 @@ get text() {

};
exports.parseRequestBody = (req, options) => {
exports.buildBodyReader = buildBodyReader;
const parseRequestBody = (req, options) => {
let transformedRequest = req;
transformedRequest.body = parseBodyStream(req, options.maxSize);
transformedRequest.body = parseBodyStream(req, options.maxSize, () => req.headers);
};
exports.parseRequestBody = parseRequestBody;
/**

@@ -354,3 +320,3 @@ * Translate from internal header representations (basically Node's header representations) to a

};
trackedResponse.body = parseBodyStream(trackingStream, options.maxSize);
trackedResponse.body = parseBodyStream(trackingStream, options.maxSize, () => trackedResponse.getHeaders());
// Proxy errors (e.g. write-after-end) to the response, so they can be

@@ -394,3 +360,3 @@ // handled elsewhere, rather than killing the process outright.

req.hostname = socket.servername;
const lines = splitBuffer(input, '\r\n');
const lines = buffer_utils_1.splitBuffer(input, '\r\n');
const requestLine = lines[0].slice(0, lines[0].length).toString('ascii');

@@ -405,3 +371,3 @@ const [method, rawUri, httpProtocol] = requestLine.split(" ");

const headers = headerLines
.map((line) => splitBuffer(line, ':', 2))
.map((line) => buffer_utils_1.splitBuffer(line, ':', 2))
.filter((line) => line.length > 1)

@@ -432,3 +398,3 @@ .map((headerParts) => headerParts.map(p => p.toString('utf8')))

const parsedUrl = url.parse(rawUri);
req.path = (_a = parsedUrl.path, (_a !== null && _a !== void 0 ? _a : undefined));
req.path = (_a = parsedUrl.path) !== null && _a !== void 0 ? _a : undefined;
const hostHeader = _.find(req.headers, (_value, key) => key.toLowerCase() === 'host');

@@ -462,18 +428,2 @@ if (hostHeader) {

exports.tryToParseHttp = tryToParseHttp;
function splitBuffer(input, splitter, maxParts = Infinity) {
const parts = [];
let remainingBuffer = input;
while (remainingBuffer.length) {
let endOfPart = remainingBuffer.indexOf(splitter);
if (endOfPart === -1)
endOfPart = remainingBuffer.length;
parts.push(remainingBuffer.slice(0, endOfPart));
remainingBuffer = remainingBuffer.slice(endOfPart + splitter.length);
if (parts.length === maxParts - 1) {
parts.push(remainingBuffer);
break;
}
}
return parts;
}
//# sourceMappingURL=request-utils.js.map

@@ -12,2 +12,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.withDeserializedBodyBuffer = exports.withSerializedBodyBuffer = exports.withDeserializedBodyReader = exports.withSerializedBodyReader = exports.deserializeBuffer = exports.serializeBuffer = exports.ClientServerChannel = exports.Serializable = exports.deserialize = exports.serialize = void 0;
const _ = require("lodash");

@@ -18,2 +19,3 @@ const stream_1 = require("stream");

const request_utils_1 = require("./request-utils");
const buffer_utils_1 = require("./buffer-utils");
function serialize(obj, stream) {

@@ -186,3 +188,5 @@ const channel = new ClientServerChannel(stream);

function withSerializedBodyReader(input) {
return Object.assign({}, input, { body: input.body.buffer.toString('base64') });
return Object.assign({}, input, {
body: buffer_utils_1.asBuffer(input.body.buffer).toString('base64')
});
}

@@ -202,3 +206,3 @@ exports.withSerializedBodyReader = withSerializedBodyReader;

else if (_.isString(input.body)) {
serializedBody = serializeBuffer(Buffer.from(input.body));
serializedBody = serializeBuffer(buffer_utils_1.asBuffer(input.body));
}

@@ -211,4 +215,4 @@ else if (_.isBuffer(input.body)) {

}
else if (input.body.hasOwnProperty('decodedBuffer')) {
serializedBody = serializeBuffer(input.body.buffer);
else if (request_utils_1.isMockttpBody(input.body)) {
serializedBody = serializeBuffer(buffer_utils_1.asBuffer(input.body.buffer));
}

@@ -215,0 +219,0 @@ return Object.assign({}, input, { body: serializedBody });

@@ -0,2 +1,4 @@

import * as net from 'net';
export declare function isLocalPortActive(interfaceIp: '::1' | '127.0.0.1', port: number): Promise<unknown>;
export declare const isLocalIPv6Available: boolean;
export declare const isSocketLoop: (outgoingSockets: net.Socket[] | Set<net.Socket>, incomingSocket: net.Socket) => boolean;

@@ -12,2 +12,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.isSocketLoop = exports.isLocalIPv6Available = exports.isLocalPortActive = void 0;
const _ = require("lodash");

@@ -45,2 +46,26 @@ const os = require("os");

: true;
// We need to normalize ips for comparison, because the same ip may be reported as ::ffff:127.0.0.1
// and 127.0.0.1 on the two sides of the connection, for the same ip.
const normalizeIp = (ip) => (ip && ip.startsWith('::ffff:'))
? ip.slice('::ffff:'.length)
: ip;
// Check whether an incoming socket is the other end of one of our outgoing sockets:
const isSocketLoop = (outgoingSockets, incomingSocket) =>
// We effectively just compare the address & port: if they match, we've almost certainly got a loop.
// I don't think it's generally possible to see the same ip on different interfaces from one process (you need
// ip-netns network namespaces), but if it is, then there's a tiny chance of false positives here. If we have ip X,
// and on another interface somebody else has ip X, and they send a request with the same incoming port as an
// outgoing request we have on the other interface, we'll assume it's a loop. Extremely unlikely imo.
_.some([...outgoingSockets], (outgoingSocket) => {
if (!outgoingSocket.localAddress || !outgoingSocket.localPort) {
// It's possible for sockets in outgoingSockets to be closed, in which case these properties
// will be undefined. If so, we know they're not relevant to loops, so skip entirely.
return false;
}
else {
return normalizeIp(outgoingSocket.localAddress) === normalizeIp(incomingSocket.remoteAddress) &&
outgoingSocket.localPort === incomingSocket.remotePort;
}
});
exports.isSocketLoop = isSocketLoop;
//# sourceMappingURL=socket-util.js.map

@@ -15,2 +15,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.CA = exports.getCA = exports.generateSPKIFingerprint = exports.generateCACertificate = void 0;
const _ = require("lodash");

@@ -17,0 +18,0 @@ const uuid = require("uuid/v4");

/// <reference types="node" />
export declare function nthIndexOf(input: string, matcher: string, n: number): number;
export declare function byteLength(input: string | Buffer): number;
export declare function byteLength(input: string | Uint8Array | Buffer): number;
export declare function delay(t: number): Promise<void>;

@@ -5,0 +5,0 @@ export declare const isWorker: boolean;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isNode = exports.isWeb = exports.isWorker = exports.delay = exports.byteLength = exports.nthIndexOf = void 0;
function nthIndexOf(input, matcher, n) {

@@ -4,0 +5,0 @@ let index = -1;

{
"name": "mockttp",
"version": "1.2.2",
"version": "2.0.0",
"description": "Mock HTTP server for testing HTTP clients and stubbing webservices",

@@ -17,3 +17,4 @@ "main": "dist/main.js",

"http2-wrapper": false,
"cacheable-lookup": false
"cacheable-lookup": false,
"proxy-agent": false
},

@@ -38,6 +39,7 @@ "types": "dist/main.d.ts",

"test": "npm run build && npm run test:node && npm run test:browser",
"test:node": "NODE_EXTRA_CA_CERTS=./test/fixtures/test-ca.pem mocha -r ts-node/register 'test/**/*.spec.ts'",
"test:browser": "ts-node ./src/standalone/standalone-bin.ts -c karma start",
"test:browser:debug": "ts-node ./src/standalone/standalone-bin.ts -c karma start --single-run=false --browsers ChromeWithCert",
"test:node": "NODE_EXTRA_CA_CERTS=./test/fixtures/test-ca.pem TS_NODE_FILES=true mocha -r ts-node/register 'test/**/*.spec.ts'",
"test:browser": "npm run with-standalone -- karma start",
"test:browser:debug": "npm run with-standalone -- karma start --single-run=false --browsers ChromeWithCert",
"standalone": "npm run build && node -e 'require(\".\").getStandalone({ debug: true }).start()'",
"with-standalone": "TS_NODE_FILES=true ts-node ./src/standalone/standalone-bin.ts -c ",
"ci-tests": "npm run test && catch-uncommitted",

@@ -67,3 +69,3 @@ "prepack": "npm run build"

"devDependencies": {
"@graphql-tools/utils": "^6.0.18",
"@graphql-tools/utils": "^7.10.0",
"@types/base64-arraybuffer": "0.1.0",

@@ -88,2 +90,3 @@ "@types/body-parser": "^1.19.0",

"agent-base": "^4.2.1",
"brotli-wasm": "^1.0.0",
"catch-uncommitted": "^1.0.0",

@@ -98,10 +101,10 @@ "chai": "^3.5.0",

"isomorphic-ws": "^4.0.1",
"karma": "^1.7.1",
"karma": "^6.3.2",
"karma-chai": "^0.1.0",
"karma-chrome-launcher": "^2.2.0",
"karma-mocha": "^1.3.0",
"karma-mocha": "^2.0.1",
"karma-sourcemap-loader": "^0.3.7",
"karma-spec-reporter": "0.0.32",
"karma-webpack": "^4.0.2",
"mocha": "^3.0.2",
"mocha": "^8.4.0",
"null-loader": "^3.0.0",

@@ -116,14 +119,13 @@ "opn": "^5.1.0",

"ts-loader": "^8.0.2",
"ts-node": "^3.3.0",
"ts-node": "^9.1.1",
"typedoc": "^0.16.4",
"typedoc-plugin-external-module-name": "^3.0.0",
"typescript": "3.7.5",
"webpack": "^4.44.1"
"typescript": "4.3.2",
"webpack": "^4.46.0"
},
"engines": {
"node": ">=10.0.0"
"node": ">=12.0.0"
},
"dependencies": {
"@graphql-tools/schema": "^6.0.18",
"@graphql-tools/utils": "^6.0.18",
"@graphql-tools/schema": "^7.1.5",
"@httptoolkit/httpolyglot": "^1.0.0",

@@ -136,3 +138,2 @@ "@types/cors": "^2.8.6",

"body-parser": "^1.15.2",
"brotli": "^1.3.2",
"cacheable-lookup": "^6.0.0",

@@ -148,3 +149,4 @@ "common-tags": "^1.8.0",

"graphql-subscriptions": "^1.1.0",
"http2-wrapper": "^1.0.0-beta.5.2",
"http-encoding": "^1.2.0",
"http2-wrapper": "^2.0.5",
"lodash": "^4.16.4",

@@ -156,2 +158,3 @@ "native-duplexpair": "^1.0.0",

"portfinder": "^1.0.23",
"proxy-agent": "^4.0.1",
"subscriptions-transport-ws": "^0.9.4",

@@ -158,0 +161,0 @@ "typed-error": "^3.0.2",

@@ -5,16 +5,12 @@ # Mockttp [![Build Status](https://github.com/httptoolkit/mockttp/workflows/CI/badge.svg)](https://github.com/httptoolkit/mockttp/actions) [![Available on NPM](https://img.shields.io/npm/v/mockttp.svg)](https://npmjs.com/package/mockttp) [![Try Mockttp on RunKit](https://badge.runkitcdn.com/mockttp.svg)](https://npm.runkit.com/mockttp)

**Mockttp lets you quickly & reliably test HTTP requests & responses in JavaScript, in both Node and browsers.**
**Mockttp lets you intercept, transform or test HTTP requests & responses in JavaScript - quickly, reliably & anywhere.**
There's a lot of tools to do this, but typically by stubbing the HTTP functions in your
process at the JS level. That ties you to a specific environment, doesn't test the
real requests that'd be made, and only works for requests made in the same JS processs.
It's inflexible, limiting and inaccurate, and often unreliable & tricky to debug too.
You can use Mockttp for integration testing, by intercepting real requests as part of your test suite, or you can use Mockttp to build custom HTTP proxies that capture, inspect and/or rewrite HTTP in any other kind of way you like.
Mockttp is here to make this better.
HTTP testing is the most common and well supported use case. There's a lot of tools to test HTTP, but typically by stubbing the HTTP functions in-process at the JS level. That ties you to a specific environment, doesn't truly test the real requests that you code would send, and only works for requests made in the same JS process. It's inflexible, limiting and inaccurate, and often unreliable & tricky to debug too.
Mockttp allows you to do accurate true integration testing, writing one set of tests that
works out of the box in node or browsers, with support for transparent proxying & HTTPS,
strong typing & promises throughout, fast & safe parallel testing, and helpful
built-in debuggability support all the way down.
Mockttp meanwhile allows you to do accurate true integration testing, writing one set of tests that works out of the box in node or browsers, with support for transparent proxying & HTTPS, strong typing & promises throughout, fast & safe parallel testing, and with debuggability built-in at every stage.
Mockttp is also battle-tested as a scriptable rewriting proxy, powering all the HTTP internals of [HTTP Toolkit](https://httptoolkit.tech). Anything you can do with HTTP Toolkit, you can automate with Mockttp as a headless script.
## Features

@@ -25,8 +21,8 @@

* Write **easy, fast & reliable node.js & browser HTTP integration tests**
* **Stub server responses** and **verify HTTP requests** made by your code
* **Stub server responses** and **verify HTTP requests**
* **Intercept HTTPS** too, with built-in self-signed certificate generation
* **Mock requests inside or outside your process/tab**, including subprocesses, native code, remote devices, and more
* **Test true real-world behaviour**, verifying the real requests made, and testing exactly how your whole stack will handle a response in reality
* Stub direct requests, or transparently stub requests elsewhere as an **HTTP mocking proxy**
* **Mock for node & browser tests with the same code** (universal/'isomorphic' HTTP mocking)
* Stub direct requests as a **mock server**, or transparently stub requests sent elsewhere as an **HTTP mocking proxy**
* **Mock HTTP in both node & browser tests with the same code** (universal/'isomorphic' HTTP mocking)
* **Safely mock HTTP in parallel**, with autoconfiguration of ports, mock URLs and proxy settings, for super-charged integration testing

@@ -58,16 +54,15 @@ * **Debug your tests easily**, with full explainability of all mock matches & misses, mock autosuggestions, and an extra detailed debug mode

describe("Mockttp", () => {
// Start your server
// Start your mock server
beforeEach(() => mockServer.start(8080));
afterEach(() => mockServer.stop());
it("lets you mock requests, and assert on the results", () =>
it("lets you mock requests, and assert on the results", async () =>
// Mock your endpoints
mockServer.get("/mocked-path").thenReply(200, "A mocked response")
.then(() => {
// Make a request
return superagent.get("http://localhost:8080/mocked-path");
}).then(response => {
// Assert on the results
expect(response.text).to.equal("A mocked response");
})
await mockServer.get("/mocked-path").thenReply(200, "A mocked response");
// Make a request
const response = await superagent.get("http://localhost:8080/mocked-path");
// Assert on the results
expect(response.text).to.equal("A mocked response");
);

@@ -92,3 +87,2 @@ });

it("lets you mock without specifying a port, allowing parallel testing", async () => {
// Simplify promises with async/await in supported environments (Chrome 55+/Node 8+/Babel/TypeScript)
await mockServer.get("/mocked-endpoint").thenReply(200, "Tip top testing")

@@ -95,0 +89,0 @@

@@ -58,3 +58,8 @@ /**

super(
`GraphQL request failed, with errors:\n${errors.map((e) => e.message).join('\n')}`,
errors.length === 0
? `GraphQL request failed with ${response.status} response`
: errors.length === 1
? `GraphQL request failed with: ${errors[0].message}`
: // >1
`GraphQL request failed, with errors:\n${errors.map((e) => e.message).join('\n')}`,
response

@@ -119,3 +124,3 @@ );

if (message.body) {
if (message.body !== undefined) {
// Body is serialized as the raw encoded buffer in base64

@@ -144,2 +149,4 @@ message.body = buildBodyReader(Buffer.from(message.body, 'base64'), message.headers);

private subscriptionClient: SubscriptionClient | undefined;
constructor(options: MockttpClientOptions = {}) {

@@ -210,2 +217,21 @@ super(_.defaults(options, {

private prepareSubscriptionClientToMockServer(config: MockServerConfig) {
const standaloneWsServer = this.mockServerOptions.standaloneServerUrl.replace(/^http/, 'ws');
const subscriptionUrl = `${standaloneWsServer}/server/${config.port}/subscription`;
this.subscriptionClient = new SubscriptionClient(subscriptionUrl, {
lazy: true, // Doesn't actually connect until you use subscriptions
reconnect: true,
reconnectionAttempts: 8,
wsOptionArguments: [this.mockClientOptions]
}, WebSocket);
this.subscriptionClient.onError((e) => {
if (this.debug) console.error("Subscription error", e)
});
this.subscriptionClient.onReconnecting(() =>
console.warn(`Reconnecting Mockttp subscription client`)
);
}
private async requestFromMockServer(path: string, options?: RequestInit): Promise<Response> {

@@ -245,8 +271,10 @@ if (!this.mockServerConfig) throw new Error('Not connected to mock server');

} catch (e) {
let graphQLErrors: Error[] | undefined = undefined;
try {
let graphQLErrors = (await e.response.json()).errors;
graphQLErrors = (await e.response.json()).errors;
} catch (e2) {}
if (graphQLErrors) {
throw new GraphQLError(e, graphQLErrors);
} catch (e2) {
// If we fail to get a proper JSON graphql error, just throw the
// underlying exception without decoration
} else {
throw e;

@@ -272,2 +300,5 @@ }

// Create a subscription client, preconfigured & ready to connect if on() is called later:
this.prepareSubscriptionClientToMockServer(mockServerConfig);
// We don't persist the config or resolve this promise until everything is set up

@@ -284,2 +315,3 @@ this.mockServerConfig = mockServerConfig;

this.mockServerStream!.end();
this.subscriptionClient!.close();
await this.requestFromMockServer('/stop', {

@@ -506,10 +538,2 @@ method: 'POST'

const standaloneStreamServer = this.mockServerOptions.standaloneServerUrl.replace(/^http/, 'ws');
const url = `${standaloneStreamServer}/server/${this.port}/subscription`;
const client = new SubscriptionClient(url, {
reconnect: true,
reconnectionAttempts: 8,
wsOptionArguments: [this.mockClientOptions]
}, WebSocket);
// Note the typeHasField checks - these are a quick hack for backward compatibility,

@@ -632,3 +656,7 @@ // introspecting the server schema to avoid requesting fields that don't exist on old servers.

client.request(query).subscribe({
// This isn't 100% correct (you can be WS-connected, but still negotiating some GQL
// setup) but it's good enough for our purposes (knowing-ish if the connection worked).
let isConnected = !!this.subscriptionClient!.client;
this.subscriptionClient!.request(query).subscribe({
next: (value) => {

@@ -663,14 +691,8 @@ if (value.data) {

return new Promise((resolve, reject) => {
client.onConnected(() => {
if (this.debug) console.log("Subscription connected");
resolve();
});
client.onDisconnected(() => {
if (this.debug) console.warn("Subscription disconnected");
reject();
});
client.onError((e) => {
if (this.debug) console.error("Subscription error", e)
});
client.onReconnecting(() => console.warn(`Reconnecting ${event} subscription`));
if (isConnected) resolve();
else {
this.subscriptionClient!.onConnected(resolve);
this.subscriptionClient!.onDisconnected(reject);
this.subscriptionClient!.onError(reject);
}
});

@@ -677,0 +699,0 @@ }

@@ -23,2 +23,3 @@ /**

import { WebSocketRuleData } from './rules/websockets/websocket-rule';
export { ProxyConfig } from './util/http-agents';

@@ -25,0 +26,0 @@ export { RequestRuleData, WebSocketRuleData };

@@ -28,3 +28,5 @@ /**

} from '../requests/request-handlers';
import { streamToBuffer, isHttp2 } from '../../util/request-utils';
import { isHttp2 } from '../../util/request-utils';
import { streamToBuffer } from '../../util/buffer-utils';
import { getAgent, ProxyConfig } from '../../util/http-agents';

@@ -44,3 +46,3 @@ export interface WebSocketHandler extends Explainable, Serializable {

interface InterceptedWebSocket extends WebSocket {
upstreamSocket: WebSocket;
upstreamWebSocket: WebSocket;
}

@@ -52,2 +54,17 @@

// Based on ws's validation.js
function isValidStatusCode(code: number) {
return ( // Standard code:
code >= 1000 &&
code <= 1014 &&
code !== 1004 &&
code !== 1005 &&
code !== 1006
) || ( // Application-specific code:
code >= 3000 && code <= 4999
);
}
const INVALID_STATUS_REGEX = /Invalid WebSocket frame: invalid status code (\d+)/;
function pipeWebSocket(inSocket: WebSocket, outSocket: WebSocket) {

@@ -68,7 +85,10 @@ const onPipeFailed = (op: string) => (err?: Error) => {

inSocket.on('close', (num, reason) => {
if (num >= 1000 && num <= 1004) {
outSocket.close(num, reason);
if (isValidStatusCode(num)) {
try {
outSocket.close(num, reason);
} catch (e) {
console.warn(e);
outSocket.close();
}
} else {
console.log(`Unhappily closing websocket ${num}: ${reason}`);
// Unspecified or invalid error
outSocket.close();

@@ -85,2 +105,24 @@ }

});
// If either socket has an general error (connection failure, but also could be invalid WS
// frames) then we kill the raw connection upstream to simulate a generic connection error:
inSocket.on('error', (err) => {
console.log(`Error in proxied WebSocket:`, err);
const rawOutSocket = outSocket as any;
if (err.message.match(INVALID_STATUS_REGEX)) {
const status = parseInt(INVALID_STATUS_REGEX.exec(err.message)![1]);
// Simulate errors elsewhere by messing with ws internals. This may break things,
// that's effectively on purpose: we're simulating the client going wrong:
const buf = Buffer.allocUnsafe(2);
buf.writeUInt16BE(status); // status comes from readUInt16BE, so always fits
rawOutSocket._sender.doClose(buf, true, () => {
rawOutSocket._socket.destroy();
});
} else {
// Unknown error, just kill the connection with no explanation
rawOutSocket._socket.destroy();
}
});
}

@@ -130,2 +172,7 @@

/**
* Upstream proxy configuration: pass through requests via this proxy
*/
proxyConfig?: ProxyConfig;
/**
* Custom DNS options, to allow configuration of the resolver used

@@ -142,2 +189,3 @@ * when forwarding requests upstream. Passing any option switches

forwarding?: ForwardingOptions;
proxyConfig?: ProxyConfig;
ignoreHostCertificateErrors?: string[]; // Doesn't match option name, backward compat

@@ -158,2 +206,4 @@ lookupOptions?: PassThroughLookupOptions;

public readonly proxyConfig?: ProxyConfig;
private _cacheableLookupInstance: CacheableLookup | undefined;

@@ -205,2 +255,3 @@ private lookup() {

this.lookupOptions = options.lookupOptions;
this.proxyConfig = options.proxyConfig;
}

@@ -219,4 +270,4 @@

this.wsServer.on('connection', (ws: InterceptedWebSocket) => {
pipeWebSocket(ws, ws.upstreamSocket);
pipeWebSocket(ws.upstreamSocket, ws);
pipeWebSocket(ws, ws.upstreamWebSocket);
pipeWebSocket(ws.upstreamWebSocket, ws);
});

@@ -294,3 +345,3 @@ }

// Initialize the server when we handle the first actual request. Mainly just so we
// don't try to initialize it in a browser when buiding rules initially.
// don't try to initialize it in a browser when building rules initially.
if (!this.wsServer) this.wsServer = new WebSocket.Server({ noServer: true });

@@ -303,5 +354,19 @@

const upstreamSocket = new WebSocket(wsUrl, {
const effectivePort = !!parsedUrl.port
? parseInt(parsedUrl.port, 10)
: parsedUrl.protocol == 'wss:' ? 443 : 80;
const agent = getAgent({
protocol: parsedUrl.protocol as 'ws:' | 'wss:',
hostname: parsedUrl.hostname!,
port: effectivePort,
proxyConfig: this.proxyConfig,
tryHttp2: false, // We don't support websockets over H2 yet
keepAlive: false // Not a thing for websockets: they take over the whole connection
});
const upstreamWebSocket = new WebSocket(wsUrl, {
rejectUnauthorized: checkServerCertificate,
maxPayload: 0,
agent,
lookup: this.lookup(),

@@ -314,6 +379,6 @@ headers: _.omitBy(headers, (_v, headerName) =>

upstreamSocket.once('open', () => {
upstreamWebSocket.once('open', () => {
// Presumably the below adds an error handler. But what about before we get here?
this.wsServer!.handleUpgrade(req, incomingSocket, head, (ws) => {
(<InterceptedWebSocket> ws).upstreamSocket = upstreamSocket;
(<InterceptedWebSocket> ws).upstreamWebSocket = upstreamWebSocket;
this.wsServer!.emit('connection', ws);

@@ -324,3 +389,3 @@ });

// If the upstream says no, we say no too.
upstreamSocket.on('unexpected-response', (req, res) => {
upstreamWebSocket.on('unexpected-response', (req, res) => {
console.log(`Unexpected websocket response from ${wsUrl}: ${res.statusCode}`);

@@ -331,3 +396,3 @@ mirrorRejection(incomingSocket, res);

// If there's some other error, we just kill the socket:
upstreamSocket.on('error', (e) => {
upstreamWebSocket.on('error', (e) => {
console.warn(e);

@@ -337,3 +402,3 @@ incomingSocket.end();

incomingSocket.on('error', () => upstreamSocket.close(1011)); // Internal error
incomingSocket.on('error', () => upstreamWebSocket.close(1011)); // Internal error
}

@@ -346,2 +411,3 @@

forwarding: this.forwarding,
proxyConfig: this.proxyConfig,
ignoreHostCertificateErrors: this.ignoreHostHttpsErrors,

@@ -348,0 +414,0 @@ lookupOptions: this.lookupOptions

@@ -53,2 +53,3 @@ import _ = require('lodash');

// If you silently close it very quicky, you probably don't trust us
socket.once('error', reject);
socket.once('close', reject);

@@ -55,0 +56,0 @@ socket.once('end', reject);

@@ -27,3 +27,3 @@ /**

TimingEvents,
ParsedBody
OngoingBody
} from "../types";

@@ -57,3 +57,3 @@ import { CAOptions } from '../util/tls';

protocol?: string;
body?: ParsedBody;
body?: OngoingBody;
path?: string;

@@ -401,2 +401,7 @@ };

request.once('aborted', abort);
// In Node 16+ we don't get an abort event in many cases, just closes, but we know
// it's aborted because the response is closed with no other result being set.
rawResponse.once('close', () => setImmediate(abort));
request.once('error', () => setImmediate(abort));
this.announceInitialRequestAsync(request);

@@ -403,0 +408,0 @@

@@ -230,3 +230,7 @@ /**

mockServerRouter.post('/stop', async (req, res) => {
let running = true;
const stopServer = async () => {
if (!running) return;
running = false;
await mockServer.stop();

@@ -236,2 +240,4 @@

delete this.routers[mockPort];
this.subscriptionServers[mockPort].close();
delete this.subscriptionServers[mockPort];

@@ -242,3 +248,6 @@

delete this.streamServers[mockPort];
};
mockServerRouter.post('/stop', async (req, res) => {
await stopServer();
res.status(200).send(JSON.stringify({

@@ -273,2 +282,11 @@ success: true

// Handle errors by logging & stopping this server instance
const onStreamError = (e: Error) => {
console.error("Error in server standalone stream, shutting down mock server");
console.error(e);
stopServer();
};
wsSocket.on('error', onStreamError);
serverSocket.on('error', onStreamError);
const schema = await this.loadSchema('schema.gql', mockServer, serverSocket);

@@ -275,0 +293,0 @@

@@ -14,3 +14,3 @@ /**

} from "graphql";
import { IResolvers } from "@graphql-tools/utils/Interfaces";
import type { IResolvers } from "@graphql-tools/utils/Interfaces";
import { PubSub } from "graphql-subscriptions";

@@ -17,0 +17,0 @@

@@ -81,7 +81,7 @@ /**

export interface OngoingRequest extends Request, EventEmitter {
body: ParsedBody;
body: OngoingBody;
timingEvents: TimingEvents;
}
export interface ParsedBody {
export interface OngoingBody {
asStream: () => stream.Readable;

@@ -95,7 +95,58 @@ asBuffer: () => Promise<Buffer>;

export interface CompletedBody {
/**
* The raw bytes of the response. If a content encoding was used, this is
* the raw encoded data.
*/
buffer: Buffer;
/**
* @deprecated Use `getDecodedBuffer()` instead with promises, to support
* more encodings and improve performance.
*/
decodedBuffer: Buffer | undefined;
/**
* The decoded bytes of the response. If no encoding was used, this is the
* same as `.buffer`. The response is decoded and returned asynchronously
* as a Promise.
*/
getDecodedBuffer(): Promise<Buffer | undefined>;
/**
* @deprecated Use `getText()` instead with promises, to support
* more encodings and improve performance.
*/
text: string | undefined;
/**
* The contents of the response, decoded and parsed as a UTF-8 string.
* The response is decoded and returned asynchronously as a Promise.
*/
getText(): Promise<string | undefined>;
/**
* @deprecated Use `getJson()` instead with promises, to support
* more encodings and improve performance.
*/
json: object | undefined;
/**
* The contents of the response, decoded, parsed as UTF-8 string, and
* then parsed a JSON. The response is decoded and returned asynchronously
* as a Promise.
*/
getJson(): Promise<object | undefined>;
/**
* @deprecated Use `getDecodedBuffer()` instead with promises, to support
* more encodings and improve performance.
*/
formData: { [key: string]: string | string[] | undefined } | undefined;
/**
* The contents of the response, decoded, parsed as UTF-8 string, and
* then parsed form-encoded data. The response is decoded and returned
* asynchronously as a Promise.
*/
getFormData(): Promise<{ [key: string]: string | string[] | undefined } | undefined>;
}

@@ -129,3 +180,3 @@

getHeaders(): Headers;
body: ParsedBody;
body: OngoingBody;
timingEvents: TimingEvents;

@@ -132,0 +183,0 @@ tags: string[];

@@ -32,3 +32,3 @@ /**

(urlInput: string): string => {
let parsedUrl: url.UrlWithStringQuery | undefined;
let parsedUrl: Partial<url.UrlWithStringQuery> | undefined;

@@ -35,0 +35,0 @@ let isProtocolless = false;

@@ -12,6 +12,5 @@ /**

import * as querystring from 'querystring';
import * as zlib from 'zlib';
import * as brotliDecompress from 'brotli/decompress';
import now = require("performance-now");
import * as url from 'url';
import { decodeBuffer, decodeBufferSync } from 'http-encoding';

@@ -24,3 +23,3 @@ import {

CompletedResponse,
ParsedBody,
OngoingBody,
CompletedBody,

@@ -30,7 +29,12 @@ TimingEvents,

} from "../types";
import { nthIndexOf, isNode } from '../util/util';
const MAX_BUFFER_SIZE = isNode
? require('buffer').constants.MAX_LENGTH
: Infinity;
import { nthIndexOf } from './util';
import {
bufferThenStream,
bufferToStream,
BufferInProgress,
splitBuffer,
streamToBuffer,
asBuffer
} from './buffer-utils';

@@ -134,70 +138,5 @@ // Is this URL fully qualified?

// Takes a buffer and a stream, returns a simple stream that outputs the buffer then the stream.
const bufferThenStream = (buffer: BufferInProgress, inputStream: stream.Readable): stream.Readable => {
const outputStream = new stream.PassThrough();
// Forward the buffered data so far
outputStream.write(Buffer.concat(buffer.currentChunks));
// After the data, forward errors from the buffer
if (buffer.failedWith) {
// Announce async, to ensure listeners have time to get set up
setTimeout(() => outputStream.emit('error', buffer.failedWith));
} else {
// Forward future data as it arrives
inputStream.pipe(outputStream);
// Forward any future errors from the input stream
inputStream.on('error', (e) => outputStream.emit('error', e));
// Silence 'unhandled rejection' warnings here, since we'll handle them on the stream instead
buffer.catch(() => {});
}
return outputStream;
};
const bufferToStream = (buffer: Buffer): stream.Readable => {
const outputStream = new stream.PassThrough();
outputStream.end(buffer);
return outputStream;
};
type BufferInProgress = Promise<Buffer> & {
currentChunks: Buffer[] // Stores the body chunks as they arrive
failedWith?: Error // Stores the error that killed the stream, if one did
};
export const streamToBuffer = (input: stream.Readable, maxSize = MAX_BUFFER_SIZE) => {
let chunks: Buffer[] = [];
const bufferPromise = <BufferInProgress> new Promise(
(resolve, reject) => {
let currentSize = 0;
input.on('data', (d: Buffer) => {
currentSize += d.length;
// If we go over maxSize, drop the whole stream, so the buffer
// resolves empty. MaxSize should be large, so this is rare,
// and only happens as an alternative to crashing the process.
if (currentSize > maxSize) {
chunks = []; // Drop all the data so far
return; // Don't save any more data
}
chunks.push(d);
});
input.once('end', () => resolve(Buffer.concat(chunks)));
input.once('aborted', () => {
bufferPromise.failedWith = new Error('Aborted');
reject(bufferPromise.failedWith);
});
input.on('error', (e) => {
bufferPromise.failedWith = bufferPromise.failedWith || e;
reject(e);
});
}
);
bufferPromise.currentChunks = chunks;
return bufferPromise;
};
const parseBodyStream = (bodyStream: stream.Readable, maxSize: number): ParsedBody => {
// Parse an in-progress request or response stream, i.e. where the body or possibly even the headers have
// not been fully received/sent yet.
const parseBodyStream = (bodyStream: stream.Readable, maxSize: number, getHeaders: () => Headers): OngoingBody => {
let bufferPromise: BufferInProgress | null = null;

@@ -226,4 +165,8 @@ let completedBuffer: Buffer | null = null;

},
async asDecodedBuffer() {
const buffer = await body.asBuffer();
return decodeBuffer(buffer, getHeaders()['content-encoding']);
},
asText(encoding: BufferEncoding = 'utf8') {
return body.asBuffer().then((b) => b.toString(encoding));
return body.asDecodedBuffer().then((b) => b.toString(encoding));
},

@@ -249,3 +192,11 @@ asJson() {

const waitForBody = async (body: ParsedBody, headers: Headers): Promise<CompletedBody> => {
async function runAsyncOrUndefined<R>(func: () => Promise<R>): Promise<R | undefined> {
try {
return await func();
} catch {
return undefined;
}
}
const waitForBody = async (body: OngoingBody, headers: Headers): Promise<CompletedBody> => {
const bufferBody = await body.asBuffer();

@@ -255,41 +206,39 @@ return buildBodyReader(bufferBody, headers);

export const handleContentEncoding = (body: Buffer, encoding?: string | string[]): Buffer => {
if (_.isArray(encoding) || (typeof encoding === 'string' && encoding.indexOf(', ') >= 0)) {
const encodings = typeof encoding === 'string' ? encoding.split(', ').reverse() : encoding;
return encodings.reduce((content, nextEncoding) => {
return handleContentEncoding(content, nextEncoding);
}, body);
}
export const isMockttpBody = (body: any): body is CompletedBody => {
return body.hasOwnProperty('getDecodedBuffer');
}
if (encoding === 'gzip' || encoding === 'x-gzip') {
return zlib.gunzipSync(body);
} else if (encoding === 'deflate' || encoding === 'x-deflate') {
// Deflate is ambiguous, and may or may not have a zlib wrapper.
// This checks the buffer header directly, based on
// https://stackoverflow.com/a/37528114/68051
const lowNibble = body[0] & 0xF;
if (lowNibble === 8) {
return zlib.inflateSync(body);
} else {
return zlib.inflateRawSync(body);
}
} else if (encoding === 'br') {
return Buffer.from(brotliDecompress(body));
} else if (encoding === 'amz-1.0') {
// Weird encoding used by some AWS requests, actually just unencoded JSON:
// https://docs.aws.amazon.com/en_us/AmazonCloudWatch/latest/APIReference/making-api-requests.html
return body;
} else if (!encoding || encoding === 'identity') {
return body;
} else {
throw new Error(`Unknown encoding: ${encoding}`);
}
};
export const buildBodyReader = (body: Buffer, headers: Headers): CompletedBody => {
const completedBody = {
buffer: body,
async getDecodedBuffer() {
return runAsyncOrUndefined(async () =>
asBuffer(
await decodeBuffer(this.buffer, headers['content-encoding'])
)
);
},
async getText() {
return runAsyncOrUndefined(async () =>
(await this.getDecodedBuffer())!.toString()
);
},
async getJson() {
return runAsyncOrUndefined(async () =>
JSON.parse((await completedBody.getText())!)
)
},
async getFormData() {
return runAsyncOrUndefined(async () => {
const text = await completedBody.getText();
return text ? querystring.parse(text) : undefined;
});
},
// Deprecated sync properties, for backwards compat. Note that these do not
// support new encodings, e.g. Brotli/Zstandard.
get decodedBuffer() {
return runOrUndefined(() =>
handleContentEncoding(this.buffer, headers['content-encoding'])
decodeBufferSync(this.buffer, headers['content-encoding'])
);

@@ -322,3 +271,3 @@ },

let transformedRequest = <OngoingRequest> <any> req;
transformedRequest.body = parseBodyStream(req, options.maxSize);
transformedRequest.body = parseBodyStream(req, options.maxSize, () => req.headers);
};

@@ -445,3 +394,7 @@

trackedResponse.body = parseBodyStream(trackingStream, options.maxSize);
trackedResponse.body = parseBodyStream(
trackingStream,
options.maxSize,
() => trackedResponse.getHeaders()
);

@@ -562,22 +515,2 @@ // Proxy errors (e.g. write-after-end) to the response, so they can be

path?: string;
}
function splitBuffer(input: Buffer, splitter: string, maxParts = Infinity) {
const parts: Buffer[] = [];
let remainingBuffer = input;
while (remainingBuffer.length) {
let endOfPart = remainingBuffer.indexOf(splitter);
if (endOfPart === -1) endOfPart = remainingBuffer.length;
parts.push(remainingBuffer.slice(0, endOfPart));
remainingBuffer = remainingBuffer.slice(endOfPart + splitter.length);
if (parts.length === maxParts - 1) {
parts.push(remainingBuffer);
break;
}
}
return parts;
}

@@ -8,3 +8,4 @@ import * as _ from 'lodash';

import { CompletedBody, Headers } from '../types';
import { buildBodyReader } from './request-utils';
import { buildBodyReader, isMockttpBody } from './request-utils';
import { asBuffer } from './buffer-utils';

@@ -168,3 +169,3 @@ export function serialize<T extends Serializable>(

} else {
resolve(response.data);
resolve(response.data!);
}

@@ -249,3 +250,5 @@ this.removeListener('data', responseListener);

}>(input: T): Replace<T, 'body', string> {
return Object.assign({}, input, { body: input.body.buffer.toString('base64') });
return Object.assign({}, input, {
body: asBuffer(input.body.buffer).toString('base64')
});
}

@@ -269,3 +272,3 @@

} else if (_.isString(input.body)) {
serializedBody = serializeBuffer(Buffer.from(input.body));
serializedBody = serializeBuffer(asBuffer(input.body));
} else if (_.isBuffer(input.body)) {

@@ -275,4 +278,4 @@ serializedBody = serializeBuffer(input.body as Buffer);

serializedBody = encodeBase64(input.body as ArrayBuffer);
} else if (input.body.hasOwnProperty('decodedBuffer')) {
serializedBody = serializeBuffer(input.body.buffer);
} else if (isMockttpBody(input.body)) {
serializedBody = serializeBuffer(asBuffer(input.body.buffer));
}

@@ -279,0 +282,0 @@

@@ -34,2 +34,29 @@ import * as _ from 'lodash';

)
: true;
: true;
// We need to normalize ips for comparison, because the same ip may be reported as ::ffff:127.0.0.1
// and 127.0.0.1 on the two sides of the connection, for the same ip.
const normalizeIp = (ip: string | undefined) =>
(ip && ip.startsWith('::ffff:'))
? ip.slice('::ffff:'.length)
: ip;
// Check whether an incoming socket is the other end of one of our outgoing sockets:
export const isSocketLoop = (outgoingSockets: net.Socket[] | Set<net.Socket>, incomingSocket: net.Socket) =>
// We effectively just compare the address & port: if they match, we've almost certainly got a loop.
// I don't think it's generally possible to see the same ip on different interfaces from one process (you need
// ip-netns network namespaces), but if it is, then there's a tiny chance of false positives here. If we have ip X,
// and on another interface somebody else has ip X, and they send a request with the same incoming port as an
// outgoing request we have on the other interface, we'll assume it's a loop. Extremely unlikely imo.
_.some([...outgoingSockets], (outgoingSocket) => {
if (!outgoingSocket.localAddress || !outgoingSocket.localPort) {
// It's possible for sockets in outgoingSockets to be closed, in which case these properties
// will be undefined. If so, we know they're not relevant to loops, so skip entirely.
return false;
} else {
return normalizeIp(outgoingSocket.localAddress) === normalizeIp(incomingSocket.remoteAddress) &&
outgoingSocket.localPort === incomingSocket.remotePort;
}
});

@@ -16,3 +16,3 @@ export function nthIndexOf(input: string, matcher: string, n: number) {

// it returns the length when encoded as UTF8.
export function byteLength(input: string | Buffer) {
export function byteLength(input: string | Uint8Array | Buffer) {
if (typeof input === 'string') {

@@ -19,0 +19,0 @@ return isNode

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc