fetch-har
Advanced tools
Comparing version 10.0.0 to 11.0.0
@@ -1,21 +0,6 @@ | ||
/// <reference types="node" /> | ||
import type { Har } from 'har-format'; | ||
interface RequestInitWithDuplex extends RequestInit { | ||
/** | ||
* `RequestInit#duplex` does not yet exist in the TS `lib.dom.d.ts` definition yet the native | ||
* fetch implementation in Node 18+, `undici`, requires it for certain POST payloads. | ||
* | ||
* @see {@link https://github.com/microsoft/TypeScript-DOM-lib-generator/issues/1483} | ||
* @see {@link https://github.com/nodejs/node/issues/46221} | ||
* @see {@link https://fetch.spec.whatwg.org/#request-class} | ||
* @see {@link https://github.com/microsoft/TypeScript/blob/main/lib/lib.dom.d.ts} | ||
*/ | ||
duplex?: 'half'; | ||
} | ||
export interface FetchHAROptions { | ||
files?: Record<string, Blob | Buffer>; | ||
init?: RequestInitWithDuplex; | ||
userAgent?: string; | ||
} | ||
export default function fetchHAR(har: Har, opts?: FetchHAROptions): Promise<Response>; | ||
export {}; | ||
import { FetchHAROptions } from './types.js'; | ||
import { Har } from 'har-format'; | ||
declare function fetchHAR(har: Har, opts?: FetchHAROptions): Promise<Response>; | ||
export = fetchHAR; |
@@ -1,291 +0,240 @@ | ||
"use strict"; | ||
var __assign = (this && this.__assign) || function () { | ||
__assign = Object.assign || function(t) { | ||
for (var s, i = 1, n = arguments.length; i < n; i++) { | ||
s = arguments[i]; | ||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) | ||
t[p] = s[p]; | ||
} | ||
return t; | ||
}; | ||
return __assign.apply(this, arguments); | ||
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var __defProp = Object.defineProperty; | ||
var __defProps = Object.defineProperties; | ||
var __getOwnPropDescs = Object.getOwnPropertyDescriptors; | ||
var __getOwnPropSymbols = Object.getOwnPropertySymbols; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __propIsEnum = Object.prototype.propertyIsEnumerable; | ||
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; | ||
var __spreadValues = (a, b) => { | ||
for (var prop in b || (b = {})) | ||
if (__hasOwnProp.call(b, prop)) | ||
__defNormalProp(a, prop, b[prop]); | ||
if (__getOwnPropSymbols) | ||
for (var prop of __getOwnPropSymbols(b)) { | ||
if (__propIsEnum.call(b, prop)) | ||
__defNormalProp(a, prop, b[prop]); | ||
} | ||
return a; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var data_urls_1 = require("@readme/data-urls"); | ||
var readable_stream_1 = require("readable-stream"); | ||
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); | ||
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, { | ||
get: (a, b) => (typeof require !== "undefined" ? require : a)[b] | ||
}) : x)(function(x) { | ||
if (typeof require !== "undefined") | ||
return require.apply(this, arguments); | ||
throw Error('Dynamic require of "' + x + '" is not supported'); | ||
}); | ||
// src/index.ts | ||
var _dataurls = require('@readme/data-urls'); | ||
if (!globalThis.Blob) { | ||
try { | ||
// eslint-disable-next-line @typescript-eslint/no-var-requires | ||
globalThis.Blob = require('node:buffer').Blob; | ||
} | ||
catch (e) { | ||
throw new Error('The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob'); | ||
} | ||
try { | ||
globalThis.Blob = __require("buffer").Blob; | ||
} catch (e) { | ||
throw new Error("The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob"); | ||
} | ||
} | ||
if (!globalThis.File) { | ||
try { | ||
// Node's native `fetch` implementation unfortunately does not make this API global so we need | ||
// to pull it in if we don't have it. | ||
// eslint-disable-next-line @typescript-eslint/no-var-requires | ||
globalThis.File = require('undici').File; | ||
} | ||
catch (e) { | ||
throw new Error('The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File'); | ||
} | ||
try { | ||
globalThis.File = __require("undici").File; | ||
} catch (e) { | ||
throw new Error("The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File"); | ||
} | ||
} | ||
function isBrowser() { | ||
return typeof window !== 'undefined' && typeof document !== 'undefined'; | ||
return typeof window !== "undefined" && typeof document !== "undefined"; | ||
} | ||
function isBuffer(value) { | ||
return typeof Buffer !== 'undefined' && Buffer.isBuffer(value); | ||
return typeof Buffer !== "undefined" && Buffer.isBuffer(value); | ||
} | ||
function isFile(value) { | ||
if (value instanceof File) { | ||
/** | ||
* The `Blob` polyfill on Node comes back as being an instanceof `File`. Because passing a Blob | ||
* into a File will end up with a corrupted file we want to prevent this. | ||
* | ||
* This object identity crisis does not happen in the browser. | ||
*/ | ||
return value.constructor.name === 'File'; | ||
} | ||
return false; | ||
if (value instanceof File) { | ||
return value.constructor.name === "File"; | ||
} | ||
return false; | ||
} | ||
function getFileFromSuppliedFiles(filename, files) { | ||
if (filename in files) { | ||
return files[filename]; | ||
} | ||
else if (decodeURIComponent(filename) in files) { | ||
return files[decodeURIComponent(filename)]; | ||
} | ||
return false; | ||
if (files && filename in files) { | ||
return files[filename]; | ||
} else if (files && decodeURIComponent(filename) in files) { | ||
return files[decodeURIComponent(filename)]; | ||
} | ||
return false; | ||
} | ||
function fetchHAR(har, opts) { | ||
var _a; | ||
if (opts === void 0) { opts = {}; } | ||
if (!har) | ||
throw new Error('Missing HAR definition'); | ||
if (!har.log || !har.log.entries || !har.log.entries.length) | ||
throw new Error('Missing log.entries array'); | ||
var request = har.log.entries[0].request; | ||
var url = request.url; | ||
var querystring = ''; | ||
var shouldSetDuplex = false; | ||
var options = __assign(__assign({}, (opts.init ? opts.init : {})), { method: request.method }); | ||
if (!options.headers) { | ||
options.headers = new Headers(); | ||
function fetchHAR(har, opts = {}) { | ||
var _a, _b, _c, _d, _e; | ||
if (!har) | ||
throw new Error("Missing HAR definition"); | ||
if (!har.log || !har.log.entries || !har.log.entries.length) | ||
throw new Error("Missing log.entries array"); | ||
const { request } = har.log.entries[0]; | ||
const { url } = request; | ||
let querystring = ""; | ||
let shouldSetDuplex = false; | ||
const options = __spreadProps(__spreadValues({}, opts.init ? opts.init : {}), { | ||
method: request.method | ||
}); | ||
if (!options.headers) { | ||
options.headers = new Headers(); | ||
} else if (typeof options.headers === "object" && !(options.headers instanceof Headers) && options.headers !== null) { | ||
options.headers = new Headers(options.headers); | ||
} | ||
const headers = options.headers; | ||
if ("headers" in request && request.headers.length) { | ||
request.headers.forEach((header) => { | ||
try { | ||
return headers.append(header.name, header.value); | ||
} catch (err) { | ||
} | ||
}); | ||
} | ||
if ("cookies" in request && request.cookies.length) { | ||
if (isBrowser()) { | ||
request.cookies.forEach((cookie) => { | ||
document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`; | ||
}); | ||
options.credentials = "include"; | ||
} else { | ||
headers.append( | ||
"cookie", | ||
request.cookies.map((cookie) => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`).join("; ") | ||
); | ||
} | ||
else if (typeof options.headers === 'object' && !(options.headers instanceof Headers) && options.headers !== null) { | ||
options.headers = new Headers(options.headers); | ||
} | ||
var headers = options.headers; | ||
if ('headers' in request && request.headers.length) { | ||
// eslint-disable-next-line consistent-return | ||
request.headers.forEach(function (header) { | ||
} | ||
if ("postData" in request) { | ||
if (request.postData && "params" in request.postData) { | ||
if (!("mimeType" in request.postData)) { | ||
request.postData.mimeType = "application/octet-stream"; | ||
} | ||
switch (request.postData.mimeType) { | ||
case "application/x-www-form-urlencoded": | ||
headers.set("Content-Type", request.postData.mimeType); | ||
const encodedParams = new URLSearchParams(); | ||
(_a = request.postData.params) == null ? void 0 : _a.forEach((param) => { | ||
if (param.value) | ||
encodedParams.set(param.name, param.value); | ||
}); | ||
options.body = encodedParams.toString(); | ||
break; | ||
case "multipart/alternative": | ||
case "multipart/form-data": | ||
case "multipart/mixed": | ||
case "multipart/related": | ||
if (headers.has("Content-Type")) { | ||
headers.delete("Content-Type"); | ||
} | ||
const form = new FormData(); | ||
(_b = request.postData.params) == null ? void 0 : _b.forEach((param) => { | ||
if ("fileName" in param && param.fileName) { | ||
if (opts.files) { | ||
const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files); | ||
if (fileContents) { | ||
if (isBuffer(fileContents)) { | ||
form.append( | ||
param.name, | ||
new File([fileContents], param.fileName, { | ||
type: param.contentType || void 0 | ||
}), | ||
param.fileName | ||
); | ||
return; | ||
} else if (isFile(fileContents)) { | ||
form.append(param.name, fileContents, param.fileName); | ||
return; | ||
} | ||
throw new TypeError( | ||
"An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects." | ||
); | ||
} | ||
} | ||
if ("value" in param && param.value) { | ||
let paramBlob; | ||
const parsed = _dataurls.parse.call(void 0, param.value); | ||
if (parsed) { | ||
paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || void 0 }); | ||
} else { | ||
paramBlob = new Blob([param.value], { type: param.contentType || void 0 }); | ||
} | ||
form.append(param.name, paramBlob, param.fileName); | ||
return; | ||
} | ||
throw new Error( | ||
"The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file." | ||
); | ||
} | ||
if (param.value) | ||
form.append(param.name, param.value); | ||
}); | ||
options.body = form; | ||
break; | ||
default: | ||
const formBody = {}; | ||
(_c = request.postData.params) == null ? void 0 : _c.map((param) => { | ||
try { | ||
return headers.append(header.name, header.value); | ||
formBody[param.name] = JSON.parse(param.value || ""); | ||
} catch (e) { | ||
formBody[param.name] = param.value; | ||
} | ||
catch (err) { | ||
/** | ||
* `Headers.append()` will throw errors if the header name is not a legal HTTP header name, | ||
* like `X-API-KEY (Header)`. If that happens instead of tossing the error back out, we | ||
* should silently just ignore | ||
* it. | ||
*/ | ||
} | ||
}); | ||
} | ||
if ('cookies' in request && request.cookies.length) { | ||
/** | ||
* As the browser fetch API can't set custom cookies for requests, they instead need to be | ||
* defined on the document and passed into the request via `credentials: include`. Since this | ||
* is a browser-specific quirk, that should only | ||
* happen in browsers! | ||
*/ | ||
if (isBrowser()) { | ||
request.cookies.forEach(function (cookie) { | ||
document.cookie = "".concat(encodeURIComponent(cookie.name), "=").concat(encodeURIComponent(cookie.value)); | ||
}); | ||
options.credentials = 'include'; | ||
} | ||
else { | ||
headers.append('cookie', request.cookies | ||
.map(function (cookie) { return "".concat(encodeURIComponent(cookie.name), "=").concat(encodeURIComponent(cookie.value)); }) | ||
.join('; ')); | ||
} | ||
} | ||
if ('postData' in request) { | ||
if ('params' in request.postData) { | ||
if (!('mimeType' in request.postData)) { | ||
// @ts-expect-error HAR spec requires that `mimeType` is always present but it might not be. | ||
request.postData.mimeType = 'application/octet-stream'; | ||
} | ||
switch (request.postData.mimeType) { | ||
case 'application/x-www-form-urlencoded': | ||
/** | ||
* Since the content we're handling here is to be encoded as | ||
* `application/x-www-form-urlencoded`, this should override any other `Content-Type` | ||
* headers that are present in the HAR. This is how Postman handles this case when | ||
* building code snippets! | ||
* | ||
* @see {@link https://github.com/github/fetch/issues/263#issuecomment-209530977} | ||
*/ | ||
headers.set('Content-Type', request.postData.mimeType); | ||
var encodedParams_1 = new URLSearchParams(); | ||
request.postData.params.forEach(function (param) { return encodedParams_1.set(param.name, param.value); }); | ||
options.body = encodedParams_1.toString(); | ||
break; | ||
case 'multipart/alternative': | ||
case 'multipart/form-data': | ||
case 'multipart/mixed': | ||
case 'multipart/related': | ||
/** | ||
* If there's a `Content-Type` header set we need to remove it. We're doing this because | ||
* when we pass the form data object into `fetch` that'll set a proper `Content-Type` | ||
* header for this request that also includes the boundary used on the content. | ||
* | ||
* If we don't do this, then consumers won't be able to parse out the payload because | ||
* they won't know what the boundary to split on it. | ||
*/ | ||
if (headers.has('Content-Type')) { | ||
headers.delete('Content-Type'); | ||
} | ||
var form_1 = new FormData(); | ||
request.postData.params.forEach(function (param) { | ||
if ('fileName' in param) { | ||
if (opts.files) { | ||
var fileContents = getFileFromSuppliedFiles(param.fileName, opts.files); | ||
if (fileContents) { | ||
// If the file we've got available to us is a Buffer then we need to convert it so | ||
// that the FormData API can use it. | ||
if (isBuffer(fileContents)) { | ||
form_1.append(param.name, new File([fileContents], param.fileName, { | ||
type: param.contentType || null, | ||
}), param.fileName); | ||
return; | ||
} | ||
else if (isFile(fileContents)) { | ||
form_1.append(param.name, fileContents, param.fileName); | ||
return; | ||
} | ||
throw new TypeError('An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects.'); | ||
} | ||
} | ||
if ('value' in param) { | ||
var paramBlob = void 0; | ||
var parsed = (0, data_urls_1.parse)(param.value); | ||
if (parsed) { | ||
// If we were able to parse out this data URL we don't need to transform its data | ||
// into a buffer for `Blob` because that supports data URLs already. | ||
paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || null }); | ||
} | ||
else { | ||
paramBlob = new Blob([param.value], { type: param.contentType || null }); | ||
} | ||
form_1.append(param.name, paramBlob, param.fileName); | ||
return; | ||
} | ||
throw new Error("The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file."); | ||
} | ||
form_1.append(param.name, param.value); | ||
}); | ||
options.body = form_1; | ||
break; | ||
default: | ||
var formBody_1 = {}; | ||
request.postData.params.map(function (param) { | ||
try { | ||
formBody_1[param.name] = JSON.parse(param.value); | ||
} | ||
catch (e) { | ||
formBody_1[param.name] = param.value; | ||
} | ||
return true; | ||
}); | ||
options.body = JSON.stringify(formBody_1); | ||
} | ||
} | ||
else if ((_a = request.postData.text) === null || _a === void 0 ? void 0 : _a.length) { | ||
// If we've got `files` map content present, and this post data content contains a valid data | ||
// URL then we can substitute the payload with that file instead of the using data URL. | ||
if (opts.files) { | ||
var parsed = (0, data_urls_1.parse)(request.postData.text); | ||
if (parsed) { | ||
if ((parsed === null || parsed === void 0 ? void 0 : parsed.name) && parsed.name in opts.files) { | ||
var fileContents = getFileFromSuppliedFiles(parsed.name, opts.files); | ||
if (fileContents) { | ||
if (isBuffer(fileContents)) { | ||
options.body = fileContents; | ||
} | ||
else if (isFile(fileContents)) { | ||
// `Readable.from` isn't available in browsers but the browser `Request` object can | ||
// handle `File` objects just fine without us having to mold it into shape. | ||
if (isBrowser()) { | ||
options.body = fileContents; | ||
} | ||
else { | ||
// @ts-expect-error "Property 'from' does not exist on type 'typeof Readable'." but it does! | ||
options.body = readable_stream_1.Readable.from(fileContents.stream()); | ||
shouldSetDuplex = true; | ||
// Supplying a polyfilled `File` stream into `Request.body` doesn't automatically | ||
// add `Content-Length`. | ||
if (!headers.has('content-length')) { | ||
headers.set('content-length', String(fileContents.size)); | ||
} | ||
} | ||
} | ||
} | ||
} | ||
return true; | ||
}); | ||
options.body = JSON.stringify(formBody); | ||
} | ||
} else if ((_e = (_d = request.postData) == null ? void 0 : _d.text) == null ? void 0 : _e.length) { | ||
if (opts.files) { | ||
const parsed = _dataurls.parse.call(void 0, request.postData.text); | ||
if (parsed) { | ||
if ((parsed == null ? void 0 : parsed.name) && parsed.name in opts.files) { | ||
const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files); | ||
if (fileContents) { | ||
if (isBuffer(fileContents)) { | ||
options.body = fileContents; | ||
} else if (isFile(fileContents)) { | ||
if (isBrowser()) { | ||
options.body = fileContents; | ||
} else { | ||
options.body = fileContents.stream(); | ||
shouldSetDuplex = true; | ||
if (!headers.has("content-length")) { | ||
headers.set("content-length", String(fileContents.size)); | ||
} | ||
} | ||
} | ||
} | ||
if (typeof options.body === 'undefined') { | ||
options.body = request.postData.text; | ||
} | ||
} | ||
} | ||
/** | ||
* The fetch spec, which Node 18+ strictly abides by, now requires that `duplex` be sent with | ||
* requests that have payloads. | ||
* | ||
* As `RequestInit#duplex` isn't supported by any browsers, or even mentioned on MDN, we aren't | ||
* sending it in browser environments. This work is purely to support Node 18+ and `undici` | ||
* environments. | ||
* | ||
* @see {@link https://github.com/nodejs/node/issues/46221} | ||
* @see {@link https://github.com/whatwg/fetch/pull/1457} | ||
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Request/Request} | ||
*/ | ||
if (shouldSetDuplex && !isBrowser()) { | ||
options.duplex = 'half'; | ||
} | ||
} | ||
if (typeof options.body === "undefined") { | ||
options.body = request.postData.text; | ||
} | ||
} | ||
// We automaticaly assume that the HAR that we have already has query parameters encoded within | ||
// it so we do **not** use the `URLSearchParams` API here for composing the query string. | ||
var requestURL = url; | ||
if ('queryString' in request && request.queryString.length) { | ||
var urlObj = new URL(requestURL); | ||
var queryParams_1 = Array.from(urlObj.searchParams).map(function (_a) { | ||
var k = _a[0], v = _a[1]; | ||
return "".concat(k, "=").concat(v); | ||
}); | ||
request.queryString.forEach(function (q) { | ||
queryParams_1.push("".concat(q.name, "=").concat(q.value)); | ||
}); | ||
querystring = queryParams_1.join('&'); | ||
// Because anchor hashes before query strings will prevent query strings from being delivered | ||
// we need to pop them off and re-add them after. | ||
if (urlObj.hash) { | ||
var urlWithoutHashes = requestURL.replace(urlObj.hash, ''); | ||
requestURL = "".concat(urlWithoutHashes.split('?')[0]).concat(querystring ? "?".concat(querystring) : ''); | ||
requestURL += urlObj.hash; | ||
} | ||
else { | ||
requestURL = "".concat(requestURL.split('?')[0]).concat(querystring ? "?".concat(querystring) : ''); | ||
} | ||
if (shouldSetDuplex && !isBrowser()) { | ||
options.duplex = "half"; | ||
} | ||
if (opts.userAgent) { | ||
headers.append('User-Agent', opts.userAgent); | ||
} | ||
let requestURL = url; | ||
if ("queryString" in request && request.queryString.length) { | ||
const urlObj = new URL(requestURL); | ||
const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`); | ||
request.queryString.forEach((q) => { | ||
queryParams.push(`${q.name}=${q.value}`); | ||
}); | ||
querystring = queryParams.join("&"); | ||
if (urlObj.hash) { | ||
const urlWithoutHashes = requestURL.replace(urlObj.hash, ""); | ||
requestURL = `${urlWithoutHashes.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
requestURL += urlObj.hash; | ||
} else { | ||
requestURL = `${requestURL.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
} | ||
options.headers = headers; | ||
return fetch(requestURL, options); | ||
} | ||
if (opts.userAgent) { | ||
headers.append("User-Agent", opts.userAgent); | ||
} | ||
options.headers = headers; | ||
return fetch(requestURL, options); | ||
} | ||
exports.default = fetchHAR; | ||
module.exports = exports.default//# sourceMappingURL=index.js.map |
{ | ||
"name": "fetch-har", | ||
"version": "10.0.0", | ||
"version": "11.0.0", | ||
"description": "Make a fetch request from a HAR definition", | ||
"main": "dist/index.js", | ||
"types": "dist/index.d.ts", | ||
"module": "dist/index.mjs", | ||
"sideEffects": false, | ||
"exports": { | ||
".": { | ||
"import": "./dist/index.mjs", | ||
"require": "./dist/index.js" | ||
}, | ||
"./types": { | ||
"import": "./dist/types.mjs", | ||
"require": "./dist/types.js" | ||
}, | ||
"./package.json": "./package.json" | ||
}, | ||
"engines": { | ||
"node": ">=18" | ||
}, | ||
"files": [ | ||
"dist" | ||
], | ||
"scripts": { | ||
"build": "tsc", | ||
"lint": "eslint . --ext .js,.ts", | ||
"build": "tsup", | ||
"lint": "npm run lint:types && npm run lint:ts && npm run prettier", | ||
"lint:ts": "eslint . --ext .js,.ts", | ||
"lint:types": "tsc", | ||
"prebuild": "rm -rf dist/", | ||
"prepack": "npm run build", | ||
"pretest": "npm run lint", | ||
"prettier": "prettier --list-different --write \"./**/**.{js,ts}\"", | ||
"prettier": "prettier --check .", | ||
"prettier:write": "prettier --check --write .", | ||
"test": "vitest --coverage", | ||
@@ -30,16 +48,14 @@ "test:browser": "vitest --browser.name=chrome --browser.headless" | ||
"dependencies": { | ||
"@readme/data-urls": "^1.0.1", | ||
"@types/har-format": "^1.2.12", | ||
"readable-stream": "^3.6.0", | ||
"undici": "^5.24.0" | ||
"@readme/data-urls": "^3.0.0", | ||
"@types/har-format": "^1.2.13", | ||
"undici": "^5.25.1" | ||
}, | ||
"devDependencies": { | ||
"@jsdevtools/host-environment": "^2.1.2", | ||
"@readme/eslint-config": "^12.2.0", | ||
"@readme/eslint-config": "^13.0.1", | ||
"@types/express": "^4.17.17", | ||
"@types/multer": "^1.4.7", | ||
"@types/node": "^20.5.7", | ||
"@types/readable-stream": "^2.3.15", | ||
"@types/node": "^20.6.3", | ||
"@vitest/browser": "^0.34.3", | ||
"@vitest/coverage-v8": "^0.34.3", | ||
"@vitest/coverage-v8": "^0.34.5", | ||
"datauri": "^4.1.0", | ||
@@ -50,8 +66,10 @@ "eslint": "^8.48.0", | ||
"har-examples": "^3.1.1", | ||
"msw": "^1.3.1", | ||
"multer": "^1.4.5-lts.1", | ||
"prettier": "^3.0.3", | ||
"temp-dir": "^2.0.0", | ||
"temp-dir": "^3.0.0", | ||
"tsup": "^7.2.0", | ||
"typescript": "^5.2.2", | ||
"vitest": "^0.34.3", | ||
"webdriverio": "^8.15.10" | ||
"vitest": "^0.34.5", | ||
"webdriverio": "^8.16.15" | ||
}, | ||
@@ -58,0 +76,0 @@ "browserslist": [ |
# fetch-har | ||
Make a [fetch](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) request from a HAR definition. | ||
@@ -15,3 +16,3 @@ | ||
- [Tested](https://github.com/readmeio/fetch-har/actions) across Chrome, Safari, Firefox on Mac, Windows, and Linux. | ||
- Requests can be mocked with [fetch-mock](https://npm.im/fetch-mock) or [msw](https://npm.im/msw). | ||
- Requests can be mocked with [`msw`](https://npm.im/msw) or [`fetch-mock`](https://npm.im/fetch-mock) (though the latter does not appear to be maintained). | ||
@@ -25,5 +26,6 @@ ## Installation | ||
## Usage | ||
```js | ||
import fetchHAR from 'fetch-har'; | ||
// const fetchHAR = require('fetch-har').default; | ||
// const fetchHAR = require('fetch-har'); | ||
@@ -67,4 +69,7 @@ const har = { | ||
### API | ||
#### Options | ||
##### userAgent | ||
A custom `User-Agent` header to apply to your request. Please note that browsers have their own handling for these headers in `fetch()` calls so it may not work everywhere; it will always be sent in Node however. | ||
@@ -77,9 +82,12 @@ | ||
##### files | ||
An optional object map you can supply to use for `multipart/form-data` file uploads in leu of relying on if the HAR you have has [data URLs](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs). It supports Node file buffers and the [File](https://developer.mozilla.org/en-US/docs/Web/API/File) API. | ||
```js | ||
await fetchHAR(har, { files: { | ||
'owlbert.png': await fs.readFile('./owlbert.png'), | ||
'file.txt': document.querySelector('#some-file-input').files[0], | ||
} }); | ||
await fetchHAR(har, { | ||
files: { | ||
'owlbert.png': await fs.readFile('./owlbert.png'), | ||
'file.txt': document.querySelector('#some-file-input').files[0], | ||
}, | ||
}); | ||
``` | ||
@@ -90,2 +98,3 @@ | ||
##### init | ||
This optional argument lets you supply any option that's available to supply to the [Request constructor](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request). | ||
@@ -100,5 +109,5 @@ | ||
}, | ||
}) | ||
}); | ||
``` | ||
> ❗ Note that if you supply `body` or `credentials` to this option they may be overridden by what your HAR requires. |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
3
15
108
62332
20
494
6
4
+ Added@readme/data-urls@3.0.0(transitive)
- Removedreadable-stream@^3.6.0
- Removed@readme/data-urls@1.0.1(transitive)
- Removedinherits@2.0.4(transitive)
- Removedreadable-stream@3.6.2(transitive)
- Removedsafe-buffer@5.2.1(transitive)
- Removedstring_decoder@1.3.0(transitive)
- Removedutil-deprecate@1.0.2(transitive)
Updated@readme/data-urls@^3.0.0
Updated@types/har-format@^1.2.13
Updatedundici@^5.25.1