fetch-har
Advanced tools
+261
| "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } }var __defProp = Object.defineProperty; | ||
| var __defProps = Object.defineProperties; | ||
| var __getOwnPropDescs = Object.getOwnPropertyDescriptors; | ||
| var __getOwnPropSymbols = Object.getOwnPropertySymbols; | ||
| var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
| var __propIsEnum = Object.prototype.propertyIsEnumerable; | ||
| var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; | ||
| var __spreadValues = (a, b) => { | ||
| for (var prop in b || (b = {})) | ||
| if (__hasOwnProp.call(b, prop)) | ||
| __defNormalProp(a, prop, b[prop]); | ||
| if (__getOwnPropSymbols) | ||
| for (var prop of __getOwnPropSymbols(b)) { | ||
| if (__propIsEnum.call(b, prop)) | ||
| __defNormalProp(a, prop, b[prop]); | ||
| } | ||
| return a; | ||
| }; | ||
| var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); | ||
| var __async = (__this, __arguments, generator) => { | ||
| return new Promise((resolve, reject) => { | ||
| var fulfilled = (value) => { | ||
| try { | ||
| step(generator.next(value)); | ||
| } catch (e) { | ||
| reject(e); | ||
| } | ||
| }; | ||
| var rejected = (value) => { | ||
| try { | ||
| step(generator.throw(value)); | ||
| } catch (e) { | ||
| reject(e); | ||
| } | ||
| }; | ||
| var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); | ||
| step((generator = generator.apply(__this, __arguments)).next()); | ||
| }); | ||
| }; | ||
| // src/index.ts | ||
| var _dataurls = require('@readme/data-urls'); | ||
| function isBrowser() { | ||
| return typeof window !== "undefined" && typeof document !== "undefined"; | ||
| } | ||
| function isBuffer(value) { | ||
| return typeof Buffer !== "undefined" && Buffer.isBuffer(value); | ||
| } | ||
| function isFile(value) { | ||
| if (value instanceof File) { | ||
| return value.constructor.name === "File"; | ||
| } | ||
| return false; | ||
| } | ||
| function getFileFromSuppliedFiles(filename, files) { | ||
| if (files && filename in files) { | ||
| return files[filename]; | ||
| } else if (files && decodeURIComponent(filename) in files) { | ||
| return files[decodeURIComponent(filename)]; | ||
| } | ||
| return false; | ||
| } | ||
| function fetchHAR(_0) { | ||
| return __async(this, arguments, function* (har, opts = {}) { | ||
| var _a, _b, _c, _d, _e; | ||
| if (!har) | ||
| throw new Error("Missing HAR definition"); | ||
| if (!har.log || !har.log.entries || !har.log.entries.length) | ||
| throw new Error("Missing log.entries array"); | ||
| if (!globalThis.Blob) { | ||
| try { | ||
| const NodeBlob = (yield Promise.resolve().then(() => _interopRequireWildcard(require("buffer")))).Blob; | ||
| globalThis.Blob = NodeBlob; | ||
| } catch (e) { | ||
| throw new Error( | ||
| "The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob" | ||
| ); | ||
| } | ||
| } | ||
| if (!globalThis.File) { | ||
| try { | ||
| const UndiciFile = (yield Promise.resolve().then(() => _interopRequireWildcard(require("undici")))).File; | ||
| globalThis.File = UndiciFile; | ||
| } catch (e) { | ||
| throw new Error( | ||
| "The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File" | ||
| ); | ||
| } | ||
| } | ||
| const { request } = har.log.entries[0]; | ||
| const { url } = request; | ||
| let querystring = ""; | ||
| let shouldSetDuplex = false; | ||
| const options = __spreadProps(__spreadValues({}, opts.init ? opts.init : {}), { | ||
| method: request.method | ||
| }); | ||
| if (!options.headers) { | ||
| options.headers = new Headers(); | ||
| } else if (typeof options.headers === "object" && !(options.headers instanceof Headers) && options.headers !== null) { | ||
| options.headers = new Headers(options.headers); | ||
| } | ||
| const headers = options.headers; | ||
| if ("headers" in request && request.headers.length) { | ||
| request.headers.forEach((header) => { | ||
| try { | ||
| return headers.append(header.name, header.value); | ||
| } catch (err) { | ||
| } | ||
| }); | ||
| } | ||
| if ("cookies" in request && request.cookies.length) { | ||
| if (isBrowser()) { | ||
| request.cookies.forEach((cookie) => { | ||
| document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`; | ||
| }); | ||
| options.credentials = "include"; | ||
| } else { | ||
| headers.append( | ||
| "cookie", | ||
| request.cookies.map((cookie) => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`).join("; ") | ||
| ); | ||
| } | ||
| } | ||
| if ("postData" in request) { | ||
| if (request.postData && "params" in request.postData) { | ||
| if (!("mimeType" in request.postData)) { | ||
| request.postData.mimeType = "application/octet-stream"; | ||
| } | ||
| switch (request.postData.mimeType) { | ||
| case "application/x-www-form-urlencoded": | ||
| headers.set("Content-Type", request.postData.mimeType); | ||
| const encodedParams = new URLSearchParams(); | ||
| (_a = request.postData.params) == null ? void 0 : _a.forEach((param) => { | ||
| if (param.value) | ||
| encodedParams.set(param.name, param.value); | ||
| }); | ||
| options.body = encodedParams.toString(); | ||
| break; | ||
| case "multipart/alternative": | ||
| case "multipart/form-data": | ||
| case "multipart/mixed": | ||
| case "multipart/related": | ||
| if (headers.has("Content-Type")) { | ||
| headers.delete("Content-Type"); | ||
| } | ||
| const form = new FormData(); | ||
| (_b = request.postData.params) == null ? void 0 : _b.forEach((param) => { | ||
| if ("fileName" in param && param.fileName) { | ||
| if (opts.files) { | ||
| const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files); | ||
| if (fileContents) { | ||
| if (isBuffer(fileContents)) { | ||
| form.append( | ||
| param.name, | ||
| new File([fileContents], param.fileName, { | ||
| type: param.contentType || void 0 | ||
| }), | ||
| param.fileName | ||
| ); | ||
| return; | ||
| } else if (isFile(fileContents)) { | ||
| form.append(param.name, fileContents, param.fileName); | ||
| return; | ||
| } | ||
| throw new TypeError( | ||
| "An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects." | ||
| ); | ||
| } | ||
| } | ||
| if ("value" in param && param.value) { | ||
| let paramBlob; | ||
| const parsed = _dataurls.parse.call(void 0, param.value); | ||
| if (parsed) { | ||
| paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || void 0 }); | ||
| } else { | ||
| paramBlob = new Blob([param.value], { type: param.contentType || void 0 }); | ||
| } | ||
| form.append(param.name, paramBlob, param.fileName); | ||
| return; | ||
| } | ||
| throw new Error( | ||
| "The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file." | ||
| ); | ||
| } | ||
| if (param.value) | ||
| form.append(param.name, param.value); | ||
| }); | ||
| options.body = form; | ||
| break; | ||
| default: | ||
| const formBody = {}; | ||
| (_c = request.postData.params) == null ? void 0 : _c.map((param) => { | ||
| try { | ||
| formBody[param.name] = JSON.parse(param.value || ""); | ||
| } catch (e) { | ||
| formBody[param.name] = param.value; | ||
| } | ||
| return true; | ||
| }); | ||
| options.body = JSON.stringify(formBody); | ||
| } | ||
| } else if ((_e = (_d = request.postData) == null ? void 0 : _d.text) == null ? void 0 : _e.length) { | ||
| if (opts.files) { | ||
| const parsed = _dataurls.parse.call(void 0, request.postData.text); | ||
| if (parsed) { | ||
| if ((parsed == null ? void 0 : parsed.name) && parsed.name in opts.files) { | ||
| const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files); | ||
| if (fileContents) { | ||
| if (isBuffer(fileContents)) { | ||
| options.body = fileContents; | ||
| } else if (isFile(fileContents)) { | ||
| if (isBrowser()) { | ||
| options.body = fileContents; | ||
| } else { | ||
| options.body = fileContents.stream(); | ||
| shouldSetDuplex = true; | ||
| if (!headers.has("content-length")) { | ||
| headers.set("content-length", String(fileContents.size)); | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| if (typeof options.body === "undefined") { | ||
| options.body = request.postData.text; | ||
| } | ||
| } | ||
| if (shouldSetDuplex && !isBrowser()) { | ||
| options.duplex = "half"; | ||
| } | ||
| } | ||
| let requestURL = url; | ||
| if ("queryString" in request && request.queryString.length) { | ||
| const urlObj = new URL(requestURL); | ||
| const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`); | ||
| request.queryString.forEach((q) => { | ||
| queryParams.push(`${q.name}=${q.value}`); | ||
| }); | ||
| querystring = queryParams.join("&"); | ||
| if (urlObj.hash) { | ||
| const urlWithoutHashes = requestURL.replace(urlObj.hash, ""); | ||
| requestURL = `${urlWithoutHashes.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
| requestURL += urlObj.hash; | ||
| } else { | ||
| requestURL = `${requestURL.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
| } | ||
| } | ||
| if (opts.userAgent) { | ||
| headers.append("User-Agent", opts.userAgent); | ||
| } | ||
| options.headers = headers; | ||
| return fetch(requestURL, options); | ||
| }); | ||
| } | ||
| exports.default = fetchHAR; | ||
| module.exports = exports.default//# sourceMappingURL=index.cjs.map |
| {"version":3,"sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA,SAAS,SAAS,oBAAoB;AAQtC,SAAS,YAAY;AACnB,SAAO,OAAO,WAAW,eAAe,OAAO,aAAa;AAC9D;AAEA,SAAS,SAAS,OAAY;AAC5B,SAAO,OAAO,WAAW,eAAe,OAAO,SAAS,KAAK;AAC/D;AAEA,SAAS,OAAO,OAAY;AAC1B,MAAI,iBAAiB,MAAM;AAOzB,WAAO,MAAM,YAAY,SAAS;AAAA,EACpC;AAEA,SAAO;AACT;AAEA,SAAS,yBAAyB,UAAkB,OAAiC;AACnF,MAAI,SAAS,YAAY,OAAO;AAC9B,WAAO,MAAM,QAAQ;AAAA,EACvB,WAAW,SAAS,mBAAmB,QAAQ,KAAK,OAAO;AACzD,WAAO,MAAM,mBAAmB,QAAQ,CAAC;AAAA,EAC3C;AAEA,SAAO;AACT;AAEA,SAAO,SAAgC,IAAyD;AAAA,6CAAzD,KAAU,OAAwB,CAAC,GAAsB;AA5ChG;AA6CE,QAAI,CAAC;AAAK,YAAM,IAAI,MAAM,wBAAwB;AAClD,QAAI,CAAC,IAAI,OAAO,CAAC,IAAI,IAAI,WAAW,CAAC,IAAI,IAAI,QAAQ;AAAQ,YAAM,IAAI,MAAM,2BAA2B;AAExG,QAAI,CAAC,WAAW,MAAM;AACpB,UAAI;AACF,cAAM,YAAY,MAAM,OAAO,QAAa,GAAG;AAE/C,mBAAW,OAAO;AAAA,MACpB,SAAS,GAAG;AACV,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,CAAC,WAAW,MAAM;AACpB,UAAI;AAGF,cAAM,cAAc,MAAM,OAAO,QAAQ,GAAG;AAE5C,mBAAW,OAAO;AAAA,MACpB,SAAS,GAAG;AACV,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,EAAE,QAAQ,IAAI,IAAI,IAAI,QAAQ,CAAC;AACrC,UAAM,EAAE,IAAI,IAAI;AAChB,QAAI,cAAc;AAClB,QAAI,kBAAkB;AAEtB,UAAM,UAAiC,iCAGjC,KAAK,OAAO,KAAK,OAAO,CAAC,IAHQ;AAAA,MAIrC,QAAQ,QAAQ;AAAA,IAClB;AAEA,QAAI,CAAC,QAAQ,SAAS;AACpB,cAAQ,UAAU,IAAI,QAAQ;AAAA,IAChC,WAAW,OAAO,QAAQ,YAAY,YAAY,EAAE,QAAQ,mBAAmB,YAAY,QAAQ,YAAY,MAAM;AACnH,cAAQ,UAAU,IAAI,QAAQ,QAAQ,OAAO;AAAA,IAC/C;AAEA,UAAM,UAAU,QAAQ;AACxB,QAAI,aAAa,WAAW,QAAQ,QAAQ,QAAQ;AAElD,cAAQ,QAAQ,QAAQ,YAAU;AAChC,YAAI;AACF,iBAAO,QAAQ,OAAO,OAAO,MAAM,OAAO,KAAK;AAAA,QACjD,SAAS,KAAK;AAAA,QAOd;AAAA,MACF,CAAC;AAAA,IACH;AAEA,QAAI,aAAa,WAAW,QAAQ,QAAQ,QAAQ;AAOlD,UAAI,UAAU,GAAG;AACf,gBAAQ,QAAQ,QAAQ,YAAU;AAChC,mBAAS,SAAS,GAAG,mBAAmB,OAAO,IAAI,CAAC,IAAI,mBAAmB,OAAO,KAAK,CAAC;AAAA,QAC1F,CAAC;AAED,gBAAQ,cAAc;AAAA,MACxB,OAAO;AACL,gBAAQ;AAAA,UACN;AAAA,UACA,QAAQ,QACL,IAAI,YAAU,GAAG,mBAAmB,OAAO,IAAI,CAAC,IAAI,mBAAmB,OAAO,KAAK,CAAC,EAAE,EACtF,KAAK,IAAI;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAEA,QAAI,cAAc,SAAS;AACzB,UAAI,QAAQ,YAAY,YAAY,QAAQ,UAAU;AACpD,YAAI,EAAE,cAAc,QAAQ,WAAW;AAErC,kBAAQ,SAAS,WAAW;AAAA,QAC9B;AAEA,gBAAQ,QAAQ,SAAS,UAAU;AAAA,UACjC,KAAK;AASH,oBAAQ,IAAI,gBAAgB,QAAQ,SAAS,QAAQ;AAErD,kBAAM,gBAAgB,IAAI,gBAAgB;AAC1C,0BAAQ,SAAS,WAAjB,mBAAyB,QAAQ,WAAS;AACxC,kBAAI,MAAM;AAAO,8BAAc,IAAI,MAAM,MAAM,MAAM,KAAK;AAAA,YAC5D;AAEA,oBAAQ,OAAO,cAAc,SAAS;AACtC;AAAA,UAEF,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AASH,gBAAI,QAAQ,IAAI,cAAc,GAAG;AAC/B,sBAAQ,OAAO,cAAc;AAAA,YAC/B;AAEA,kBAAM,OAAO,IAAI,SAAS;AAE1B,0BAAQ,SAAS,WAAjB,mBAAyB,QAAQ,WAAS;AACxC,kBAAI,cAAc,SAAS,MAAM,UAAU;AACzC,oBAAI,KAAK,OAAO;AACd,wBAAM,eAAe,yBAAyB,MAAM,UAAU,KAAK,KAAK;AACxE,sBAAI,cAAc;AAGhB,wBAAI,SAAS,YAAY,GAAG;AAC1B,2BAAK;AAAA,wBACH,MAAM;AAAA,wBACN,IAAI,KAAK,CAAC,YAAY,GAAG,MAAM,UAAU;AAAA,0BACvC,MAAM,MAAM,eAAe;AAAA,wBAC7B,CAAC;AAAA,wBACD,MAAM;AAAA,sBACR;AAEA;AAAA,oBACF,WAAW,OAAO,YAAY,GAAG;AAC/B,2BAAK,OAAO,MAAM,MAAM,cAAsB,MAAM,QAAQ;AAC5D;AAAA,oBACF;AAEA,0BAAM,IAAI;AAAA,sBACR;AAAA,oBACF;AAAA,kBACF;AAAA,gBACF;AAEA,oBAAI,WAAW,SAAS,MAAM,OAAO;AACnC,sBAAI;AACJ,wBAAM,SAAS,aAAa,MAAM,KAAK;AACvC,sBAAI,QAAQ;AAGV,gCAAY,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG,EAAE,MAAM,OAAO,eAAe,MAAM,eAAe,OAAU,CAAC;AAAA,kBACpG,OAAO;AACL,gCAAY,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG,EAAE,MAAM,MAAM,eAAe,OAAU,CAAC;AAAA,kBAC9E;AAEA,uBAAK,OAAO,MAAM,MAAM,WAAW,MAAM,QAAQ;AACjD;AAAA,gBACF;AAEA,sBAAM,IAAI;AAAA,kBACR;AAAA,gBACF;AAAA,cACF;AAEA,kBAAI,MAAM;AAAO,qBAAK,OAAO,MAAM,MAAM,MAAM,KAAK;AAAA,YACtD;AAEA,oBAAQ,OAAO;AACf;AAAA,UAEF;AACE,kBAAM,WAAoC,CAAC;AAC3C,0BAAQ,SAAS,WAAjB,mBAAyB,IAAI,WAAS;AACpC,kBAAI;AACF,yBAAS,MAAM,IAAI,IAAI,KAAK,MAAM,MAAM,SAAS,EAAE;AAAA,cACrD,SAAS,GAAG;AACV,yBAAS,MAAM,IAAI,IAAI,MAAM;AAAA,cAC/B;AAEA,qBAAO;AAAA,YACT;AAEA,oBAAQ,OAAO,KAAK,UAAU,QAAQ;AAAA,QAC1C;AAAA,MACF,YAAW,mBAAQ,aAAR,mBAAkB,SAAlB,mBAAwB,QAAQ;AAGzC,YAAI,KAAK,OAAO;AACd,gBAAM,SAAS,aAAa,QAAQ,SAAS,IAAI;AACjD,cAAI,QAAQ;AACV,iBAAI,iCAAQ,SAAQ,OAAO,QAAQ,KAAK,OAAO;AAC7C,oBAAM,eAAe,yBAAyB,OAAO,MAAM,KAAK,KAAK;AACrE,kBAAI,cAAc;AAChB,oBAAI,SAAS,YAAY,GAAG;AAC1B,0BAAQ,OAAO;AAAA,gBACjB,WAAW,OAAO,YAAY,GAAG;AAG/B,sBAAI,UAAU,GAAG;AACf,4BAAQ,OAAO;AAAA,kBACjB,OAAO;AACL,4BAAQ,OAAQ,aAAsB,OAAO;AAC7C,sCAAkB;AAIlB,wBAAI,CAAC,QAAQ,IAAI,gBAAgB,GAAG;AAClC,8BAAQ,IAAI,kBAAkB,OAAQ,aAAsB,IAAI,CAAC;AAAA,oBACnE;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,YAAI,OAAO,QAAQ,SAAS,aAAa;AACvC,kBAAQ,OAAO,QAAQ,SAAS;AAAA,QAClC;AAAA,MACF;AAcA,UAAI,mBAAmB,CAAC,UAAU,GAAG;AACnC,gBAAQ,SAAS;AAAA,MACnB;AAAA,IACF;AAIA,QAAI,aAAa;AACjB,QAAI,iBAAiB,WAAW,QAAQ,YAAY,QAAQ;AAC1D,YAAM,SAAS,IAAI,IAAI,UAAU;AAEjC,YAAM,cAAc,MAAM,KAAK,OAAO,YAAY,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE;AAC/E,cAAQ,YAAY,QAAQ,OAAK;AAC/B,oBAAY,KAAK,GAAG,EAAE,IAAI,IAAI,EAAE,KAAK,EAAE;AAAA,MACzC,CAAC;AAED,oBAAc,YAAY,KAAK,GAAG;AAIlC,UAAI,OAAO,MAAM;AACf,cAAM,mBAAmB,WAAW,QAAQ,OAAO,MAAM,EAAE;AAC3D,qBAAa,GAAG,iBAAiB,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,cAAc,IAAI,WAAW,KAAK,EAAE;AACrF,sBAAc,OAAO;AAAA,MACvB,OAAO;AACL,qBAAa,GAAG,WAAW,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,cAAc,IAAI,WAAW,KAAK,EAAE;AAAA,MACjF;AAAA,IACF;AAEA,QAAI,KAAK,WAAW;AAClB,cAAQ,OAAO,cAAc,KAAK,SAAS;AAAA,IAC7C;AAEA,YAAQ,UAAU;AAElB,WAAO,MAAM,YAAY,OAAO;AAAA,EAClC;AAAA","sourcesContent":["import type { FetchHAROptions, RequestInitWithDuplex } from './types.js';\nimport type { DataURL as npmDataURL } from '@readme/data-urls';\nimport type { Har } from 'har-format';\n\nimport { parse as parseDataUrl } from '@readme/data-urls';\n\ntype DataURL = npmDataURL & {\n // `parse-data-url` doesn't explicitly support `name` in data URLs but if it's there it'll be\n // returned back to us.\n name?: string;\n};\n\nfunction isBrowser() {\n return typeof window !== 'undefined' && typeof document !== 'undefined';\n}\n\nfunction isBuffer(value: any) {\n return typeof Buffer !== 'undefined' && Buffer.isBuffer(value);\n}\n\nfunction isFile(value: any) {\n if (value instanceof File) {\n /**\n * The `Blob` polyfill on Node comes back as being an instanceof `File`. Because passing a Blob\n * into a File will end up with a corrupted file we want to prevent this.\n *\n * This object identity crisis does not happen in the browser.\n */\n return value.constructor.name === 'File';\n }\n\n return false;\n}\n\nfunction getFileFromSuppliedFiles(filename: string, files: FetchHAROptions['files']) {\n if (files && filename in files) {\n return files[filename];\n } else if (files && decodeURIComponent(filename) in files) {\n return files[decodeURIComponent(filename)];\n }\n\n return false;\n}\n\nexport default async function fetchHAR(har: Har, opts: FetchHAROptions = {}): Promise<Response> {\n if (!har) throw new Error('Missing HAR definition');\n if (!har.log || !har.log.entries || !har.log.entries.length) throw new Error('Missing log.entries array');\n\n if (!globalThis.Blob) {\n try {\n const NodeBlob = (await import('node:buffer')).Blob;\n // @ts-expect-error the types don't match exactly, which is expected!\n globalThis.Blob = NodeBlob;\n } catch (e) {\n throw new Error(\n 'The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob',\n );\n }\n }\n\n if (!globalThis.File) {\n try {\n // Node's native `fetch` implementation unfortunately does not make this API global so we need\n // to pull it in if we don't have it.\n const UndiciFile = (await import('undici')).File;\n // @ts-expect-error the types don't match exactly, which is expected!\n globalThis.File = UndiciFile;\n } catch (e) {\n throw new Error(\n 'The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File',\n );\n }\n }\n\n const { request } = har.log.entries[0];\n const { url } = request;\n let querystring = '';\n let shouldSetDuplex = false;\n\n const options: RequestInitWithDuplex = {\n // If we have custom options for the `Request` API we need to add them in here now before we\n // fill it in with everything we need from the HAR.\n ...(opts.init ? opts.init : {}),\n method: request.method,\n };\n\n if (!options.headers) {\n options.headers = new Headers();\n } else if (typeof options.headers === 'object' && !(options.headers instanceof Headers) && options.headers !== null) {\n options.headers = new Headers(options.headers);\n }\n\n const headers = options.headers as Headers;\n if ('headers' in request && request.headers.length) {\n // eslint-disable-next-line consistent-return\n request.headers.forEach(header => {\n try {\n return headers.append(header.name, header.value);\n } catch (err) {\n /**\n * `Headers.append()` will throw errors if the header name is not a legal HTTP header name,\n * like `X-API-KEY (Header)`. If that happens instead of tossing the error back out, we\n * should silently just ignore\n * it.\n */\n }\n });\n }\n\n if ('cookies' in request && request.cookies.length) {\n /**\n * As the browser fetch API can't set custom cookies for requests, they instead need to be\n * defined on the document and passed into the request via `credentials: include`. Since this\n * is a browser-specific quirk, that should only\n * happen in browsers!\n */\n if (isBrowser()) {\n request.cookies.forEach(cookie => {\n document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`;\n });\n\n options.credentials = 'include';\n } else {\n headers.append(\n 'cookie',\n request.cookies\n .map(cookie => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`)\n .join('; '),\n );\n }\n }\n\n if ('postData' in request) {\n if (request.postData && 'params' in request.postData) {\n if (!('mimeType' in request.postData)) {\n // @ts-expect-error HAR spec requires that `mimeType` is always present but it might not be.\n request.postData.mimeType = 'application/octet-stream';\n }\n\n switch (request.postData.mimeType) {\n case 'application/x-www-form-urlencoded':\n /**\n * Since the content we're handling here is to be encoded as\n * `application/x-www-form-urlencoded`, this should override any other `Content-Type`\n * headers that are present in the HAR. This is how Postman handles this case when\n * building code snippets!\n *\n * @see {@link https://github.com/github/fetch/issues/263#issuecomment-209530977}\n */\n headers.set('Content-Type', request.postData.mimeType);\n\n const encodedParams = new URLSearchParams();\n request.postData.params?.forEach(param => {\n if (param.value) encodedParams.set(param.name, param.value);\n });\n\n options.body = encodedParams.toString();\n break;\n\n case 'multipart/alternative':\n case 'multipart/form-data':\n case 'multipart/mixed':\n case 'multipart/related':\n /**\n * If there's a `Content-Type` header set we need to remove it. We're doing this because\n * when we pass the form data object into `fetch` that'll set a proper `Content-Type`\n * header for this request that also includes the boundary used on the content.\n *\n * If we don't do this, then consumers won't be able to parse out the payload because\n * they won't know what the boundary to split on it.\n */\n if (headers.has('Content-Type')) {\n headers.delete('Content-Type');\n }\n\n const form = new FormData();\n\n request.postData.params?.forEach(param => {\n if ('fileName' in param && param.fileName) {\n if (opts.files) {\n const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files);\n if (fileContents) {\n // If the file we've got available to us is a Buffer then we need to convert it so\n // that the FormData API can use it.\n if (isBuffer(fileContents)) {\n form.append(\n param.name,\n new File([fileContents], param.fileName, {\n type: param.contentType || undefined,\n }),\n param.fileName,\n );\n\n return;\n } else if (isFile(fileContents)) {\n form.append(param.name, fileContents as Blob, param.fileName);\n return;\n }\n\n throw new TypeError(\n 'An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects.',\n );\n }\n }\n\n if ('value' in param && param.value) {\n let paramBlob;\n const parsed = parseDataUrl(param.value);\n if (parsed) {\n // If we were able to parse out this data URL we don't need to transform its data\n // into a buffer for `Blob` because that supports data URLs already.\n paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || undefined });\n } else {\n paramBlob = new Blob([param.value], { type: param.contentType || undefined });\n }\n\n form.append(param.name, paramBlob, param.fileName);\n return;\n }\n\n throw new Error(\n \"The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file.\",\n );\n }\n\n if (param.value) form.append(param.name, param.value);\n });\n\n options.body = form;\n break;\n\n default:\n const formBody: Record<string, unknown> = {};\n request.postData.params?.map(param => {\n try {\n formBody[param.name] = JSON.parse(param.value || '');\n } catch (e) {\n formBody[param.name] = param.value;\n }\n\n return true;\n });\n\n options.body = JSON.stringify(formBody);\n }\n } else if (request.postData?.text?.length) {\n // If we've got `files` map content present, and this post data content contains a valid data\n // URL then we can substitute the payload with that file instead of the using data URL.\n if (opts.files) {\n const parsed = parseDataUrl(request.postData.text) as DataURL;\n if (parsed) {\n if (parsed?.name && parsed.name in opts.files) {\n const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files);\n if (fileContents) {\n if (isBuffer(fileContents)) {\n options.body = fileContents;\n } else if (isFile(fileContents)) {\n // `Readable.from` isn't available in browsers but the browser `Request` object can\n // handle `File` objects just fine without us having to mold it into shape.\n if (isBrowser()) {\n options.body = fileContents;\n } else {\n options.body = (fileContents as File).stream();\n shouldSetDuplex = true;\n\n // Supplying a polyfilled `File` stream into `Request.body` doesn't automatically\n // add `Content-Length`.\n if (!headers.has('content-length')) {\n headers.set('content-length', String((fileContents as File).size));\n }\n }\n }\n }\n }\n }\n }\n\n if (typeof options.body === 'undefined') {\n options.body = request.postData.text;\n }\n }\n\n /**\n * The fetch spec, which Node 18+ strictly abides by, now requires that `duplex` be sent with\n * requests that have payloads.\n *\n * As `RequestInit#duplex` isn't supported by any browsers, or even mentioned on MDN, we aren't\n * sending it in browser environments. This work is purely to support Node 18+ and `undici`\n * environments.\n *\n * @see {@link https://github.com/nodejs/node/issues/46221}\n * @see {@link https://github.com/whatwg/fetch/pull/1457}\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Request/Request}\n */\n if (shouldSetDuplex && !isBrowser()) {\n options.duplex = 'half';\n }\n }\n\n // We automaticaly assume that the HAR that we have already has query parameters encoded within\n // it so we do **not** use the `URLSearchParams` API here for composing the query string.\n let requestURL = url;\n if ('queryString' in request && request.queryString.length) {\n const urlObj = new URL(requestURL);\n\n const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`);\n request.queryString.forEach(q => {\n queryParams.push(`${q.name}=${q.value}`);\n });\n\n querystring = queryParams.join('&');\n\n // Because anchor hashes before query strings will prevent query strings from being delivered\n // we need to pop them off and re-add them after.\n if (urlObj.hash) {\n const urlWithoutHashes = requestURL.replace(urlObj.hash, '');\n requestURL = `${urlWithoutHashes.split('?')[0]}${querystring ? `?${querystring}` : ''}`;\n requestURL += urlObj.hash;\n } else {\n requestURL = `${requestURL.split('?')[0]}${querystring ? `?${querystring}` : ''}`;\n }\n }\n\n if (opts.userAgent) {\n headers.append('User-Agent', opts.userAgent);\n }\n\n options.headers = headers;\n\n return fetch(requestURL, options);\n}\n"]} |
| import { FetchHAROptions } from './types.cjs'; | ||
| import { Har } from 'har-format'; | ||
| declare function fetchHAR(har: Har, opts?: FetchHAROptions): Promise<Response>; | ||
| export = fetchHAR; |
| "use strict";//# sourceMappingURL=types.cjs.map |
| {"version":3,"sources":[],"names":[],"mappings":""} |
| interface RequestInitWithDuplex extends RequestInit { | ||
| /** | ||
| * `RequestInit#duplex` does not yet exist in the TS `lib.dom.d.ts` definition yet the native | ||
| * fetch implementation in Node 18+, `undici`, requires it for certain POST payloads. | ||
| * | ||
| * @see {@link https://github.com/microsoft/TypeScript-DOM-lib-generator/issues/1483} | ||
| * @see {@link https://github.com/nodejs/node/issues/46221} | ||
| * @see {@link https://fetch.spec.whatwg.org/#request-class} | ||
| * @see {@link https://github.com/microsoft/TypeScript/blob/main/lib/lib.dom.d.ts} | ||
| */ | ||
| duplex?: 'half'; | ||
| } | ||
| interface FetchHAROptions { | ||
| files?: Record<string, Blob | Buffer>; | ||
| init?: RequestInitWithDuplex; | ||
| userAgent?: string; | ||
| } | ||
| export { FetchHAROptions, RequestInitWithDuplex }; |
+1
-1
@@ -6,2 +6,2 @@ import { FetchHAROptions } from './types.js'; | ||
| export = fetchHAR; | ||
| export { fetchHAR as default }; |
+196
-176
@@ -1,2 +0,2 @@ | ||
| "use strict";Object.defineProperty(exports, "__esModule", {value: true});var __defProp = Object.defineProperty; | ||
| var __defProp = Object.defineProperty; | ||
| var __defProps = Object.defineProperties; | ||
@@ -20,26 +20,25 @@ var __getOwnPropDescs = Object.getOwnPropertyDescriptors; | ||
| var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); | ||
| var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, { | ||
| get: (a, b) => (typeof require !== "undefined" ? require : a)[b] | ||
| }) : x)(function(x) { | ||
| if (typeof require !== "undefined") | ||
| return require.apply(this, arguments); | ||
| throw Error('Dynamic require of "' + x + '" is not supported'); | ||
| }); | ||
| var __async = (__this, __arguments, generator) => { | ||
| return new Promise((resolve, reject) => { | ||
| var fulfilled = (value) => { | ||
| try { | ||
| step(generator.next(value)); | ||
| } catch (e) { | ||
| reject(e); | ||
| } | ||
| }; | ||
| var rejected = (value) => { | ||
| try { | ||
| step(generator.throw(value)); | ||
| } catch (e) { | ||
| reject(e); | ||
| } | ||
| }; | ||
| var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); | ||
| step((generator = generator.apply(__this, __arguments)).next()); | ||
| }); | ||
| }; | ||
| // src/index.ts | ||
| var _dataurls = require('@readme/data-urls'); | ||
| if (!globalThis.Blob) { | ||
| try { | ||
| globalThis.Blob = __require("buffer").Blob; | ||
| } catch (e) { | ||
| throw new Error("The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob"); | ||
| } | ||
| } | ||
| if (!globalThis.File) { | ||
| try { | ||
| globalThis.File = __require("undici").File; | ||
| } catch (e) { | ||
| throw new Error("The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File"); | ||
| } | ||
| } | ||
| import { parse as parseDataUrl } from "@readme/data-urls"; | ||
| function isBrowser() { | ||
@@ -65,137 +64,159 @@ return typeof window !== "undefined" && typeof document !== "undefined"; | ||
| } | ||
| function fetchHAR(har, opts = {}) { | ||
| var _a, _b, _c, _d, _e; | ||
| if (!har) | ||
| throw new Error("Missing HAR definition"); | ||
| if (!har.log || !har.log.entries || !har.log.entries.length) | ||
| throw new Error("Missing log.entries array"); | ||
| const { request } = har.log.entries[0]; | ||
| const { url } = request; | ||
| let querystring = ""; | ||
| let shouldSetDuplex = false; | ||
| const options = __spreadProps(__spreadValues({}, opts.init ? opts.init : {}), { | ||
| method: request.method | ||
| }); | ||
| if (!options.headers) { | ||
| options.headers = new Headers(); | ||
| } else if (typeof options.headers === "object" && !(options.headers instanceof Headers) && options.headers !== null) { | ||
| options.headers = new Headers(options.headers); | ||
| } | ||
| const headers = options.headers; | ||
| if ("headers" in request && request.headers.length) { | ||
| request.headers.forEach((header) => { | ||
| function fetchHAR(_0) { | ||
| return __async(this, arguments, function* (har, opts = {}) { | ||
| var _a, _b, _c, _d, _e; | ||
| if (!har) | ||
| throw new Error("Missing HAR definition"); | ||
| if (!har.log || !har.log.entries || !har.log.entries.length) | ||
| throw new Error("Missing log.entries array"); | ||
| if (!globalThis.Blob) { | ||
| try { | ||
| return headers.append(header.name, header.value); | ||
| } catch (err) { | ||
| const NodeBlob = (yield import("buffer")).Blob; | ||
| globalThis.Blob = NodeBlob; | ||
| } catch (e) { | ||
| throw new Error( | ||
| "The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob" | ||
| ); | ||
| } | ||
| } | ||
| if (!globalThis.File) { | ||
| try { | ||
| const UndiciFile = (yield import("undici")).File; | ||
| globalThis.File = UndiciFile; | ||
| } catch (e) { | ||
| throw new Error( | ||
| "The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File" | ||
| ); | ||
| } | ||
| } | ||
| const { request } = har.log.entries[0]; | ||
| const { url } = request; | ||
| let querystring = ""; | ||
| let shouldSetDuplex = false; | ||
| const options = __spreadProps(__spreadValues({}, opts.init ? opts.init : {}), { | ||
| method: request.method | ||
| }); | ||
| } | ||
| if ("cookies" in request && request.cookies.length) { | ||
| if (isBrowser()) { | ||
| request.cookies.forEach((cookie) => { | ||
| document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`; | ||
| if (!options.headers) { | ||
| options.headers = new Headers(); | ||
| } else if (typeof options.headers === "object" && !(options.headers instanceof Headers) && options.headers !== null) { | ||
| options.headers = new Headers(options.headers); | ||
| } | ||
| const headers = options.headers; | ||
| if ("headers" in request && request.headers.length) { | ||
| request.headers.forEach((header) => { | ||
| try { | ||
| return headers.append(header.name, header.value); | ||
| } catch (err) { | ||
| } | ||
| }); | ||
| options.credentials = "include"; | ||
| } else { | ||
| headers.append( | ||
| "cookie", | ||
| request.cookies.map((cookie) => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`).join("; ") | ||
| ); | ||
| } | ||
| } | ||
| if ("postData" in request) { | ||
| if (request.postData && "params" in request.postData) { | ||
| if (!("mimeType" in request.postData)) { | ||
| request.postData.mimeType = "application/octet-stream"; | ||
| if ("cookies" in request && request.cookies.length) { | ||
| if (isBrowser()) { | ||
| request.cookies.forEach((cookie) => { | ||
| document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`; | ||
| }); | ||
| options.credentials = "include"; | ||
| } else { | ||
| headers.append( | ||
| "cookie", | ||
| request.cookies.map((cookie) => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`).join("; ") | ||
| ); | ||
| } | ||
| switch (request.postData.mimeType) { | ||
| case "application/x-www-form-urlencoded": | ||
| headers.set("Content-Type", request.postData.mimeType); | ||
| const encodedParams = new URLSearchParams(); | ||
| (_a = request.postData.params) == null ? void 0 : _a.forEach((param) => { | ||
| if (param.value) | ||
| encodedParams.set(param.name, param.value); | ||
| }); | ||
| options.body = encodedParams.toString(); | ||
| break; | ||
| case "multipart/alternative": | ||
| case "multipart/form-data": | ||
| case "multipart/mixed": | ||
| case "multipart/related": | ||
| if (headers.has("Content-Type")) { | ||
| headers.delete("Content-Type"); | ||
| } | ||
| const form = new FormData(); | ||
| (_b = request.postData.params) == null ? void 0 : _b.forEach((param) => { | ||
| if ("fileName" in param && param.fileName) { | ||
| if (opts.files) { | ||
| const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files); | ||
| if (fileContents) { | ||
| if (isBuffer(fileContents)) { | ||
| form.append( | ||
| param.name, | ||
| new File([fileContents], param.fileName, { | ||
| type: param.contentType || void 0 | ||
| }), | ||
| param.fileName | ||
| } | ||
| if ("postData" in request) { | ||
| if (request.postData && "params" in request.postData) { | ||
| if (!("mimeType" in request.postData)) { | ||
| request.postData.mimeType = "application/octet-stream"; | ||
| } | ||
| switch (request.postData.mimeType) { | ||
| case "application/x-www-form-urlencoded": | ||
| headers.set("Content-Type", request.postData.mimeType); | ||
| const encodedParams = new URLSearchParams(); | ||
| (_a = request.postData.params) == null ? void 0 : _a.forEach((param) => { | ||
| if (param.value) | ||
| encodedParams.set(param.name, param.value); | ||
| }); | ||
| options.body = encodedParams.toString(); | ||
| break; | ||
| case "multipart/alternative": | ||
| case "multipart/form-data": | ||
| case "multipart/mixed": | ||
| case "multipart/related": | ||
| if (headers.has("Content-Type")) { | ||
| headers.delete("Content-Type"); | ||
| } | ||
| const form = new FormData(); | ||
| (_b = request.postData.params) == null ? void 0 : _b.forEach((param) => { | ||
| if ("fileName" in param && param.fileName) { | ||
| if (opts.files) { | ||
| const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files); | ||
| if (fileContents) { | ||
| if (isBuffer(fileContents)) { | ||
| form.append( | ||
| param.name, | ||
| new File([fileContents], param.fileName, { | ||
| type: param.contentType || void 0 | ||
| }), | ||
| param.fileName | ||
| ); | ||
| return; | ||
| } else if (isFile(fileContents)) { | ||
| form.append(param.name, fileContents, param.fileName); | ||
| return; | ||
| } | ||
| throw new TypeError( | ||
| "An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects." | ||
| ); | ||
| return; | ||
| } else if (isFile(fileContents)) { | ||
| form.append(param.name, fileContents, param.fileName); | ||
| return; | ||
| } | ||
| throw new TypeError( | ||
| "An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects." | ||
| ); | ||
| } | ||
| } | ||
| if ("value" in param && param.value) { | ||
| let paramBlob; | ||
| const parsed = _dataurls.parse.call(void 0, param.value); | ||
| if (parsed) { | ||
| paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || void 0 }); | ||
| } else { | ||
| paramBlob = new Blob([param.value], { type: param.contentType || void 0 }); | ||
| if ("value" in param && param.value) { | ||
| let paramBlob; | ||
| const parsed = parseDataUrl(param.value); | ||
| if (parsed) { | ||
| paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || void 0 }); | ||
| } else { | ||
| paramBlob = new Blob([param.value], { type: param.contentType || void 0 }); | ||
| } | ||
| form.append(param.name, paramBlob, param.fileName); | ||
| return; | ||
| } | ||
| form.append(param.name, paramBlob, param.fileName); | ||
| return; | ||
| throw new Error( | ||
| "The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file." | ||
| ); | ||
| } | ||
| throw new Error( | ||
| "The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file." | ||
| ); | ||
| } | ||
| if (param.value) | ||
| form.append(param.name, param.value); | ||
| }); | ||
| options.body = form; | ||
| break; | ||
| default: | ||
| const formBody = {}; | ||
| (_c = request.postData.params) == null ? void 0 : _c.map((param) => { | ||
| try { | ||
| formBody[param.name] = JSON.parse(param.value || ""); | ||
| } catch (e) { | ||
| formBody[param.name] = param.value; | ||
| } | ||
| return true; | ||
| }); | ||
| options.body = JSON.stringify(formBody); | ||
| } | ||
| } else if ((_e = (_d = request.postData) == null ? void 0 : _d.text) == null ? void 0 : _e.length) { | ||
| if (opts.files) { | ||
| const parsed = _dataurls.parse.call(void 0, request.postData.text); | ||
| if (parsed) { | ||
| if ((parsed == null ? void 0 : parsed.name) && parsed.name in opts.files) { | ||
| const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files); | ||
| if (fileContents) { | ||
| if (isBuffer(fileContents)) { | ||
| options.body = fileContents; | ||
| } else if (isFile(fileContents)) { | ||
| if (isBrowser()) { | ||
| if (param.value) | ||
| form.append(param.name, param.value); | ||
| }); | ||
| options.body = form; | ||
| break; | ||
| default: | ||
| const formBody = {}; | ||
| (_c = request.postData.params) == null ? void 0 : _c.map((param) => { | ||
| try { | ||
| formBody[param.name] = JSON.parse(param.value || ""); | ||
| } catch (e) { | ||
| formBody[param.name] = param.value; | ||
| } | ||
| return true; | ||
| }); | ||
| options.body = JSON.stringify(formBody); | ||
| } | ||
| } else if ((_e = (_d = request.postData) == null ? void 0 : _d.text) == null ? void 0 : _e.length) { | ||
| if (opts.files) { | ||
| const parsed = parseDataUrl(request.postData.text); | ||
| if (parsed) { | ||
| if ((parsed == null ? void 0 : parsed.name) && parsed.name in opts.files) { | ||
| const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files); | ||
| if (fileContents) { | ||
| if (isBuffer(fileContents)) { | ||
| options.body = fileContents; | ||
| } else { | ||
| options.body = fileContents.stream(); | ||
| shouldSetDuplex = true; | ||
| if (!headers.has("content-length")) { | ||
| headers.set("content-length", String(fileContents.size)); | ||
| } else if (isFile(fileContents)) { | ||
| if (isBrowser()) { | ||
| options.body = fileContents; | ||
| } else { | ||
| options.body = fileContents.stream(); | ||
| shouldSetDuplex = true; | ||
| if (!headers.has("content-length")) { | ||
| headers.set("content-length", String(fileContents.size)); | ||
| } | ||
| } | ||
@@ -207,37 +228,36 @@ } | ||
| } | ||
| if (typeof options.body === "undefined") { | ||
| options.body = request.postData.text; | ||
| } | ||
| } | ||
| if (typeof options.body === "undefined") { | ||
| options.body = request.postData.text; | ||
| if (shouldSetDuplex && !isBrowser()) { | ||
| options.duplex = "half"; | ||
| } | ||
| } | ||
| if (shouldSetDuplex && !isBrowser()) { | ||
| options.duplex = "half"; | ||
| let requestURL = url; | ||
| if ("queryString" in request && request.queryString.length) { | ||
| const urlObj = new URL(requestURL); | ||
| const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`); | ||
| request.queryString.forEach((q) => { | ||
| queryParams.push(`${q.name}=${q.value}`); | ||
| }); | ||
| querystring = queryParams.join("&"); | ||
| if (urlObj.hash) { | ||
| const urlWithoutHashes = requestURL.replace(urlObj.hash, ""); | ||
| requestURL = `${urlWithoutHashes.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
| requestURL += urlObj.hash; | ||
| } else { | ||
| requestURL = `${requestURL.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
| } | ||
| } | ||
| } | ||
| let requestURL = url; | ||
| if ("queryString" in request && request.queryString.length) { | ||
| const urlObj = new URL(requestURL); | ||
| const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`); | ||
| request.queryString.forEach((q) => { | ||
| queryParams.push(`${q.name}=${q.value}`); | ||
| }); | ||
| querystring = queryParams.join("&"); | ||
| if (urlObj.hash) { | ||
| const urlWithoutHashes = requestURL.replace(urlObj.hash, ""); | ||
| requestURL = `${urlWithoutHashes.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
| requestURL += urlObj.hash; | ||
| } else { | ||
| requestURL = `${requestURL.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
| if (opts.userAgent) { | ||
| headers.append("User-Agent", opts.userAgent); | ||
| } | ||
| } | ||
| if (opts.userAgent) { | ||
| headers.append("User-Agent", opts.userAgent); | ||
| } | ||
| options.headers = headers; | ||
| return fetch(requestURL, options); | ||
| options.headers = headers; | ||
| return fetch(requestURL, options); | ||
| }); | ||
| } | ||
| exports.default = fetchHAR; | ||
| module.exports = exports.default//# sourceMappingURL=index.js.map | ||
| export { | ||
| fetchHAR as default | ||
| }; | ||
| //# sourceMappingURL=index.js.map |
@@ -1,1 +0,1 @@ | ||
| {"version":3,"sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA,SAAS,SAAS,oBAAoB;AAEtC,IAAI,CAAC,WAAW,MAAM;AACpB,MAAI;AAEF,eAAW,OAAO,UAAQ,QAAa,EAAE;AAAA,EAC3C,SAAS,GAAG;AACV,UAAM,IAAI,MAAM,kGAAkG;AAAA,EACpH;AACF;AAEA,IAAI,CAAC,WAAW,MAAM;AACpB,MAAI;AAIF,eAAW,OAAO,UAAQ,QAAQ,EAAE;AAAA,EACtC,SAAS,GAAG;AACV,UAAM,IAAI,MAAM,kGAAkG;AAAA,EACpH;AACF;AAQA,SAAS,YAAY;AACnB,SAAO,OAAO,WAAW,eAAe,OAAO,aAAa;AAC9D;AAEA,SAAS,SAAS,OAAY;AAC5B,SAAO,OAAO,WAAW,eAAe,OAAO,SAAS,KAAK;AAC/D;AAEA,SAAS,OAAO,OAAY;AAC1B,MAAI,iBAAiB,MAAM;AAOzB,WAAO,MAAM,YAAY,SAAS;AAAA,EACpC;AAEA,SAAO;AACT;AAEA,SAAS,yBAAyB,UAAkB,OAAiC;AACnF,MAAI,SAAS,YAAY,OAAO;AAC9B,WAAO,MAAM,QAAQ;AAAA,EACvB,WAAW,SAAS,mBAAmB,QAAQ,KAAK,OAAO;AACzD,WAAO,MAAM,mBAAmB,QAAQ,CAAC;AAAA,EAC3C;AAEA,SAAO;AACT;AAEe,SAAR,SAA0B,KAAU,OAAwB,CAAC,GAAG;AAhEvE;AAiEE,MAAI,CAAC;AAAK,UAAM,IAAI,MAAM,wBAAwB;AAClD,MAAI,CAAC,IAAI,OAAO,CAAC,IAAI,IAAI,WAAW,CAAC,IAAI,IAAI,QAAQ;AAAQ,UAAM,IAAI,MAAM,2BAA2B;AAExG,QAAM,EAAE,QAAQ,IAAI,IAAI,IAAI,QAAQ,CAAC;AACrC,QAAM,EAAE,IAAI,IAAI;AAChB,MAAI,cAAc;AAClB,MAAI,kBAAkB;AAEtB,QAAM,UAAiC,iCAGjC,KAAK,OAAO,KAAK,OAAO,CAAC,IAHQ;AAAA,IAIrC,QAAQ,QAAQ;AAAA,EAClB;AAEA,MAAI,CAAC,QAAQ,SAAS;AACpB,YAAQ,UAAU,IAAI,QAAQ;AAAA,EAChC,WAAW,OAAO,QAAQ,YAAY,YAAY,EAAE,QAAQ,mBAAmB,YAAY,QAAQ,YAAY,MAAM;AACnH,YAAQ,UAAU,IAAI,QAAQ,QAAQ,OAAO;AAAA,EAC/C;AAEA,QAAM,UAAU,QAAQ;AACxB,MAAI,aAAa,WAAW,QAAQ,QAAQ,QAAQ;AAElD,YAAQ,QAAQ,QAAQ,YAAU;AAChC,UAAI;AACF,eAAO,QAAQ,OAAO,OAAO,MAAM,OAAO,KAAK;AAAA,MACjD,SAAS,KAAK;AAAA,MAOd;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,aAAa,WAAW,QAAQ,QAAQ,QAAQ;AAOlD,QAAI,UAAU,GAAG;AACf,cAAQ,QAAQ,QAAQ,YAAU;AAChC,iBAAS,SAAS,GAAG,mBAAmB,OAAO,IAAI,CAAC,IAAI,mBAAmB,OAAO,KAAK,CAAC;AAAA,MAC1F,CAAC;AAED,cAAQ,cAAc;AAAA,IACxB,OAAO;AACL,cAAQ;AAAA,QACN;AAAA,QACA,QAAQ,QACL,IAAI,YAAU,GAAG,mBAAmB,OAAO,IAAI,CAAC,IAAI,mBAAmB,OAAO,KAAK,CAAC,EAAE,EACtF,KAAK,IAAI;AAAA,MACd;AAAA,IACF;AAAA,EACF;AAEA,MAAI,cAAc,SAAS;AACzB,QAAI,QAAQ,YAAY,YAAY,QAAQ,UAAU;AACpD,UAAI,EAAE,cAAc,QAAQ,WAAW;AAErC,gBAAQ,SAAS,WAAW;AAAA,MAC9B;AAEA,cAAQ,QAAQ,SAAS,UAAU;AAAA,QACjC,KAAK;AASH,kBAAQ,IAAI,gBAAgB,QAAQ,SAAS,QAAQ;AAErD,gBAAM,gBAAgB,IAAI,gBAAgB;AAC1C,wBAAQ,SAAS,WAAjB,mBAAyB,QAAQ,WAAS;AACxC,gBAAI,MAAM;AAAO,4BAAc,IAAI,MAAM,MAAM,MAAM,KAAK;AAAA,UAC5D;AAEA,kBAAQ,OAAO,cAAc,SAAS;AACtC;AAAA,QAEF,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AASH,cAAI,QAAQ,IAAI,cAAc,GAAG;AAC/B,oBAAQ,OAAO,cAAc;AAAA,UAC/B;AAEA,gBAAM,OAAO,IAAI,SAAS;AAE1B,wBAAQ,SAAS,WAAjB,mBAAyB,QAAQ,WAAS;AACxC,gBAAI,cAAc,SAAS,MAAM,UAAU;AACzC,kBAAI,KAAK,OAAO;AACd,sBAAM,eAAe,yBAAyB,MAAM,UAAU,KAAK,KAAK;AACxE,oBAAI,cAAc;AAGhB,sBAAI,SAAS,YAAY,GAAG;AAC1B,yBAAK;AAAA,sBACH,MAAM;AAAA,sBACN,IAAI,KAAK,CAAC,YAAY,GAAG,MAAM,UAAU;AAAA,wBACvC,MAAM,MAAM,eAAe;AAAA,sBAC7B,CAAC;AAAA,sBACD,MAAM;AAAA,oBACR;AAEA;AAAA,kBACF,WAAW,OAAO,YAAY,GAAG;AAC/B,yBAAK,OAAO,MAAM,MAAM,cAAsB,MAAM,QAAQ;AAC5D;AAAA,kBACF;AAEA,wBAAM,IAAI;AAAA,oBACR;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAEA,kBAAI,WAAW,SAAS,MAAM,OAAO;AACnC,oBAAI;AACJ,sBAAM,SAAS,aAAa,MAAM,KAAK;AACvC,oBAAI,QAAQ;AAGV,8BAAY,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG,EAAE,MAAM,OAAO,eAAe,MAAM,eAAe,OAAU,CAAC;AAAA,gBACpG,OAAO;AACL,8BAAY,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG,EAAE,MAAM,MAAM,eAAe,OAAU,CAAC;AAAA,gBAC9E;AAEA,qBAAK,OAAO,MAAM,MAAM,WAAW,MAAM,QAAQ;AACjD;AAAA,cACF;AAEA,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,MAAM;AAAO,mBAAK,OAAO,MAAM,MAAM,MAAM,KAAK;AAAA,UACtD;AAEA,kBAAQ,OAAO;AACf;AAAA,QAEF;AACE,gBAAM,WAAoC,CAAC;AAC3C,wBAAQ,SAAS,WAAjB,mBAAyB,IAAI,WAAS;AACpC,gBAAI;AACF,uBAAS,MAAM,IAAI,IAAI,KAAK,MAAM,MAAM,SAAS,EAAE;AAAA,YACrD,SAAS,GAAG;AACV,uBAAS,MAAM,IAAI,IAAI,MAAM;AAAA,YAC/B;AAEA,mBAAO;AAAA,UACT;AAEA,kBAAQ,OAAO,KAAK,UAAU,QAAQ;AAAA,MAC1C;AAAA,IACF,YAAW,mBAAQ,aAAR,mBAAkB,SAAlB,mBAAwB,QAAQ;AAGzC,UAAI,KAAK,OAAO;AACd,cAAM,SAAS,aAAa,QAAQ,SAAS,IAAI;AACjD,YAAI,QAAQ;AACV,eAAI,iCAAQ,SAAQ,OAAO,QAAQ,KAAK,OAAO;AAC7C,kBAAM,eAAe,yBAAyB,OAAO,MAAM,KAAK,KAAK;AACrE,gBAAI,cAAc;AAChB,kBAAI,SAAS,YAAY,GAAG;AAC1B,wBAAQ,OAAO;AAAA,cACjB,WAAW,OAAO,YAAY,GAAG;AAG/B,oBAAI,UAAU,GAAG;AACf,0BAAQ,OAAO;AAAA,gBACjB,OAAO;AACL,0BAAQ,OAAQ,aAAsB,OAAO;AAC7C,oCAAkB;AAIlB,sBAAI,CAAC,QAAQ,IAAI,gBAAgB,GAAG;AAClC,4BAAQ,IAAI,kBAAkB,OAAQ,aAAsB,IAAI,CAAC;AAAA,kBACnE;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,UAAI,OAAO,QAAQ,SAAS,aAAa;AACvC,gBAAQ,OAAO,QAAQ,SAAS;AAAA,MAClC;AAAA,IACF;AAcA,QAAI,mBAAmB,CAAC,UAAU,GAAG;AACnC,cAAQ,SAAS;AAAA,IACnB;AAAA,EACF;AAIA,MAAI,aAAa;AACjB,MAAI,iBAAiB,WAAW,QAAQ,YAAY,QAAQ;AAC1D,UAAM,SAAS,IAAI,IAAI,UAAU;AAEjC,UAAM,cAAc,MAAM,KAAK,OAAO,YAAY,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE;AAC/E,YAAQ,YAAY,QAAQ,OAAK;AAC/B,kBAAY,KAAK,GAAG,EAAE,IAAI,IAAI,EAAE,KAAK,EAAE;AAAA,IACzC,CAAC;AAED,kBAAc,YAAY,KAAK,GAAG;AAIlC,QAAI,OAAO,MAAM;AACf,YAAM,mBAAmB,WAAW,QAAQ,OAAO,MAAM,EAAE;AAC3D,mBAAa,GAAG,iBAAiB,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,cAAc,IAAI,WAAW,KAAK,EAAE;AACrF,oBAAc,OAAO;AAAA,IACvB,OAAO;AACL,mBAAa,GAAG,WAAW,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,cAAc,IAAI,WAAW,KAAK,EAAE;AAAA,IACjF;AAAA,EACF;AAEA,MAAI,KAAK,WAAW;AAClB,YAAQ,OAAO,cAAc,KAAK,SAAS;AAAA,EAC7C;AAEA,UAAQ,UAAU;AAElB,SAAO,MAAM,YAAY,OAAO;AAClC","sourcesContent":["import type { FetchHAROptions, RequestInitWithDuplex } from './types';\nimport type { DataURL as npmDataURL } from '@readme/data-urls';\nimport type { Har } from 'har-format';\n\nimport { parse as parseDataUrl } from '@readme/data-urls';\n\nif (!globalThis.Blob) {\n try {\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n globalThis.Blob = require('node:buffer').Blob;\n } catch (e) {\n throw new Error('The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob');\n }\n}\n\nif (!globalThis.File) {\n try {\n // Node's native `fetch` implementation unfortunately does not make this API global so we need\n // to pull it in if we don't have it.\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n globalThis.File = require('undici').File;\n } catch (e) {\n throw new Error('The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File');\n }\n}\n\ntype DataURL = npmDataURL & {\n // `parse-data-url` doesn't explicitly support `name` in data URLs but if it's there it'll be\n // returned back to us.\n name?: string;\n};\n\nfunction isBrowser() {\n return typeof window !== 'undefined' && typeof document !== 'undefined';\n}\n\nfunction isBuffer(value: any) {\n return typeof Buffer !== 'undefined' && Buffer.isBuffer(value);\n}\n\nfunction isFile(value: any) {\n if (value instanceof File) {\n /**\n * The `Blob` polyfill on Node comes back as being an instanceof `File`. Because passing a Blob\n * into a File will end up with a corrupted file we want to prevent this.\n *\n * This object identity crisis does not happen in the browser.\n */\n return value.constructor.name === 'File';\n }\n\n return false;\n}\n\nfunction getFileFromSuppliedFiles(filename: string, files: FetchHAROptions['files']) {\n if (files && filename in files) {\n return files[filename];\n } else if (files && decodeURIComponent(filename) in files) {\n return files[decodeURIComponent(filename)];\n }\n\n return false;\n}\n\nexport default function fetchHAR(har: Har, opts: FetchHAROptions = {}) {\n if (!har) throw new Error('Missing HAR definition');\n if (!har.log || !har.log.entries || !har.log.entries.length) throw new Error('Missing log.entries array');\n\n const { request } = har.log.entries[0];\n const { url } = request;\n let querystring = '';\n let shouldSetDuplex = false;\n\n const options: RequestInitWithDuplex = {\n // If we have custom options for the `Request` API we need to add them in here now before we\n // fill it in with everything we need from the HAR.\n ...(opts.init ? opts.init : {}),\n method: request.method,\n };\n\n if (!options.headers) {\n options.headers = new Headers();\n } else if (typeof options.headers === 'object' && !(options.headers instanceof Headers) && options.headers !== null) {\n options.headers = new Headers(options.headers);\n }\n\n const headers = options.headers as Headers;\n if ('headers' in request && request.headers.length) {\n // eslint-disable-next-line consistent-return\n request.headers.forEach(header => {\n try {\n return headers.append(header.name, header.value);\n } catch (err) {\n /**\n * `Headers.append()` will throw errors if the header name is not a legal HTTP header name,\n * like `X-API-KEY (Header)`. If that happens instead of tossing the error back out, we\n * should silently just ignore\n * it.\n */\n }\n });\n }\n\n if ('cookies' in request && request.cookies.length) {\n /**\n * As the browser fetch API can't set custom cookies for requests, they instead need to be\n * defined on the document and passed into the request via `credentials: include`. Since this\n * is a browser-specific quirk, that should only\n * happen in browsers!\n */\n if (isBrowser()) {\n request.cookies.forEach(cookie => {\n document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`;\n });\n\n options.credentials = 'include';\n } else {\n headers.append(\n 'cookie',\n request.cookies\n .map(cookie => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`)\n .join('; '),\n );\n }\n }\n\n if ('postData' in request) {\n if (request.postData && 'params' in request.postData) {\n if (!('mimeType' in request.postData)) {\n // @ts-expect-error HAR spec requires that `mimeType` is always present but it might not be.\n request.postData.mimeType = 'application/octet-stream';\n }\n\n switch (request.postData.mimeType) {\n case 'application/x-www-form-urlencoded':\n /**\n * Since the content we're handling here is to be encoded as\n * `application/x-www-form-urlencoded`, this should override any other `Content-Type`\n * headers that are present in the HAR. This is how Postman handles this case when\n * building code snippets!\n *\n * @see {@link https://github.com/github/fetch/issues/263#issuecomment-209530977}\n */\n headers.set('Content-Type', request.postData.mimeType);\n\n const encodedParams = new URLSearchParams();\n request.postData.params?.forEach(param => {\n if (param.value) encodedParams.set(param.name, param.value);\n });\n\n options.body = encodedParams.toString();\n break;\n\n case 'multipart/alternative':\n case 'multipart/form-data':\n case 'multipart/mixed':\n case 'multipart/related':\n /**\n * If there's a `Content-Type` header set we need to remove it. We're doing this because\n * when we pass the form data object into `fetch` that'll set a proper `Content-Type`\n * header for this request that also includes the boundary used on the content.\n *\n * If we don't do this, then consumers won't be able to parse out the payload because\n * they won't know what the boundary to split on it.\n */\n if (headers.has('Content-Type')) {\n headers.delete('Content-Type');\n }\n\n const form = new FormData();\n\n request.postData.params?.forEach(param => {\n if ('fileName' in param && param.fileName) {\n if (opts.files) {\n const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files);\n if (fileContents) {\n // If the file we've got available to us is a Buffer then we need to convert it so\n // that the FormData API can use it.\n if (isBuffer(fileContents)) {\n form.append(\n param.name,\n new File([fileContents], param.fileName, {\n type: param.contentType || undefined,\n }),\n param.fileName,\n );\n\n return;\n } else if (isFile(fileContents)) {\n form.append(param.name, fileContents as Blob, param.fileName);\n return;\n }\n\n throw new TypeError(\n 'An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects.',\n );\n }\n }\n\n if ('value' in param && param.value) {\n let paramBlob;\n const parsed = parseDataUrl(param.value);\n if (parsed) {\n // If we were able to parse out this data URL we don't need to transform its data\n // into a buffer for `Blob` because that supports data URLs already.\n paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || undefined });\n } else {\n paramBlob = new Blob([param.value], { type: param.contentType || undefined });\n }\n\n form.append(param.name, paramBlob, param.fileName);\n return;\n }\n\n throw new Error(\n \"The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file.\",\n );\n }\n\n if (param.value) form.append(param.name, param.value);\n });\n\n options.body = form;\n break;\n\n default:\n const formBody: Record<string, unknown> = {};\n request.postData.params?.map(param => {\n try {\n formBody[param.name] = JSON.parse(param.value || '');\n } catch (e) {\n formBody[param.name] = param.value;\n }\n\n return true;\n });\n\n options.body = JSON.stringify(formBody);\n }\n } else if (request.postData?.text?.length) {\n // If we've got `files` map content present, and this post data content contains a valid data\n // URL then we can substitute the payload with that file instead of the using data URL.\n if (opts.files) {\n const parsed = parseDataUrl(request.postData.text) as DataURL;\n if (parsed) {\n if (parsed?.name && parsed.name in opts.files) {\n const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files);\n if (fileContents) {\n if (isBuffer(fileContents)) {\n options.body = fileContents;\n } else if (isFile(fileContents)) {\n // `Readable.from` isn't available in browsers but the browser `Request` object can\n // handle `File` objects just fine without us having to mold it into shape.\n if (isBrowser()) {\n options.body = fileContents;\n } else {\n options.body = (fileContents as File).stream();\n shouldSetDuplex = true;\n\n // Supplying a polyfilled `File` stream into `Request.body` doesn't automatically\n // add `Content-Length`.\n if (!headers.has('content-length')) {\n headers.set('content-length', String((fileContents as File).size));\n }\n }\n }\n }\n }\n }\n }\n\n if (typeof options.body === 'undefined') {\n options.body = request.postData.text;\n }\n }\n\n /**\n * The fetch spec, which Node 18+ strictly abides by, now requires that `duplex` be sent with\n * requests that have payloads.\n *\n * As `RequestInit#duplex` isn't supported by any browsers, or even mentioned on MDN, we aren't\n * sending it in browser environments. This work is purely to support Node 18+ and `undici`\n * environments.\n *\n * @see {@link https://github.com/nodejs/node/issues/46221}\n * @see {@link https://github.com/whatwg/fetch/pull/1457}\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Request/Request}\n */\n if (shouldSetDuplex && !isBrowser()) {\n options.duplex = 'half';\n }\n }\n\n // We automaticaly assume that the HAR that we have already has query parameters encoded within\n // it so we do **not** use the `URLSearchParams` API here for composing the query string.\n let requestURL = url;\n if ('queryString' in request && request.queryString.length) {\n const urlObj = new URL(requestURL);\n\n const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`);\n request.queryString.forEach(q => {\n queryParams.push(`${q.name}=${q.value}`);\n });\n\n querystring = queryParams.join('&');\n\n // Because anchor hashes before query strings will prevent query strings from being delivered\n // we need to pop them off and re-add them after.\n if (urlObj.hash) {\n const urlWithoutHashes = requestURL.replace(urlObj.hash, '');\n requestURL = `${urlWithoutHashes.split('?')[0]}${querystring ? `?${querystring}` : ''}`;\n requestURL += urlObj.hash;\n } else {\n requestURL = `${requestURL.split('?')[0]}${querystring ? `?${querystring}` : ''}`;\n }\n }\n\n if (opts.userAgent) {\n headers.append('User-Agent', opts.userAgent);\n }\n\n options.headers = headers;\n\n return fetch(requestURL, options);\n}\n"]} | ||
| {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import type { FetchHAROptions, RequestInitWithDuplex } from './types.js';\nimport type { DataURL as npmDataURL } from '@readme/data-urls';\nimport type { Har } from 'har-format';\n\nimport { parse as parseDataUrl } from '@readme/data-urls';\n\ntype DataURL = npmDataURL & {\n // `parse-data-url` doesn't explicitly support `name` in data URLs but if it's there it'll be\n // returned back to us.\n name?: string;\n};\n\nfunction isBrowser() {\n return typeof window !== 'undefined' && typeof document !== 'undefined';\n}\n\nfunction isBuffer(value: any) {\n return typeof Buffer !== 'undefined' && Buffer.isBuffer(value);\n}\n\nfunction isFile(value: any) {\n if (value instanceof File) {\n /**\n * The `Blob` polyfill on Node comes back as being an instanceof `File`. Because passing a Blob\n * into a File will end up with a corrupted file we want to prevent this.\n *\n * This object identity crisis does not happen in the browser.\n */\n return value.constructor.name === 'File';\n }\n\n return false;\n}\n\nfunction getFileFromSuppliedFiles(filename: string, files: FetchHAROptions['files']) {\n if (files && filename in files) {\n return files[filename];\n } else if (files && decodeURIComponent(filename) in files) {\n return files[decodeURIComponent(filename)];\n }\n\n return false;\n}\n\nexport default async function fetchHAR(har: Har, opts: FetchHAROptions = {}): Promise<Response> {\n if (!har) throw new Error('Missing HAR definition');\n if (!har.log || !har.log.entries || !har.log.entries.length) throw new Error('Missing log.entries array');\n\n if (!globalThis.Blob) {\n try {\n const NodeBlob = (await import('node:buffer')).Blob;\n // @ts-expect-error the types don't match exactly, which is expected!\n globalThis.Blob = NodeBlob;\n } catch (e) {\n throw new Error(\n 'The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob',\n );\n }\n }\n\n if (!globalThis.File) {\n try {\n // Node's native `fetch` implementation unfortunately does not make this API global so we need\n // to pull it in if we don't have it.\n const UndiciFile = (await import('undici')).File;\n // @ts-expect-error the types don't match exactly, which is expected!\n globalThis.File = UndiciFile;\n } catch (e) {\n throw new Error(\n 'The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File',\n );\n }\n }\n\n const { request } = har.log.entries[0];\n const { url } = request;\n let querystring = '';\n let shouldSetDuplex = false;\n\n const options: RequestInitWithDuplex = {\n // If we have custom options for the `Request` API we need to add them in here now before we\n // fill it in with everything we need from the HAR.\n ...(opts.init ? opts.init : {}),\n method: request.method,\n };\n\n if (!options.headers) {\n options.headers = new Headers();\n } else if (typeof options.headers === 'object' && !(options.headers instanceof Headers) && options.headers !== null) {\n options.headers = new Headers(options.headers);\n }\n\n const headers = options.headers as Headers;\n if ('headers' in request && request.headers.length) {\n // eslint-disable-next-line consistent-return\n request.headers.forEach(header => {\n try {\n return headers.append(header.name, header.value);\n } catch (err) {\n /**\n * `Headers.append()` will throw errors if the header name is not a legal HTTP header name,\n * like `X-API-KEY (Header)`. If that happens instead of tossing the error back out, we\n * should silently just ignore\n * it.\n */\n }\n });\n }\n\n if ('cookies' in request && request.cookies.length) {\n /**\n * As the browser fetch API can't set custom cookies for requests, they instead need to be\n * defined on the document and passed into the request via `credentials: include`. Since this\n * is a browser-specific quirk, that should only\n * happen in browsers!\n */\n if (isBrowser()) {\n request.cookies.forEach(cookie => {\n document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`;\n });\n\n options.credentials = 'include';\n } else {\n headers.append(\n 'cookie',\n request.cookies\n .map(cookie => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`)\n .join('; '),\n );\n }\n }\n\n if ('postData' in request) {\n if (request.postData && 'params' in request.postData) {\n if (!('mimeType' in request.postData)) {\n // @ts-expect-error HAR spec requires that `mimeType` is always present but it might not be.\n request.postData.mimeType = 'application/octet-stream';\n }\n\n switch (request.postData.mimeType) {\n case 'application/x-www-form-urlencoded':\n /**\n * Since the content we're handling here is to be encoded as\n * `application/x-www-form-urlencoded`, this should override any other `Content-Type`\n * headers that are present in the HAR. This is how Postman handles this case when\n * building code snippets!\n *\n * @see {@link https://github.com/github/fetch/issues/263#issuecomment-209530977}\n */\n headers.set('Content-Type', request.postData.mimeType);\n\n const encodedParams = new URLSearchParams();\n request.postData.params?.forEach(param => {\n if (param.value) encodedParams.set(param.name, param.value);\n });\n\n options.body = encodedParams.toString();\n break;\n\n case 'multipart/alternative':\n case 'multipart/form-data':\n case 'multipart/mixed':\n case 'multipart/related':\n /**\n * If there's a `Content-Type` header set we need to remove it. We're doing this because\n * when we pass the form data object into `fetch` that'll set a proper `Content-Type`\n * header for this request that also includes the boundary used on the content.\n *\n * If we don't do this, then consumers won't be able to parse out the payload because\n * they won't know what the boundary to split on it.\n */\n if (headers.has('Content-Type')) {\n headers.delete('Content-Type');\n }\n\n const form = new FormData();\n\n request.postData.params?.forEach(param => {\n if ('fileName' in param && param.fileName) {\n if (opts.files) {\n const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files);\n if (fileContents) {\n // If the file we've got available to us is a Buffer then we need to convert it so\n // that the FormData API can use it.\n if (isBuffer(fileContents)) {\n form.append(\n param.name,\n new File([fileContents], param.fileName, {\n type: param.contentType || undefined,\n }),\n param.fileName,\n );\n\n return;\n } else if (isFile(fileContents)) {\n form.append(param.name, fileContents as Blob, param.fileName);\n return;\n }\n\n throw new TypeError(\n 'An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects.',\n );\n }\n }\n\n if ('value' in param && param.value) {\n let paramBlob;\n const parsed = parseDataUrl(param.value);\n if (parsed) {\n // If we were able to parse out this data URL we don't need to transform its data\n // into a buffer for `Blob` because that supports data URLs already.\n paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || undefined });\n } else {\n paramBlob = new Blob([param.value], { type: param.contentType || undefined });\n }\n\n form.append(param.name, paramBlob, param.fileName);\n return;\n }\n\n throw new Error(\n \"The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file.\",\n );\n }\n\n if (param.value) form.append(param.name, param.value);\n });\n\n options.body = form;\n break;\n\n default:\n const formBody: Record<string, unknown> = {};\n request.postData.params?.map(param => {\n try {\n formBody[param.name] = JSON.parse(param.value || '');\n } catch (e) {\n formBody[param.name] = param.value;\n }\n\n return true;\n });\n\n options.body = JSON.stringify(formBody);\n }\n } else if (request.postData?.text?.length) {\n // If we've got `files` map content present, and this post data content contains a valid data\n // URL then we can substitute the payload with that file instead of the using data URL.\n if (opts.files) {\n const parsed = parseDataUrl(request.postData.text) as DataURL;\n if (parsed) {\n if (parsed?.name && parsed.name in opts.files) {\n const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files);\n if (fileContents) {\n if (isBuffer(fileContents)) {\n options.body = fileContents;\n } else if (isFile(fileContents)) {\n // `Readable.from` isn't available in browsers but the browser `Request` object can\n // handle `File` objects just fine without us having to mold it into shape.\n if (isBrowser()) {\n options.body = fileContents;\n } else {\n options.body = (fileContents as File).stream();\n shouldSetDuplex = true;\n\n // Supplying a polyfilled `File` stream into `Request.body` doesn't automatically\n // add `Content-Length`.\n if (!headers.has('content-length')) {\n headers.set('content-length', String((fileContents as File).size));\n }\n }\n }\n }\n }\n }\n }\n\n if (typeof options.body === 'undefined') {\n options.body = request.postData.text;\n }\n }\n\n /**\n * The fetch spec, which Node 18+ strictly abides by, now requires that `duplex` be sent with\n * requests that have payloads.\n *\n * As `RequestInit#duplex` isn't supported by any browsers, or even mentioned on MDN, we aren't\n * sending it in browser environments. This work is purely to support Node 18+ and `undici`\n * environments.\n *\n * @see {@link https://github.com/nodejs/node/issues/46221}\n * @see {@link https://github.com/whatwg/fetch/pull/1457}\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Request/Request}\n */\n if (shouldSetDuplex && !isBrowser()) {\n options.duplex = 'half';\n }\n }\n\n // We automaticaly assume that the HAR that we have already has query parameters encoded within\n // it so we do **not** use the `URLSearchParams` API here for composing the query string.\n let requestURL = url;\n if ('queryString' in request && request.queryString.length) {\n const urlObj = new URL(requestURL);\n\n const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`);\n request.queryString.forEach(q => {\n queryParams.push(`${q.name}=${q.value}`);\n });\n\n querystring = queryParams.join('&');\n\n // Because anchor hashes before query strings will prevent query strings from being delivered\n // we need to pop them off and re-add them after.\n if (urlObj.hash) {\n const urlWithoutHashes = requestURL.replace(urlObj.hash, '');\n requestURL = `${urlWithoutHashes.split('?')[0]}${querystring ? `?${querystring}` : ''}`;\n requestURL += urlObj.hash;\n } else {\n requestURL = `${requestURL.split('?')[0]}${querystring ? `?${querystring}` : ''}`;\n }\n }\n\n if (opts.userAgent) {\n headers.append('User-Agent', opts.userAgent);\n }\n\n options.headers = headers;\n\n return fetch(requestURL, options);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA,SAAS,SAAS,oBAAoB;AAQtC,SAAS,YAAY;AACnB,SAAO,OAAO,WAAW,eAAe,OAAO,aAAa;AAC9D;AAEA,SAAS,SAAS,OAAY;AAC5B,SAAO,OAAO,WAAW,eAAe,OAAO,SAAS,KAAK;AAC/D;AAEA,SAAS,OAAO,OAAY;AAC1B,MAAI,iBAAiB,MAAM;AAOzB,WAAO,MAAM,YAAY,SAAS;AAAA,EACpC;AAEA,SAAO;AACT;AAEA,SAAS,yBAAyB,UAAkB,OAAiC;AACnF,MAAI,SAAS,YAAY,OAAO;AAC9B,WAAO,MAAM,QAAQ;AAAA,EACvB,WAAW,SAAS,mBAAmB,QAAQ,KAAK,OAAO;AACzD,WAAO,MAAM,mBAAmB,QAAQ,CAAC;AAAA,EAC3C;AAEA,SAAO;AACT;AAEA,SAAO,SAAgC,IAAyD;AAAA,6CAAzD,KAAU,OAAwB,CAAC,GAAsB;AA5ChG;AA6CE,QAAI,CAAC;AAAK,YAAM,IAAI,MAAM,wBAAwB;AAClD,QAAI,CAAC,IAAI,OAAO,CAAC,IAAI,IAAI,WAAW,CAAC,IAAI,IAAI,QAAQ;AAAQ,YAAM,IAAI,MAAM,2BAA2B;AAExG,QAAI,CAAC,WAAW,MAAM;AACpB,UAAI;AACF,cAAM,YAAY,MAAM,OAAO,QAAa,GAAG;AAE/C,mBAAW,OAAO;AAAA,MACpB,SAAS,GAAG;AACV,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,CAAC,WAAW,MAAM;AACpB,UAAI;AAGF,cAAM,cAAc,MAAM,OAAO,QAAQ,GAAG;AAE5C,mBAAW,OAAO;AAAA,MACpB,SAAS,GAAG;AACV,cAAM,IAAI;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,EAAE,QAAQ,IAAI,IAAI,IAAI,QAAQ,CAAC;AACrC,UAAM,EAAE,IAAI,IAAI;AAChB,QAAI,cAAc;AAClB,QAAI,kBAAkB;AAEtB,UAAM,UAAiC,iCAGjC,KAAK,OAAO,KAAK,OAAO,CAAC,IAHQ;AAAA,MAIrC,QAAQ,QAAQ;AAAA,IAClB;AAEA,QAAI,CAAC,QAAQ,SAAS;AACpB,cAAQ,UAAU,IAAI,QAAQ;AAAA,IAChC,WAAW,OAAO,QAAQ,YAAY,YAAY,EAAE,QAAQ,mBAAmB,YAAY,QAAQ,YAAY,MAAM;AACnH,cAAQ,UAAU,IAAI,QAAQ,QAAQ,OAAO;AAAA,IAC/C;AAEA,UAAM,UAAU,QAAQ;AACxB,QAAI,aAAa,WAAW,QAAQ,QAAQ,QAAQ;AAElD,cAAQ,QAAQ,QAAQ,YAAU;AAChC,YAAI;AACF,iBAAO,QAAQ,OAAO,OAAO,MAAM,OAAO,KAAK;AAAA,QACjD,SAAS,KAAK;AAAA,QAOd;AAAA,MACF,CAAC;AAAA,IACH;AAEA,QAAI,aAAa,WAAW,QAAQ,QAAQ,QAAQ;AAOlD,UAAI,UAAU,GAAG;AACf,gBAAQ,QAAQ,QAAQ,YAAU;AAChC,mBAAS,SAAS,GAAG,mBAAmB,OAAO,IAAI,CAAC,IAAI,mBAAmB,OAAO,KAAK,CAAC;AAAA,QAC1F,CAAC;AAED,gBAAQ,cAAc;AAAA,MACxB,OAAO;AACL,gBAAQ;AAAA,UACN;AAAA,UACA,QAAQ,QACL,IAAI,YAAU,GAAG,mBAAmB,OAAO,IAAI,CAAC,IAAI,mBAAmB,OAAO,KAAK,CAAC,EAAE,EACtF,KAAK,IAAI;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAEA,QAAI,cAAc,SAAS;AACzB,UAAI,QAAQ,YAAY,YAAY,QAAQ,UAAU;AACpD,YAAI,EAAE,cAAc,QAAQ,WAAW;AAErC,kBAAQ,SAAS,WAAW;AAAA,QAC9B;AAEA,gBAAQ,QAAQ,SAAS,UAAU;AAAA,UACjC,KAAK;AASH,oBAAQ,IAAI,gBAAgB,QAAQ,SAAS,QAAQ;AAErD,kBAAM,gBAAgB,IAAI,gBAAgB;AAC1C,0BAAQ,SAAS,WAAjB,mBAAyB,QAAQ,WAAS;AACxC,kBAAI,MAAM;AAAO,8BAAc,IAAI,MAAM,MAAM,MAAM,KAAK;AAAA,YAC5D;AAEA,oBAAQ,OAAO,cAAc,SAAS;AACtC;AAAA,UAEF,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AAAA,UACL,KAAK;AASH,gBAAI,QAAQ,IAAI,cAAc,GAAG;AAC/B,sBAAQ,OAAO,cAAc;AAAA,YAC/B;AAEA,kBAAM,OAAO,IAAI,SAAS;AAE1B,0BAAQ,SAAS,WAAjB,mBAAyB,QAAQ,WAAS;AACxC,kBAAI,cAAc,SAAS,MAAM,UAAU;AACzC,oBAAI,KAAK,OAAO;AACd,wBAAM,eAAe,yBAAyB,MAAM,UAAU,KAAK,KAAK;AACxE,sBAAI,cAAc;AAGhB,wBAAI,SAAS,YAAY,GAAG;AAC1B,2BAAK;AAAA,wBACH,MAAM;AAAA,wBACN,IAAI,KAAK,CAAC,YAAY,GAAG,MAAM,UAAU;AAAA,0BACvC,MAAM,MAAM,eAAe;AAAA,wBAC7B,CAAC;AAAA,wBACD,MAAM;AAAA,sBACR;AAEA;AAAA,oBACF,WAAW,OAAO,YAAY,GAAG;AAC/B,2BAAK,OAAO,MAAM,MAAM,cAAsB,MAAM,QAAQ;AAC5D;AAAA,oBACF;AAEA,0BAAM,IAAI;AAAA,sBACR;AAAA,oBACF;AAAA,kBACF;AAAA,gBACF;AAEA,oBAAI,WAAW,SAAS,MAAM,OAAO;AACnC,sBAAI;AACJ,wBAAM,SAAS,aAAa,MAAM,KAAK;AACvC,sBAAI,QAAQ;AAGV,gCAAY,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG,EAAE,MAAM,OAAO,eAAe,MAAM,eAAe,OAAU,CAAC;AAAA,kBACpG,OAAO;AACL,gCAAY,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG,EAAE,MAAM,MAAM,eAAe,OAAU,CAAC;AAAA,kBAC9E;AAEA,uBAAK,OAAO,MAAM,MAAM,WAAW,MAAM,QAAQ;AACjD;AAAA,gBACF;AAEA,sBAAM,IAAI;AAAA,kBACR;AAAA,gBACF;AAAA,cACF;AAEA,kBAAI,MAAM;AAAO,qBAAK,OAAO,MAAM,MAAM,MAAM,KAAK;AAAA,YACtD;AAEA,oBAAQ,OAAO;AACf;AAAA,UAEF;AACE,kBAAM,WAAoC,CAAC;AAC3C,0BAAQ,SAAS,WAAjB,mBAAyB,IAAI,WAAS;AACpC,kBAAI;AACF,yBAAS,MAAM,IAAI,IAAI,KAAK,MAAM,MAAM,SAAS,EAAE;AAAA,cACrD,SAAS,GAAG;AACV,yBAAS,MAAM,IAAI,IAAI,MAAM;AAAA,cAC/B;AAEA,qBAAO;AAAA,YACT;AAEA,oBAAQ,OAAO,KAAK,UAAU,QAAQ;AAAA,QAC1C;AAAA,MACF,YAAW,mBAAQ,aAAR,mBAAkB,SAAlB,mBAAwB,QAAQ;AAGzC,YAAI,KAAK,OAAO;AACd,gBAAM,SAAS,aAAa,QAAQ,SAAS,IAAI;AACjD,cAAI,QAAQ;AACV,iBAAI,iCAAQ,SAAQ,OAAO,QAAQ,KAAK,OAAO;AAC7C,oBAAM,eAAe,yBAAyB,OAAO,MAAM,KAAK,KAAK;AACrE,kBAAI,cAAc;AAChB,oBAAI,SAAS,YAAY,GAAG;AAC1B,0BAAQ,OAAO;AAAA,gBACjB,WAAW,OAAO,YAAY,GAAG;AAG/B,sBAAI,UAAU,GAAG;AACf,4BAAQ,OAAO;AAAA,kBACjB,OAAO;AACL,4BAAQ,OAAQ,aAAsB,OAAO;AAC7C,sCAAkB;AAIlB,wBAAI,CAAC,QAAQ,IAAI,gBAAgB,GAAG;AAClC,8BAAQ,IAAI,kBAAkB,OAAQ,aAAsB,IAAI,CAAC;AAAA,oBACnE;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,YAAI,OAAO,QAAQ,SAAS,aAAa;AACvC,kBAAQ,OAAO,QAAQ,SAAS;AAAA,QAClC;AAAA,MACF;AAcA,UAAI,mBAAmB,CAAC,UAAU,GAAG;AACnC,gBAAQ,SAAS;AAAA,MACnB;AAAA,IACF;AAIA,QAAI,aAAa;AACjB,QAAI,iBAAiB,WAAW,QAAQ,YAAY,QAAQ;AAC1D,YAAM,SAAS,IAAI,IAAI,UAAU;AAEjC,YAAM,cAAc,MAAM,KAAK,OAAO,YAAY,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE;AAC/E,cAAQ,YAAY,QAAQ,OAAK;AAC/B,oBAAY,KAAK,GAAG,EAAE,IAAI,IAAI,EAAE,KAAK,EAAE;AAAA,MACzC,CAAC;AAED,oBAAc,YAAY,KAAK,GAAG;AAIlC,UAAI,OAAO,MAAM;AACf,cAAM,mBAAmB,WAAW,QAAQ,OAAO,MAAM,EAAE;AAC3D,qBAAa,GAAG,iBAAiB,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,cAAc,IAAI,WAAW,KAAK,EAAE;AACrF,sBAAc,OAAO;AAAA,MACvB,OAAO;AACL,qBAAa,GAAG,WAAW,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,cAAc,IAAI,WAAW,KAAK,EAAE;AAAA,MACjF;AAAA,IACF;AAEA,QAAI,KAAK,WAAW;AAClB,cAAQ,OAAO,cAAc,KAAK,SAAS;AAAA,IAC7C;AAEA,YAAQ,UAAU;AAElB,WAAO,MAAM,YAAY,OAAO;AAAA,EAClC;AAAA;","names":[]} |
+1
-1
@@ -1,1 +0,1 @@ | ||
| "use strict";//# sourceMappingURL=types.js.map | ||
| //# sourceMappingURL=types.js.map |
@@ -1,1 +0,1 @@ | ||
| {"version":3,"sources":[],"names":[],"mappings":""} | ||
| {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]} |
+10
-9
| { | ||
| "name": "fetch-har", | ||
| "version": "11.0.0", | ||
| "version": "11.0.1", | ||
| "description": "Make a fetch request from a HAR definition", | ||
| "main": "dist/index.js", | ||
| "types": "dist/index.d.ts", | ||
| "module": "dist/index.mjs", | ||
| "main": "dist/index.cjs", | ||
| "types": "dist/index.d.cts", | ||
| "module": "dist/index.js", | ||
| "type": "module", | ||
| "sideEffects": false, | ||
| "exports": { | ||
| ".": { | ||
| "import": "./dist/index.mjs", | ||
| "require": "./dist/index.js" | ||
| "import": "./dist/index.js", | ||
| "require": "./dist/index.cjs" | ||
| }, | ||
| "./types": { | ||
| "import": "./dist/types.mjs", | ||
| "require": "./dist/types.js" | ||
| "import": "./dist/types.js", | ||
| "require": "./dist/types.cjs" | ||
| }, | ||
@@ -40,3 +41,3 @@ "./package.json": "./package.json" | ||
| "type": "git", | ||
| "url": "git@github.com:readmeio/fetch-har.git" | ||
| "url": "git+ssh://git@github.com/readmeio/fetch-har.git" | ||
| }, | ||
@@ -43,0 +44,0 @@ "license": "ISC", |
| import { FetchHAROptions } from './types.mjs'; | ||
| import { Har } from 'har-format'; | ||
| declare function fetchHAR(har: Har, opts?: FetchHAROptions): Promise<Response>; | ||
| export { fetchHAR as default }; |
-239
| var __defProp = Object.defineProperty; | ||
| var __defProps = Object.defineProperties; | ||
| var __getOwnPropDescs = Object.getOwnPropertyDescriptors; | ||
| var __getOwnPropSymbols = Object.getOwnPropertySymbols; | ||
| var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
| var __propIsEnum = Object.prototype.propertyIsEnumerable; | ||
| var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; | ||
| var __spreadValues = (a, b) => { | ||
| for (var prop in b || (b = {})) | ||
| if (__hasOwnProp.call(b, prop)) | ||
| __defNormalProp(a, prop, b[prop]); | ||
| if (__getOwnPropSymbols) | ||
| for (var prop of __getOwnPropSymbols(b)) { | ||
| if (__propIsEnum.call(b, prop)) | ||
| __defNormalProp(a, prop, b[prop]); | ||
| } | ||
| return a; | ||
| }; | ||
| var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); | ||
| var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, { | ||
| get: (a, b) => (typeof require !== "undefined" ? require : a)[b] | ||
| }) : x)(function(x) { | ||
| if (typeof require !== "undefined") | ||
| return require.apply(this, arguments); | ||
| throw Error('Dynamic require of "' + x + '" is not supported'); | ||
| }); | ||
| // src/index.ts | ||
| import { parse as parseDataUrl } from "@readme/data-urls"; | ||
| if (!globalThis.Blob) { | ||
| try { | ||
| globalThis.Blob = __require("buffer").Blob; | ||
| } catch (e) { | ||
| throw new Error("The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob"); | ||
| } | ||
| } | ||
| if (!globalThis.File) { | ||
| try { | ||
| globalThis.File = __require("undici").File; | ||
| } catch (e) { | ||
| throw new Error("The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File"); | ||
| } | ||
| } | ||
| function isBrowser() { | ||
| return typeof window !== "undefined" && typeof document !== "undefined"; | ||
| } | ||
| function isBuffer(value) { | ||
| return typeof Buffer !== "undefined" && Buffer.isBuffer(value); | ||
| } | ||
| function isFile(value) { | ||
| if (value instanceof File) { | ||
| return value.constructor.name === "File"; | ||
| } | ||
| return false; | ||
| } | ||
| function getFileFromSuppliedFiles(filename, files) { | ||
| if (files && filename in files) { | ||
| return files[filename]; | ||
| } else if (files && decodeURIComponent(filename) in files) { | ||
| return files[decodeURIComponent(filename)]; | ||
| } | ||
| return false; | ||
| } | ||
| function fetchHAR(har, opts = {}) { | ||
| var _a, _b, _c, _d, _e; | ||
| if (!har) | ||
| throw new Error("Missing HAR definition"); | ||
| if (!har.log || !har.log.entries || !har.log.entries.length) | ||
| throw new Error("Missing log.entries array"); | ||
| const { request } = har.log.entries[0]; | ||
| const { url } = request; | ||
| let querystring = ""; | ||
| let shouldSetDuplex = false; | ||
| const options = __spreadProps(__spreadValues({}, opts.init ? opts.init : {}), { | ||
| method: request.method | ||
| }); | ||
| if (!options.headers) { | ||
| options.headers = new Headers(); | ||
| } else if (typeof options.headers === "object" && !(options.headers instanceof Headers) && options.headers !== null) { | ||
| options.headers = new Headers(options.headers); | ||
| } | ||
| const headers = options.headers; | ||
| if ("headers" in request && request.headers.length) { | ||
| request.headers.forEach((header) => { | ||
| try { | ||
| return headers.append(header.name, header.value); | ||
| } catch (err) { | ||
| } | ||
| }); | ||
| } | ||
| if ("cookies" in request && request.cookies.length) { | ||
| if (isBrowser()) { | ||
| request.cookies.forEach((cookie) => { | ||
| document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`; | ||
| }); | ||
| options.credentials = "include"; | ||
| } else { | ||
| headers.append( | ||
| "cookie", | ||
| request.cookies.map((cookie) => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`).join("; ") | ||
| ); | ||
| } | ||
| } | ||
| if ("postData" in request) { | ||
| if (request.postData && "params" in request.postData) { | ||
| if (!("mimeType" in request.postData)) { | ||
| request.postData.mimeType = "application/octet-stream"; | ||
| } | ||
| switch (request.postData.mimeType) { | ||
| case "application/x-www-form-urlencoded": | ||
| headers.set("Content-Type", request.postData.mimeType); | ||
| const encodedParams = new URLSearchParams(); | ||
| (_a = request.postData.params) == null ? void 0 : _a.forEach((param) => { | ||
| if (param.value) | ||
| encodedParams.set(param.name, param.value); | ||
| }); | ||
| options.body = encodedParams.toString(); | ||
| break; | ||
| case "multipart/alternative": | ||
| case "multipart/form-data": | ||
| case "multipart/mixed": | ||
| case "multipart/related": | ||
| if (headers.has("Content-Type")) { | ||
| headers.delete("Content-Type"); | ||
| } | ||
| const form = new FormData(); | ||
| (_b = request.postData.params) == null ? void 0 : _b.forEach((param) => { | ||
| if ("fileName" in param && param.fileName) { | ||
| if (opts.files) { | ||
| const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files); | ||
| if (fileContents) { | ||
| if (isBuffer(fileContents)) { | ||
| form.append( | ||
| param.name, | ||
| new File([fileContents], param.fileName, { | ||
| type: param.contentType || void 0 | ||
| }), | ||
| param.fileName | ||
| ); | ||
| return; | ||
| } else if (isFile(fileContents)) { | ||
| form.append(param.name, fileContents, param.fileName); | ||
| return; | ||
| } | ||
| throw new TypeError( | ||
| "An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects." | ||
| ); | ||
| } | ||
| } | ||
| if ("value" in param && param.value) { | ||
| let paramBlob; | ||
| const parsed = parseDataUrl(param.value); | ||
| if (parsed) { | ||
| paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || void 0 }); | ||
| } else { | ||
| paramBlob = new Blob([param.value], { type: param.contentType || void 0 }); | ||
| } | ||
| form.append(param.name, paramBlob, param.fileName); | ||
| return; | ||
| } | ||
| throw new Error( | ||
| "The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file." | ||
| ); | ||
| } | ||
| if (param.value) | ||
| form.append(param.name, param.value); | ||
| }); | ||
| options.body = form; | ||
| break; | ||
| default: | ||
| const formBody = {}; | ||
| (_c = request.postData.params) == null ? void 0 : _c.map((param) => { | ||
| try { | ||
| formBody[param.name] = JSON.parse(param.value || ""); | ||
| } catch (e) { | ||
| formBody[param.name] = param.value; | ||
| } | ||
| return true; | ||
| }); | ||
| options.body = JSON.stringify(formBody); | ||
| } | ||
| } else if ((_e = (_d = request.postData) == null ? void 0 : _d.text) == null ? void 0 : _e.length) { | ||
| if (opts.files) { | ||
| const parsed = parseDataUrl(request.postData.text); | ||
| if (parsed) { | ||
| if ((parsed == null ? void 0 : parsed.name) && parsed.name in opts.files) { | ||
| const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files); | ||
| if (fileContents) { | ||
| if (isBuffer(fileContents)) { | ||
| options.body = fileContents; | ||
| } else if (isFile(fileContents)) { | ||
| if (isBrowser()) { | ||
| options.body = fileContents; | ||
| } else { | ||
| options.body = fileContents.stream(); | ||
| shouldSetDuplex = true; | ||
| if (!headers.has("content-length")) { | ||
| headers.set("content-length", String(fileContents.size)); | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| if (typeof options.body === "undefined") { | ||
| options.body = request.postData.text; | ||
| } | ||
| } | ||
| if (shouldSetDuplex && !isBrowser()) { | ||
| options.duplex = "half"; | ||
| } | ||
| } | ||
| let requestURL = url; | ||
| if ("queryString" in request && request.queryString.length) { | ||
| const urlObj = new URL(requestURL); | ||
| const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`); | ||
| request.queryString.forEach((q) => { | ||
| queryParams.push(`${q.name}=${q.value}`); | ||
| }); | ||
| querystring = queryParams.join("&"); | ||
| if (urlObj.hash) { | ||
| const urlWithoutHashes = requestURL.replace(urlObj.hash, ""); | ||
| requestURL = `${urlWithoutHashes.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
| requestURL += urlObj.hash; | ||
| } else { | ||
| requestURL = `${requestURL.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
| } | ||
| } | ||
| if (opts.userAgent) { | ||
| headers.append("User-Agent", opts.userAgent); | ||
| } | ||
| options.headers = headers; | ||
| return fetch(requestURL, options); | ||
| } | ||
| export { | ||
| fetchHAR as default | ||
| }; | ||
| //# sourceMappingURL=index.mjs.map |
| {"version":3,"sources":["../src/index.ts"],"sourcesContent":["import type { FetchHAROptions, RequestInitWithDuplex } from './types';\nimport type { DataURL as npmDataURL } from '@readme/data-urls';\nimport type { Har } from 'har-format';\n\nimport { parse as parseDataUrl } from '@readme/data-urls';\n\nif (!globalThis.Blob) {\n try {\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n globalThis.Blob = require('node:buffer').Blob;\n } catch (e) {\n throw new Error('The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob');\n }\n}\n\nif (!globalThis.File) {\n try {\n // Node's native `fetch` implementation unfortunately does not make this API global so we need\n // to pull it in if we don't have it.\n // eslint-disable-next-line @typescript-eslint/no-var-requires\n globalThis.File = require('undici').File;\n } catch (e) {\n throw new Error('The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File');\n }\n}\n\ntype DataURL = npmDataURL & {\n // `parse-data-url` doesn't explicitly support `name` in data URLs but if it's there it'll be\n // returned back to us.\n name?: string;\n};\n\nfunction isBrowser() {\n return typeof window !== 'undefined' && typeof document !== 'undefined';\n}\n\nfunction isBuffer(value: any) {\n return typeof Buffer !== 'undefined' && Buffer.isBuffer(value);\n}\n\nfunction isFile(value: any) {\n if (value instanceof File) {\n /**\n * The `Blob` polyfill on Node comes back as being an instanceof `File`. Because passing a Blob\n * into a File will end up with a corrupted file we want to prevent this.\n *\n * This object identity crisis does not happen in the browser.\n */\n return value.constructor.name === 'File';\n }\n\n return false;\n}\n\nfunction getFileFromSuppliedFiles(filename: string, files: FetchHAROptions['files']) {\n if (files && filename in files) {\n return files[filename];\n } else if (files && decodeURIComponent(filename) in files) {\n return files[decodeURIComponent(filename)];\n }\n\n return false;\n}\n\nexport default function fetchHAR(har: Har, opts: FetchHAROptions = {}) {\n if (!har) throw new Error('Missing HAR definition');\n if (!har.log || !har.log.entries || !har.log.entries.length) throw new Error('Missing log.entries array');\n\n const { request } = har.log.entries[0];\n const { url } = request;\n let querystring = '';\n let shouldSetDuplex = false;\n\n const options: RequestInitWithDuplex = {\n // If we have custom options for the `Request` API we need to add them in here now before we\n // fill it in with everything we need from the HAR.\n ...(opts.init ? opts.init : {}),\n method: request.method,\n };\n\n if (!options.headers) {\n options.headers = new Headers();\n } else if (typeof options.headers === 'object' && !(options.headers instanceof Headers) && options.headers !== null) {\n options.headers = new Headers(options.headers);\n }\n\n const headers = options.headers as Headers;\n if ('headers' in request && request.headers.length) {\n // eslint-disable-next-line consistent-return\n request.headers.forEach(header => {\n try {\n return headers.append(header.name, header.value);\n } catch (err) {\n /**\n * `Headers.append()` will throw errors if the header name is not a legal HTTP header name,\n * like `X-API-KEY (Header)`. If that happens instead of tossing the error back out, we\n * should silently just ignore\n * it.\n */\n }\n });\n }\n\n if ('cookies' in request && request.cookies.length) {\n /**\n * As the browser fetch API can't set custom cookies for requests, they instead need to be\n * defined on the document and passed into the request via `credentials: include`. Since this\n * is a browser-specific quirk, that should only\n * happen in browsers!\n */\n if (isBrowser()) {\n request.cookies.forEach(cookie => {\n document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`;\n });\n\n options.credentials = 'include';\n } else {\n headers.append(\n 'cookie',\n request.cookies\n .map(cookie => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`)\n .join('; '),\n );\n }\n }\n\n if ('postData' in request) {\n if (request.postData && 'params' in request.postData) {\n if (!('mimeType' in request.postData)) {\n // @ts-expect-error HAR spec requires that `mimeType` is always present but it might not be.\n request.postData.mimeType = 'application/octet-stream';\n }\n\n switch (request.postData.mimeType) {\n case 'application/x-www-form-urlencoded':\n /**\n * Since the content we're handling here is to be encoded as\n * `application/x-www-form-urlencoded`, this should override any other `Content-Type`\n * headers that are present in the HAR. This is how Postman handles this case when\n * building code snippets!\n *\n * @see {@link https://github.com/github/fetch/issues/263#issuecomment-209530977}\n */\n headers.set('Content-Type', request.postData.mimeType);\n\n const encodedParams = new URLSearchParams();\n request.postData.params?.forEach(param => {\n if (param.value) encodedParams.set(param.name, param.value);\n });\n\n options.body = encodedParams.toString();\n break;\n\n case 'multipart/alternative':\n case 'multipart/form-data':\n case 'multipart/mixed':\n case 'multipart/related':\n /**\n * If there's a `Content-Type` header set we need to remove it. We're doing this because\n * when we pass the form data object into `fetch` that'll set a proper `Content-Type`\n * header for this request that also includes the boundary used on the content.\n *\n * If we don't do this, then consumers won't be able to parse out the payload because\n * they won't know what the boundary to split on it.\n */\n if (headers.has('Content-Type')) {\n headers.delete('Content-Type');\n }\n\n const form = new FormData();\n\n request.postData.params?.forEach(param => {\n if ('fileName' in param && param.fileName) {\n if (opts.files) {\n const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files);\n if (fileContents) {\n // If the file we've got available to us is a Buffer then we need to convert it so\n // that the FormData API can use it.\n if (isBuffer(fileContents)) {\n form.append(\n param.name,\n new File([fileContents], param.fileName, {\n type: param.contentType || undefined,\n }),\n param.fileName,\n );\n\n return;\n } else if (isFile(fileContents)) {\n form.append(param.name, fileContents as Blob, param.fileName);\n return;\n }\n\n throw new TypeError(\n 'An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects.',\n );\n }\n }\n\n if ('value' in param && param.value) {\n let paramBlob;\n const parsed = parseDataUrl(param.value);\n if (parsed) {\n // If we were able to parse out this data URL we don't need to transform its data\n // into a buffer for `Blob` because that supports data URLs already.\n paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || undefined });\n } else {\n paramBlob = new Blob([param.value], { type: param.contentType || undefined });\n }\n\n form.append(param.name, paramBlob, param.fileName);\n return;\n }\n\n throw new Error(\n \"The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file.\",\n );\n }\n\n if (param.value) form.append(param.name, param.value);\n });\n\n options.body = form;\n break;\n\n default:\n const formBody: Record<string, unknown> = {};\n request.postData.params?.map(param => {\n try {\n formBody[param.name] = JSON.parse(param.value || '');\n } catch (e) {\n formBody[param.name] = param.value;\n }\n\n return true;\n });\n\n options.body = JSON.stringify(formBody);\n }\n } else if (request.postData?.text?.length) {\n // If we've got `files` map content present, and this post data content contains a valid data\n // URL then we can substitute the payload with that file instead of the using data URL.\n if (opts.files) {\n const parsed = parseDataUrl(request.postData.text) as DataURL;\n if (parsed) {\n if (parsed?.name && parsed.name in opts.files) {\n const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files);\n if (fileContents) {\n if (isBuffer(fileContents)) {\n options.body = fileContents;\n } else if (isFile(fileContents)) {\n // `Readable.from` isn't available in browsers but the browser `Request` object can\n // handle `File` objects just fine without us having to mold it into shape.\n if (isBrowser()) {\n options.body = fileContents;\n } else {\n options.body = (fileContents as File).stream();\n shouldSetDuplex = true;\n\n // Supplying a polyfilled `File` stream into `Request.body` doesn't automatically\n // add `Content-Length`.\n if (!headers.has('content-length')) {\n headers.set('content-length', String((fileContents as File).size));\n }\n }\n }\n }\n }\n }\n }\n\n if (typeof options.body === 'undefined') {\n options.body = request.postData.text;\n }\n }\n\n /**\n * The fetch spec, which Node 18+ strictly abides by, now requires that `duplex` be sent with\n * requests that have payloads.\n *\n * As `RequestInit#duplex` isn't supported by any browsers, or even mentioned on MDN, we aren't\n * sending it in browser environments. This work is purely to support Node 18+ and `undici`\n * environments.\n *\n * @see {@link https://github.com/nodejs/node/issues/46221}\n * @see {@link https://github.com/whatwg/fetch/pull/1457}\n * @see {@link https://developer.mozilla.org/en-US/docs/Web/API/Request/Request}\n */\n if (shouldSetDuplex && !isBrowser()) {\n options.duplex = 'half';\n }\n }\n\n // We automaticaly assume that the HAR that we have already has query parameters encoded within\n // it so we do **not** use the `URLSearchParams` API here for composing the query string.\n let requestURL = url;\n if ('queryString' in request && request.queryString.length) {\n const urlObj = new URL(requestURL);\n\n const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`);\n request.queryString.forEach(q => {\n queryParams.push(`${q.name}=${q.value}`);\n });\n\n querystring = queryParams.join('&');\n\n // Because anchor hashes before query strings will prevent query strings from being delivered\n // we need to pop them off and re-add them after.\n if (urlObj.hash) {\n const urlWithoutHashes = requestURL.replace(urlObj.hash, '');\n requestURL = `${urlWithoutHashes.split('?')[0]}${querystring ? `?${querystring}` : ''}`;\n requestURL += urlObj.hash;\n } else {\n requestURL = `${requestURL.split('?')[0]}${querystring ? `?${querystring}` : ''}`;\n }\n }\n\n if (opts.userAgent) {\n headers.append('User-Agent', opts.userAgent);\n }\n\n options.headers = headers;\n\n return fetch(requestURL, options);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA,SAAS,SAAS,oBAAoB;AAEtC,IAAI,CAAC,WAAW,MAAM;AACpB,MAAI;AAEF,eAAW,OAAO,UAAQ,QAAa,EAAE;AAAA,EAC3C,SAAS,GAAG;AACV,UAAM,IAAI,MAAM,kGAAkG;AAAA,EACpH;AACF;AAEA,IAAI,CAAC,WAAW,MAAM;AACpB,MAAI;AAIF,eAAW,OAAO,UAAQ,QAAQ,EAAE;AAAA,EACtC,SAAS,GAAG;AACV,UAAM,IAAI,MAAM,kGAAkG;AAAA,EACpH;AACF;AAQA,SAAS,YAAY;AACnB,SAAO,OAAO,WAAW,eAAe,OAAO,aAAa;AAC9D;AAEA,SAAS,SAAS,OAAY;AAC5B,SAAO,OAAO,WAAW,eAAe,OAAO,SAAS,KAAK;AAC/D;AAEA,SAAS,OAAO,OAAY;AAC1B,MAAI,iBAAiB,MAAM;AAOzB,WAAO,MAAM,YAAY,SAAS;AAAA,EACpC;AAEA,SAAO;AACT;AAEA,SAAS,yBAAyB,UAAkB,OAAiC;AACnF,MAAI,SAAS,YAAY,OAAO;AAC9B,WAAO,MAAM,QAAQ;AAAA,EACvB,WAAW,SAAS,mBAAmB,QAAQ,KAAK,OAAO;AACzD,WAAO,MAAM,mBAAmB,QAAQ,CAAC;AAAA,EAC3C;AAEA,SAAO;AACT;AAEe,SAAR,SAA0B,KAAU,OAAwB,CAAC,GAAG;AAhEvE;AAiEE,MAAI,CAAC;AAAK,UAAM,IAAI,MAAM,wBAAwB;AAClD,MAAI,CAAC,IAAI,OAAO,CAAC,IAAI,IAAI,WAAW,CAAC,IAAI,IAAI,QAAQ;AAAQ,UAAM,IAAI,MAAM,2BAA2B;AAExG,QAAM,EAAE,QAAQ,IAAI,IAAI,IAAI,QAAQ,CAAC;AACrC,QAAM,EAAE,IAAI,IAAI;AAChB,MAAI,cAAc;AAClB,MAAI,kBAAkB;AAEtB,QAAM,UAAiC,iCAGjC,KAAK,OAAO,KAAK,OAAO,CAAC,IAHQ;AAAA,IAIrC,QAAQ,QAAQ;AAAA,EAClB;AAEA,MAAI,CAAC,QAAQ,SAAS;AACpB,YAAQ,UAAU,IAAI,QAAQ;AAAA,EAChC,WAAW,OAAO,QAAQ,YAAY,YAAY,EAAE,QAAQ,mBAAmB,YAAY,QAAQ,YAAY,MAAM;AACnH,YAAQ,UAAU,IAAI,QAAQ,QAAQ,OAAO;AAAA,EAC/C;AAEA,QAAM,UAAU,QAAQ;AACxB,MAAI,aAAa,WAAW,QAAQ,QAAQ,QAAQ;AAElD,YAAQ,QAAQ,QAAQ,YAAU;AAChC,UAAI;AACF,eAAO,QAAQ,OAAO,OAAO,MAAM,OAAO,KAAK;AAAA,MACjD,SAAS,KAAK;AAAA,MAOd;AAAA,IACF,CAAC;AAAA,EACH;AAEA,MAAI,aAAa,WAAW,QAAQ,QAAQ,QAAQ;AAOlD,QAAI,UAAU,GAAG;AACf,cAAQ,QAAQ,QAAQ,YAAU;AAChC,iBAAS,SAAS,GAAG,mBAAmB,OAAO,IAAI,CAAC,IAAI,mBAAmB,OAAO,KAAK,CAAC;AAAA,MAC1F,CAAC;AAED,cAAQ,cAAc;AAAA,IACxB,OAAO;AACL,cAAQ;AAAA,QACN;AAAA,QACA,QAAQ,QACL,IAAI,YAAU,GAAG,mBAAmB,OAAO,IAAI,CAAC,IAAI,mBAAmB,OAAO,KAAK,CAAC,EAAE,EACtF,KAAK,IAAI;AAAA,MACd;AAAA,IACF;AAAA,EACF;AAEA,MAAI,cAAc,SAAS;AACzB,QAAI,QAAQ,YAAY,YAAY,QAAQ,UAAU;AACpD,UAAI,EAAE,cAAc,QAAQ,WAAW;AAErC,gBAAQ,SAAS,WAAW;AAAA,MAC9B;AAEA,cAAQ,QAAQ,SAAS,UAAU;AAAA,QACjC,KAAK;AASH,kBAAQ,IAAI,gBAAgB,QAAQ,SAAS,QAAQ;AAErD,gBAAM,gBAAgB,IAAI,gBAAgB;AAC1C,wBAAQ,SAAS,WAAjB,mBAAyB,QAAQ,WAAS;AACxC,gBAAI,MAAM;AAAO,4BAAc,IAAI,MAAM,MAAM,MAAM,KAAK;AAAA,UAC5D;AAEA,kBAAQ,OAAO,cAAc,SAAS;AACtC;AAAA,QAEF,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AASH,cAAI,QAAQ,IAAI,cAAc,GAAG;AAC/B,oBAAQ,OAAO,cAAc;AAAA,UAC/B;AAEA,gBAAM,OAAO,IAAI,SAAS;AAE1B,wBAAQ,SAAS,WAAjB,mBAAyB,QAAQ,WAAS;AACxC,gBAAI,cAAc,SAAS,MAAM,UAAU;AACzC,kBAAI,KAAK,OAAO;AACd,sBAAM,eAAe,yBAAyB,MAAM,UAAU,KAAK,KAAK;AACxE,oBAAI,cAAc;AAGhB,sBAAI,SAAS,YAAY,GAAG;AAC1B,yBAAK;AAAA,sBACH,MAAM;AAAA,sBACN,IAAI,KAAK,CAAC,YAAY,GAAG,MAAM,UAAU;AAAA,wBACvC,MAAM,MAAM,eAAe;AAAA,sBAC7B,CAAC;AAAA,sBACD,MAAM;AAAA,oBACR;AAEA;AAAA,kBACF,WAAW,OAAO,YAAY,GAAG;AAC/B,yBAAK,OAAO,MAAM,MAAM,cAAsB,MAAM,QAAQ;AAC5D;AAAA,kBACF;AAEA,wBAAM,IAAI;AAAA,oBACR;AAAA,kBACF;AAAA,gBACF;AAAA,cACF;AAEA,kBAAI,WAAW,SAAS,MAAM,OAAO;AACnC,oBAAI;AACJ,sBAAM,SAAS,aAAa,MAAM,KAAK;AACvC,oBAAI,QAAQ;AAGV,8BAAY,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG,EAAE,MAAM,OAAO,eAAe,MAAM,eAAe,OAAU,CAAC;AAAA,gBACpG,OAAO;AACL,8BAAY,IAAI,KAAK,CAAC,MAAM,KAAK,GAAG,EAAE,MAAM,MAAM,eAAe,OAAU,CAAC;AAAA,gBAC9E;AAEA,qBAAK,OAAO,MAAM,MAAM,WAAW,MAAM,QAAQ;AACjD;AAAA,cACF;AAEA,oBAAM,IAAI;AAAA,gBACR;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,MAAM;AAAO,mBAAK,OAAO,MAAM,MAAM,MAAM,KAAK;AAAA,UACtD;AAEA,kBAAQ,OAAO;AACf;AAAA,QAEF;AACE,gBAAM,WAAoC,CAAC;AAC3C,wBAAQ,SAAS,WAAjB,mBAAyB,IAAI,WAAS;AACpC,gBAAI;AACF,uBAAS,MAAM,IAAI,IAAI,KAAK,MAAM,MAAM,SAAS,EAAE;AAAA,YACrD,SAAS,GAAG;AACV,uBAAS,MAAM,IAAI,IAAI,MAAM;AAAA,YAC/B;AAEA,mBAAO;AAAA,UACT;AAEA,kBAAQ,OAAO,KAAK,UAAU,QAAQ;AAAA,MAC1C;AAAA,IACF,YAAW,mBAAQ,aAAR,mBAAkB,SAAlB,mBAAwB,QAAQ;AAGzC,UAAI,KAAK,OAAO;AACd,cAAM,SAAS,aAAa,QAAQ,SAAS,IAAI;AACjD,YAAI,QAAQ;AACV,eAAI,iCAAQ,SAAQ,OAAO,QAAQ,KAAK,OAAO;AAC7C,kBAAM,eAAe,yBAAyB,OAAO,MAAM,KAAK,KAAK;AACrE,gBAAI,cAAc;AAChB,kBAAI,SAAS,YAAY,GAAG;AAC1B,wBAAQ,OAAO;AAAA,cACjB,WAAW,OAAO,YAAY,GAAG;AAG/B,oBAAI,UAAU,GAAG;AACf,0BAAQ,OAAO;AAAA,gBACjB,OAAO;AACL,0BAAQ,OAAQ,aAAsB,OAAO;AAC7C,oCAAkB;AAIlB,sBAAI,CAAC,QAAQ,IAAI,gBAAgB,GAAG;AAClC,4BAAQ,IAAI,kBAAkB,OAAQ,aAAsB,IAAI,CAAC;AAAA,kBACnE;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,UAAI,OAAO,QAAQ,SAAS,aAAa;AACvC,gBAAQ,OAAO,QAAQ,SAAS;AAAA,MAClC;AAAA,IACF;AAcA,QAAI,mBAAmB,CAAC,UAAU,GAAG;AACnC,cAAQ,SAAS;AAAA,IACnB;AAAA,EACF;AAIA,MAAI,aAAa;AACjB,MAAI,iBAAiB,WAAW,QAAQ,YAAY,QAAQ;AAC1D,UAAM,SAAS,IAAI,IAAI,UAAU;AAEjC,UAAM,cAAc,MAAM,KAAK,OAAO,YAAY,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,EAAE;AAC/E,YAAQ,YAAY,QAAQ,OAAK;AAC/B,kBAAY,KAAK,GAAG,EAAE,IAAI,IAAI,EAAE,KAAK,EAAE;AAAA,IACzC,CAAC;AAED,kBAAc,YAAY,KAAK,GAAG;AAIlC,QAAI,OAAO,MAAM;AACf,YAAM,mBAAmB,WAAW,QAAQ,OAAO,MAAM,EAAE;AAC3D,mBAAa,GAAG,iBAAiB,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,cAAc,IAAI,WAAW,KAAK,EAAE;AACrF,oBAAc,OAAO;AAAA,IACvB,OAAO;AACL,mBAAa,GAAG,WAAW,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,cAAc,IAAI,WAAW,KAAK,EAAE;AAAA,IACjF;AAAA,EACF;AAEA,MAAI,KAAK,WAAW;AAClB,YAAQ,OAAO,cAAc,KAAK,SAAS;AAAA,EAC7C;AAEA,UAAQ,UAAU;AAElB,SAAO,MAAM,YAAY,OAAO;AAClC;","names":[]} |
| interface RequestInitWithDuplex extends RequestInit { | ||
| /** | ||
| * `RequestInit#duplex` does not yet exist in the TS `lib.dom.d.ts` definition yet the native | ||
| * fetch implementation in Node 18+, `undici`, requires it for certain POST payloads. | ||
| * | ||
| * @see {@link https://github.com/microsoft/TypeScript-DOM-lib-generator/issues/1483} | ||
| * @see {@link https://github.com/nodejs/node/issues/46221} | ||
| * @see {@link https://fetch.spec.whatwg.org/#request-class} | ||
| * @see {@link https://github.com/microsoft/TypeScript/blob/main/lib/lib.dom.d.ts} | ||
| */ | ||
| duplex?: 'half'; | ||
| } | ||
| interface FetchHAROptions { | ||
| files?: Record<string, Blob | Buffer>; | ||
| init?: RequestInitWithDuplex; | ||
| userAgent?: string; | ||
| } | ||
| export { FetchHAROptions, RequestInitWithDuplex }; |
| //# sourceMappingURL=types.mjs.map |
| {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]} |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
64913
4.14%537
8.7%9
-10%Yes
NaN