fetch-har
Advanced tools
Comparing version 11.0.0 to 11.0.1
@@ -6,2 +6,2 @@ import { FetchHAROptions } from './types.js'; | ||
export = fetchHAR; | ||
export { fetchHAR as default }; |
@@ -1,2 +0,2 @@ | ||
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var __defProp = Object.defineProperty; | ||
var __defProp = Object.defineProperty; | ||
var __defProps = Object.defineProperties; | ||
@@ -20,26 +20,25 @@ var __getOwnPropDescs = Object.getOwnPropertyDescriptors; | ||
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); | ||
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, { | ||
get: (a, b) => (typeof require !== "undefined" ? require : a)[b] | ||
}) : x)(function(x) { | ||
if (typeof require !== "undefined") | ||
return require.apply(this, arguments); | ||
throw Error('Dynamic require of "' + x + '" is not supported'); | ||
}); | ||
var __async = (__this, __arguments, generator) => { | ||
return new Promise((resolve, reject) => { | ||
var fulfilled = (value) => { | ||
try { | ||
step(generator.next(value)); | ||
} catch (e) { | ||
reject(e); | ||
} | ||
}; | ||
var rejected = (value) => { | ||
try { | ||
step(generator.throw(value)); | ||
} catch (e) { | ||
reject(e); | ||
} | ||
}; | ||
var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); | ||
step((generator = generator.apply(__this, __arguments)).next()); | ||
}); | ||
}; | ||
// src/index.ts | ||
var _dataurls = require('@readme/data-urls'); | ||
if (!globalThis.Blob) { | ||
try { | ||
globalThis.Blob = __require("buffer").Blob; | ||
} catch (e) { | ||
throw new Error("The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob"); | ||
} | ||
} | ||
if (!globalThis.File) { | ||
try { | ||
globalThis.File = __require("undici").File; | ||
} catch (e) { | ||
throw new Error("The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File"); | ||
} | ||
} | ||
import { parse as parseDataUrl } from "@readme/data-urls"; | ||
function isBrowser() { | ||
@@ -65,137 +64,159 @@ return typeof window !== "undefined" && typeof document !== "undefined"; | ||
} | ||
function fetchHAR(har, opts = {}) { | ||
var _a, _b, _c, _d, _e; | ||
if (!har) | ||
throw new Error("Missing HAR definition"); | ||
if (!har.log || !har.log.entries || !har.log.entries.length) | ||
throw new Error("Missing log.entries array"); | ||
const { request } = har.log.entries[0]; | ||
const { url } = request; | ||
let querystring = ""; | ||
let shouldSetDuplex = false; | ||
const options = __spreadProps(__spreadValues({}, opts.init ? opts.init : {}), { | ||
method: request.method | ||
}); | ||
if (!options.headers) { | ||
options.headers = new Headers(); | ||
} else if (typeof options.headers === "object" && !(options.headers instanceof Headers) && options.headers !== null) { | ||
options.headers = new Headers(options.headers); | ||
} | ||
const headers = options.headers; | ||
if ("headers" in request && request.headers.length) { | ||
request.headers.forEach((header) => { | ||
function fetchHAR(_0) { | ||
return __async(this, arguments, function* (har, opts = {}) { | ||
var _a, _b, _c, _d, _e; | ||
if (!har) | ||
throw new Error("Missing HAR definition"); | ||
if (!har.log || !har.log.entries || !har.log.entries.length) | ||
throw new Error("Missing log.entries array"); | ||
if (!globalThis.Blob) { | ||
try { | ||
return headers.append(header.name, header.value); | ||
} catch (err) { | ||
const NodeBlob = (yield import("buffer")).Blob; | ||
globalThis.Blob = NodeBlob; | ||
} catch (e) { | ||
throw new Error( | ||
"The Blob API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/Blob" | ||
); | ||
} | ||
} | ||
if (!globalThis.File) { | ||
try { | ||
const UndiciFile = (yield import("undici")).File; | ||
globalThis.File = UndiciFile; | ||
} catch (e) { | ||
throw new Error( | ||
"The File API is required for this library. https://developer.mozilla.org/en-US/docs/Web/API/File" | ||
); | ||
} | ||
} | ||
const { request } = har.log.entries[0]; | ||
const { url } = request; | ||
let querystring = ""; | ||
let shouldSetDuplex = false; | ||
const options = __spreadProps(__spreadValues({}, opts.init ? opts.init : {}), { | ||
method: request.method | ||
}); | ||
} | ||
if ("cookies" in request && request.cookies.length) { | ||
if (isBrowser()) { | ||
request.cookies.forEach((cookie) => { | ||
document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`; | ||
if (!options.headers) { | ||
options.headers = new Headers(); | ||
} else if (typeof options.headers === "object" && !(options.headers instanceof Headers) && options.headers !== null) { | ||
options.headers = new Headers(options.headers); | ||
} | ||
const headers = options.headers; | ||
if ("headers" in request && request.headers.length) { | ||
request.headers.forEach((header) => { | ||
try { | ||
return headers.append(header.name, header.value); | ||
} catch (err) { | ||
} | ||
}); | ||
options.credentials = "include"; | ||
} else { | ||
headers.append( | ||
"cookie", | ||
request.cookies.map((cookie) => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`).join("; ") | ||
); | ||
} | ||
} | ||
if ("postData" in request) { | ||
if (request.postData && "params" in request.postData) { | ||
if (!("mimeType" in request.postData)) { | ||
request.postData.mimeType = "application/octet-stream"; | ||
if ("cookies" in request && request.cookies.length) { | ||
if (isBrowser()) { | ||
request.cookies.forEach((cookie) => { | ||
document.cookie = `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`; | ||
}); | ||
options.credentials = "include"; | ||
} else { | ||
headers.append( | ||
"cookie", | ||
request.cookies.map((cookie) => `${encodeURIComponent(cookie.name)}=${encodeURIComponent(cookie.value)}`).join("; ") | ||
); | ||
} | ||
switch (request.postData.mimeType) { | ||
case "application/x-www-form-urlencoded": | ||
headers.set("Content-Type", request.postData.mimeType); | ||
const encodedParams = new URLSearchParams(); | ||
(_a = request.postData.params) == null ? void 0 : _a.forEach((param) => { | ||
if (param.value) | ||
encodedParams.set(param.name, param.value); | ||
}); | ||
options.body = encodedParams.toString(); | ||
break; | ||
case "multipart/alternative": | ||
case "multipart/form-data": | ||
case "multipart/mixed": | ||
case "multipart/related": | ||
if (headers.has("Content-Type")) { | ||
headers.delete("Content-Type"); | ||
} | ||
const form = new FormData(); | ||
(_b = request.postData.params) == null ? void 0 : _b.forEach((param) => { | ||
if ("fileName" in param && param.fileName) { | ||
if (opts.files) { | ||
const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files); | ||
if (fileContents) { | ||
if (isBuffer(fileContents)) { | ||
form.append( | ||
param.name, | ||
new File([fileContents], param.fileName, { | ||
type: param.contentType || void 0 | ||
}), | ||
param.fileName | ||
} | ||
if ("postData" in request) { | ||
if (request.postData && "params" in request.postData) { | ||
if (!("mimeType" in request.postData)) { | ||
request.postData.mimeType = "application/octet-stream"; | ||
} | ||
switch (request.postData.mimeType) { | ||
case "application/x-www-form-urlencoded": | ||
headers.set("Content-Type", request.postData.mimeType); | ||
const encodedParams = new URLSearchParams(); | ||
(_a = request.postData.params) == null ? void 0 : _a.forEach((param) => { | ||
if (param.value) | ||
encodedParams.set(param.name, param.value); | ||
}); | ||
options.body = encodedParams.toString(); | ||
break; | ||
case "multipart/alternative": | ||
case "multipart/form-data": | ||
case "multipart/mixed": | ||
case "multipart/related": | ||
if (headers.has("Content-Type")) { | ||
headers.delete("Content-Type"); | ||
} | ||
const form = new FormData(); | ||
(_b = request.postData.params) == null ? void 0 : _b.forEach((param) => { | ||
if ("fileName" in param && param.fileName) { | ||
if (opts.files) { | ||
const fileContents = getFileFromSuppliedFiles(param.fileName, opts.files); | ||
if (fileContents) { | ||
if (isBuffer(fileContents)) { | ||
form.append( | ||
param.name, | ||
new File([fileContents], param.fileName, { | ||
type: param.contentType || void 0 | ||
}), | ||
param.fileName | ||
); | ||
return; | ||
} else if (isFile(fileContents)) { | ||
form.append(param.name, fileContents, param.fileName); | ||
return; | ||
} | ||
throw new TypeError( | ||
"An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects." | ||
); | ||
return; | ||
} else if (isFile(fileContents)) { | ||
form.append(param.name, fileContents, param.fileName); | ||
return; | ||
} | ||
throw new TypeError( | ||
"An unknown object has been supplied into the `files` config for use. We only support instances of the File API and Node Buffer objects." | ||
); | ||
} | ||
} | ||
if ("value" in param && param.value) { | ||
let paramBlob; | ||
const parsed = _dataurls.parse.call(void 0, param.value); | ||
if (parsed) { | ||
paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || void 0 }); | ||
} else { | ||
paramBlob = new Blob([param.value], { type: param.contentType || void 0 }); | ||
if ("value" in param && param.value) { | ||
let paramBlob; | ||
const parsed = parseDataUrl(param.value); | ||
if (parsed) { | ||
paramBlob = new Blob([param.value], { type: parsed.contentType || param.contentType || void 0 }); | ||
} else { | ||
paramBlob = new Blob([param.value], { type: param.contentType || void 0 }); | ||
} | ||
form.append(param.name, paramBlob, param.fileName); | ||
return; | ||
} | ||
form.append(param.name, paramBlob, param.fileName); | ||
return; | ||
throw new Error( | ||
"The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file." | ||
); | ||
} | ||
throw new Error( | ||
"The supplied HAR has a postData parameter with `fileName`, but neither `value` content within the HAR or any file buffers were supplied with the `files` option. Since this library doesn't have access to the filesystem, it can't fetch that file." | ||
); | ||
} | ||
if (param.value) | ||
form.append(param.name, param.value); | ||
}); | ||
options.body = form; | ||
break; | ||
default: | ||
const formBody = {}; | ||
(_c = request.postData.params) == null ? void 0 : _c.map((param) => { | ||
try { | ||
formBody[param.name] = JSON.parse(param.value || ""); | ||
} catch (e) { | ||
formBody[param.name] = param.value; | ||
} | ||
return true; | ||
}); | ||
options.body = JSON.stringify(formBody); | ||
} | ||
} else if ((_e = (_d = request.postData) == null ? void 0 : _d.text) == null ? void 0 : _e.length) { | ||
if (opts.files) { | ||
const parsed = _dataurls.parse.call(void 0, request.postData.text); | ||
if (parsed) { | ||
if ((parsed == null ? void 0 : parsed.name) && parsed.name in opts.files) { | ||
const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files); | ||
if (fileContents) { | ||
if (isBuffer(fileContents)) { | ||
options.body = fileContents; | ||
} else if (isFile(fileContents)) { | ||
if (isBrowser()) { | ||
if (param.value) | ||
form.append(param.name, param.value); | ||
}); | ||
options.body = form; | ||
break; | ||
default: | ||
const formBody = {}; | ||
(_c = request.postData.params) == null ? void 0 : _c.map((param) => { | ||
try { | ||
formBody[param.name] = JSON.parse(param.value || ""); | ||
} catch (e) { | ||
formBody[param.name] = param.value; | ||
} | ||
return true; | ||
}); | ||
options.body = JSON.stringify(formBody); | ||
} | ||
} else if ((_e = (_d = request.postData) == null ? void 0 : _d.text) == null ? void 0 : _e.length) { | ||
if (opts.files) { | ||
const parsed = parseDataUrl(request.postData.text); | ||
if (parsed) { | ||
if ((parsed == null ? void 0 : parsed.name) && parsed.name in opts.files) { | ||
const fileContents = getFileFromSuppliedFiles(parsed.name, opts.files); | ||
if (fileContents) { | ||
if (isBuffer(fileContents)) { | ||
options.body = fileContents; | ||
} else { | ||
options.body = fileContents.stream(); | ||
shouldSetDuplex = true; | ||
if (!headers.has("content-length")) { | ||
headers.set("content-length", String(fileContents.size)); | ||
} else if (isFile(fileContents)) { | ||
if (isBrowser()) { | ||
options.body = fileContents; | ||
} else { | ||
options.body = fileContents.stream(); | ||
shouldSetDuplex = true; | ||
if (!headers.has("content-length")) { | ||
headers.set("content-length", String(fileContents.size)); | ||
} | ||
} | ||
@@ -207,37 +228,36 @@ } | ||
} | ||
if (typeof options.body === "undefined") { | ||
options.body = request.postData.text; | ||
} | ||
} | ||
if (typeof options.body === "undefined") { | ||
options.body = request.postData.text; | ||
if (shouldSetDuplex && !isBrowser()) { | ||
options.duplex = "half"; | ||
} | ||
} | ||
if (shouldSetDuplex && !isBrowser()) { | ||
options.duplex = "half"; | ||
let requestURL = url; | ||
if ("queryString" in request && request.queryString.length) { | ||
const urlObj = new URL(requestURL); | ||
const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`); | ||
request.queryString.forEach((q) => { | ||
queryParams.push(`${q.name}=${q.value}`); | ||
}); | ||
querystring = queryParams.join("&"); | ||
if (urlObj.hash) { | ||
const urlWithoutHashes = requestURL.replace(urlObj.hash, ""); | ||
requestURL = `${urlWithoutHashes.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
requestURL += urlObj.hash; | ||
} else { | ||
requestURL = `${requestURL.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
} | ||
} | ||
} | ||
let requestURL = url; | ||
if ("queryString" in request && request.queryString.length) { | ||
const urlObj = new URL(requestURL); | ||
const queryParams = Array.from(urlObj.searchParams).map(([k, v]) => `${k}=${v}`); | ||
request.queryString.forEach((q) => { | ||
queryParams.push(`${q.name}=${q.value}`); | ||
}); | ||
querystring = queryParams.join("&"); | ||
if (urlObj.hash) { | ||
const urlWithoutHashes = requestURL.replace(urlObj.hash, ""); | ||
requestURL = `${urlWithoutHashes.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
requestURL += urlObj.hash; | ||
} else { | ||
requestURL = `${requestURL.split("?")[0]}${querystring ? `?${querystring}` : ""}`; | ||
if (opts.userAgent) { | ||
headers.append("User-Agent", opts.userAgent); | ||
} | ||
} | ||
if (opts.userAgent) { | ||
headers.append("User-Agent", opts.userAgent); | ||
} | ||
options.headers = headers; | ||
return fetch(requestURL, options); | ||
options.headers = headers; | ||
return fetch(requestURL, options); | ||
}); | ||
} | ||
exports.default = fetchHAR; | ||
module.exports = exports.default//# sourceMappingURL=index.js.map | ||
export { | ||
fetchHAR as default | ||
}; | ||
//# sourceMappingURL=index.js.map |
@@ -1,1 +0,1 @@ | ||
"use strict";//# sourceMappingURL=types.js.map | ||
//# sourceMappingURL=types.js.map |
{ | ||
"name": "fetch-har", | ||
"version": "11.0.0", | ||
"version": "11.0.1", | ||
"description": "Make a fetch request from a HAR definition", | ||
"main": "dist/index.js", | ||
"types": "dist/index.d.ts", | ||
"module": "dist/index.mjs", | ||
"main": "dist/index.cjs", | ||
"types": "dist/index.d.cts", | ||
"module": "dist/index.js", | ||
"type": "module", | ||
"sideEffects": false, | ||
"exports": { | ||
".": { | ||
"import": "./dist/index.mjs", | ||
"require": "./dist/index.js" | ||
"import": "./dist/index.js", | ||
"require": "./dist/index.cjs" | ||
}, | ||
"./types": { | ||
"import": "./dist/types.mjs", | ||
"require": "./dist/types.js" | ||
"import": "./dist/types.js", | ||
"require": "./dist/types.cjs" | ||
}, | ||
@@ -40,3 +41,3 @@ "./package.json": "./package.json" | ||
"type": "git", | ||
"url": "git@github.com:readmeio/fetch-har.git" | ||
"url": "git+ssh://git@github.com/readmeio/fetch-har.git" | ||
}, | ||
@@ -43,0 +44,0 @@ "license": "ISC", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
64913
537
0
3
Yes