| "use strict"; | ||
| // Adapted from undici's lib/interceptor/decompress.js | ||
| // https://github.com/nodejs/undici/blob/main/lib/interceptor/decompress.js | ||
| // Changes: | ||
| // - Removed zstd support (requires runtimeFeatures check) | ||
| // - Removed experimental warning | ||
| // - Use undici's exported DecoratorHandler | ||
| const { createInflate, createGunzip, createBrotliDecompress } = require("zlib"); | ||
| const { pipeline } = require("stream"); | ||
| const { DecoratorHandler } = require("undici"); | ||
| const supportedEncodings = { | ||
| "gzip": createGunzip, | ||
| "x-gzip": createGunzip, | ||
| "br": createBrotliDecompress, | ||
| "deflate": createInflate, | ||
| "compress": createInflate, | ||
| "x-compress": createInflate | ||
| }; | ||
| const defaultSkipStatusCodes = [204, 304]; | ||
| class DecompressHandler extends DecoratorHandler { | ||
| #decompressors = []; | ||
| #skipStatusCodes; | ||
| #skipErrorResponses; | ||
| constructor(handler, { skipStatusCodes = defaultSkipStatusCodes, skipErrorResponses = true } = {}) { | ||
| super(handler); | ||
| this.#skipStatusCodes = skipStatusCodes; | ||
| this.#skipErrorResponses = skipErrorResponses; | ||
| } | ||
| #shouldSkipDecompression(contentEncoding, statusCode) { | ||
| if (!contentEncoding || statusCode < 200) { | ||
| return true; | ||
| } | ||
| if (this.#skipStatusCodes.includes(statusCode)) { | ||
| return true; | ||
| } | ||
| if (this.#skipErrorResponses && statusCode >= 400) { | ||
| return true; | ||
| } | ||
| return false; | ||
| } | ||
| #createDecompressionChain(encodings) { | ||
| const parts = encodings.split(","); | ||
| const maxContentEncodings = 5; | ||
| if (parts.length > maxContentEncodings) { | ||
| throw new Error(`too many content-encodings in response: ${parts.length}, max is ${maxContentEncodings}`); | ||
| } | ||
| const decompressors = []; | ||
| for (let i = parts.length - 1; i >= 0; i--) { | ||
| const encoding = parts[i].trim(); | ||
| if (!encoding) { | ||
| continue; | ||
| } | ||
| if (!supportedEncodings[encoding]) { | ||
| decompressors.length = 0; | ||
| return decompressors; | ||
| } | ||
| decompressors.push(supportedEncodings[encoding]()); | ||
| } | ||
| return decompressors; | ||
| } | ||
| #setupDecompressorEvents(decompressor, controller) { | ||
| decompressor.on("readable", () => { | ||
| let chunk; | ||
| while ((chunk = decompressor.read()) !== null) { | ||
| const result = super.onResponseData(controller, chunk); | ||
| if (result === false) { | ||
| break; | ||
| } | ||
| } | ||
| }); | ||
| decompressor.on("error", error => { | ||
| super.onResponseError(controller, error); | ||
| }); | ||
| } | ||
| #setupSingleDecompressor(controller) { | ||
| const decompressor = this.#decompressors[0]; | ||
| this.#setupDecompressorEvents(decompressor, controller); | ||
| decompressor.on("end", () => { | ||
| super.onResponseEnd(controller, {}); | ||
| }); | ||
| } | ||
| #setupMultipleDecompressors(controller) { | ||
| const lastDecompressor = this.#decompressors[this.#decompressors.length - 1]; | ||
| this.#setupDecompressorEvents(lastDecompressor, controller); | ||
| pipeline(this.#decompressors, err => { | ||
| if (err) { | ||
| super.onResponseError(controller, err); | ||
| return; | ||
| } | ||
| super.onResponseEnd(controller, {}); | ||
| }); | ||
| } | ||
| #cleanupDecompressors() { | ||
| this.#decompressors.length = 0; | ||
| } | ||
| onResponseStart(controller, statusCode, headers, statusMessage) { | ||
| const contentEncoding = headers["content-encoding"]; | ||
| if (this.#shouldSkipDecompression(contentEncoding, statusCode)) { | ||
| return super.onResponseStart(controller, statusCode, headers, statusMessage); | ||
| } | ||
| const decompressors = this.#createDecompressionChain(contentEncoding.toLowerCase()); | ||
| if (decompressors.length === 0) { | ||
| this.#cleanupDecompressors(); | ||
| return super.onResponseStart(controller, statusCode, headers, statusMessage); | ||
| } | ||
| this.#decompressors = decompressors; | ||
| // Keep content-encoding and content-length headers as-is | ||
| // XHR spec requires these to reflect the wire format, not the decoded body | ||
| const newHeaders = { ...headers }; | ||
| if (this.#decompressors.length === 1) { | ||
| this.#setupSingleDecompressor(controller); | ||
| } else { | ||
| this.#setupMultipleDecompressors(controller); | ||
| } | ||
| return super.onResponseStart(controller, statusCode, newHeaders, statusMessage); | ||
| } | ||
| onResponseData(controller, chunk) { | ||
| if (this.#decompressors.length > 0) { | ||
| this.#decompressors[0].write(chunk); | ||
| return; | ||
| } | ||
| super.onResponseData(controller, chunk); | ||
| } | ||
| onResponseEnd(controller, trailers) { | ||
| if (this.#decompressors.length > 0) { | ||
| this.#decompressors[0].end(); | ||
| this.#cleanupDecompressors(); | ||
| return; | ||
| } | ||
| super.onResponseEnd(controller, trailers); | ||
| } | ||
| onResponseError(controller, err) { | ||
| if (this.#decompressors.length > 0) { | ||
| for (const decompressor of this.#decompressors) { | ||
| decompressor.destroy(err); | ||
| } | ||
| this.#cleanupDecompressors(); | ||
| } | ||
| super.onResponseError(controller, err); | ||
| } | ||
| } | ||
| function createDecompressInterceptor(options = {}) { | ||
| return dispatch => { | ||
| return (opts, handler) => { | ||
| const decompressHandler = new DecompressHandler(handler, options); | ||
| return dispatch(opts, decompressHandler); | ||
| }; | ||
| }; | ||
| } | ||
| module.exports = createDecompressInterceptor; |
| "use strict"; | ||
| const fs = require("fs"); | ||
| const { Readable } = require("stream"); | ||
| const { fileURLToPath } = require("url"); | ||
| const { parseURL, serializeURL, serializeURLOrigin, serializePath } = require("whatwg-url"); | ||
| const dataURLFromRecord = require("data-urls").fromURLRecord; | ||
| const { Dispatcher } = require("undici"); | ||
| const WrapHandler = require("undici/lib/handler/wrap-handler.js"); | ||
| const { toBase64 } = require("@exodus/bytes/base64.js"); | ||
| const { utf8Encode } = require("../../living/helpers/encoding"); | ||
| const { sendStreamResponse } = require("./stream-handler"); | ||
| const packageVersion = require("../../../../package.json").version; | ||
| const DEFAULT_USER_AGENT = `Mozilla/5.0 (${process.platform || "unknown OS"}) AppleWebKit/537.36 ` + | ||
| `(KHTML, like Gecko) jsdom/${packageVersion}`; | ||
| const MAX_REDIRECTS = 20; | ||
| /** | ||
| * JSDOMDispatcher - Full undici Dispatcher implementation for jsdom. | ||
| * | ||
| * Handles: | ||
| * - data: URLs (decode and return) | ||
| * - file: URLs (read from filesystem) | ||
| * - HTTP(S) and web sockets: follows redirects manually, capturing cookies at each hop | ||
| * | ||
| * Callers should provide the expected opaque fields when possible, to ensure that various parts of the jsdom pipeline | ||
| * have enough information. See the `dispatch()` JSDoc for details. | ||
| */ | ||
| class JSDOMDispatcher extends Dispatcher { | ||
| #baseDispatcher; | ||
| #cookieJar; | ||
| #userAgent; | ||
| #userInterceptors; | ||
| constructor({ baseDispatcher, cookieJar, userAgent, userInterceptors = [] }) { | ||
| super(); | ||
| this.#baseDispatcher = baseDispatcher; | ||
| this.#cookieJar = cookieJar; | ||
| this.#userAgent = userAgent || DEFAULT_USER_AGENT; | ||
| this.#userInterceptors = userInterceptors; | ||
| } | ||
| /** | ||
| * Dispatch a request through the jsdom resource loading pipeline. | ||
| * | ||
| * Vaguely corresponds to: | ||
| * - https://fetch.spec.whatwg.org/#concept-fetch: in theory, all jsdom fetches should go through here, like all web | ||
| * platform fetches go through #concept-fetch. | ||
| * - https://fetch.spec.whatwg.org/#concept-scheme-fetch: the code is more like skipping straight to scheme fetch. | ||
| * | ||
| * @param {object} opts - undici dispatch options | ||
| * @param {object} [opts.opaque] - jsdom-specific request context (may be undefined for WebSocket upgrades) | ||
| * @param {Element|null} opts.opaque.element - DOM element that triggered the request | ||
| * @param {string} opts.opaque.url - Full request URL (since we cannot reconstruct it from `opts.origin + opts.path` | ||
| * for `file:` URLs). If given, `opts.origin`, `opts.path`, and `opts.query` are ignored. | ||
| * @param {string} [opts.opaque.origin] - Request origin for CORS (used by XHR) | ||
| * @param {boolean} [opts.opaque.corsMode] - Enable CORS validation during redirects (used by XHR) | ||
| * @param {boolean} [opts.opaque.withCredentials] - Include cookies cross-origin (used by XHR) | ||
| * @param {Object} [opts.opaque.auth] - Auth credentials {user, pass} for 401 Basic auth handling | ||
| * @param {Object} [opts.opaque.preflight] - If present, do CORS preflight before main request | ||
| * @param {string[]} [opts.opaque.preflight.unsafeHeaders] - Non-simple headers that need to be allowed | ||
| * @param {object} handler - undici handler | ||
| */ | ||
| dispatch(opts, handler) { | ||
| // Wrap handler to normalize OLD API (onConnect/onHeaders/onData/onComplete/onError) to NEW API | ||
| // (onRequestStart/onResponseStart/onResponseData/onResponseEnd/onResponseError). This is necessary because undici's | ||
| // internals call the old API a lot, despite it being undocumented: | ||
| // * https://github.com/nodejs/undici/issues/4771 | ||
| // * https://github.com/nodejs/undici/issues/4780 | ||
| const wrappedHandler = WrapHandler.wrap(handler); | ||
| // Get URL from opaque if present (required for file: URLs since they have origin "null"), | ||
| // otherwise reconstruct from opts.origin + opts.path (works for http/https/ws/wss) | ||
| const urlString = opts.opaque?.url || (opts.origin + opts.path); | ||
| const urlRecord = parseURL(urlString); | ||
| if (urlRecord === null) { | ||
| wrappedHandler.onResponseError?.(null, new TypeError(`Invalid URL: ${urlString}`)); | ||
| return false; | ||
| } | ||
| if (urlRecord.scheme === "data") { | ||
| return this.#dispatchDataURL(urlRecord, wrappedHandler); | ||
| } | ||
| if (urlRecord.scheme === "file") { | ||
| return this.#dispatchFileURL(urlRecord, wrappedHandler); | ||
| } | ||
| // HTTP(S) - handles redirects, CORS, preflight, and WebSocket upgrades | ||
| this.#dispatchHTTP(urlRecord, wrappedHandler, opts); | ||
| return true; | ||
| } | ||
| /** | ||
| * Handle `data:` URLs by decoding them and returning the body. | ||
| * | ||
| * Corresponds fairly directly to https://fetch.spec.whatwg.org/#concept-scheme-fetch's "data" scheme case. | ||
| */ | ||
| #dispatchDataURL(urlRecord, handler) { | ||
| const dataURL = dataURLFromRecord(urlRecord); | ||
| if (dataURL === null) { | ||
| const error = new TypeError("Invalid data: URL"); | ||
| handler.onResponseError?.(null, error); | ||
| return false; | ||
| } | ||
| const stream = Readable.from([dataURL.body]); | ||
| sendStreamResponse(handler, stream, { | ||
| status: 200, | ||
| statusText: "OK", | ||
| headers: { "content-type": dataURL.mimeType.toString() }, | ||
| context: { finalURL: urlRecord } | ||
| }); | ||
| return true; | ||
| } | ||
| /** | ||
| * Handle `file:` URLs by reading from the filesystem. | ||
| * | ||
| * Corresponds fairly directly to https://fetch.spec.whatwg.org/#concept-scheme-fetch's "file" scheme case. | ||
| */ | ||
| #dispatchFileURL(urlRecord, handler) { | ||
| const filePath = fileURLToPath(serializeURL(urlRecord)); | ||
| const stream = fs.createReadStream(filePath); | ||
| sendStreamResponse(handler, stream, { | ||
| status: 200, | ||
| statusText: "OK", | ||
| context: { finalURL: urlRecord } | ||
| }); | ||
| return true; | ||
| } | ||
| /** | ||
| * High-level HTTP(S) fetch with redirect handling, CORS validation, and preflight. | ||
| * | ||
| * Corresponds roughly to https://fetch.spec.whatwg.org/#concept-http-fetch, although some parts of | ||
| * https://fetch.spec.whatwg.org/#concept-fetch also live here. | ||
| */ | ||
| async #dispatchHTTP(urlRecord, handler, opts) { | ||
| const { corsMode, origin, withCredentials, auth, preflight } = opts.opaque || {}; | ||
| const requestFragment = urlRecord.fragment; | ||
| let currentURL = urlRecord; | ||
| let currentMethod = opts.method || "GET"; | ||
| let currentBody = opts.body ?? null; | ||
| const currentHeaders = { ...this.#normalizeHeadersToObject(opts.headers) }; | ||
| let effectiveOrigin = origin; // CORS tracking - may become "null" after cross-origin redirects | ||
| let receivedAuthChallenge = false; | ||
| const ctx = { finalURL: null }; | ||
| // Create a proxy controller that forwards to the current underlying controller. | ||
| // This provides a stable reference across redirect hops. | ||
| let currentController; | ||
| let onRequestStartCalled = false; | ||
| const proxyController = { | ||
| abort(reason) { | ||
| currentController.abort(reason); | ||
| }, | ||
| pause() { | ||
| currentController.pause(); | ||
| }, | ||
| resume() { | ||
| currentController.resume(); | ||
| }, | ||
| get paused() { | ||
| return currentController.paused; | ||
| }, | ||
| get aborted() { | ||
| return currentController.aborted; | ||
| }, | ||
| get reason() { | ||
| return currentController.reason; | ||
| } | ||
| }; | ||
| // Callback for #doSingleRequest to invoke when a controller becomes available | ||
| function onControllerReady(controller) { | ||
| currentController = controller; | ||
| if (!onRequestStartCalled) { | ||
| onRequestStartCalled = true; | ||
| handler.onRequestStart?.(proxyController, ctx); | ||
| } | ||
| } | ||
| // Handle CORS preflight if needed | ||
| if (preflight) { | ||
| const preflightHeaders = { | ||
| Origin: origin | ||
| }; | ||
| preflightHeaders["Access-Control-Request-Method"] = currentMethod; | ||
| if (preflight.unsafeHeaders?.length > 0) { | ||
| preflightHeaders["Access-Control-Request-Headers"] = preflight.unsafeHeaders.join(", "); | ||
| } | ||
| const preflightResult = await this.#doSingleRequest( | ||
| currentURL, | ||
| "OPTIONS", | ||
| preflightHeaders, | ||
| null, | ||
| { ...opts.opaque, origin, withCredentials }, | ||
| undefined, // no upgrade for preflight | ||
| opts, | ||
| onControllerReady | ||
| ); | ||
| if (preflightResult.error) { | ||
| handler.onResponseError?.(null, preflightResult.error); | ||
| return; | ||
| } | ||
| // Validate preflight response status | ||
| if (preflightResult.status < 200 || preflightResult.status > 299) { | ||
| handler.onResponseError?.(null, new Error( | ||
| "Response for preflight has invalid HTTP status code " + preflightResult.status | ||
| )); | ||
| return; | ||
| } | ||
| // CORS validation on preflight response | ||
| const acao = preflightResult.headers["access-control-allow-origin"]; | ||
| if (acao !== "*" && acao !== origin) { | ||
| handler.onResponseError?.(null, new Error("Cross origin " + origin + " forbidden")); | ||
| return; | ||
| } | ||
| if (withCredentials) { | ||
| const acac = preflightResult.headers["access-control-allow-credentials"]; | ||
| if (acac !== "true") { | ||
| handler.onResponseError?.(null, new Error("Credentials forbidden")); | ||
| return; | ||
| } | ||
| } | ||
| // Validate allowed headers | ||
| const acahStr = preflightResult.headers["access-control-allow-headers"]; | ||
| const acah = new Set(acahStr ? acahStr.toLowerCase().split(/,\s*/) : []); | ||
| if (!acah.has("*")) { | ||
| for (const unsafeHeader of preflight.unsafeHeaders || []) { | ||
| if (!acah.has(unsafeHeader.toLowerCase())) { | ||
| handler.onResponseError?.(null, new Error("Header " + unsafeHeader + " forbidden")); | ||
| return; | ||
| } | ||
| } | ||
| } | ||
| } | ||
| // Redirect loop | ||
| for (let redirectCount = 0; redirectCount <= MAX_REDIRECTS; redirectCount++) { | ||
| ctx.finalURL = currentURL; | ||
| const currentOrigin = serializeURLOrigin(currentURL); | ||
| // Clone headers for this request | ||
| const requestHeaders = { ...currentHeaders }; | ||
| // Add auth header if needed | ||
| if (receivedAuthChallenge && auth) { | ||
| const authString = `${auth.user || ""}:${auth.pass || ""}`; | ||
| requestHeaders.Authorization = "Basic " + toBase64(utf8Encode(authString)); | ||
| } | ||
| const result = await this.#doSingleRequest( | ||
| currentURL, | ||
| currentMethod, | ||
| requestHeaders, | ||
| currentBody, | ||
| { ...opts.opaque, origin, withCredentials }, | ||
| opts.upgrade, | ||
| opts, | ||
| onControllerReady | ||
| ); | ||
| // WebSocket upgrade | ||
| if (result.upgraded) { | ||
| handler.onRequestUpgrade?.(proxyController, result.statusCode, result.headers, result.socket); | ||
| return; | ||
| } | ||
| if (result.error) { | ||
| handler.onResponseError?.(null, result.error); | ||
| return; | ||
| } | ||
| // Handle 401 auth challenge | ||
| if (result.status === 401 && auth && !receivedAuthChallenge) { | ||
| const wwwAuth = result.headers["www-authenticate"] || ""; | ||
| if (/^Basic /i.test(wwwAuth)) { | ||
| receivedAuthChallenge = true; | ||
| continue; | ||
| } | ||
| } | ||
| // Handle redirect | ||
| const { location } = result.headers; | ||
| if (result.status >= 300 && result.status < 400 && location) { | ||
| const targetURL = parseURL(location, { baseURL: currentURL }); | ||
| if (!targetURL) { | ||
| handler.onResponseError?.(null, new TypeError("Invalid redirect URL")); | ||
| return; | ||
| } | ||
| // Per fetch spec: if location's fragment is null, inherit from request | ||
| if (targetURL.fragment === null) { | ||
| targetURL.fragment = requestFragment; | ||
| } | ||
| // Per fetch spec: if locationURL's scheme is not HTTP(S), return a network error | ||
| if (targetURL.scheme !== "http" && targetURL.scheme !== "https") { | ||
| handler.onResponseError?.(null, new Error("Cannot redirect to non-HTTP(S) URL")); | ||
| return; | ||
| } | ||
| // Method change per fetch spec "HTTP-redirect fetch" | ||
| // 301/302 + POST → GET, 303 + non-GET/HEAD → GET | ||
| if (((result.status === 301 || result.status === 302) && currentMethod === "POST") || | ||
| (result.status === 303 && !["GET", "HEAD"].includes(currentMethod))) { | ||
| currentMethod = "GET"; | ||
| currentBody = null; | ||
| this.#deleteRequestHeader(currentHeaders, "content-encoding"); | ||
| this.#deleteRequestHeader(currentHeaders, "content-language"); | ||
| this.#deleteRequestHeader(currentHeaders, "content-location"); | ||
| this.#deleteRequestHeader(currentHeaders, "content-type"); | ||
| } | ||
| const targetOrigin = serializeURLOrigin(targetURL); | ||
| // Authorization header removal on cross-origin redirect | ||
| if (currentOrigin !== targetOrigin) { | ||
| this.#deleteRequestHeader(currentHeaders, "authorization"); | ||
| } | ||
| // CORS handling for cross-origin redirects (only if origin is set, indicating XHR/fetch) | ||
| const targetIsCrossOrigin = origin !== undefined && origin !== targetOrigin; | ||
| if (corsMode || targetIsCrossOrigin) { | ||
| // CORS validation on redirect response (if source was cross-origin) | ||
| if (origin !== currentOrigin) { | ||
| const acao = result.headers["access-control-allow-origin"]; | ||
| if (acao !== "*" && acao !== origin) { | ||
| handler.onResponseError?.(null, new Error("Cross origin " + origin + " forbidden")); | ||
| return; | ||
| } | ||
| if (withCredentials) { | ||
| const acac = result.headers["access-control-allow-credentials"]; | ||
| if (acac !== "true") { | ||
| handler.onResponseError?.(null, new Error("Credentials forbidden")); | ||
| return; | ||
| } | ||
| } | ||
| // Userinfo check - forbid redirects to URLs with username/password | ||
| if (targetURL.username || targetURL.password) { | ||
| handler.onResponseError?.(null, new Error("Userinfo forbidden in cors redirect")); | ||
| return; | ||
| } | ||
| // Update effective origin - becomes "null" after cross-origin→cross-origin redirect | ||
| if (currentOrigin !== targetOrigin) { | ||
| effectiveOrigin = "null"; | ||
| } | ||
| } | ||
| // Add Origin header for cross-origin target or if effective origin became "null" | ||
| if (targetIsCrossOrigin || effectiveOrigin === "null") { | ||
| currentHeaders.Origin = effectiveOrigin; | ||
| } | ||
| } | ||
| currentURL = targetURL; | ||
| continue; | ||
| } | ||
| // Final response - CORS validation (if destination is cross-origin or effective origin is "null") | ||
| if (origin !== undefined && (origin !== currentOrigin || effectiveOrigin === "null")) { | ||
| const acao = result.headers["access-control-allow-origin"]; | ||
| if (acao !== "*" && acao !== effectiveOrigin) { | ||
| handler.onResponseError?.(null, new Error("Cross origin " + effectiveOrigin + " forbidden")); | ||
| return; | ||
| } | ||
| if (withCredentials) { | ||
| const acac = result.headers["access-control-allow-credentials"]; | ||
| if (acac !== "true") { | ||
| handler.onResponseError?.(null, new Error("Credentials forbidden")); | ||
| return; | ||
| } | ||
| } | ||
| } | ||
| // Stream response to handler | ||
| handler.onResponseStart?.(proxyController, result.status, result.headers, result.statusText); | ||
| // Forward body chunks to handler | ||
| result.forwardBodyTo(handler); | ||
| return; | ||
| } | ||
| handler.onResponseError?.(null, new Error(`Too many redirects (max ${MAX_REDIRECTS})`)); | ||
| } | ||
| /** | ||
| * Perform a single HTTP request (no redirects). | ||
| * Handles cookies based on cross-origin/credentials settings. | ||
| * Returns response metadata immediately, with a forwardBodyTo() method to stream the body later. | ||
| * | ||
| * For WebSocket upgrades, returns { upgraded: true, controller, statusCode, headers, socket }. | ||
| * | ||
| * Mostly corresponds to https://fetch.spec.whatwg.org/#concept-http-network-fetch. | ||
| * | ||
| * @param {object} url - URL record to request | ||
| * @param {string} method - HTTP method | ||
| * @param {object} headers - Request headers | ||
| * @param {*} body - Request body | ||
| * @param {object} opaque - jsdom opaque options | ||
| * @param {string} upgrade - Upgrade protocol (e.g., "websocket") | ||
| * @param {object} originalOpts - Original dispatch options to preserve extra undici options | ||
| * @param {function} onControllerReady - Callback invoked when controller is available | ||
| */ | ||
| async #doSingleRequest(url, method, headers, body, opaque, upgrade, originalOpts, onControllerReady) { | ||
| const { origin: requestOrigin, withCredentials } = opaque || {}; | ||
| // Build headers with defaults | ||
| const requestHeaders = { ...headers }; | ||
| this.#setDefaultHeaders(requestHeaders); | ||
| if (body === null && (method === "POST" || method === "PUT")) { | ||
| requestHeaders["Content-Length"] = "0"; | ||
| } else if (body !== null && body.byteLength !== undefined) { | ||
| // The `body.byteLength !== undefined` check is equivalent to the spec case where httpRequest's body's length is | ||
| // null, because body is a stream. | ||
| requestHeaders["Content-Length"] = String(body.byteLength); | ||
| } | ||
| // Determine if this is cross-origin (for cookie handling) | ||
| const urlOrigin = serializeURLOrigin(url); | ||
| const crossOrigin = requestOrigin !== undefined && requestOrigin !== urlOrigin; | ||
| // Only handle cookies for same-origin requests, or cross-origin with credentials | ||
| // Don't send cookies for preflight requests | ||
| const isPreflight = method === "OPTIONS" && | ||
| this.#hasRequestHeader(headers, "Access-Control-Request-Method"); | ||
| const shouldHandleCookies = (!crossOrigin || withCredentials) && !isPreflight; | ||
| const urlSerialized = serializeURL(url); | ||
| if (shouldHandleCookies) { | ||
| const cookieString = this.#cookieJar.getCookieStringSync(urlSerialized); | ||
| if (cookieString) { | ||
| requestHeaders.Cookie = cookieString; | ||
| } | ||
| } | ||
| // Spread original opts to preserve extra undici options (e.g., idempotent, bodyTimeout), | ||
| // then override with our specific values. | ||
| // If opaque.url was provided, derive origin/path from it and null out query. | ||
| // Otherwise, pass through origin/path/query unchanged. | ||
| const hasOpaqueURL = opaque?.url !== undefined; | ||
| const dispatchOpts = { | ||
| ...originalOpts, | ||
| origin: hasOpaqueURL ? urlOrigin : originalOpts.origin, | ||
| path: hasOpaqueURL ? serializePathForUndici(url) : originalOpts.path, | ||
| query: hasOpaqueURL ? null : originalOpts.query, | ||
| method, | ||
| headers: requestHeaders, | ||
| body, | ||
| upgrade, | ||
| opaque: { ...opaque, url: urlSerialized } | ||
| }; | ||
| const innerDispatch = this.#buildDispatchChain(); | ||
| return new Promise(resolve => { | ||
| let responseHeaders, streamError; | ||
| let bodyHandler = null; | ||
| let pendingChunks = []; | ||
| let ended = false; | ||
| let responseStarted = false; | ||
| innerDispatch(dispatchOpts, { | ||
| onRequestStart: controller => { | ||
| onControllerReady(controller); | ||
| }, | ||
| onRequestUpgrade: (controller, statusCode, headersObj, socket) => { | ||
| if (controller.aborted) { | ||
| resolve({ error: controller.reason }); | ||
| return; | ||
| } | ||
| if (shouldHandleCookies) { | ||
| this.#storeCookiesFromHeaders(headersObj, urlSerialized); | ||
| } | ||
| resolve({ upgraded: true, controller, statusCode, headers: headersObj, socket }); | ||
| }, | ||
| onResponseStart: (controller, statusCode, headersObj, statusText) => { | ||
| if (controller.aborted) { | ||
| resolve({ error: controller.reason }); | ||
| return; | ||
| } | ||
| responseHeaders = headersObj; | ||
| responseStarted = true; | ||
| // Create a mechanism to forward body to handler later | ||
| function forwardBodyTo(fwdHandler) { | ||
| bodyHandler = fwdHandler; | ||
| // Forward any chunks that arrived before forwardBodyTo was called | ||
| for (const chunk of pendingChunks) { | ||
| fwdHandler.onResponseData?.(controller, chunk); | ||
| } | ||
| pendingChunks = null; | ||
| if (streamError) { | ||
| fwdHandler.onResponseError?.(controller, streamError); | ||
| } else if (ended) { | ||
| fwdHandler.onResponseEnd?.(controller, {}); | ||
| } | ||
| } | ||
| resolve({ | ||
| status: statusCode, | ||
| statusText: statusText || "", | ||
| headers: responseHeaders, | ||
| url, | ||
| forwardBodyTo | ||
| }); | ||
| }, | ||
| onResponseData: (controller, chunk) => { | ||
| if (controller.aborted) { | ||
| return; | ||
| } | ||
| if (bodyHandler) { | ||
| bodyHandler.onResponseData?.(controller, chunk); | ||
| } else { | ||
| pendingChunks.push(chunk); | ||
| } | ||
| }, | ||
| onResponseEnd: (controller, trailers) => { | ||
| if (controller.aborted) { | ||
| if (bodyHandler) { | ||
| bodyHandler.onResponseError?.(controller, controller.reason); | ||
| } else { | ||
| streamError = controller.reason; | ||
| } | ||
| return; | ||
| } | ||
| if (shouldHandleCookies) { | ||
| this.#storeCookiesFromHeaders(responseHeaders, urlSerialized); | ||
| } | ||
| if (bodyHandler) { | ||
| bodyHandler.onResponseEnd?.(controller, trailers); | ||
| } else { | ||
| ended = true; | ||
| } | ||
| }, | ||
| onResponseError: (controller, err) => { | ||
| if (responseStarted) { | ||
| // Error occurred mid-stream - forward to body handler | ||
| if (bodyHandler) { | ||
| bodyHandler.onResponseError?.(controller, err); | ||
| } else { | ||
| streamError = err; | ||
| } | ||
| } else { | ||
| resolve({ error: err }); | ||
| } | ||
| } | ||
| }); | ||
| }); | ||
| } | ||
| /** | ||
| * Build the dispatch chain with user interceptors applied. | ||
| */ | ||
| #buildDispatchChain() { | ||
| let innerDispatch = (opts, h) => { | ||
| return this.#baseDispatcher.dispatch(opts, h); | ||
| }; | ||
| // Apply user interceptors from innermost to outermost | ||
| for (let i = this.#userInterceptors.length - 1; i >= 0; i--) { | ||
| const interceptor = this.#userInterceptors[i]; | ||
| const nextDispatch = innerDispatch; | ||
| innerDispatch = (opts, h) => interceptor(nextDispatch)(opts, h); | ||
| } | ||
| return innerDispatch; | ||
| } | ||
| /** | ||
| * Normalize headers to an object format. | ||
| * Callers pass either HeaderList (iterable) or plain objects. | ||
| */ | ||
| #normalizeHeadersToObject(headers) { | ||
| if (!headers) { | ||
| return {}; | ||
| } | ||
| // HeaderList has Symbol.iterator; plain objects don't | ||
| if (typeof headers[Symbol.iterator] === "function") { | ||
| const obj = {}; | ||
| for (const [name, value] of headers) { | ||
| obj[name] = value; | ||
| } | ||
| return obj; | ||
| } | ||
| return { ...headers }; | ||
| } | ||
| /** | ||
| * Check if a request header exists (case-insensitive). | ||
| * Request headers may have user-controlled casing. | ||
| */ | ||
| #hasRequestHeader(requestHeaders, name) { | ||
| const lowerName = name.toLowerCase(); | ||
| return Object.keys(requestHeaders).some(key => key.toLowerCase() === lowerName); | ||
| } | ||
| /** | ||
| * Delete a request header (case-insensitive). | ||
| * Request headers may have user-controlled casing. Mutates the object in place. | ||
| */ | ||
| #deleteRequestHeader(requestHeaders, name) { | ||
| const lowerName = name.toLowerCase(); | ||
| for (const key of Object.keys(requestHeaders)) { | ||
| if (key.toLowerCase() === lowerName) { | ||
| delete requestHeaders[key]; | ||
| } | ||
| } | ||
| } | ||
| /** | ||
| * Set default request headers if not already present. | ||
| * Mutates the headers object in place. | ||
| */ | ||
| #setDefaultHeaders(requestHeaders) { | ||
| if (!this.#hasRequestHeader(requestHeaders, "User-Agent")) { | ||
| requestHeaders["User-Agent"] = this.#userAgent; | ||
| } | ||
| if (!this.#hasRequestHeader(requestHeaders, "Accept-Language")) { | ||
| requestHeaders["Accept-Language"] = "en"; | ||
| } | ||
| if (!this.#hasRequestHeader(requestHeaders, "Accept")) { | ||
| requestHeaders.Accept = "*/*"; | ||
| } | ||
| if (!this.#hasRequestHeader(requestHeaders, "Accept-Encoding")) { | ||
| requestHeaders["Accept-Encoding"] = "gzip, deflate"; | ||
| } | ||
| } | ||
| /** | ||
| * Extract and store cookies from response headers. | ||
| */ | ||
| #storeCookiesFromHeaders(headers, url) { | ||
| if (!headers["set-cookie"]) { | ||
| return; | ||
| } | ||
| const cookies = Array.isArray(headers["set-cookie"]) ? | ||
| headers["set-cookie"] : | ||
| [headers["set-cookie"]]; | ||
| for (const cookie of cookies) { | ||
| this.#cookieJar.setCookieSync(cookie, url, { ignoreError: true }); | ||
| } | ||
| } | ||
| // Dispatcher API methods - forward close/destroy to base dispatcher | ||
| close(...args) { | ||
| return this.#baseDispatcher.close(...args); | ||
| } | ||
| destroy(...args) { | ||
| return this.#baseDispatcher.destroy(...args); | ||
| } | ||
| /** | ||
| * Create a new JSDOMDispatcher with additional interceptors. | ||
| * The new interceptors are added as the outermost (first to see requests, last to see responses). | ||
| */ | ||
| compose(...additionalInterceptors) { | ||
| return new JSDOMDispatcher({ | ||
| baseDispatcher: this.#baseDispatcher, | ||
| cookieJar: this.#cookieJar, | ||
| userAgent: this.#userAgent, | ||
| userInterceptors: [...additionalInterceptors, ...this.#userInterceptors] | ||
| }); | ||
| } | ||
| get closed() { | ||
| return this.#baseDispatcher.closed; | ||
| } | ||
| get destroyed() { | ||
| return this.#baseDispatcher.destroyed; | ||
| } | ||
| } | ||
| /** | ||
| * High-level GET fetch that collects the full response body. Used for subresources and `JSDOM.fromURL()`. | ||
| * | ||
| * @param {Dispatcher} dispatcher - The undici dispatcher to use | ||
| * @param {object} opts - Request options | ||
| * @param {string} opts.url - The URL to fetch | ||
| * @param {object} [opts.headers] - Request headers (include Referer if needed) | ||
| * @param {AbortSignal} [opts.signal] - Abort signal | ||
| * @param {Element} [opts.element] - The element initiating the request (default: null) | ||
| * @returns {Promise<{status: number, headers: object, body: Uint8Array, url: string, ok: boolean}>} | ||
| */ | ||
| async function fetchCollected(dispatcher, { url, headers, signal, element = null }) { | ||
| const urlRecord = parseURL(url); | ||
| if (!urlRecord) { | ||
| throw new TypeError(`Invalid URL: ${url}`); | ||
| } | ||
| const response = await dispatcher.request({ | ||
| origin: serializeURLOrigin(urlRecord), | ||
| path: serializePathForUndici(urlRecord), | ||
| method: "GET", | ||
| headers, | ||
| signal, | ||
| opaque: { element, url } | ||
| }); | ||
| const body = await response.body.bytes(); | ||
| // Get final URL from context (set by dispatcher after handling redirects) | ||
| const finalURL = serializeURL(response.context.finalURL); | ||
| return { | ||
| status: response.statusCode, | ||
| headers: response.headers, | ||
| body, | ||
| url: finalURL, | ||
| ok: response.statusCode >= 200 && response.statusCode < 300 | ||
| }; | ||
| } | ||
| /** | ||
| * Serialize a URL record's path and query for undici's `path` option. | ||
| */ | ||
| function serializePathForUndici(urlRecord) { | ||
| return serializePath(urlRecord) + (urlRecord.query ? "?" + urlRecord.query : ""); | ||
| } | ||
| module.exports = { | ||
| JSDOMDispatcher, | ||
| DEFAULT_USER_AGENT, | ||
| fetchCollected | ||
| }; |
| "use strict"; | ||
| const { Readable } = require("stream"); | ||
| const { sendStreamResponse } = require("./stream-handler"); | ||
| /** | ||
| * Creates a user-friendly `undici` interceptor for jsdom. | ||
| * | ||
| * This helper allows users to intercept requests using a callback that receives a `Request` object and can return a | ||
| * promise for a `Response` to provide a synthetic response, or a promise for `undefined` to pass through. | ||
| * | ||
| * @param {function} fn - Async callback function that receives (request, context) and can return a Response | ||
| * @returns {function} An undici interceptor | ||
| * | ||
| * @example | ||
| * const dom = new JSDOM(html, { | ||
| * interceptors: [ | ||
| * requestInterceptor(async (request, { element }) => { | ||
| * console.log(`${element?.localName || 'XHR'} requested ${request.url}`); | ||
| * if (request.url.endsWith('/test.js')) { | ||
| * return new Response('window.mocked = true;', { | ||
| * headers: { 'Content-Type': 'application/javascript' } | ||
| * }); | ||
| * } | ||
| * // Return undefined to let the request pass through | ||
| * }) | ||
| * ] | ||
| * }); | ||
| * | ||
| * ## Why this doesn't use undici's DecoratorHandler pattern | ||
| * | ||
| * The standard undici interceptor pattern (see e.g. undici/lib/interceptor/dump.js) uses DecoratorHandler | ||
| * to wrap handler callbacks. That pattern calls `dispatch()` synchronously, then observes/modifies the | ||
| * request and response as they flow through the handler callbacks. | ||
| * | ||
| * This interceptor needs to support async user functions that can BLOCK the request and potentially | ||
| * REPLACE it with a synthetic response. This requires: | ||
| * | ||
| * 1. Waiting for the async user function BEFORE deciding whether to dispatch | ||
| * 2. For synthetic responses, NEVER calling dispatch() at all | ||
| * | ||
| * Since synthetic responses bypass the normal undici dispatch flow entirely, there's no underlying | ||
| * dispatcher (Agent/Pool) to create a real controller for us. We provide our own controller object | ||
| * that delegates to a Node.js Readable stream for pause/resume/abort support, following the pattern | ||
| * from undici/lib/interceptor/cache.js. | ||
| */ | ||
| module.exports = function requestInterceptor(fn) { | ||
| return dispatch => (options, handler) => { | ||
| const { element = null, url } = options.opaque || {}; | ||
| const abortController = new AbortController(); | ||
| // Create undici controller and wrapped handler using the signal handler helper. | ||
| // The undici controller reflects abortController.signal's state and forwards to inner undici controller. | ||
| const { undiciController, wrappedHandler } = createSignalHandler(handler, abortController); | ||
| // Call onRequestStart immediately to wire into the abort chain. | ||
| handler.onRequestStart?.(undiciController, {}); | ||
| // Build Request object with our signal | ||
| const requestInit = { | ||
| method: options.method || "GET", | ||
| headers: options.headers, | ||
| signal: abortController.signal | ||
| }; | ||
| if (options.body !== undefined && options.body !== null) { | ||
| requestInit.body = options.body; | ||
| requestInit.duplex = "half"; | ||
| } | ||
| if (options.referrer) { | ||
| requestInit.referrer = options.referrer; | ||
| } | ||
| const request = new Request(url, requestInit); | ||
| new Promise(resolve => { | ||
| resolve(fn(request, { element })); | ||
| }) | ||
| .then(response => { | ||
| if (response instanceof Response) { | ||
| // Send synthetic response without ever starting real request | ||
| // response.body can be null for responses with no body | ||
| const stream = response.body ? Readable.fromWeb(response.body) : Readable.from([]); | ||
| sendStreamResponse(wrappedHandler, stream, { | ||
| status: response.status, | ||
| statusText: response.statusText, | ||
| headers: headersToUndici(response.headers) | ||
| }); | ||
| } else if (response !== undefined) { | ||
| throw new TypeError("requestInterceptor callback must return undefined or a Response"); | ||
| } else if (!abortController.signal.aborted) { | ||
| // Pass through to real request | ||
| dispatch(options, wrappedHandler); | ||
| } | ||
| }) | ||
| .catch(error => { | ||
| handler.onResponseError?.(undiciController, error); | ||
| }); | ||
| // Return true to indicate request is being handled | ||
| return true; | ||
| }; | ||
| }; | ||
| /** | ||
| * Creates an undici controller and wrapped handler that bridge an AbortController to undici's dispatch protocol. | ||
| * | ||
| * The undici controller reflects the AbortController's signal state and captures an inner undici controller | ||
| * (from pass-through dispatch or synthetic stream) for forwarding pause/resume/abort. | ||
| */ | ||
| function createSignalHandler(handler, abortController) { | ||
| let innerUndiciController = null; | ||
| const undiciController = { | ||
| abort(reason) { | ||
| abortController.abort(reason); | ||
| innerUndiciController?.abort(reason); | ||
| }, | ||
| pause() { | ||
| innerUndiciController?.pause(); | ||
| }, | ||
| resume() { | ||
| innerUndiciController?.resume(); | ||
| }, | ||
| get paused() { | ||
| return innerUndiciController?.paused ?? false; | ||
| }, | ||
| get aborted() { | ||
| return abortController.signal.aborted; | ||
| }, | ||
| get reason() { | ||
| return abortController.signal.reason; | ||
| } | ||
| }; | ||
| const wrappedHandler = { | ||
| onRequestStart(controller) { | ||
| innerUndiciController = controller; | ||
| }, | ||
| onRequestUpgrade(...args) { | ||
| handler.onRequestUpgrade?.(...args); | ||
| }, | ||
| onResponseStart(...args) { | ||
| handler.onResponseStart?.(...args); | ||
| }, | ||
| onResponseData(...args) { | ||
| handler.onResponseData?.(...args); | ||
| }, | ||
| onResponseEnd(...args) { | ||
| handler.onResponseEnd?.(...args); | ||
| }, | ||
| onResponseError(...args) { | ||
| handler.onResponseError?.(...args); | ||
| } | ||
| }; | ||
| return { undiciController, wrappedHandler }; | ||
| } | ||
| /** | ||
| * Converts a Headers object to the format undici expects. | ||
| * Handles multiple Set-Cookie headers via getSetCookie(). | ||
| */ | ||
| function headersToUndici(headers) { | ||
| const result = {}; | ||
| for (const [key, value] of headers) { | ||
| result[key] = value; | ||
| } | ||
| const cookies = headers.getSetCookie(); | ||
| if (cookies.length > 0) { | ||
| result["set-cookie"] = cookies; | ||
| } | ||
| return result; | ||
| } |
| "use strict"; | ||
| /** | ||
| * Sends a response to an undici handler by streaming data from a Node.js Readable stream. | ||
| * | ||
| * ## Why this exists | ||
| * | ||
| * undici's new handler API (onRequestStart, onResponseStart, onResponseData, onResponseEnd, onResponseError) | ||
| * requires a "controller" object with pause/resume/abort methods. When responses flow through undici's | ||
| * normal network stack, undici provides this controller. But when we generate responses ourselves | ||
| * (e.g., for file: URLs, data: URLs, or synthetic responses from interceptors), we need to provide | ||
| * our own controller. | ||
| * | ||
| * This helper creates a controller backed by a Node.js Readable stream, giving callers proper | ||
| * pause/resume and abort support. It follows the pattern from undici/lib/interceptor/cache.js. | ||
| * | ||
| * ## Usage | ||
| * | ||
| * ```javascript | ||
| * const stream = fs.createReadStream(filePath); | ||
| * // or: const stream = Readable.from(buffer); | ||
| * // or: const stream = Readable.fromWeb(response.body); | ||
| * | ||
| * sendStreamResponse(handler, stream, { | ||
| * status: 200, | ||
| * statusText: "OK", | ||
| * headers: { "content-type": "text/plain" }, | ||
| * context: { history: ["file:///path/to/file"] } | ||
| * }); | ||
| * ``` | ||
| * | ||
| * The function handles all stream events and translates them to handler callbacks. | ||
| * If the stream is destroyed (e.g., via controller.abort()), pending operations are cancelled. | ||
| * | ||
| * @param {object} handler - undici handler with new API methods (onRequestStart, onResponseStart, etc.) | ||
| * @param {Readable} stream - Node.js Readable stream containing the response body | ||
| * @param {object} options - Response metadata | ||
| * @param {number} options.status - HTTP status code | ||
| * @param {string} [options.statusText] - HTTP status text (default: "") | ||
| * @param {object} [options.headers] - Response headers as {name: value} object (default: {}) | ||
| * @param {object} [options.context] - Context object passed to onRequestStart (default: {}) | ||
| */ | ||
| function sendStreamResponse(handler, stream, { status, statusText = "", headers = {}, context = {} }) { | ||
| const controller = { | ||
| resume() { | ||
| stream.resume(); | ||
| }, | ||
| pause() { | ||
| stream.pause(); | ||
| }, | ||
| get paused() { | ||
| return stream.isPaused(); | ||
| }, | ||
| get aborted() { | ||
| return stream.errored !== null; | ||
| }, | ||
| get reason() { | ||
| return stream.errored; | ||
| }, | ||
| abort(reason) { | ||
| stream.destroy(reason); | ||
| } | ||
| }; | ||
| stream | ||
| .on("error", err => { | ||
| if (!stream.readableEnded) { | ||
| handler.onResponseError?.(controller, err); | ||
| } | ||
| }) | ||
| .on("close", () => { | ||
| if (!stream.errored) { | ||
| handler.onResponseEnd?.(controller, {}); | ||
| } | ||
| }) | ||
| .on("data", chunk => { | ||
| handler.onResponseData?.(controller, chunk); | ||
| }); | ||
| handler.onRequestStart?.(controller, context); | ||
| if (stream.destroyed) { | ||
| return; | ||
| } | ||
| handler.onResponseStart?.(controller, status, headers, statusText); | ||
| } | ||
| module.exports = { sendStreamResponse }; |
| "use strict"; | ||
| // https://fetch.spec.whatwg.org/#header-name | ||
| function isHeaderName(name) { | ||
| return /^[!#$%&'*+\-.^`|~\w]+$/.test(name); | ||
| } | ||
| // https://fetch.spec.whatwg.org/#header-value | ||
| function isHeaderValue(value) { | ||
| return value[0] !== "\t" && | ||
| value[0] !== " " && | ||
| value[value.length - 1] !== "\t" && | ||
| value[value.length - 1] !== " " && | ||
| !/[\0\r\n]/.test(value); | ||
| } | ||
| // https://fetch.spec.whatwg.org/#concept-header-value-normalize | ||
| function normalizeHeaderValue(potentialValue) { | ||
| return potentialValue.replace(/^[\n\r\t ]+|[\n\r\t ]+$/g, ""); | ||
| } | ||
| module.exports = { | ||
| isHeaderName, | ||
| isHeaderValue, | ||
| normalizeHeaderValue | ||
| }; |
| "use strict"; | ||
| // Small wrapper around Node.js's native `TextEncoder`/`TextDecoder`to expose functions with names matching the | ||
| // Encoding Standard, so it's more obvious what to use when implementing specs. See also discussion at | ||
| // https://github.com/ExodusOSS/bytes/issues/17. | ||
| // | ||
| // Note that we could also use `@exodus/bytes` instead of Node.js's native `TextEncoder`/`TextDecoder`. This could give | ||
| // benefits on non-Node.js environments. But, on Node.js, `@exodus/bytes` just delegates to the native classes, plus | ||
| // adds some extra type checks that we don't want. Per the discussion above, it fixes some bugs, but they are not | ||
| // relevant for our use cases. | ||
| const encoder = new TextEncoder(); | ||
| const decoder = new TextDecoder("utf-8", { ignoreBOM: false, fatal: false }); | ||
| const decoderWithoutBOM = new TextDecoder("utf-8", { ignoreBOM: true, fatal: false }); | ||
| const decoderWithoutBOMOrFail = new TextDecoder("utf-8", { ignoreBOM: true, fatal: true }); | ||
| /** | ||
| * Implements the <https://encoding.spec.whatwg.org/#utf-8-decode> algorithm. If there are three preceding BOM bytes, | ||
| * they are discarded, and any lone surrogates become U+FFFD. | ||
| * | ||
| * @param {Uint8Array} bytes - The bytes to decode. | ||
| * @returns {string} - The decoded string. | ||
| */ | ||
| exports.utf8Decode = bytes => { | ||
| return decoder.decode(bytes); | ||
| }; | ||
| /** | ||
| * Implements the <https://encoding.spec.whatwg.org/#utf-8-decode-without-bom> algorithm. If there are three preceding | ||
| * BOM bytes, they become U+FEFF, and any lone surrogates become U+FFFD. | ||
| * | ||
| * @param {Uint8Array} bytes - The bytes to decode. | ||
| * @returns {string} - The decoded string. | ||
| */ | ||
| exports.utf8DecodeWithoutBOM = bytes => { | ||
| return decoderWithoutBOM.decode(bytes); | ||
| }; | ||
| /** | ||
| * Implements the <https://encoding.spec.whatwg.org/#utf-8-decode-without-bom-or-fail> algorithm. If there are three | ||
| * preceding BOM bytes, they become U+FEFF, and any lone surrogates cause an exception. | ||
| * | ||
| * @param {Uint8Array} bytes - The bytes to decode. | ||
| * @returns {string} - The decoded string. | ||
| */ | ||
| exports.utf8DecodeWithoutBOMOrFail = bytes => { | ||
| return decoderWithoutBOMOrFail.decode(bytes); | ||
| }; | ||
| /** | ||
| * Implements the <https://encoding.spec.whatwg.org/#utf-8-decode> algorithm, but also bundles in `USVString` conversion | ||
| * (i.e. lone surrogates become 0xEF 0xBF 0xBD). | ||
| * | ||
| * @param {string} string - A string, possibly containing lone surrogates. If the string contains no lone surrogates, | ||
| * then this behaves as the spec algorithm. Otherwise, it behaves as the composition of the spec algorithm and | ||
| * `USVString` conversion. | ||
| * @returns {Uint8Array} - The UTF-8 encoded bytes of the input string. | ||
| */ | ||
| exports.utf8Encode = string => { | ||
| return encoder.encode(string); | ||
| }; |
+96
-58
| "use strict"; | ||
| const path = require("path"); | ||
| const { pathToFileURL } = require("url"); | ||
| const fs = require("fs").promises; | ||
@@ -10,3 +11,4 @@ const vm = require("vm"); | ||
| const { URL } = require("whatwg-url"); | ||
| const MIMEType = require("whatwg-mimetype"); | ||
| const { MIMEType } = require("whatwg-mimetype"); | ||
| const { getGlobalDispatcher } = require("undici"); | ||
| const idlUtils = require("./jsdom/living/generated/utils.js"); | ||
@@ -17,4 +19,7 @@ const VirtualConsole = require("./jsdom/virtual-console.js"); | ||
| const { fragmentSerialization } = require("./jsdom/living/domparsing/serialization.js"); | ||
| const ResourceLoader = require("./jsdom/browser/resources/resource-loader.js"); | ||
| const NoOpResourceLoader = require("./jsdom/browser/resources/no-op-resource-loader.js"); | ||
| const createDecompressInterceptor = require("./jsdom/browser/resources/decompress-interceptor.js"); | ||
| const { | ||
| JSDOMDispatcher, DEFAULT_USER_AGENT, fetchCollected | ||
| } = require("./jsdom/browser/resources/jsdom-dispatcher.js"); | ||
| const requestInterceptor = require("./jsdom/browser/resources/request-interceptor.js"); | ||
@@ -115,36 +120,33 @@ class CookieJar extends toughCookie.CookieJar { | ||
| static fromURL(url, options = {}) { | ||
| return Promise.resolve().then(() => { | ||
| // Remove the hash while sending this through the research loader fetch(). | ||
| // It gets added back a few lines down when constructing the JSDOM object. | ||
| const parsedURL = new URL(url); | ||
| const originalHash = parsedURL.hash; | ||
| parsedURL.hash = ""; | ||
| url = parsedURL.href; | ||
| static async fromURL(url, options = {}) { | ||
| options = normalizeFromURLOptions(options); | ||
| options = normalizeFromURLOptions(options); | ||
| // Build the dispatcher for the initial request | ||
| // For the initial fetch, we default to "usable" instead of no resource loading, since fromURL() implicitly requests | ||
| // fetching the initial resource. This does not impact further resource fetching, which uses options.resources. | ||
| const resourcesForInitialFetch = options.resources !== undefined ? options.resources : "usable"; | ||
| const { effectiveDispatcher } = extractResourcesOptions(resourcesForInitialFetch, options.cookieJar); | ||
| const resourceLoader = resourcesToResourceLoader(options.resources); | ||
| const resourceLoaderForInitialRequest = resourceLoader.constructor === NoOpResourceLoader ? | ||
| new ResourceLoader() : | ||
| resourceLoader; | ||
| const headers = { Accept: "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" }; | ||
| if (options.referrer) { | ||
| headers.Referer = options.referrer; | ||
| } | ||
| const req = resourceLoaderForInitialRequest.fetch(url, { | ||
| accept: "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", | ||
| cookieJar: options.cookieJar, | ||
| referrer: options.referrer | ||
| }); | ||
| const response = await fetchCollected(effectiveDispatcher, { | ||
| url, | ||
| headers | ||
| }); | ||
| return req.then(body => { | ||
| const res = req.response; | ||
| if (!response.ok) { | ||
| throw new Error(`Resource was not loaded. Status: ${response.status}`); | ||
| } | ||
| options = Object.assign(options, { | ||
| url: req.href + originalHash, | ||
| contentType: res.headers["content-type"], | ||
| referrer: req.getHeader("referer") ?? undefined | ||
| }); | ||
| options = Object.assign(options, { | ||
| url: response.url, | ||
| contentType: response.headers["content-type"] || undefined, | ||
| referrer: options.referrer, | ||
| resources: options.resources | ||
| }); | ||
| return new JSDOM(body, options); | ||
| }); | ||
| }); | ||
| return new JSDOM(response.body, options); | ||
| } | ||
@@ -154,5 +156,5 @@ | ||
| options = normalizeFromFileOptions(filename, options); | ||
| const buffer = await fs.readFile(filename); | ||
| const nodeBuffer = await fs.readFile(filename); | ||
| return new JSDOM(buffer, options); | ||
| return new JSDOM(nodeBuffer, options); | ||
| } | ||
@@ -188,2 +190,47 @@ } | ||
| function extractResourcesOptions(resources, cookieJar) { | ||
| // loadSubresources controls whether PerDocumentResourceLoader fetches scripts, stylesheets, etc. | ||
| // XHR always works regardless of this flag. | ||
| let userAgent, baseDispatcher, userInterceptors, loadSubresources; | ||
| if (resources === undefined) { | ||
| // resources: undefined means no automatic subresource fetching, but XHR still works | ||
| userAgent = DEFAULT_USER_AGENT; | ||
| baseDispatcher = getGlobalDispatcher(); | ||
| userInterceptors = []; | ||
| loadSubresources = false; | ||
| } else if (resources === "usable") { | ||
| // resources: "usable" means use all defaults | ||
| userAgent = DEFAULT_USER_AGENT; | ||
| baseDispatcher = getGlobalDispatcher(); | ||
| userInterceptors = []; | ||
| loadSubresources = true; | ||
| } else if (typeof resources === "object" && resources !== null) { | ||
| // resources: { userAgent?, dispatcher?, interceptors? } | ||
| userAgent = resources.userAgent !== undefined ? resources.userAgent : DEFAULT_USER_AGENT; | ||
| baseDispatcher = resources.dispatcher !== undefined ? resources.dispatcher : getGlobalDispatcher(); | ||
| userInterceptors = resources.interceptors !== undefined ? resources.interceptors : []; | ||
| loadSubresources = true; | ||
| } else { | ||
| throw new TypeError(`resources must be undefined, "usable", or an object`); | ||
| } | ||
| // User interceptors come first (outermost), then decompress interceptor | ||
| const allUserInterceptors = [ | ||
| ...userInterceptors, | ||
| createDecompressInterceptor() | ||
| ]; | ||
| return { | ||
| userAgent, | ||
| effectiveDispatcher: new JSDOMDispatcher({ | ||
| baseDispatcher, | ||
| cookieJar, | ||
| userAgent, | ||
| userInterceptors: allUserInterceptors | ||
| }), | ||
| loadSubresources | ||
| }; | ||
| } | ||
| function normalizeFromFileOptions(filename, options) { | ||
@@ -200,3 +247,3 @@ const normalized = { ...options }; | ||
| if (normalized.url === undefined) { | ||
| normalized.url = new URL("file:" + path.resolve(filename)); | ||
| normalized.url = pathToFileURL(path.resolve(filename)).href; | ||
| } | ||
@@ -225,3 +272,5 @@ | ||
| // Defaults filled in later | ||
| resourceLoader: undefined, | ||
| dispatcher: undefined, | ||
| loadSubresources: undefined, | ||
| userAgent: undefined, | ||
| virtualConsole: undefined, | ||
@@ -271,3 +320,7 @@ cookieJar: undefined | ||
| transformed.windowOptions.resourceLoader = resourcesToResourceLoader(options.resources); | ||
| const { userAgent, effectiveDispatcher, loadSubresources } = | ||
| extractResourcesOptions(options.resources, transformed.windowOptions.cookieJar); | ||
| transformed.windowOptions.userAgent = userAgent; | ||
| transformed.windowOptions.dispatcher = effectiveDispatcher; | ||
| transformed.windowOptions.loadSubresources = loadSubresources; | ||
@@ -301,9 +354,11 @@ if (options.runScripts !== undefined) { | ||
| if (ArrayBuffer.isView(html)) { | ||
| html = Buffer.from(html.buffer, html.byteOffset, html.byteLength); | ||
| if (html instanceof Uint8Array) { | ||
| // leave as-is | ||
| } else if (ArrayBuffer.isView(html)) { | ||
| html = new Uint8Array(html.buffer, html.byteOffset, html.byteLength); | ||
| } else if (html instanceof ArrayBuffer) { | ||
| html = Buffer.from(html); | ||
| html = new Uint8Array(html); | ||
| } | ||
| if (Buffer.isBuffer(html)) { | ||
| if (html instanceof Uint8Array) { | ||
| encoding = sniffHTMLEncoding(html, { | ||
@@ -321,19 +376,2 @@ xml: mimeType.isXML(), | ||
| function resourcesToResourceLoader(resources) { | ||
| switch (resources) { | ||
| case undefined: { | ||
| return new NoOpResourceLoader(); | ||
| } | ||
| case "usable": { | ||
| return new ResourceLoader(); | ||
| } | ||
| default: { | ||
| if (!(resources instanceof ResourceLoader)) { | ||
| throw new TypeError("resources must be an instance of ResourceLoader"); | ||
| } | ||
| return resources; | ||
| } | ||
| } | ||
| } | ||
| exports.JSDOM = JSDOM; | ||
@@ -343,4 +381,4 @@ | ||
| exports.CookieJar = CookieJar; | ||
| exports.ResourceLoader = ResourceLoader; | ||
| exports.requestInterceptor = requestInterceptor; | ||
| exports.toughCookie = toughCookie; |
| "use strict"; | ||
| const idlUtils = require("../../living/generated/utils"); | ||
| const { fireAnEvent } = require("../../living/helpers/events"); | ||
| const { fetchCollected } = require("./jsdom-dispatcher"); | ||
@@ -9,3 +10,5 @@ module.exports = class PerDocumentResourceLoader { | ||
| this._defaultEncoding = document._encoding; | ||
| this._resourceLoader = document._defaultView ? document._defaultView._resourceLoader : null; | ||
| const defaultView = document._defaultView; | ||
| this._dispatcher = defaultView ? defaultView._dispatcher : null; | ||
| this._loadSubresources = defaultView ? defaultView._loadSubresources : false; | ||
| this._requestManager = document._requestManager; | ||
@@ -18,17 +21,31 @@ this._queue = document._queue; | ||
| fetch(url, { element, onLoad, onError }) { | ||
| const request = this._resourceLoader.fetch(url, { | ||
| cookieJar: this._document._cookieJar, | ||
| element: idlUtils.wrapperForImpl(element), | ||
| referrer: this._document.URL | ||
| }); | ||
| if (request === null) { | ||
| if (!this._loadSubresources) { | ||
| return null; | ||
| } | ||
| this._requestManager.add(request); | ||
| const abortController = new AbortController(); | ||
| // Add it to the request manager. The request manager is very old code, but it works with the contract of "has an | ||
| // `abort()` method". One day this whole subsystem will be refactored to use `AbortController`s natively, but for | ||
| // now this just happens to work. | ||
| // | ||
| // Note that we add the controller now, before calling `fetchCollected()`, so that if any interceptors or other code | ||
| // calls `window.close()`, the abort controller is already registered and will see the abort. | ||
| this._requestManager.add(abortController); | ||
| const fetchPromise = fetchCollected(this._dispatcher, { | ||
| url, | ||
| headers: { Referer: this._document.URL }, | ||
| signal: abortController.signal, | ||
| element: idlUtils.wrapperForImpl(element) | ||
| }); | ||
| const onErrorWrapped = cause => { | ||
| this._requestManager.remove(request); | ||
| this._requestManager.remove(abortController); | ||
| // If the request was aborted, don't fire error events | ||
| if (cause && cause.name === "AbortError") { | ||
| return Promise.resolve(); | ||
| } | ||
| if (onError) { | ||
@@ -49,9 +66,20 @@ onError(cause); | ||
| const onLoadWrapped = data => { | ||
| this._requestManager.remove(request); | ||
| const onLoadWrapped = response => { | ||
| this._requestManager.remove(abortController); | ||
| this._addCookies(url, request.response ? request.response.headers : {}); | ||
| // Extract data and create a response-like object for compatibility | ||
| const { body: data, status, headers, url: responseURL } = response; | ||
| const responseObj = { | ||
| ok: status >= 200 && status < 300, | ||
| status, | ||
| headers: { | ||
| get(name) { | ||
| return headers[name.toLowerCase()] ?? null; | ||
| } | ||
| }, | ||
| url: responseURL | ||
| }; | ||
| try { | ||
| const result = onLoad ? onLoad(data) : undefined; | ||
| const result = onLoad ? onLoad(data, responseObj) : undefined; | ||
@@ -72,2 +100,8 @@ return Promise.resolve(result) | ||
| // Create a wrapper object that can be used by the queue system | ||
| const request = { | ||
| then: (onFulfilled, onRejected) => fetchPromise.then(onFulfilled, onRejected), | ||
| catch: onRejected => fetchPromise.catch(onRejected) | ||
| }; | ||
| if (element.localName === "script" && element.hasAttributeNS(null, "async")) { | ||
@@ -86,18 +120,2 @@ this._asyncQueue.push(request, onLoadWrapped, onErrorWrapped, this._queue.getLastScript()); | ||
| } | ||
| _addCookies(url, headers) { | ||
| let cookies = headers["set-cookie"]; | ||
| if (!cookies) { | ||
| return; | ||
| } | ||
| if (!Array.isArray(cookies)) { | ||
| cookies = [cookies]; | ||
| } | ||
| cookies.forEach(cookie => { | ||
| this._document._cookieJar.setCookieSync(cookie, url, { http: true, ignoreError: true }); | ||
| }); | ||
| } | ||
| }; |
@@ -247,3 +247,5 @@ "use strict"; | ||
| window._resourceLoader = options.resourceLoader; | ||
| window._dispatcher = options.dispatcher; | ||
| window._loadSubresources = options.loadSubresources; | ||
| window._userAgent = options.userAgent; | ||
@@ -348,3 +350,3 @@ // List options explicitly to be clear which are passed through | ||
| const external = External.create(window); | ||
| const navigator = Navigator.create(window, [], { userAgent: window._resourceLoader._userAgent }); | ||
| const navigator = Navigator.create(window); | ||
| const performanceImpl = Performance.create(window, [], { | ||
@@ -351,0 +353,0 @@ timeOrigin: performance.timeOrigin + windowInitialized, |
| "use strict"; | ||
| /** | ||
| * Provides some utility functions for somewhat efficiently modifying a | ||
| * collection of headers. | ||
| * Provides utility functions for efficiently managing a collection of headers. Corresponds to | ||
| * https://fetch.spec.whatwg.org/#concept-header-list. | ||
| * | ||
| * Note that this class only operates on ByteStrings (which is also why we use | ||
| * toLowerCase internally). | ||
| * Notably, unlike `Headers`, this allows retrieving the original casing of header names and does no normalization of | ||
| * inputs, which is important for implementing `XMLHttpRequest`. See discussions on, e.g., | ||
| * https://github.com/whatwg/fetch/pull/476. | ||
| * | ||
| */ | ||
| class HeaderList { | ||
| // Note: we can use normal `toLowerCase()` in this class, instead of `asciiLowercase()`, because we assume all inputs | ||
| // are byte strings. | ||
| constructor() { | ||
| this.headers = new Map(); | ||
| // Internal storage: Map<lowercaseName, {name: originalName, values: Array<value>}> | ||
| this._headers = new Map(); | ||
| } | ||
| // https://fetch.spec.whatwg.org/#concept-header-list-append | ||
| append(name, value) { | ||
| const existing = this.headers.get(name.toLowerCase()); | ||
| const lower = name.toLowerCase(); | ||
| const existing = this._headers.get(lower); | ||
| if (existing) { | ||
| existing.push(value); | ||
| existing.values.push(value); | ||
| } else { | ||
| this.headers.set(name.toLowerCase(), [value]); | ||
| this._headers.set(lower, { name, values: [value] }); | ||
| } | ||
| } | ||
| // https://fetch.spec.whatwg.org/#header-list-contains | ||
| contains(name) { | ||
| return this.headers.has(name.toLowerCase()); | ||
| return this._headers.has(name.toLowerCase()); | ||
| } | ||
| // https://fetch.spec.whatwg.org/#concept-header-list-get | ||
| get(name) { | ||
| name = name.toLowerCase(); | ||
| const values = this.headers.get(name); | ||
| if (!values) { | ||
| const entry = this._headers.get(name.toLowerCase()); | ||
| if (!entry) { | ||
| return null; | ||
| } | ||
| return values; | ||
| return entry.values.join(", "); | ||
| } | ||
| // No corresponding spec algorithm, but equivalent to e.g. the steps used in | ||
| // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie. | ||
| getAll(name) { | ||
| const entry = this._headers.get(name.toLowerCase()); | ||
| if (!entry) { | ||
| return null; | ||
| } | ||
| return entry.values; | ||
| } | ||
| // https://fetch.spec.whatwg.org/#concept-header-list-delete | ||
| delete(name) { | ||
| this.headers.delete(name.toLowerCase()); | ||
| this._headers.delete(name.toLowerCase()); | ||
| } | ||
| // https://fetch.spec.whatwg.org/#concept-header-list-set | ||
| set(name, value) { | ||
| const lowerName = name.toLowerCase(); | ||
| this.headers.delete(lowerName); | ||
| this.headers.set(lowerName, [value]); | ||
| const lower = name.toLowerCase(); | ||
| this._headers.set(lower, { name, values: [value] }); | ||
| } | ||
| // https://fetch.spec.whatwg.org/#concept-header-list-combine | ||
| combine(name, value) { | ||
| const lower = name.toLowerCase(); | ||
| const existing = this._headers.get(lower); | ||
| if (existing) { | ||
| existing.values[0] += ", " + value; | ||
| } else { | ||
| this._headers.set(lower, { name, values: [value] }); | ||
| } | ||
| } | ||
| // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine | ||
| sortAndCombine() { | ||
| const names = [...this.headers.keys()].sort(); | ||
| const names = [...this._headers.keys()].sort(); | ||
| const headers = []; | ||
| for (const name of names) { | ||
| const { values } = this._headers.get(name); | ||
| if (name === "set-cookie") { | ||
| for (const value of this.get(name)) { | ||
| for (const value of values) { | ||
| headers.push([name, value]); | ||
| } | ||
| } else { | ||
| headers.push([name, this.get(name).join(", ")]); | ||
| headers.push([name, values.join(", ")]); | ||
| } | ||
@@ -63,4 +96,64 @@ } | ||
| } | ||
| /** | ||
| * Yields [name, value] pairs for iteration. | ||
| * Each header with multiple values yields multiple pairs. | ||
| * Preserves original casing. | ||
| */ | ||
| * [Symbol.iterator]() { | ||
| for (const { name, values } of this._headers.values()) { | ||
| for (const value of values) { | ||
| yield [name, value]; | ||
| } | ||
| } | ||
| } | ||
| /** | ||
| * Yields unique header names (with original casing). | ||
| */ | ||
| * names() { | ||
| for (const { name } of this._headers.values()) { | ||
| yield name; | ||
| } | ||
| } | ||
| /** | ||
| * Serializes the header list to an object. | ||
| * Format matches undici's headers object: {name: value} or {name: [values]} for multiple values. | ||
| */ | ||
| toJSON() { | ||
| const result = {}; | ||
| for (const { name, values } of this._headers.values()) { | ||
| result[name] = values.length === 1 ? values[0] : values; | ||
| } | ||
| return result; | ||
| } | ||
| /** | ||
| * Creates a HeaderList from a headers object. | ||
| * Format: {name: value} or {name: [values]} for multiple values. | ||
| * @param {object} obj - Headers object | ||
| */ | ||
| static fromJSON(obj) { | ||
| const list = new HeaderList(); | ||
| for (const [name, value] of Object.entries(obj)) { | ||
| if (Array.isArray(value)) { | ||
| for (const v of value) { | ||
| list.append(name, v); | ||
| } | ||
| } else { | ||
| list.append(name, value); | ||
| } | ||
| } | ||
| return list; | ||
| } | ||
| /** | ||
| * Creates a copy of this HeaderList. | ||
| */ | ||
| clone() { | ||
| return HeaderList.fromJSON(this.toJSON()); | ||
| } | ||
| } | ||
| module.exports = HeaderList; |
| "use strict"; | ||
| const MIMEType = require("whatwg-mimetype"); | ||
| const { MIMEType } = require("whatwg-mimetype"); | ||
| // https://fetch.spec.whatwg.org/#privileged-no-cors-request-header-name | ||
| const PRIVILEGED_NO_CORS_REQUEST = new Set(["range"]); | ||
| function isPrivilegedNoCORSRequest(name) { | ||
| function isPrivilegedNoCORSRequestHeaderName(name) { | ||
| return PRIVILEGED_NO_CORS_REQUEST.has(name.toLowerCase()); | ||
| } | ||
| // https://fetch.spec.whatwg.org/#no-cors-safelisted-request-header-name | ||
| const NO_CORS_SAFELISTED_REQUEST = new Set([ | ||
@@ -16,42 +18,16 @@ `accept`, | ||
| ]); | ||
| function isNoCORSSafelistedRequest(name) { | ||
| function isNoCORSSafelistedRequestHeaderName(name) { | ||
| return NO_CORS_SAFELISTED_REQUEST.has(name.toLowerCase()); | ||
| } | ||
| const FORBIDDEN = new Set([ | ||
| `accept-charset`, | ||
| `accept-encoding`, | ||
| `access-control-request-headers`, | ||
| `access-control-request-method`, | ||
| `connection`, | ||
| `content-length`, | ||
| `cookie`, | ||
| `cookie2`, | ||
| `date`, | ||
| `dnt`, | ||
| `expect`, | ||
| `host`, | ||
| `keep-alive`, | ||
| `origin`, | ||
| `referer`, | ||
| `te`, | ||
| `trailer`, | ||
| `transfer-encoding`, | ||
| `upgrade`, | ||
| `via` | ||
| ]); | ||
| function isForbidden(name) { | ||
| name = name.toLowerCase(); | ||
| return ( | ||
| FORBIDDEN.has(name) || name.startsWith("proxy-") || name.startsWith("sec-") | ||
| ); | ||
| } | ||
| // https://fetch.spec.whatwg.org/#forbidden-response-header-name | ||
| const FORBIDDEN_RESPONSE = new Set(["set-cookie", "set-cookie2"]); | ||
| function isForbiddenResponse(name) { | ||
| function isForbiddenResponseHeaderName(name) { | ||
| return FORBIDDEN_RESPONSE.has(name.toLowerCase()); | ||
| } | ||
| // https://fetch.spec.whatwg.org/#cors-safelisted-request-header | ||
| // Note: name and value are already ensured by the IDL layer to be byte strings. | ||
| const CORS_UNSAFE_BYTE = /[\x00-\x08\x0A-\x1F"():<>?@[\\\]{}\x7F]/; | ||
| function isCORSWhitelisted(name, value) { | ||
| function isCORSSafelistedRequestHeader(name, value) { | ||
| name = name.toLowerCase(); | ||
@@ -92,3 +68,3 @@ switch (name) { | ||
| } | ||
| if (Buffer.from(value).length > 128) { | ||
| if (value.length > 128) { | ||
| return false; | ||
@@ -99,8 +75,134 @@ } | ||
| // https://fetch.spec.whatwg.org/#no-cors-safelisted-request-header | ||
| function isNoCORSSafelistedRequestHeader(name, value) { | ||
| if (!isNoCORSSafelistedRequestHeaderName(name)) { | ||
| return false; | ||
| } | ||
| return isCORSSafelistedRequestHeader(name, value); | ||
| } | ||
| const BASIC_FORBIDDEN_REQUEST_HEADERS = new Set([ | ||
| "accept-charset", | ||
| "accept-encoding", | ||
| "access-control-request-headers", | ||
| "access-control-request-method", | ||
| "connection", | ||
| "content-length", | ||
| "cookie", | ||
| "cookie2", | ||
| "date", | ||
| "dnt", | ||
| "expect", | ||
| "host", | ||
| "keep-alive", | ||
| "origin", | ||
| "referer", | ||
| "te", | ||
| "trailer", | ||
| "transfer-encoding", | ||
| "upgrade", | ||
| "via" | ||
| ]); | ||
| const METHOD_CHECKING_FORBIDDEN_REQUEST_HEADERS = new Set([ | ||
| "x-http-method", | ||
| "x-http-method-override", | ||
| "x-method-override" | ||
| ]); | ||
| const FORBIDDEN_METHODS = new Set([ | ||
| "connect", | ||
| "trace", | ||
| "track" | ||
| ]); | ||
| // https://fetch.spec.whatwg.org/#forbidden-method | ||
| function isForbiddenMethod(value) { | ||
| return FORBIDDEN_METHODS.has(value.toLowerCase()); | ||
| } | ||
| // https://fetch.spec.whatwg.org/#forbidden-request-header | ||
| function isForbiddenRequestHeader(name, value) { | ||
| const lowercaseName = name.toLowerCase(); | ||
| if (BASIC_FORBIDDEN_REQUEST_HEADERS.has(lowercaseName)) { | ||
| return true; | ||
| } | ||
| if (lowercaseName.startsWith("proxy-") || lowercaseName.startsWith("sec-")) { | ||
| return true; | ||
| } | ||
| if (METHOD_CHECKING_FORBIDDEN_REQUEST_HEADERS.has(lowercaseName)) { | ||
| const parsedValues = getDecodeAndSplit(value); | ||
| return parsedValues.some(isForbiddenMethod); | ||
| } | ||
| return false; | ||
| } | ||
| // https://fetch.spec.whatwg.org/#header-value-get-decode-and-split | ||
| function getDecodeAndSplit(input) { | ||
| const values = []; | ||
| let temporaryValue = ""; | ||
| let position = 0; | ||
| while (true) { | ||
| // Collect sequence of code points that are not " or , | ||
| while (position < input.length && input[position] !== "\"" && input[position] !== ",") { | ||
| temporaryValue += input[position++]; | ||
| } | ||
| // If position is not past end and code point is " | ||
| if (position < input.length && input[position] === '"') { | ||
| // Inlined: collect HTTP quoted string (extract-value = false) | ||
| const positionStart = position++; | ||
| while (true) { | ||
| while (position < input.length && input[position] !== "\"" && input[position] !== "\\") { | ||
| position++; | ||
| } | ||
| if (position >= input.length) { | ||
| break; | ||
| } | ||
| if (input[position++] === "\\") { | ||
| if (position >= input.length) { | ||
| break; | ||
| } | ||
| position++; | ||
| } else { | ||
| break; // It was " | ||
| } | ||
| } | ||
| temporaryValue += input.slice(positionStart, position); | ||
| if (position < input.length) { | ||
| continue; | ||
| } | ||
| } | ||
| // Remove HTTP tab or space from start and end | ||
| let start = 0; | ||
| let end = temporaryValue.length; | ||
| while (start < end && (temporaryValue[start] === "\t" || temporaryValue[start] === " ")) { | ||
| start++; | ||
| } | ||
| while (end > start && (temporaryValue[end - 1] === "\t" || temporaryValue[end - 1] === " ")) { | ||
| end--; | ||
| } | ||
| values.push(temporaryValue.slice(start, end)); | ||
| temporaryValue = ""; | ||
| if (position >= input.length) { | ||
| return values; | ||
| } | ||
| // Assert: code point at position is , | ||
| position++; | ||
| } | ||
| } | ||
| module.exports = { | ||
| isPrivilegedNoCORSRequest, | ||
| isNoCORSSafelistedRequest, | ||
| isForbidden, | ||
| isForbiddenResponse, | ||
| isCORSWhitelisted | ||
| isPrivilegedNoCORSRequestHeaderName, | ||
| isNoCORSSafelistedRequestHeaderName, | ||
| isNoCORSSafelistedRequestHeader, | ||
| isForbiddenRequestHeader, | ||
| isForbiddenResponseHeaderName, | ||
| isCORSSafelistedRequestHeader | ||
| }; |
| "use strict"; | ||
| const { | ||
| isForbidden, | ||
| isForbiddenResponse, | ||
| isPrivilegedNoCORSRequest, | ||
| isNoCORSSafelistedRequest, | ||
| isCORSWhitelisted | ||
| isForbiddenRequestHeader, | ||
| isForbiddenResponseHeaderName, | ||
| isPrivilegedNoCORSRequestHeaderName, | ||
| isNoCORSSafelistedRequestHeaderName, | ||
| isNoCORSSafelistedRequestHeader | ||
| } = require("./header-types"); | ||
| const { isHeaderName, isHeaderValue, normalizeHeaderValue } = require("./header-utils"); | ||
| const HeaderList = require("./header-list"); | ||
| function assertName(name) { | ||
| if (!name.match(/^[!#$%&'*+\-.^`|~\w]+$/)) { | ||
| if (!isHeaderName(name)) { | ||
| throw new TypeError("name is invalid"); | ||
@@ -19,3 +20,3 @@ } | ||
| function assertValue(value) { | ||
| if (value.match(/[\0\r\n]/)) { | ||
| if (!isHeaderValue(value)) { | ||
| throw new TypeError("value is invalid"); | ||
@@ -25,18 +26,40 @@ } | ||
| // https://fetch.spec.whatwg.org/#concept-header-value-normalize | ||
| function normalizeValue(potentialValue) { | ||
| return potentialValue.replace(/^[\n\r\t ]+|[\n\r\t ]+$/g, ""); | ||
| } | ||
| class HeadersImpl { | ||
| constructor(globalObject, args) { | ||
| this.guard = "none"; | ||
| this.headersList = new HeaderList(); | ||
| this.headerList = new HeaderList(); | ||
| if (args[0]) { | ||
| this._fill(args[0]); | ||
| this.#fill(args[0]); | ||
| } | ||
| } | ||
| _fill(init) { | ||
| // https://fetch.spec.whatwg.org/#headers-validate | ||
| #validate(name, value) { | ||
| assertName(name); | ||
| assertValue(value); | ||
| switch (this.guard) { | ||
| case "immutable": { | ||
| throw new TypeError("Headers is immutable"); | ||
| } | ||
| case "request": { | ||
| if (isForbiddenRequestHeader(name, value)) { | ||
| return false; | ||
| } | ||
| break; | ||
| } | ||
| case "response": { | ||
| if (isForbiddenResponseHeaderName(name)) { | ||
| return false; | ||
| } | ||
| break; | ||
| } | ||
| } | ||
| return true; | ||
| } | ||
| // https://fetch.spec.whatwg.org/#concept-headers-fill | ||
| #fill(init) { | ||
| if (Array.isArray(init)) { | ||
@@ -50,4 +73,4 @@ for (const header of init) { | ||
| } else { | ||
| for (const key of Object.keys(init)) { | ||
| this.append(key, init[key]); | ||
| for (const [key, value] of Object.entries(init)) { | ||
| this.append(key, value); | ||
| } | ||
@@ -57,115 +80,93 @@ } | ||
| has(name) { | ||
| assertName(name); | ||
| return this.headersList.contains(name); | ||
| // https://fetch.spec.whatwg.org/#concept-headers-remove-privileged-no-cors-request-headers | ||
| #removePrivilegedNoCORSHeaders() { | ||
| this.headerList.delete("range"); | ||
| } | ||
| getSetCookie() { | ||
| return this.headersList.get("Set-Cookie") || []; | ||
| } | ||
| // https://fetch.spec.whatwg.org/#dom-headers-append | ||
| append(name, value) { | ||
| value = normalizeHeaderValue(value); | ||
| if (!this.#validate(name, value)) { | ||
| return; | ||
| } | ||
| get(name) { | ||
| assertName(name); | ||
| const r = this.headersList.get(name); | ||
| if (!r) { | ||
| return null; | ||
| if (this.guard === "request-no-cors") { | ||
| let temporaryValue = this.headerList.get(name); | ||
| if (temporaryValue === null) { | ||
| temporaryValue = value; | ||
| } else { | ||
| temporaryValue += ", " + value; | ||
| } | ||
| if (!isNoCORSSafelistedRequestHeader(name, temporaryValue)) { | ||
| return; | ||
| } | ||
| } | ||
| return r.join(", "); | ||
| } | ||
| _removePrivilegedNoCORSHeaders() { | ||
| this.headersList.delete("range"); | ||
| this.headerList.append(name, value); | ||
| if (this.guard === "request-no-cors") { | ||
| this.#removePrivilegedNoCORSHeaders(); | ||
| } | ||
| } | ||
| append(name, value) { | ||
| value = normalizeValue(value); | ||
| assertName(name); | ||
| assertValue(value); | ||
| // https://fetch.spec.whatwg.org/#dom-headers-delete | ||
| delete(name) { | ||
| if (!this.#validate(name, "")) { | ||
| return; | ||
| } | ||
| switch (this.guard) { | ||
| case "immutable": | ||
| throw new TypeError("Headers is immutable"); | ||
| case "request": | ||
| if (isForbidden(name)) { | ||
| return; | ||
| } | ||
| break; | ||
| case "request-no-cors": { | ||
| if (!isCORSWhitelisted(name, value)) { | ||
| return; | ||
| } | ||
| break; | ||
| } | ||
| case "response": | ||
| if (isForbiddenResponse(name)) { | ||
| return; | ||
| } | ||
| break; | ||
| if (this.guard === "request-no-cors" && | ||
| !isNoCORSSafelistedRequestHeaderName(name) && | ||
| !isPrivilegedNoCORSRequestHeaderName(name)) { | ||
| return; | ||
| } | ||
| this.headersList.append(name, value); | ||
| this._removePrivilegedNoCORSHeaders(); | ||
| if (!this.headerList.contains(name)) { | ||
| return; | ||
| } | ||
| this.headerList.delete(name); | ||
| if (this.guard === "request-no-cors") { | ||
| this.#removePrivilegedNoCORSHeaders(); | ||
| } | ||
| } | ||
| set(name, value) { | ||
| value = normalizeValue(value); | ||
| // https://fetch.spec.whatwg.org/#dom-headers-get | ||
| get(name) { | ||
| assertName(name); | ||
| assertValue(value); | ||
| return this.headerList.get(name); | ||
| } | ||
| switch (this.guard) { | ||
| case "immutable": | ||
| throw new TypeError("Headers is immutable"); | ||
| case "request": | ||
| if (isForbidden(name)) { | ||
| return; | ||
| } | ||
| break; | ||
| case "request-no-cors": { | ||
| if (!isCORSWhitelisted(name, value)) { | ||
| return; | ||
| } | ||
| break; | ||
| } | ||
| case "response": | ||
| if (isForbiddenResponse(name)) { | ||
| return; | ||
| } | ||
| break; | ||
| } | ||
| this.headersList.set(name, value); | ||
| this._removePrivilegedNoCORSHeaders(); | ||
| // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie | ||
| getSetCookie() { | ||
| return this.headerList.getAll("Set-Cookie") || []; | ||
| } | ||
| delete(name) { | ||
| // https://fetch.spec.whatwg.org/#dom-headers-has | ||
| has(name) { | ||
| assertName(name); | ||
| return this.headerList.contains(name); | ||
| } | ||
| switch (this.guard) { | ||
| case "immutable": | ||
| throw new TypeError("Headers is immutable"); | ||
| case "request": | ||
| if (isForbidden(name)) { | ||
| return; | ||
| } | ||
| break; | ||
| case "request-no-cors": { | ||
| if ( | ||
| !isNoCORSSafelistedRequest(name) && | ||
| !isPrivilegedNoCORSRequest(name) | ||
| ) { | ||
| return; | ||
| } | ||
| break; | ||
| } | ||
| case "response": | ||
| if (isForbiddenResponse(name)) { | ||
| return; | ||
| } | ||
| break; | ||
| // https://fetch.spec.whatwg.org/#dom-headers-set | ||
| set(name, value) { | ||
| value = normalizeHeaderValue(value); | ||
| if (!this.#validate(name, value)) { | ||
| return; | ||
| } | ||
| this.headersList.delete(name); | ||
| this._removePrivilegedNoCORSHeaders(); | ||
| if (this.guard === "request-no-cors" && !isNoCORSSafelistedRequestHeader(name, value)) { | ||
| return; | ||
| } | ||
| this.headerList.set(name, value); | ||
| if (this.guard === "request-no-cors") { | ||
| this.#removePrivilegedNoCORSHeaders(); | ||
| } | ||
| } | ||
| * [Symbol.iterator]() { | ||
| for (const header of this.headersList.sortAndCombine()) { | ||
| for (const header of this.headerList.sortAndCombine()) { | ||
| yield header; | ||
@@ -172,0 +173,0 @@ } |
| "use strict"; | ||
| const { utf8Encode } = require("../helpers/encoding"); | ||
| const Blob = require("../generated/Blob"); | ||
| const { isArrayBuffer } = require("../generated/utils"); | ||
| const { concatTypedArrays } = require("../helpers/binary-data"); | ||
@@ -12,41 +14,61 @@ function convertLineEndingsToNative(s) { | ||
| exports.implementation = class BlobImpl { | ||
| constructor(globalObject, args) { | ||
| const parts = args[0]; | ||
| const properties = args[1]; | ||
| constructor(globalObject, [parts, properties], { fastPathArrayBufferToWrap } = {}) { | ||
| this._globalObject = globalObject; | ||
| const buffers = []; | ||
| this.type = properties.type; | ||
| if (/[^\u0020-\u007E]/.test(this.type)) { | ||
| this.type = ""; | ||
| } else { | ||
| this.type = this.type.toLowerCase(); | ||
| } | ||
| // A word about `this._bytes`: | ||
| // | ||
| // It is a `Uint8Array`. The realm of that `Uint8Array`, and/or the realm of its underlying `ArrayBuffer`, may be | ||
| // arbitrary. In particular, they likely do *not* match `this._globalObject`. The underlying `ArrayBuffer` may have | ||
| // been acquired from some other part of the system, e.g., the `ws` library, or aliased to another `BlobImpl`'s | ||
| // `_bytes`. | ||
| // | ||
| // This is fine, and indeed desirable, for efficiency. The key is that `Blob` is conceptually immutable, so users | ||
| // will never mutate the underlying `ArrayBuffer`. And, we never expose `this._bytes` or the underlying | ||
| // `ArrayBuffer` directly to the user: we always use something like `copyToArrayBufferInTargetRealm()` to ensure the | ||
| // result is in the realm appropriate for the user's request, and that if the user mutates the exposed bytes, this | ||
| // doesn't impact `this._bytes`. | ||
| // Used internally in jsdom when we receive an `ArrayBuffer` from elsewhere in the system and know we don't need to | ||
| // copy it because the user doesn't have any references to it. It's OK if `fastPathArrayBufferToWrap` is in the | ||
| // wrong realm even, because it never directly escapes the `BlobImpl` without a copy. | ||
| if (fastPathArrayBufferToWrap) { | ||
| this._bytes = new Uint8Array(fastPathArrayBufferToWrap); | ||
| return; | ||
| } | ||
| const chunks = []; | ||
| if (parts !== undefined) { | ||
| for (const part of parts) { | ||
| let buffer; | ||
| let chunk; | ||
| if (isArrayBuffer(part)) { | ||
| buffer = Buffer.from(part); | ||
| // Create a wrapper. The copying will happen in `concatTypedArrays()`. | ||
| chunk = new Uint8Array(part); | ||
| } else if (ArrayBuffer.isView(part)) { | ||
| buffer = Buffer.from(part.buffer, part.byteOffset, part.byteLength); | ||
| // Use the part as-is. The copying will happen in `concatTypedArrays()`. | ||
| chunk = part; | ||
| } else if (Blob.isImpl(part)) { | ||
| buffer = part._buffer; | ||
| // Use the existing `Uint8Array` as-is. The copying will happen in `concatTypedArrays()`. | ||
| chunk = part._bytes; | ||
| } else { | ||
| let s = part; | ||
| if (properties.endings === "native") { | ||
| s = convertLineEndingsToNative(part); | ||
| s = convertLineEndingsToNative(s); | ||
| } | ||
| buffer = Buffer.from(s); | ||
| chunk = utf8Encode(s); | ||
| } | ||
| buffers.push(buffer); | ||
| chunks.push(chunk); | ||
| } | ||
| } | ||
| this._buffer = Buffer.concat(buffers); | ||
| this._globalObject = globalObject; | ||
| this.type = properties.type; | ||
| if (/[^\u0020-\u007E]/.test(this.type)) { | ||
| this.type = ""; | ||
| } else { | ||
| this.type = this.type.toLowerCase(); | ||
| } | ||
| this._bytes = concatTypedArrays(chunks); | ||
| } | ||
| get size() { | ||
| return this._buffer.length; | ||
| return this._bytes.length; | ||
| } | ||
@@ -84,12 +106,8 @@ | ||
| const buffer = this._buffer; | ||
| const slicedBuffer = buffer.slice( | ||
| relativeStart, | ||
| relativeStart + span | ||
| ); | ||
| const slicedBuffer = this._bytes.slice(relativeStart, relativeStart + span); | ||
| const blob = Blob.createImpl(this._globalObject, [[], { type: relativeContentType }], {}); | ||
| blob._buffer = slicedBuffer; | ||
| blob._bytes = slicedBuffer; | ||
| return blob; | ||
| } | ||
| }; |
| "use strict"; | ||
| const { labelToName, legacyHookDecode } = require("@exodus/bytes/encoding.js"); | ||
| const MIMEType = require("whatwg-mimetype"); | ||
| const { toBase64 } = require("@exodus/bytes/base64.js"); | ||
| const { MIMEType } = require("whatwg-mimetype"); | ||
| const DOMException = require("../generated/DOMException"); | ||
@@ -10,3 +11,3 @@ const EventTargetImpl = require("../events/EventTarget-impl").implementation; | ||
| const { fireAnEvent } = require("../helpers/events"); | ||
| const { copyToArrayBufferInNewRealm } = require("../helpers/binary-data"); | ||
| const { copyToArrayBufferInTargetRealm } = require("../helpers/binary-data"); | ||
@@ -85,10 +86,6 @@ const READY_STATES = Object.freeze({ | ||
| let data = file._buffer; | ||
| if (!data) { | ||
| data = Buffer.alloc(0); | ||
| } | ||
| this._fireProgressEvent("progress", { | ||
| lengthComputable: !isNaN(file.size), | ||
| total: file.size, | ||
| loaded: data.length | ||
| loaded: file._bytes.length | ||
| }); | ||
@@ -104,3 +101,8 @@ | ||
| case "binaryString": { | ||
| this.result = data.toString("binary"); | ||
| // Convert Uint8Array to binary string (each byte as a code point) | ||
| let binaryString = ""; | ||
| for (let i = 0; i < file._bytes.length; i++) { | ||
| binaryString += String.fromCharCode(file._bytes[i]); | ||
| } | ||
| this.result = binaryString; | ||
| break; | ||
@@ -111,7 +113,7 @@ } | ||
| const contentType = MIMEType.parse(file.type) || "application/octet-stream"; | ||
| this.result = `data:${contentType};base64,${data.toString("base64")}`; | ||
| this.result = `data:${contentType};base64,${toBase64(file._bytes)}`; | ||
| break; | ||
| } | ||
| case "text": { | ||
| this.result = legacyHookDecode(data, encodingLabel); | ||
| this.result = legacyHookDecode(file._bytes, encodingLabel); | ||
| break; | ||
@@ -121,3 +123,3 @@ } | ||
| default: { | ||
| this.result = copyToArrayBufferInNewRealm(data, this._globalObject); | ||
| this.result = copyToArrayBufferInTargetRealm(file._bytes.buffer, this._globalObject); | ||
| break; | ||
@@ -124,0 +126,0 @@ } |
| "use strict"; | ||
| // See https://github.com/jsdom/jsdom/pull/2743#issuecomment-562991955 for background. | ||
| exports.copyToArrayBufferInNewRealm = (nodejsBuffer, newRealm) => { | ||
| const newAB = new newRealm.ArrayBuffer(nodejsBuffer.byteLength); | ||
| const view = new Uint8Array(newAB); | ||
| view.set(nodejsBuffer); | ||
| /** | ||
| * Concatenate the given typed arrays into a single `Uint8Array`. | ||
| * | ||
| * @param {Array<TypedArray>} arrays - An array of typed arrays. They can be of any type (but not `DataView` or | ||
| * `ArrayBuffer`). | ||
| * @returns {Uint8Array} - A new `Uint8Array` containing a copy of the concatenated data. | ||
| * @see {@link https://github.com/tc39/proposal-typedarray-concat} | ||
| */ | ||
| exports.concatTypedArrays = arrays => { | ||
| const totalLength = arrays.reduce((sum, arr) => sum + arr.byteLength, 0); | ||
| const result = new Uint8Array(totalLength); | ||
| let offset = 0; | ||
| for (const arr of arrays) { | ||
| const toSet = arr instanceof Uint8Array ? arr : new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); | ||
| result.set(toSet, offset); | ||
| offset += arr.byteLength; | ||
| } | ||
| return result; | ||
| }; | ||
| /** | ||
| * Create a copy of the data in a given `ArrayBuffer`, in a new `ArrayBuffer` created in the target realm. | ||
| * | ||
| * Used when we have some internal data, usually in the Node.js realm, and we need to copy it to the target realm. | ||
| * Compared to {@link copyToArrayBufferInTargetRealmDestructively}, use this when we need to keep the original data | ||
| * around, such as when exposing the data from inside the `Blob` implementation to the user. | ||
| * | ||
| * @param {ArrayBuffer} arrayBuffer - The `ArrayBuffer` to copy the data from. | ||
| * @param {object} newRealm - The target realm's global object, which has `ArrayBuffer` and `Uint8Array` constructor | ||
| * properties. | ||
| * @returns {ArrayBuffer} - A new `ArrayBuffer` containing a copy of the data, using the `ArrayBuffer` constructor from | ||
| * `newRealm`. | ||
| */ | ||
| exports.copyToArrayBufferInTargetRealm = (arrayBuffer, newRealm) => { | ||
| const newAB = new newRealm.ArrayBuffer(arrayBuffer.byteLength); | ||
| const view = new newRealm.Uint8Array(newAB); | ||
| view.set(new newRealm.Uint8Array(arrayBuffer)); | ||
| return newAB; | ||
| }; | ||
| /** | ||
| * Create a copy of the data in a given `ArrayBuffer`, in a new `ArrayBuffer` created in the target realm. The original | ||
| * `ArrayBuffer` is transferred in the process, so its data is no longer usable. | ||
| * | ||
| * Used when we have some internal data, usually in the Node.js realm, and we need to copy it to the target realm. | ||
| * Compared to {@link copyToArrayBufferInTargetRealm}, use this when we don't need to keep the original data around, | ||
| * such as when copying data from the Node.js network or `WebSocket` stack to the jsdom user. | ||
| * | ||
| * @param {ArrayBuffer} arrayBuffer - The `ArrayBuffer` to extract the data from. After calling this function, | ||
| * `arrayBuffer` is no longer usable. | ||
| * @param {object} newRealm - The target realm's global object, which has an `ArrayBuffer` constructor property. | ||
| * @returns {ArrayBuffer} - A new `ArrayBuffer` containing the original data data, using the `ArrayBuffer` constructor | ||
| * from `newRealm`. | ||
| */ | ||
| exports.copyToArrayBufferInTargetRealmDestructively = (arrayBuffer, newRealm) => { | ||
| if (!newRealm.ArrayBuffer.prototype.transfer) { | ||
| return exports.copyToArrayBufferInTargetRealm(arrayBuffer, newRealm); | ||
| } | ||
| return newRealm.ArrayBuffer.prototype.transfer.call(arrayBuffer); | ||
| }; |
| "use strict"; | ||
| const { utf8Decode } = require("./encoding"); | ||
| // https://infra.spec.whatwg.org/#parse-json-from-bytes | ||
| // https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value | ||
| exports.parseJSONFromBytes = bytes => { | ||
| // https://encoding.spec.whatwg.org/#utf-8-decode | ||
| if (bytes[0] === 0xEF && bytes[1] === 0xBB && bytes[2] === 0xBF) { | ||
| bytes = bytes.subarray(3); | ||
| } | ||
| const jsonText = bytes.toString("utf-8"); | ||
| return JSON.parse(jsonText); | ||
| return JSON.parse(utf8Decode(bytes)); | ||
| }; |
@@ -201,6 +201,3 @@ "use strict"; | ||
| const domSelector = elementImpl._ownerDocument._domSelector; | ||
| // Workaround for cssstyle issue. | ||
| // https://github.com/jsdom/cssstyle/issues/193 | ||
| const selector = rule.selectorText.split(";").pop().trim(); | ||
| const { match, pseudoElement } = domSelector.check(selector, elementImpl); | ||
| const { match, pseudoElement } = domSelector.check(rule.selectorText, elementImpl); | ||
| // `pseudoElement` is a pseudo-element selector (e.g. `::before`). | ||
@@ -207,0 +204,0 @@ // However, we do not support getComputedStyle(element, pseudoElement), so return `false`. |
@@ -18,3 +18,7 @@ "use strict"; | ||
| function onStylesheetLoad(data) { | ||
| function onStylesheetLoad(data, response) { | ||
| if (!response.ok) { | ||
| throw new Error("Status code: " + response.status); | ||
| } | ||
| // if the element was detached before the load could finish, don't process the data | ||
@@ -21,0 +25,0 @@ if (!elementImpl._attached) { |
@@ -13,5 +13,5 @@ "use strict"; | ||
| class NavigatorImpl { | ||
| constructor(globalObject, args, privateData) { | ||
| constructor(globalObject) { | ||
| this._globalObject = globalObject; | ||
| this.userAgent = privateData.userAgent; | ||
| this.userAgent = globalObject._userAgent; | ||
| this.languages = Object.freeze(["en-US", "en"]); | ||
@@ -18,0 +18,0 @@ this.plugins = PluginArray.create(this._globalObject); |
@@ -247,6 +247,5 @@ "use strict"; | ||
| // https://html.spec.whatwg.org/#resolve-a-url | ||
| // https://html.spec.whatwg.org/#encoding-parsing-a-url | ||
| encodingParseAURL(url) { | ||
| // TODO account for encoding (once whatwg-url supports that) | ||
| return whatwgURL.parseURL(url, { baseURL: this.baseURL() }); | ||
| return whatwgURL.parseURL(url, { baseURL: this.baseURL(), encoding: this._encoding }); | ||
| } | ||
@@ -253,0 +252,0 @@ |
| "use strict"; | ||
| const MIMEType = require("whatwg-mimetype"); | ||
| const { legacyHookDecode } = require("@exodus/bytes/encoding.js"); | ||
| const { parseURL, serializeURL } = require("whatwg-url"); | ||
| const sniffHTMLEncoding = require("html-encoding-sniffer"); | ||
| const { computedMIMEType } = require("whatwg-mimetype"); | ||
@@ -35,18 +35,13 @@ const window = require("../../browser/Window"); | ||
| let request; | ||
| function onFrameLoaded(data, response) { | ||
| const contentType = computedMIMEType(data, { | ||
| contentTypeHeader: response.headers.get("content-type") | ||
| }); | ||
| const xml = contentType.isXML(); | ||
| const transportLayerEncodingLabel = contentType.parameters.get("charset"); | ||
| function onFrameLoaded(data) { | ||
| let xml = false; | ||
| let transportLayerEncodingLabel; | ||
| if (request.response) { | ||
| const contentType = MIMEType.parse(request.response.headers["content-type"]) || new MIMEType("text/plain"); | ||
| xml = contentType.isXML(); | ||
| transportLayerEncodingLabel = contentType.parameters.get("charset"); | ||
| if (xml) { | ||
| contentDoc._parsingMode = "xml"; | ||
| } | ||
| contentDoc.contentType = contentType.essence; | ||
| if (xml) { | ||
| contentDoc._parsingMode = "xml"; | ||
| } | ||
| contentDoc.contentType = contentType.essence; | ||
@@ -95,3 +90,3 @@ contentDoc._encoding = sniffHTMLEncoding(data, { | ||
| request = resourceLoader.fetch(serializedURL, { | ||
| resourceLoader.fetch(serializedURL, { | ||
| element: frame, | ||
@@ -136,3 +131,5 @@ onLoad: onFrameLoaded | ||
| parentOrigin: parentDoc._origin, | ||
| resourceLoader: parentDoc._defaultView._resourceLoader, | ||
| dispatcher: parentDoc._defaultView._dispatcher, | ||
| loadSubresources: parentDoc._defaultView._loadSubresources, | ||
| userAgent: parentDoc._defaultView._userAgent, | ||
| referrer: parentDoc.URL, | ||
@@ -139,0 +136,0 @@ cookieJar: parentDoc._cookieJar, |
@@ -353,5 +353,6 @@ "use strict"; | ||
| function setTheURL(hheu) { | ||
| hheu.url = null; | ||
| const href = hheu.getAttributeNS(null, "href"); | ||
| if (href === null) { | ||
| hheu.url = null; | ||
| return; | ||
@@ -361,4 +362,5 @@ } | ||
| const parsed = hheu._ownerDocument.encodingParseAURL(href); | ||
| hheu.url = parsed === null ? null : parsed; | ||
| if (parsed !== null) { | ||
| hheu.url = parsed; | ||
| } | ||
| } | ||
@@ -365,0 +367,0 @@ |
@@ -96,15 +96,11 @@ "use strict"; | ||
| const resourceLoader = document._resourceLoader; | ||
| let request; | ||
| const onLoadImage = data => { | ||
| const { response } = request; | ||
| if (response && response.statusCode !== undefined && response.statusCode !== 200) { | ||
| throw new Error("Status code: " + response.statusCode); | ||
| } | ||
| let error = null; | ||
| this._image.onerror = function (err) { | ||
| this._image.onerror = err => { | ||
| error = err; | ||
| }; | ||
| this._image.src = data; | ||
| // eslint-disable-next-line no-restricted-globals -- The canvas package expects a Node.js `Buffer`. | ||
| this._image.src = Buffer.from(data.buffer, data.byteOffset, data.byteLength); | ||
| if (error) { | ||
@@ -117,3 +113,3 @@ throw new Error(error); | ||
| request = resourceLoader.fetch(urlString, { | ||
| resourceLoader.fetch(urlString, { | ||
| element: this, | ||
@@ -120,0 +116,0 @@ onLoad: onLoadImage, |
| "use strict"; | ||
| const vm = require("vm"); | ||
| const { getBOMEncoding, labelToName, legacyHookDecode } = require("@exodus/bytes/encoding.js"); | ||
| const MIMEType = require("whatwg-mimetype"); | ||
| const { MIMEType } = require("whatwg-mimetype"); | ||
| const { serializeURL } = require("whatwg-url"); | ||
@@ -66,3 +66,2 @@ | ||
| const defaultEncoding = labelToName(this.getAttributeNS(null, "charset")) || document._encoding; | ||
| let request; | ||
@@ -80,16 +79,11 @@ if (!this._canRunScript()) { | ||
| const onLoadExternalScript = data => { | ||
| const { response } = request; | ||
| let contentType; | ||
| if (response && response.statusCode !== undefined && response.statusCode >= 400) { | ||
| throw new Error("Status code: " + response.statusCode); | ||
| const onLoadExternalScript = (data, response) => { | ||
| if (!response.ok) { | ||
| throw new Error("Status code: " + response.status); | ||
| } | ||
| if (response) { | ||
| contentType = MIMEType.parse(response.headers["content-type"]) || new MIMEType("text/plain"); | ||
| } | ||
| const contentType = MIMEType.parse(response.headers.get("content-type")) || new MIMEType("text/plain"); | ||
| const encoding = labelToName(getBOMEncoding(data)) || | ||
| (contentType && labelToName(contentType.parameters.get("charset"))) || | ||
| labelToName(contentType.parameters.get("charset")) || | ||
| defaultEncoding; | ||
@@ -101,3 +95,3 @@ const script = legacyHookDecode(data, encoding); | ||
| request = resourceLoader.fetch(urlString, { | ||
| resourceLoader.fetch(urlString, { | ||
| element: this, | ||
@@ -104,0 +98,0 @@ onLoad: onLoadExternalScript |
| "use strict"; | ||
| const nodeURL = require("url"); | ||
| const DOMException = require("../generated/DOMException"); | ||
| const { WebSocket } = require("undici"); | ||
| const { parseURL, serializeURL, serializeURLOrigin } = require("whatwg-url"); | ||
| const WebSocket = require("ws"); | ||
| const { setupForSimpleEventAccessors } = require("../helpers/create-event-accessor"); | ||
| const { fireAnEvent } = require("../helpers/events"); | ||
| const { isArrayBuffer } = require("../generated/utils"); | ||
| const { copyToArrayBufferInNewRealm } = require("../helpers/binary-data"); | ||
| const { copyToArrayBufferInTargetRealmDestructively } = require("../helpers/binary-data"); | ||
| const IterableWeakSet = require("../helpers/iterable-weak-set"); | ||
| const EventTargetImpl = require("../events/EventTarget-impl").implementation; | ||
| const idlUtils = require("../generated/utils"); | ||
| const Blob = require("../generated/Blob"); | ||
| const CloseEvent = require("../generated/CloseEvent"); | ||
| const DOMException = require("../generated/DOMException"); | ||
| const MessageEvent = require("../generated/MessageEvent"); | ||
| const CONNECTING = 0; | ||
| const OPEN = 1; | ||
| const CLOSING = 2; | ||
| const CLOSED = 3; | ||
| const productions = { | ||
| // https://tools.ietf.org/html/rfc7230#section-3.2.6 | ||
| token: /^[!#$%&'*+\-.^_`|~\dA-Za-z]+$/ | ||
| }; | ||
| const readyStateWSToDOM = []; | ||
| readyStateWSToDOM[WebSocket.CONNECTING] = CONNECTING; | ||
| readyStateWSToDOM[WebSocket.OPEN] = OPEN; | ||
| readyStateWSToDOM[WebSocket.CLOSING] = CLOSING; | ||
| readyStateWSToDOM[WebSocket.CLOSED] = CLOSED; | ||
| // https://tools.ietf.org/html/rfc6455#section-4.3 | ||
| // See Sec-WebSocket-Protocol-Client, which is for the syntax of an entire header value. This function checks if a | ||
| // single header conforms to the rules. | ||
| function verifySecWebSocketProtocol(str) { | ||
| return productions.token.test(str); | ||
| } | ||
| class PromiseQueues extends WeakMap { | ||
| get(window) { | ||
| const cur = super.get(window); | ||
| return cur !== undefined ? cur : Promise.resolve(); | ||
| } | ||
| } | ||
| const openSockets = new WeakMap(); | ||
| const openingQueues = new PromiseQueues(); | ||
| class WebSocketImpl extends EventTargetImpl { | ||
| constructor(globalObject, args, privateData) { | ||
| super(globalObject, args, privateData); | ||
| constructor(globalObject, [url, protocols]) { | ||
| super(globalObject); | ||
| this._ownerDocument = idlUtils.implForWrapper(globalObject._document); | ||
| const url = args[0]; | ||
| let protocols = args[1] !== undefined ? args[1] : []; | ||
| // Do our own URL parsing because we want to be consistent with the rest of jsdom and use whatwg-url, not Node.js's | ||
| // URL. | ||
| const urlRecord = parseURL(url); | ||
@@ -81,37 +40,27 @@ if (urlRecord === null) { | ||
| if (typeof protocols === "string") { | ||
| protocols = [protocols]; | ||
| } | ||
| const protocolSet = new Set(); | ||
| for (const protocol of protocols) { | ||
| if (!verifySecWebSocketProtocol(protocol)) { | ||
| throw DOMException.create(this._globalObject, [`The subprotocol '${protocol}' is invalid.`, "SyntaxError"]); | ||
| this._urlRecord = urlRecord; | ||
| this._urlSerialized = serializeURL(urlRecord); | ||
| this._binaryType = "blob"; | ||
| const wsOptions = { | ||
| dispatcher: globalObject._dispatcher, | ||
| protocols, | ||
| headers: { | ||
| // Origin is required for WebSocket and uses the window's origin | ||
| origin: globalObject._origin | ||
| } | ||
| const lowered = protocol.toLowerCase(); | ||
| if (protocolSet.has(lowered)) { | ||
| throw DOMException.create(this._globalObject, [ | ||
| `The subprotocol '${protocol}' is duplicated.`, | ||
| "SyntaxError" | ||
| ]); | ||
| } | ||
| protocolSet.add(lowered); | ||
| } | ||
| }; | ||
| this._urlRecord = urlRecord; | ||
| this.url = serializeURL(urlRecord); | ||
| const nodeParsedURL = nodeURL.parse(this.url); | ||
| this.extensions = ""; | ||
| this._ws = wrapAndRethrowNodeDOMExceptions(() => { | ||
| return new WebSocket(serializeURL(urlRecord), wsOptions); | ||
| }, this._globalObject); | ||
| this.binaryType = "blob"; | ||
| // Always use "arraybuffer" for `this._ws`'s `binaryType`. It will be converted to the correct type by `_onMessage`, | ||
| // and jsdom's `Blob`s are just wrappers around `ArrayBuffer`s anyway. | ||
| this._ws.binaryType = "arraybuffer"; | ||
| this._ws = null; | ||
| // Used when this._ws has not been initialized yet. | ||
| this._readyState = CONNECTING; | ||
| this._requiredToFail = false; | ||
| this.bufferedAmount = 0; | ||
| this._sendQueue = []; | ||
| // Track open sockets for cleanup | ||
| let openSocketsForWindow = openSockets.get(globalObject._globalProxy); | ||
| if (openSocketsForWindow === undefined) { | ||
| openSocketsForWindow = new Set(); | ||
| openSocketsForWindow = new IterableWeakSet(); | ||
| openSockets.set(globalObject._globalProxy, openSocketsForWindow); | ||
@@ -121,103 +70,52 @@ } | ||
| openingQueues.set(this._ownerDocument, openingQueues.get(this._ownerDocument).then(() => new Promise(resolve => { | ||
| // close() called before _ws has been initialized. | ||
| if (this._requiredToFail) { | ||
| resolve(); | ||
| this._readyState = CLOSED; | ||
| this._onConnectionClosed(1006, ""); | ||
| return; | ||
| } | ||
| // Set up event forwarding. We use `setTimeout()` to work around https://github.com/nodejs/undici/issues/4741 where | ||
| // undici fires events synchronously during `close()`, but the spec requires them to fire asynchronously. | ||
| this._ws.addEventListener("open", () => { | ||
| setTimeout(() => fireAnEvent("open", this), 0); | ||
| }); | ||
| this._ws = new WebSocket(this.url, protocols, { | ||
| headers: { | ||
| "user-agent": globalObject.navigator.userAgent, | ||
| "cookie": this._ownerDocument._cookieJar.getCookieStringSync(nodeParsedURL, { http: true }), | ||
| "origin": globalObject._origin | ||
| }, | ||
| rejectUnauthorized: globalObject._resourceLoader._strictSSL | ||
| }); | ||
| this._ws.once("open", () => { | ||
| resolve(); | ||
| this._onConnectionEstablished(); | ||
| }); | ||
| this._ws.on("message", this._onMessageReceived.bind(this)); | ||
| this._ws.once("close", (...closeArgs) => { | ||
| resolve(); | ||
| this._onConnectionClosed(...closeArgs); | ||
| }); | ||
| this._ws.once("upgrade", ({ headers }) => { | ||
| if (Array.isArray(headers["set-cookie"])) { | ||
| for (const cookie of headers["set-cookie"]) { | ||
| this._ownerDocument._cookieJar.setCookieSync( | ||
| cookie, | ||
| nodeParsedURL, | ||
| { http: true, ignoreError: true } | ||
| ); | ||
| } | ||
| } else if (headers["set-cookie"] !== undefined) { | ||
| this._ownerDocument._cookieJar.setCookieSync( | ||
| headers["set-cookie"], | ||
| nodeParsedURL, | ||
| { http: true, ignoreError: true } | ||
| ); | ||
| } | ||
| }); | ||
| this._ws.once("error", () => { | ||
| // The exact error is passed into this callback, but it is ignored as we don't really care about it. | ||
| resolve(); | ||
| this._requiredToFail = true; | ||
| // Do not emit an error here, as that will be handled in _onConnectionClosed. ws always emits a close event | ||
| // after errors. | ||
| }); | ||
| }))); | ||
| } | ||
| this._ws.addEventListener("message", event => { | ||
| // Capture readyState now, before setTimeout, because undici may transition to CLOSED before our setTimeout fires, | ||
| // but the spec says readyState must be OPEN during message events. | ||
| const readyStateWhenReceived = this._ws.readyState; | ||
| setTimeout(() => { | ||
| const prevReadyState = this._readyState; | ||
| this._readyState = readyStateWhenReceived; | ||
| this._onMessage(event); | ||
| this._readyState = prevReadyState; | ||
| }, 0); | ||
| }); | ||
| // https://html.spec.whatwg.org/multipage/web-sockets.html#make-disappear | ||
| _makeDisappear() { | ||
| this._eventListeners = Object.create(null); | ||
| this._close(1001); | ||
| } | ||
| this._ws.addEventListener("error", () => { | ||
| setTimeout(() => fireAnEvent("error", this), 0); | ||
| }); | ||
| static cleanUpWindow(window) { | ||
| const openSocketsForWindow = openSockets.get(window._globalProxy); | ||
| if (openSocketsForWindow !== undefined) { | ||
| for (const ws of openSocketsForWindow) { | ||
| ws._makeDisappear(); | ||
| } | ||
| } | ||
| this._ws.addEventListener("close", event => { | ||
| setTimeout(() => { | ||
| // Set readyState to CLOSED when firing the close event. We manage this ourselves because undici has bugs with | ||
| // readyState during close: https://github.com/nodejs/undici/issues/4742. | ||
| this._readyState = this._ws.CLOSED; | ||
| openSocketsForWindow.delete(this); | ||
| fireAnEvent("close", this, CloseEvent, { | ||
| wasClean: event.wasClean, | ||
| code: event.code, | ||
| reason: event.reason | ||
| }); | ||
| }, 0); | ||
| }); | ||
| } | ||
| // https://html.spec.whatwg.org/multipage/web-sockets.html#feedback-from-the-protocol | ||
| _onConnectionEstablished() { | ||
| // readyState is a getter. | ||
| if (this._ws.extensions !== null) { | ||
| // Right now, ws only supports one extension, permessage-deflate, without any parameters. This algorithm may need | ||
| // to be more sophiscated as more extenions are supported. | ||
| this.extensions = Object.keys(this._ws.extensions).join(", "); | ||
| _onMessage({ data }) { | ||
| let dataForEvent; | ||
| if (typeof data === "string") { | ||
| dataForEvent = data; | ||
| } else if (this._binaryType === "arraybuffer") { | ||
| dataForEvent = copyToArrayBufferInTargetRealmDestructively(data, this._globalObject); | ||
| } else { | ||
| // `this._binaryType === "blob"` | ||
| dataForEvent = Blob.create(this._globalObject, [undefined, { type: "" }], { | ||
| fastPathArrayBufferToWrap: data | ||
| }); | ||
| } | ||
| // protocol is a getter. | ||
| fireAnEvent("open", this); | ||
| } | ||
| _onMessageReceived(data, isBinary) { | ||
| if (this.readyState !== OPEN) { | ||
| return; | ||
| } | ||
| let dataForEvent; | ||
| if (!isBinary) { | ||
| dataForEvent = data.toString(); | ||
| } else if (this.binaryType === "arraybuffer") { | ||
| if (isArrayBuffer(data)) { | ||
| dataForEvent = data; | ||
| } else if (Array.isArray(data)) { | ||
| dataForEvent = copyToArrayBufferInNewRealm(Buffer.concat(data), this._globalObject); | ||
| } else { | ||
| dataForEvent = copyToArrayBufferInNewRealm(data, this._globalObject); | ||
| } | ||
| } else { // this.binaryType === "blob" | ||
| if (!Array.isArray(data)) { | ||
| data = [data]; | ||
| } | ||
| dataForEvent = Blob.create(this._globalObject, [data, { type: "" }]); | ||
| } | ||
| fireAnEvent("message", this, MessageEvent, { | ||
@@ -229,98 +127,83 @@ data: dataForEvent, | ||
| _onConnectionClosed(code, reason) { | ||
| const openSocketsForWindow = openSockets.get(this._ownerDocument._defaultView); | ||
| openSocketsForWindow.delete(this); | ||
| const wasClean = !this._requiredToFail; | ||
| if (this._requiredToFail) { | ||
| fireAnEvent("error", this); | ||
| } | ||
| fireAnEvent("close", this, CloseEvent, { | ||
| wasClean, | ||
| code, | ||
| reason: reason.toString() | ||
| }); | ||
| get url() { | ||
| return this._urlSerialized; | ||
| } | ||
| get readyState() { | ||
| if (this._ws !== null) { | ||
| return readyStateWSToDOM[this._ws.readyState]; | ||
| } | ||
| return this._readyState; | ||
| // Use captured readyState if available (workaround for undici bug #4742) | ||
| return this._readyState ?? this._ws.readyState; | ||
| } | ||
| get bufferedAmount() { | ||
| return this._ws.bufferedAmount; | ||
| } | ||
| get extensions() { | ||
| return this._ws.extensions; | ||
| } | ||
| get protocol() { | ||
| if (this._ws === null) { | ||
| return ""; | ||
| } | ||
| return this._ws.protocol; | ||
| } | ||
| close(code = undefined, reason = undefined) { | ||
| if (code !== undefined && code !== 1000 && !(code >= 3000 && code <= 4999)) { | ||
| throw DOMException.create(this._globalObject, [ | ||
| `The code must be either 1000, or between 3000 and 4999. ${code} is neither.`, | ||
| "InvalidAccessError" | ||
| ]); | ||
| } | ||
| if (reason !== undefined && Buffer.byteLength(reason, "utf8") > 123) { | ||
| throw DOMException.create(this._globalObject, [ | ||
| "The message must not be greater than 123 bytes.", | ||
| "SyntaxError" | ||
| ]); | ||
| } | ||
| this._close(code, reason); | ||
| get binaryType() { | ||
| return this._binaryType; | ||
| } | ||
| _close(code = undefined, reason = undefined) { | ||
| if (this.readyState === CONNECTING) { | ||
| this._requiredToFail = true; | ||
| if (this._ws !== null) { | ||
| this._ws.terminate(); | ||
| } else { | ||
| this._readyState = CLOSING; | ||
| set binaryType(value) { | ||
| this._binaryType = value; | ||
| } | ||
| close(code, reason) { | ||
| return wrapAndRethrowNodeDOMExceptions(() => { | ||
| // Set readyState to CLOSING before calling undici's close(). We manage this ourselves because | ||
| // undici has bugs with readyState during close - see https://github.com/nodejs/undici/issues/4742 | ||
| // Only set to CLOSING if not already CLOSED (calling close() on a closed socket is a no-op). | ||
| if (this._readyState !== this._ws.CLOSED) { | ||
| this._readyState = this._ws.CLOSING; | ||
| } | ||
| } else if (this.readyState === OPEN) { | ||
| this._ws.close(code, reason); | ||
| } | ||
| return this._ws.close(code, reason); | ||
| }, this._globalObject); | ||
| } | ||
| send(data) { | ||
| if (this.readyState === CONNECTING) { | ||
| throw DOMException.create(this._globalObject, ["Still in CONNECTING state.", "InvalidStateError"]); | ||
| return wrapAndRethrowNodeDOMExceptions(() => { | ||
| // Convert jsdom Blob to ArrayBuffer. Other types are passed through as-is. | ||
| if (Blob.isImpl(data)) { | ||
| data = data._bytes.buffer; | ||
| } | ||
| return this._ws.send(data); | ||
| }, this._globalObject); | ||
| } | ||
| // https://websockets.spec.whatwg.org/#make-disappear | ||
| // But with additional work from jsdom to remove all event listeners. | ||
| _makeDisappear() { | ||
| this._eventListeners = Object.create(null); | ||
| if (this._ws.readyState === this._ws.OPEN || this._ws.readyState === this._ws.CONNECTING) { | ||
| // Close without a code - undici doesn't allow reserved codes like 1001 | ||
| this._ws.close(); | ||
| } | ||
| if (this.readyState !== OPEN) { | ||
| return; | ||
| } | ||
| if (Blob.isImpl(data)) { | ||
| data = data._buffer; | ||
| } | ||
| let length; | ||
| if (typeof data === "string") { | ||
| length = Buffer.byteLength(data, "utf8"); | ||
| } else { | ||
| length = data.byteLength; | ||
| } | ||
| this.bufferedAmount += length; | ||
| this._sendQueue.push([data, length]); | ||
| this._scheduleSend(); | ||
| } | ||
| _actuallySend() { | ||
| for (const [data, length] of this._sendQueue.splice(0)) { | ||
| this._ws.send(data, { binary: typeof data !== "string" }, () => { | ||
| this.bufferedAmount -= length; | ||
| }); | ||
| static cleanUpWindow(window) { | ||
| const openSocketsForWindow = openSockets.get(window._globalProxy); | ||
| if (openSocketsForWindow !== undefined) { | ||
| for (const ws of openSocketsForWindow) { | ||
| ws._makeDisappear(); | ||
| } | ||
| openSockets.delete(window._globalProxy); | ||
| } | ||
| } | ||
| } | ||
| _scheduleSend() { | ||
| if (this._dequeueScheduled) { | ||
| return; | ||
| function wrapAndRethrowNodeDOMExceptions(func, globalObject) { | ||
| try { | ||
| return func(); | ||
| } catch (e) { | ||
| if (e instanceof globalThis.DOMException) { | ||
| throw DOMException.create(globalObject, [e.message, e.name]); | ||
| } | ||
| this._dequeueScheduled = true; | ||
| process.nextTick(() => { | ||
| this._dequeueScheduled = false; | ||
| this._actuallySend(); | ||
| }); | ||
| throw e; | ||
| } | ||
@@ -327,0 +210,0 @@ } |
@@ -5,8 +5,10 @@ "use strict"; | ||
| const reportException = require("../helpers/runtime-script-errors.js"); | ||
| const { utf8Decode } = require("../helpers/encoding.js"); | ||
| const idlUtils = require("../generated/utils.js"); | ||
| // https://html.spec.whatwg.org/#evaluate-a-javascript:-url, kind of | ||
| exports.evaluateJavaScriptURL = (window, urlRecord) => { | ||
| const urlString = whatwgURL.serializeURL(urlRecord); | ||
| const encodedScriptSource = urlString.substring("javascript:".length); | ||
| const scriptSource = Buffer.from(whatwgURL.percentDecodeString(encodedScriptSource)).toString(); | ||
| const scriptSource = utf8Decode(whatwgURL.percentDecodeString(encodedScriptSource)); | ||
| if (window._runScripts === "dangerously") { | ||
@@ -13,0 +15,0 @@ try { |
@@ -86,3 +86,3 @@ "use strict"; | ||
| // "representing the same bytes" | ||
| value._buffer = oldValue._buffer; | ||
| value._bytes = oldValue._bytes; | ||
| } | ||
@@ -100,3 +100,3 @@ | ||
| // "representing the same bytes" | ||
| value._buffer = oldValue._buffer; | ||
| value._bytes = oldValue._bytes; | ||
| } | ||
@@ -103,0 +103,0 @@ |
| "use strict"; | ||
| const conversions = require("webidl-conversions"); | ||
| const { utf8Encode } = require("../helpers/encoding"); | ||
| // https://fetch.spec.whatwg.org/#concept-bodyinit-extract (note: always UTF-8) | ||
@@ -7,8 +9,6 @@ // https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#multipart%2Fform-data-encoding-algorithm | ||
| const utf8Encoder = new TextEncoder(); | ||
| const contentDispositionPrefix = utf8Encode(`Content-Disposition: form-data; name="`); | ||
| const filenamePrefix = utf8Encode(`; filename="`); | ||
| const contentType = utf8Encode(`Content-Type: `); | ||
| const contentDispositionPrefix = utf8Encoder.encode(`Content-Disposition: form-data; name="`); | ||
| const filenamePrefix = utf8Encoder.encode(`; filename="`); | ||
| const contentType = utf8Encoder.encode(`Content-Type: `); | ||
| // https://andreubotella.github.io/multipart-form-data/#multipart-form-data-boundary | ||
@@ -20,3 +20,3 @@ function generateBoundary() { | ||
| } | ||
| return utf8Encoder.encode(boundary); | ||
| return utf8Encode(boundary); | ||
| } | ||
@@ -32,3 +32,3 @@ | ||
| const encoded = utf8Encoder.encode(name); | ||
| const encoded = utf8Encode(name); | ||
| const encodedWithSubs = []; | ||
@@ -70,3 +70,3 @@ for (const originalByte of encoded) { | ||
| chunkBytes.push(...utf8Encoder.encode(value)); | ||
| chunkBytes.push(...utf8Encode(value)); | ||
@@ -82,3 +82,3 @@ chunkBytes.push(0x0D, 0x0A); | ||
| const type = value.type !== "" ? value.type : "application/octet-stream"; | ||
| chunkBytes.push(...contentType, ...utf8Encoder.encode(type)); | ||
| chunkBytes.push(...contentType, ...utf8Encode(type)); | ||
@@ -89,4 +89,4 @@ chunkBytes.push(0x0D, 0x0A, 0x0D, 0x0A); | ||
| new Uint8Array(chunkBytes), | ||
| // The spec returns the File object here but for our purposes the bytes (as a `Buffer`) are more convenient. | ||
| value._buffer, | ||
| // The spec returns the File object here but for our purposes the bytes (as a `Uint8Array`) are more convenient. | ||
| value._bytes, | ||
| new Uint8Array([0x0D, 0x0A]) | ||
@@ -106,7 +106,1 @@ ); | ||
| }; | ||
| // Inspired by https://andreubotella.github.io/multipart-form-data/#create-a-multipart-form-data-readable-stream | ||
| // (But we don't stream in jsdom, at least for now.) | ||
| exports.chunksToBuffer = chunks => { | ||
| return Buffer.concat(chunks); | ||
| }; |
| "use strict"; | ||
| const util = require("util"); | ||
| const { JSDOM } = require("../../../.."); | ||
| const { READY_STATES } = require("./xhr-utils"); | ||
| const idlUtils = require("../generated/utils"); | ||
| const tough = require("tough-cookie"); | ||
@@ -19,42 +16,22 @@ const dom = new JSDOM(); | ||
| process.stdin.on("end", () => { | ||
| // eslint-disable-next-line no-restricted-globals -- We can't avoid receiving `Buffer`s from `process.stdin`. | ||
| const buffer = Buffer.concat(chunks); | ||
| const flag = JSON.parse(buffer.toString()); | ||
| if (flag.body && flag.body.type === "Buffer" && flag.body.data) { | ||
| flag.body = Buffer.from(flag.body.data); | ||
| const config = JSON.parse(buffer.toString()); | ||
| xhrImpl._adoptSerializedRequest(config); | ||
| function writeResultAndExit() { | ||
| process.stdout.write(JSON.stringify(xhrImpl._serializeResponse()), () => { | ||
| // Exit immediately. The process destruction will handle all connection cleanup. | ||
| process.exit(0); | ||
| }); | ||
| } | ||
| if (flag.cookieJar) { | ||
| flag.cookieJar = tough.CookieJar.fromJSON(flag.cookieJar); | ||
| } | ||
| flag.synchronous = false; | ||
| Object.assign(xhrImpl.flag, flag); | ||
| const { properties } = xhrImpl; | ||
| xhrImpl.readyState = READY_STATES.OPENED; | ||
| try { | ||
| xhr.addEventListener("loadend", () => { | ||
| if (properties.error) { | ||
| properties.error = properties.error.stack || util.inspect(properties.error); | ||
| } | ||
| process.stdout.write(JSON.stringify({ | ||
| responseURL: xhrImpl.responseURL, | ||
| status: xhrImpl.status, | ||
| statusText: xhrImpl.statusText, | ||
| properties | ||
| }), () => { | ||
| process.exit(0); | ||
| }); | ||
| }, false); | ||
| xhr.send(flag.body); | ||
| xhr.addEventListener("loadend", writeResultAndExit, false); | ||
| xhr.send(xhrImpl._body); | ||
| } catch (error) { | ||
| properties.error += error.stack || util.inspect(error); | ||
| process.stdout.write(JSON.stringify({ | ||
| responseURL: xhrImpl.responseURL, | ||
| status: xhrImpl.status, | ||
| statusText: xhrImpl.statusText, | ||
| properties | ||
| }), () => { | ||
| process.exit(0); | ||
| }); | ||
| xhrImpl._error = error; | ||
| writeResultAndExit(); | ||
| } | ||
| }); |
| "use strict"; | ||
| const fs = require("fs"); | ||
| const { EventEmitter } = require("events"); | ||
| const { URL } = require("whatwg-url"); | ||
| const parseDataURL = require("data-urls"); | ||
| const DOMException = require("../generated/DOMException"); | ||
| const { Readable } = require("stream"); | ||
| const { parseURL, serializeURL, serializeURLOrigin } = require("whatwg-url"); | ||
| const HeaderList = require("../fetch/header-list"); | ||
| const { isForbiddenResponseHeaderName, isNoCORSSafelistedRequestHeaderName } = require("../fetch/header-types"); | ||
| const ProgressEvent = require("../generated/ProgressEvent"); | ||
| const agentFactory = require("../helpers/agent-factory"); | ||
| const Request = require("../helpers/http-request"); | ||
| const { fireAnEvent } = require("../helpers/events"); | ||
| const headerListSeparatorRegexp = /,[ \t]*/; | ||
| const simpleMethods = new Set(["GET", "HEAD", "POST"]); | ||
| const simpleHeaders = new Set(["accept", "accept-language", "content-language", "content-type"]); | ||
| const preflightHeaders = new Set([ | ||
| "access-control-expose-headers", | ||
| "access-control-allow-headers", | ||
| "access-control-allow-credentials", | ||
| "access-control-allow-origin" | ||
| const corsSafeResponseHeaders = new Set([ | ||
| "cache-control", | ||
| "content-language", | ||
| "content-length", | ||
| "content-type", | ||
| "expires", | ||
| "last-modified", | ||
| "pragma" | ||
| ]); | ||
| const READY_STATES = exports.READY_STATES = Object.freeze({ | ||
| UNSENT: 0, | ||
| OPENED: 1, | ||
| HEADERS_RECEIVED: 2, | ||
| LOADING: 3, | ||
| DONE: 4 | ||
| }); | ||
| function getRequestHeader(requestHeaders, header) { | ||
| const lcHeader = header.toLowerCase(); | ||
| const keys = Object.keys(requestHeaders); | ||
| let n = keys.length; | ||
| while (n--) { | ||
| const key = keys[n]; | ||
| if (key.toLowerCase() === lcHeader) { | ||
| return requestHeaders[key]; | ||
| } | ||
| } | ||
| return null; | ||
| /** | ||
| * A network error response. | ||
| */ | ||
| function makeNetworkError(error = null) { | ||
| return { type: "error", error }; | ||
| } | ||
| function updateRequestHeader(requestHeaders, header, newValue) { | ||
| const lcHeader = header.toLowerCase(); | ||
| const keys = Object.keys(requestHeaders); | ||
| let n = keys.length; | ||
| while (n--) { | ||
| const key = keys[n]; | ||
| if (key.toLowerCase() === lcHeader) { | ||
| requestHeaders[key] = newValue; | ||
| } | ||
| } | ||
| /** | ||
| * Check if a response is a network error. | ||
| */ | ||
| function isNetworkError(response) { | ||
| return response && response.type === "error"; | ||
| } | ||
| function dispatchError(xhr) { | ||
| const errMessage = xhr.properties.error; | ||
| requestErrorSteps(xhr, "error", DOMException.create(xhr._globalObject, [errMessage, "NetworkError"])); | ||
| } | ||
| /** | ||
| * Performs a fetch operation for XHR with full CORS handling. | ||
| * Delegates redirect handling and CORS validation to the dispatcher. | ||
| * | ||
| * @param {JSDOMDispatcher} dispatcher - The dispatcher to use for sending the request | ||
| * @param {Object} config - Request configuration | ||
| * @param {AbortSignal} signal - Signal to abort the fetch | ||
| * @returns {Promise<Object>} The response object with filteredResponseHeaders, or a network error response | ||
| */ | ||
| async function performFetch(dispatcher, config, signal) { | ||
| const { | ||
| url, | ||
| method, | ||
| requestHeaders: userRequestHeaders, | ||
| body, | ||
| origin, | ||
| referrer, | ||
| withCredentials, | ||
| auth, | ||
| uploadListener | ||
| } = config; | ||
| function validCORSHeaders(xhr, response, flag, properties, origin) { | ||
| const acaoStr = response.headers["access-control-allow-origin"]; | ||
| const acao = acaoStr ? acaoStr.trim() : null; | ||
| if (acao !== "*" && acao !== origin) { | ||
| properties.error = "Cross origin " + origin + " forbidden"; | ||
| dispatchError(xhr); | ||
| return false; | ||
| } | ||
| const acacStr = response.headers["access-control-allow-credentials"]; | ||
| const acac = acacStr ? acacStr.trim() : null; | ||
| if (flag.withCredentials && acac !== "true") { | ||
| properties.error = "Credentials forbidden"; | ||
| dispatchError(xhr); | ||
| return false; | ||
| } | ||
| return true; | ||
| } | ||
| const urlRecord = parseURL(url); | ||
| const ucMethod = method.toUpperCase(); | ||
| function validCORSPreflightHeaders(xhr, response, flag, properties) { | ||
| if (!validCORSHeaders(xhr, response, flag, properties, properties.origin)) { | ||
| return false; | ||
| } | ||
| const acahStr = response.headers["access-control-allow-headers"]; | ||
| const acah = new Set(acahStr ? acahStr.trim().toLowerCase().split(headerListSeparatorRegexp) : []); | ||
| const forbiddenHeaders = acah.has("*") ? | ||
| [] : | ||
| Object.keys(flag.requestHeaders).filter(header => { | ||
| const lcHeader = header.toLowerCase(); | ||
| return !simpleHeaders.has(lcHeader) && !acah.has(lcHeader); | ||
| }); | ||
| if (forbiddenHeaders.length > 0) { | ||
| properties.error = "Headers " + forbiddenHeaders + " forbidden"; | ||
| dispatchError(xhr); | ||
| return false; | ||
| } | ||
| return true; | ||
| } | ||
| // Build request headers - start with user-set headers | ||
| // Default headers (User-Agent, Accept, etc.) are added by the dispatcher | ||
| const requestHeaders = userRequestHeaders.clone(); | ||
| function requestErrorSteps(xhr, event, exception) { | ||
| const { flag, properties, upload } = xhr; | ||
| xhr.readyState = READY_STATES.DONE; | ||
| properties.send = false; | ||
| setResponseToNetworkError(xhr); | ||
| if (flag.synchronous) { | ||
| throw exception; | ||
| if (referrer) { | ||
| requestHeaders.set("Referer", referrer); | ||
| } | ||
| fireAnEvent("readystatechange", xhr); | ||
| if (!properties.uploadComplete) { | ||
| properties.uploadComplete = true; | ||
| if (properties.uploadListener) { | ||
| fireAnEvent(event, upload, ProgressEvent, { loaded: 0, total: 0, lengthComputable: false }); | ||
| fireAnEvent("loadend", upload, ProgressEvent, { loaded: 0, total: 0, lengthComputable: false }); | ||
| } | ||
| const crossOrigin = origin !== serializeURLOrigin(urlRecord); | ||
| if (crossOrigin) { | ||
| requestHeaders.set("Origin", origin); | ||
| } | ||
| fireAnEvent(event, xhr, ProgressEvent, { loaded: 0, total: 0, lengthComputable: false }); | ||
| fireAnEvent("loadend", xhr, ProgressEvent, { loaded: 0, total: 0, lengthComputable: false }); | ||
| } | ||
| function setResponseToNetworkError(xhr) { | ||
| const { properties } = xhr; | ||
| properties.responseBuffer = | ||
| properties.responseCache = | ||
| properties.responseTextCache = | ||
| properties.responseXMLCache = null; | ||
| properties.responseHeaders = {}; | ||
| xhr.status = 0; | ||
| xhr.statusText = ""; | ||
| } | ||
| // return a "request" client object or an event emitter matching the same behaviour for unsupported protocols | ||
| // the callback should be called with a "request" response object or an event emitter matching the same behaviour too | ||
| function createClient(xhr) { | ||
| const { flag, properties } = xhr; | ||
| const urlObj = new URL(flag.uri); | ||
| const uri = urlObj.href; | ||
| const ucMethod = flag.method.toUpperCase(); | ||
| const { requestManager } = flag; | ||
| if (urlObj.protocol === "file:") { | ||
| const response = new EventEmitter(); | ||
| response.statusCode = 200; | ||
| response.rawHeaders = []; | ||
| response.headers = {}; | ||
| const filePath = urlObj.pathname | ||
| .replace(/^file:\/\//, "") | ||
| .replace(/^\/([a-z]):\//i, "$1:/") | ||
| .replace(/%20/g, " "); | ||
| const client = new EventEmitter(); | ||
| const readableStream = fs.createReadStream(filePath, { encoding: null }); | ||
| readableStream.on("data", chunk => { | ||
| response.emit("data", chunk); | ||
| client.emit("data", chunk); | ||
| }); | ||
| readableStream.on("end", () => { | ||
| response.emit("end"); | ||
| client.emit("end"); | ||
| }); | ||
| readableStream.on("error", err => { | ||
| client.emit("error", err); | ||
| }); | ||
| client.abort = function () { | ||
| readableStream.destroy(); | ||
| client.emit("abort"); | ||
| }; | ||
| if (requestManager) { | ||
| const req = { | ||
| abort() { | ||
| properties.abortError = true; | ||
| xhr.abort(); | ||
| } | ||
| }; | ||
| requestManager.add(req); | ||
| const rmReq = requestManager.remove.bind(requestManager, req); | ||
| client.on("abort", rmReq); | ||
| client.on("error", rmReq); | ||
| client.on("end", rmReq); | ||
| // Compute if preflight is needed (but don't execute - dispatcher will) | ||
| const nonSimpleHeaders = []; | ||
| for (const name of userRequestHeaders.names()) { | ||
| if (!isNoCORSSafelistedRequestHeaderName(name)) { | ||
| nonSimpleHeaders.push(name); | ||
| } | ||
| process.nextTick(() => client.emit("response", response, urlObj.href)); | ||
| return client; | ||
| } | ||
| const needsPreflight = crossOrigin && | ||
| (!simpleMethods.has(ucMethod) || nonSimpleHeaders.length > 0 || uploadListener); | ||
| if (urlObj.protocol === "data:") { | ||
| const response = new EventEmitter(); | ||
| const client = new EventEmitter(); | ||
| let buffer; | ||
| try { | ||
| const parsed = parseDataURL(uri); | ||
| const contentType = parsed.mimeType.toString(); | ||
| buffer = Buffer.from(parsed.body); | ||
| response.statusCode = 200; | ||
| response.rawHeaders = ["Content-Type", contentType]; | ||
| response.headers = { "content-type": contentType }; | ||
| } catch (err) { | ||
| process.nextTick(() => client.emit("error", err)); | ||
| return client; | ||
| } | ||
| client.abort = () => { | ||
| // do nothing | ||
| }; | ||
| process.nextTick(() => { | ||
| client.emit("response", response, urlObj.href); | ||
| process.nextTick(() => { | ||
| response.emit("data", buffer); | ||
| client.emit("data", buffer); | ||
| response.emit("end"); | ||
| client.emit("end"); | ||
| }); | ||
| }); | ||
| return client; | ||
| // Build opaque options for dispatcher | ||
| const opaque = { | ||
| element: null, | ||
| url, | ||
| origin, | ||
| corsMode: crossOrigin, // Enable CORS handling if cross-origin | ||
| withCredentials, | ||
| auth | ||
| }; | ||
| if (needsPreflight) { | ||
| opaque.preflight = { unsafeHeaders: nonSimpleHeaders }; | ||
| } | ||
| const agents = agentFactory(flag.proxy, flag.strictSSL); | ||
| const requestHeaders = {}; | ||
| for (const header in flag.requestHeaders) { | ||
| requestHeaders[header] = flag.requestHeaders[header]; | ||
| } | ||
| // Single dispatch call - dispatcher handles preflight, redirects, CORS | ||
| const response = await dispatchMainRequest(dispatcher, { | ||
| method, | ||
| headers: requestHeaders, | ||
| body, | ||
| opaque | ||
| }, signal); | ||
| if (getRequestHeader(flag.requestHeaders, "referer") === null) { | ||
| requestHeaders.Referer = flag.referrer; | ||
| if (isNetworkError(response)) { | ||
| return response; | ||
| } | ||
| if (getRequestHeader(flag.requestHeaders, "user-agent") === null) { | ||
| requestHeaders["User-Agent"] = flag.userAgent; | ||
| } | ||
| if (getRequestHeader(flag.requestHeaders, "accept-language") === null) { | ||
| requestHeaders["Accept-Language"] = "en"; | ||
| } | ||
| if (getRequestHeader(flag.requestHeaders, "accept") === null) { | ||
| requestHeaders.Accept = "*/*"; | ||
| } | ||
| const crossOrigin = flag.origin !== urlObj.origin; | ||
| if (crossOrigin) { | ||
| requestHeaders.Origin = flag.origin; | ||
| } | ||
| // Build filtered response headers (post-processing) | ||
| const filteredResponseHeaders = new Set(); | ||
| const { headers } = response; | ||
| const options = { rejectUnauthorized: flag.strictSSL, agents, followRedirects: true }; | ||
| if (flag.auth) { | ||
| options.user = flag.auth.user || ""; | ||
| options.pass = flag.auth.pass || ""; | ||
| } | ||
| if (flag.cookieJar && (!crossOrigin || flag.withCredentials)) { | ||
| options.cookieJar = flag.cookieJar; | ||
| } | ||
| // Determine effective origin for filtering (from response URL after redirects) | ||
| const destUrlRecord = parseURL(response.url); | ||
| const isCrossOriginResponse = origin !== serializeURLOrigin(destUrlRecord) && destUrlRecord.scheme !== "data"; | ||
| const { body } = flag; | ||
| const hasBody = body !== undefined && | ||
| body !== null && | ||
| body !== "" && | ||
| !(ucMethod === "HEAD" || ucMethod === "GET"); | ||
| if (hasBody && getRequestHeader(flag.requestHeaders, "content-type") === null) { | ||
| requestHeaders["Content-Type"] = "text/plain;charset=UTF-8"; | ||
| if (isCrossOriginResponse) { | ||
| // Filter headers not exposed by CORS | ||
| const acehStr = headers.get("access-control-expose-headers"); | ||
| const aceh = new Set(acehStr ? acehStr.trim().toLowerCase().split(headerListSeparatorRegexp) : []); | ||
| for (const [header] of headers) { | ||
| if (!corsSafeResponseHeaders.has(header) && !aceh.has(header) && !aceh.has("*")) { | ||
| filteredResponseHeaders.add(header); | ||
| } | ||
| } | ||
| } | ||
| function doRequest() { | ||
| try { | ||
| requestHeaders["Accept-Encoding"] = "gzip, deflate"; | ||
| let len = 0; | ||
| if (hasBody) { | ||
| len = body.byteLength; | ||
| requestHeaders["Content-Length"] = len; | ||
| } | ||
| const requestClient = new Request(uri, options, { method: flag.method, headers: requestHeaders }); | ||
| if (hasBody) { | ||
| requestClient.write(body); | ||
| } | ||
| return requestClient; | ||
| } catch (e) { | ||
| const eventEmitterclient = new EventEmitter(); | ||
| process.nextTick(() => eventEmitterclient.emit("error", e)); | ||
| eventEmitterclient.end = () => {}; | ||
| eventEmitterclient.abort = () => { | ||
| // do nothing | ||
| }; | ||
| return eventEmitterclient; | ||
| // Always filter forbidden response headers | ||
| for (const [header] of headers) { | ||
| if (isForbiddenResponseHeaderName(header)) { | ||
| filteredResponseHeaders.add(header); | ||
| } | ||
| } | ||
| let client; | ||
| response.filteredResponseHeaders = filteredResponseHeaders; | ||
| return response; | ||
| } | ||
| const nonSimpleHeaders = Object.keys(flag.requestHeaders) | ||
| .filter(header => !simpleHeaders.has(header.toLowerCase())); | ||
| /** | ||
| * Helper to dispatch a request and return a Promise with the response. | ||
| * | ||
| * We use the callback-based dispatch() API instead of the simpler request() API because of | ||
| * WrapHandler's incorrect encoding of header values: https://github.com/nodejs/undici/issues/4797 | ||
| */ | ||
| function dispatchMainRequest(dispatcher, opts, signal) { | ||
| if (signal.aborted) { | ||
| return Promise.resolve(makeNetworkError()); | ||
| } | ||
| if (crossOrigin && (!simpleMethods.has(ucMethod) || nonSimpleHeaders.length > 0 || properties.uploadListener)) { | ||
| client = new EventEmitter(); | ||
| return new Promise(resolve => { | ||
| let context, bodyStream, sendAbortToUndiciController; | ||
| const preflightRequestHeaders = {}; | ||
| for (const header in requestHeaders) { | ||
| // the only existing request headers the cors spec allows on the preflight request are Origin and Referer | ||
| const lcHeader = header.toLowerCase(); | ||
| if (lcHeader === "origin" || lcHeader === "referer") { | ||
| preflightRequestHeaders[header] = requestHeaders[header]; | ||
| } | ||
| function onAbort() { | ||
| sendAbortToUndiciController?.(signal.reason); | ||
| bodyStream?.destroy(); | ||
| resolve(makeNetworkError()); | ||
| } | ||
| signal.addEventListener("abort", onAbort, { once: true }); | ||
| preflightRequestHeaders["Access-Control-Request-Method"] = flag.method; | ||
| if (nonSimpleHeaders.length > 0) { | ||
| preflightRequestHeaders["Access-Control-Request-Headers"] = nonSimpleHeaders.join(", "); | ||
| } | ||
| dispatcher.dispatch(opts, { | ||
| onRequestStart(controller, ctx) { | ||
| context = ctx; | ||
| sendAbortToUndiciController = reason => controller.abort(reason); | ||
| if (signal.aborted) { | ||
| controller.abort(signal.reason); | ||
| } | ||
| }, | ||
| onResponseStart(controller, statusCode, headers, statusText) { | ||
| if (signal.aborted) { | ||
| return; | ||
| } | ||
| preflightRequestHeaders["User-Agent"] = flag.userAgent; | ||
| // Prevent unhandled "error" events from `destroy()` calls. | ||
| bodyStream = new Readable({ read() {} }); | ||
| bodyStream.on("error", () => {}); | ||
| flag.preflight = true; | ||
| // Get final URL from context (set by dispatcher after handling redirects) | ||
| const finalURL = serializeURL(context.finalURL); | ||
| const rejectUnauthorized = flag.strictSSL; | ||
| const preflightClient = new Request( | ||
| uri, | ||
| { agents, followRedirects: false }, | ||
| { method: "OPTIONS", headers: preflightRequestHeaders, rejectUnauthorized } | ||
| ); | ||
| preflightClient.on("response", resp => { | ||
| // don't send the real request if the preflight request returned an error | ||
| if (resp.statusCode < 200 || resp.statusCode > 299) { | ||
| client.emit("error", new Error("Response for preflight has invalid HTTP status code " + resp.statusCode)); | ||
| return; | ||
| resolve({ | ||
| status: statusCode, | ||
| statusText: statusText || "", | ||
| headers: HeaderList.fromJSON(headers), | ||
| body: bodyStream, | ||
| url: finalURL | ||
| }); | ||
| }, | ||
| onResponseData(controller, chunk) { | ||
| if (signal.aborted) { | ||
| return; | ||
| } | ||
| bodyStream.push(chunk); | ||
| }, | ||
| onResponseEnd() { | ||
| signal.removeEventListener("abort", onAbort); | ||
| bodyStream?.push(null); | ||
| }, | ||
| onResponseError(controller, err) { | ||
| signal.removeEventListener("abort", onAbort); | ||
| bodyStream?.destroy(err); | ||
| resolve(makeNetworkError(err)); | ||
| } | ||
| // don't send the real request if we aren't allowed to use the headers | ||
| if (!validCORSPreflightHeaders(xhr, resp, flag, properties)) { | ||
| setResponseToNetworkError(xhr); | ||
| return; | ||
| } | ||
| // Set request gzip option right before headers are set | ||
| const realClient = doRequest(); | ||
| realClient.on("response", (...args) => client.emit("response", ...args)); | ||
| realClient.on("data", chunk => client.emit("data", chunk)); | ||
| realClient.on("end", () => client.emit("end")); | ||
| realClient.on("abort", () => client.emit("abort")); | ||
| realClient.on("request", req => { | ||
| client.headers = realClient.headers; | ||
| client.emit("request", req); | ||
| }); | ||
| realClient.on("redirect", (...args) => { | ||
| client.emit("redirect", ...args); | ||
| }); | ||
| realClient.on("error", err => { | ||
| client.emit("error", err); | ||
| }); | ||
| client.abort = () => { | ||
| realClient.abort(); | ||
| }; | ||
| setImmediate(() => realClient.end()); | ||
| }); | ||
| preflightClient.on("error", err => { | ||
| client.emit("error", err); | ||
| }); | ||
| client.abort = () => { | ||
| preflightClient.abort(); | ||
| }; | ||
| setImmediate(() => preflightClient.end()); | ||
| } else { | ||
| client = doRequest(); | ||
| setImmediate(() => client.end()); | ||
| } | ||
| if (requestManager) { | ||
| const req = { | ||
| abort() { | ||
| properties.abortError = true; | ||
| xhr.abort(); | ||
| } | ||
| }; | ||
| requestManager.add(req); | ||
| const rmReq = requestManager.remove.bind(requestManager, req); | ||
| client.on("abort", rmReq); | ||
| client.on("error", rmReq); | ||
| client.on("end", rmReq); | ||
| } | ||
| return client; | ||
| }); | ||
| } | ||
| exports.headerListSeparatorRegexp = headerListSeparatorRegexp; | ||
| exports.simpleHeaders = simpleHeaders; | ||
| exports.preflightHeaders = preflightHeaders; | ||
| exports.getRequestHeader = getRequestHeader; | ||
| exports.updateRequestHeader = updateRequestHeader; | ||
| exports.dispatchError = dispatchError; | ||
| exports.validCORSHeaders = validCORSHeaders; | ||
| exports.requestErrorSteps = requestErrorSteps; | ||
| exports.setResponseToNetworkError = setResponseToNetworkError; | ||
| exports.createClient = createClient; | ||
| exports.performFetch = performFetch; | ||
| exports.isNetworkError = isNetworkError; |
| "use strict"; | ||
| const HTTP_STATUS_CODES = require("http").STATUS_CODES; | ||
| const { spawnSync } = require("child_process"); | ||
| const { URL } = require("whatwg-url"); | ||
| const { inspect } = require("util"); | ||
| const { parseURL, serializeURL } = require("whatwg-url"); | ||
| const { getBOMEncoding, labelToName, legacyHookDecode } = require("@exodus/bytes/encoding.js"); | ||
| const tough = require("tough-cookie"); | ||
| const MIMEType = require("whatwg-mimetype"); | ||
| const xhrUtils = require("./xhr-utils"); | ||
| const { MIMEType } = require("whatwg-mimetype"); | ||
| const DOMException = require("../generated/DOMException"); | ||
| const { asciiCaseInsensitiveMatch } = require("../helpers/strings"); | ||
| const idlUtils = require("../generated/utils"); | ||
@@ -17,76 +14,80 @@ const Document = require("../generated/Document"); | ||
| const FormData = require("../generated/FormData"); | ||
| const XMLHttpRequestEventTargetImpl = require("./XMLHttpRequestEventTarget-impl").implementation; | ||
| const XMLHttpRequestUpload = require("../generated/XMLHttpRequestUpload"); | ||
| const ProgressEvent = require("../generated/ProgressEvent"); | ||
| const { isArrayBuffer } = require("../generated/utils"); | ||
| const { isHeaderName, isHeaderValue, normalizeHeaderValue } = require("../fetch/header-utils"); | ||
| const HeaderList = require("../fetch/header-list"); | ||
| const { isForbiddenRequestHeader } = require("../fetch/header-types"); | ||
| const { performFetch, isNetworkError } = require("./xhr-utils"); | ||
| const XMLHttpRequestEventTargetImpl = require("./XMLHttpRequestEventTarget-impl").implementation; | ||
| const { parseIntoDocument } = require("../../browser/parser"); | ||
| const { fragmentSerialization } = require("../domparsing/serialization"); | ||
| const { copyToArrayBufferInTargetRealmDestructively, concatTypedArrays } = require("../helpers/binary-data"); | ||
| const { setupForSimpleEventAccessors } = require("../helpers/create-event-accessor"); | ||
| const { utf8Encode, utf8Decode } = require("../helpers/encoding"); | ||
| const { fireAnEvent } = require("../helpers/events"); | ||
| const { parseJSONFromBytes } = require("../helpers/json"); | ||
| const { fireAnEvent } = require("../helpers/events"); | ||
| const { copyToArrayBufferInNewRealm } = require("../helpers/binary-data"); | ||
| const { serializeEntryList, chunksToBuffer } = require("./multipart-form-data.js"); | ||
| const { asciiCaseInsensitiveMatch } = require("../helpers/strings"); | ||
| const { serializeEntryList } = require("./multipart-form-data"); | ||
| const { READY_STATES } = xhrUtils; | ||
| const syncWorkerFile = require.resolve("./xhr-sync-worker.js"); | ||
| const syncWorkerFile = require.resolve ? require.resolve("./xhr-sync-worker.js") : null; | ||
| const READY_STATES = Object.freeze({ | ||
| UNSENT: 0, | ||
| OPENED: 1, | ||
| HEADERS_RECEIVED: 2, | ||
| LOADING: 3, | ||
| DONE: 4 | ||
| }); | ||
| const tokenRegexp = /^[!#$%&'*+\-.^_`|~0-9A-Za-z]+$/; | ||
| const fieldValueRegexp = /^[ \t]*(?:[\x21-\x7E\x80-\xFF](?:[ \t][\x21-\x7E\x80-\xFF])?)*[ \t]*$/; | ||
| const utf8Decoder = new TextDecoder(); | ||
| const forbiddenRequestHeaders = new Set([ | ||
| "accept-charset", | ||
| "accept-encoding", | ||
| "access-control-request-headers", | ||
| "access-control-request-method", | ||
| "connection", | ||
| "content-length", | ||
| "cookie", | ||
| "cookie2", | ||
| "date", | ||
| "dnt", | ||
| "expect", | ||
| "host", | ||
| "keep-alive", | ||
| "origin", | ||
| "referer", | ||
| "te", | ||
| "trailer", | ||
| "transfer-encoding", | ||
| "upgrade", | ||
| "via" | ||
| ]); | ||
| const forbiddenResponseHeaders = new Set([ | ||
| "set-cookie", | ||
| "set-cookie2" | ||
| ]); | ||
| const uniqueResponseHeaders = new Set([ | ||
| "content-type", | ||
| "content-length", | ||
| "user-agent", | ||
| "referer", | ||
| "host", | ||
| "authorization", | ||
| "proxy-authorization", | ||
| "if-modified-since", | ||
| "if-unmodified-since", | ||
| "from", | ||
| "location", | ||
| "max-forwards" | ||
| ]); | ||
| const corsSafeResponseHeaders = new Set([ | ||
| "cache-control", | ||
| "content-language", | ||
| "content-length", | ||
| "content-type", | ||
| "expires", | ||
| "last-modified", | ||
| "pragma" | ||
| ]); | ||
| const allowedRequestMethods = new Set(["OPTIONS", "GET", "HEAD", "POST", "PUT", "DELETE"]); | ||
| const forbiddenRequestMethods = new Set(["TRACK", "TRACE", "CONNECT"]); | ||
| // Helper functions for error handling | ||
| function dispatchError(xhr, errMessage) { | ||
| // Store the error message for sync XHR worker to serialize | ||
| xhr._error = errMessage; | ||
| requestErrorSteps(xhr, "error", DOMException.create(xhr._globalObject, [errMessage, "NetworkError"])); | ||
| } | ||
| function requestErrorSteps(xhr, event, exception) { | ||
| const { upload } = xhr; | ||
| xhr.readyState = READY_STATES.DONE; | ||
| xhr._send = false; | ||
| setResponseToNetworkError(xhr); | ||
| if (xhr._synchronous) { | ||
| throw exception; | ||
| } | ||
| fireAnEvent("readystatechange", xhr); | ||
| if (!xhr._uploadComplete) { | ||
| xhr._uploadComplete = true; | ||
| if (xhr._uploadListener) { | ||
| fireAnEvent(event, upload, ProgressEvent, { loaded: 0, total: 0, lengthComputable: false }); | ||
| fireAnEvent("loadend", upload, ProgressEvent, { loaded: 0, total: 0, lengthComputable: false }); | ||
| } | ||
| } | ||
| fireAnEvent(event, xhr, ProgressEvent, { loaded: 0, total: 0, lengthComputable: false }); | ||
| fireAnEvent("loadend", xhr, ProgressEvent, { loaded: 0, total: 0, lengthComputable: false }); | ||
| } | ||
| function setResponseToNetworkError(xhr) { | ||
| xhr._responseBytes = | ||
| xhr._responseCache = | ||
| xhr._responseTextCache = | ||
| xhr._responseXMLCache = null; | ||
| xhr._responseHeaders = new HeaderList(); | ||
| xhr.status = 0; | ||
| xhr.statusText = ""; | ||
| } | ||
| class XMLHttpRequestImpl extends XMLHttpRequestEventTargetImpl { | ||
@@ -96,3 +97,2 @@ constructor(window) { | ||
| // Avoid running `_ownerDocument` getter multiple times in the constructor: | ||
| const { _ownerDocument } = this; | ||
@@ -102,2 +102,3 @@ | ||
| // Public WebIDL properties | ||
| this.readyState = READY_STATES.UNSENT; | ||
@@ -108,64 +109,55 @@ this.responseURL = ""; | ||
| this.flag = { | ||
| synchronous: false, | ||
| withCredentials: false, | ||
| mimeType: null, | ||
| auth: null, | ||
| method: undefined, | ||
| responseType: "", | ||
| requestHeaders: {}, | ||
| referrer: _ownerDocument.URL, | ||
| uri: "", | ||
| timeout: 0, | ||
| body: undefined, | ||
| preflight: false, | ||
| requestManager: _ownerDocument._requestManager, | ||
| strictSSL: window._resourceLoader._strictSSL, | ||
| proxy: window._resourceLoader._proxy, | ||
| cookieJar: _ownerDocument._cookieJar, | ||
| encoding: _ownerDocument._encoding, | ||
| origin: window._origin, | ||
| userAgent: window.navigator.userAgent | ||
| }; | ||
| // Request configuration | ||
| this._synchronous = false; | ||
| this._withCredentials = false; | ||
| this._mimeType = null; | ||
| this._auth = null; | ||
| this._method = undefined; | ||
| this._responseType = ""; | ||
| this._requestHeaders = new HeaderList(); | ||
| this._referrer = _ownerDocument.URL; | ||
| this._url = ""; | ||
| this._timeout = 0; | ||
| this._body = undefined; | ||
| this._preflight = false; | ||
| this._overrideMIMEType = null; | ||
| this._overrideCharset = null; | ||
| this._requestManager = _ownerDocument._requestManager; | ||
| this._dispatcher = window._dispatcher; | ||
| this._cookieJar = _ownerDocument._cookieJar; | ||
| this._encoding = _ownerDocument._encoding; | ||
| this._origin = window._origin; | ||
| this._userAgent = window.navigator.userAgent; | ||
| this.properties = { | ||
| beforeSend: false, | ||
| send: false, | ||
| client: null, | ||
| timeoutStart: 0, | ||
| timeoutId: 0, | ||
| timeoutFn: null, | ||
| responseBuffer: null, | ||
| responseCache: null, | ||
| responseTextCache: null, | ||
| responseXMLCache: null, | ||
| responseHeaders: {}, | ||
| filteredResponseHeaders: [], | ||
| error: "", | ||
| uploadComplete: false, | ||
| uploadListener: false, | ||
| // Signifies that we're calling abort() from xhr-utils.js because of a window shutdown. | ||
| // In that case the termination reason is "fatal", not "end-user abort". | ||
| abortError: false, | ||
| cookieJar: _ownerDocument._cookieJar, | ||
| bufferStepSize: 1 * 1024 * 1024, // pre-allocate buffer increase step size. init value is 1MB | ||
| totalReceivedChunkSize: 0 | ||
| }; | ||
| // Runtime/response state | ||
| this._beforeSend = false; | ||
| this._send = false; | ||
| this._controller = null; | ||
| this._timeoutStart = 0; | ||
| this._timeoutId = 0; | ||
| this._timeoutFn = null; | ||
| this._responseBytes = null; | ||
| this._responseCache = null; | ||
| this._responseTextCache = null; | ||
| this._responseXMLCache = null; | ||
| this._responseHeaders = new HeaderList(); | ||
| this._filteredResponseHeaders = new Set(); | ||
| this._error = ""; | ||
| this._uploadComplete = false; | ||
| this._uploadListener = false; | ||
| // Signifies that we're calling abort() from xhr-utils.js because of a window shutdown. | ||
| // In that case the termination reason is "fatal", not "end-user abort". | ||
| this._abortError = false; | ||
| this._bufferStepSize = 1 * 1024 * 1024; // pre-allocate buffer increase step size. init value is 1MB | ||
| this._totalReceivedChunkSize = 0; | ||
| } | ||
| get responseType() { | ||
| return this.flag.responseType; | ||
| return this._responseType; | ||
| } | ||
| set responseType(responseType) { | ||
| const { flag } = this; | ||
| if (this.readyState === READY_STATES.LOADING || this.readyState === READY_STATES.DONE) { | ||
| throw DOMException.create(this._globalObject, ["The object is in an invalid state.", "InvalidStateError"]); | ||
| } | ||
| if (this.readyState === READY_STATES.OPENED && flag.synchronous) { | ||
| if (this.readyState === READY_STATES.OPENED && this._synchronous) { | ||
| throw DOMException.create(this._globalObject, [ | ||
@@ -176,16 +168,13 @@ "The object does not support the operation or argument.", | ||
| } | ||
| flag.responseType = responseType; | ||
| this._responseType = responseType; | ||
| } | ||
| get response() { | ||
| const { properties } = this; | ||
| if (properties.responseCache) { | ||
| if (this._responseCache) { | ||
| // Needed because of: https://github.com/jsdom/webidl2js/issues/149 | ||
| return idlUtils.tryWrapperForImpl(properties.responseCache); | ||
| return idlUtils.tryWrapperForImpl(this._responseCache); | ||
| } | ||
| let res; | ||
| const responseBuffer = properties.responseBuffer ? | ||
| properties.responseBuffer.slice(0, properties.totalReceivedChunkSize) : | ||
| null; | ||
| const responseBytes = this._responseBytes?.slice(0, this._totalReceivedChunkSize) ?? null; | ||
@@ -199,10 +188,10 @@ switch (this.responseType) { | ||
| case "arraybuffer": { | ||
| if (!responseBuffer) { | ||
| if (!responseBytes) { | ||
| return null; | ||
| } | ||
| res = copyToArrayBufferInNewRealm(responseBuffer, this._globalObject); | ||
| res = copyToArrayBufferInTargetRealmDestructively(responseBytes.buffer, this._globalObject); | ||
| break; | ||
| } | ||
| case "blob": { | ||
| if (!responseBuffer) { | ||
| if (!responseBytes) { | ||
| return null; | ||
@@ -212,3 +201,3 @@ } | ||
| res = Blob.createImpl(this._globalObject, [ | ||
| [new Uint8Array(responseBuffer)], | ||
| [new Uint8Array(responseBytes)], | ||
| { type: contentType || "" } | ||
@@ -223,3 +212,3 @@ ]); | ||
| case "json": { | ||
| if (this.readyState !== READY_STATES.DONE || !responseBuffer) { | ||
| if (this.readyState !== READY_STATES.DONE || !responseBytes) { | ||
| res = null; | ||
@@ -229,3 +218,3 @@ } | ||
| try { | ||
| res = parseJSONFromBytes(responseBuffer); | ||
| res = parseJSONFromBytes(responseBytes); | ||
| } catch { | ||
@@ -237,3 +226,3 @@ res = null; | ||
| } | ||
| properties.responseCache = res; | ||
| this._responseCache = res; | ||
| // Needed because of: https://github.com/jsdom/webidl2js/issues/149 | ||
@@ -243,3 +232,2 @@ return idlUtils.tryWrapperForImpl(res); | ||
| get responseText() { | ||
| const { properties } = this; | ||
| if (this.responseType !== "" && this.responseType !== "text") { | ||
@@ -251,21 +239,17 @@ throw DOMException.create(this._globalObject, ["The object is in an invalid state.", "InvalidStateError"]); | ||
| } | ||
| if (properties.responseTextCache) { | ||
| return properties.responseTextCache; | ||
| if (this._responseTextCache) { | ||
| return this._responseTextCache; | ||
| } | ||
| const responseBuffer = properties.responseBuffer ? | ||
| properties.responseBuffer.slice(0, properties.totalReceivedChunkSize) : | ||
| null; | ||
| if (!responseBuffer) { | ||
| const responseBytes = this._responseBytes?.slice(0, this._totalReceivedChunkSize) ?? null; | ||
| if (!responseBytes) { | ||
| return ""; | ||
| } | ||
| const fallbackEncodingLabel = finalCharset(this) || getBOMEncoding(responseBuffer) || "UTF-8"; | ||
| const res = legacyHookDecode(responseBuffer, fallbackEncodingLabel); | ||
| const fallbackEncodingLabel = finalCharset(this) || getBOMEncoding(responseBytes) || "UTF-8"; | ||
| const res = legacyHookDecode(responseBytes, fallbackEncodingLabel); | ||
| properties.responseTextCache = res; | ||
| this._responseTextCache = res; | ||
| return res; | ||
| } | ||
| get responseXML() { | ||
| const { flag, properties } = this; | ||
| if (this.responseType !== "" && this.responseType !== "document") { | ||
@@ -277,10 +261,7 @@ throw DOMException.create(this._globalObject, ["The object is in an invalid state.", "InvalidStateError"]); | ||
| } | ||
| if (properties.responseXMLCache) { | ||
| return properties.responseXMLCache; | ||
| if (this._responseXMLCache) { | ||
| return this._responseXMLCache; | ||
| } | ||
| const responseBuffer = properties.responseBuffer ? | ||
| properties.responseBuffer.slice(0, properties.totalReceivedChunkSize) : | ||
| null; | ||
| if (!responseBuffer) { | ||
| const responseBytes = this._responseBytes?.slice(0, this._totalReceivedChunkSize) ?? null; | ||
| if (!responseBytes) { | ||
| return null; | ||
@@ -305,4 +286,4 @@ } | ||
| const encoding = finalCharset(this) || labelToName(getBOMEncoding(responseBuffer)) || "UTF-8"; | ||
| const resText = legacyHookDecode(responseBuffer, encoding); | ||
| const encoding = finalCharset(this) || labelToName(getBOMEncoding(responseBytes)) || "UTF-8"; | ||
| const resText = legacyHookDecode(responseBytes, encoding); | ||
@@ -314,4 +295,4 @@ if (!resText) { | ||
| options: { | ||
| url: flag.uri, | ||
| lastModified: new Date(getResponseHeader(this, "last-modified")), | ||
| url: this._url, | ||
| lastModified: new Date(this._responseHeaders.get("last-modified")), | ||
| parsingMode: isHTML ? "html" : "xml", | ||
@@ -326,7 +307,7 @@ cookieJar: { setCookieSync: () => undefined, getCookieStringSync: () => "" }, | ||
| } catch { | ||
| properties.responseXMLCache = null; | ||
| this._responseXMLCache = null; | ||
| return null; | ||
| } | ||
| res.close(); | ||
| properties.responseXMLCache = res; | ||
| this._responseXMLCache = res; | ||
| return res; | ||
@@ -336,7 +317,6 @@ } | ||
| get timeout() { | ||
| return this.flag.timeout; | ||
| return this._timeout; | ||
| } | ||
| set timeout(val) { | ||
| const { flag, properties } = this; | ||
| if (flag.synchronous) { | ||
| if (this._synchronous) { | ||
| throw DOMException.create(this._globalObject, [ | ||
@@ -347,12 +327,12 @@ "The object does not support the operation or argument.", | ||
| } | ||
| flag.timeout = val; | ||
| clearTimeout(properties.timeoutId); | ||
| if (val > 0 && properties.timeoutFn) { | ||
| properties.timeoutId = setTimeout( | ||
| properties.timeoutFn, | ||
| Math.max(0, val - ((new Date()).getTime() - properties.timeoutStart)) | ||
| this._timeout = val; | ||
| clearTimeout(this._timeoutId); | ||
| if (val > 0 && this._timeoutFn) { | ||
| this._timeoutId = setTimeout( | ||
| this._timeoutFn, | ||
| Math.max(0, val - ((new Date()).getTime() - this._timeoutStart)) | ||
| ); | ||
| } else { | ||
| properties.timeoutFn = null; | ||
| properties.timeoutStart = 0; | ||
| this._timeoutFn = null; | ||
| this._timeoutStart = 0; | ||
| } | ||
@@ -362,41 +342,38 @@ } | ||
| get withCredentials() { | ||
| return this.flag.withCredentials; | ||
| return this._withCredentials; | ||
| } | ||
| set withCredentials(val) { | ||
| const { flag, properties } = this; | ||
| if (!(this.readyState === READY_STATES.UNSENT || this.readyState === READY_STATES.OPENED)) { | ||
| throw DOMException.create(this._globalObject, ["The object is in an invalid state.", "InvalidStateError"]); | ||
| } | ||
| if (properties.send) { | ||
| if (this._send) { | ||
| throw DOMException.create(this._globalObject, ["The object is in an invalid state.", "InvalidStateError"]); | ||
| } | ||
| flag.withCredentials = val; | ||
| this._withCredentials = val; | ||
| } | ||
| abort() { | ||
| const { properties } = this; | ||
| // Terminate the request | ||
| clearTimeout(properties.timeoutId); | ||
| properties.timeoutFn = null; | ||
| properties.timeoutStart = 0; | ||
| clearTimeout(this._timeoutId); | ||
| this._timeoutFn = null; | ||
| this._timeoutStart = 0; | ||
| const { client } = properties; | ||
| if (client) { | ||
| client.abort(); | ||
| properties.client = null; | ||
| if (this._controller) { | ||
| this._controller.abort(); | ||
| this._controller = null; | ||
| } | ||
| if (properties.abortError) { | ||
| if (this._abortError) { | ||
| // Special case that ideally shouldn't be going through the public API at all. | ||
| // Run the https://xhr.spec.whatwg.org/#handle-errors "fatal" steps. | ||
| this.readyState = READY_STATES.DONE; | ||
| properties.send = false; | ||
| xhrUtils.setResponseToNetworkError(this); | ||
| this._send = false; | ||
| setResponseToNetworkError(this); | ||
| return; | ||
| } | ||
| if ((this.readyState === READY_STATES.OPENED && properties.send) || | ||
| if ((this.readyState === READY_STATES.OPENED && this._send) || | ||
| this.readyState === READY_STATES.HEADERS_RECEIVED || | ||
| this.readyState === READY_STATES.LOADING) { | ||
| xhrUtils.requestErrorSteps(this, "abort"); | ||
| requestErrorSteps(this, "abort"); | ||
| } | ||
@@ -407,30 +384,32 @@ | ||
| xhrUtils.setResponseToNetworkError(this); | ||
| setResponseToNetworkError(this); | ||
| } | ||
| } | ||
| getAllResponseHeaders() { | ||
| const { properties, readyState } = this; | ||
| if (readyState === READY_STATES.UNSENT || readyState === READY_STATES.OPENED) { | ||
| if (this.readyState === READY_STATES.UNSENT || this.readyState === READY_STATES.OPENED) { | ||
| return ""; | ||
| } | ||
| return Object.keys(properties.responseHeaders) | ||
| .filter(key => properties.filteredResponseHeaders.indexOf(key) === -1) | ||
| .map(key => [key.toLowerCase(), properties.responseHeaders[key]].join(": ")) | ||
| .join("\r\n"); | ||
| const result = []; | ||
| for (const [key, value] of this._responseHeaders) { | ||
| const lcKey = key.toLowerCase(); | ||
| if (!this._filteredResponseHeaders.has(lcKey)) { | ||
| result.push(`${lcKey}: ${value}`); | ||
| } | ||
| } | ||
| return result.join("\r\n"); | ||
| } | ||
| getResponseHeader(header) { | ||
| const { properties, readyState } = this; | ||
| if (readyState === READY_STATES.UNSENT || readyState === READY_STATES.OPENED) { | ||
| if (this.readyState === READY_STATES.UNSENT || this.readyState === READY_STATES.OPENED) { | ||
| return null; | ||
| } | ||
| const lcHeader = header.toLowerCase(); | ||
| if (properties.filteredResponseHeaders.find(filtered => lcHeader === filtered.toLowerCase())) { | ||
| if (this._filteredResponseHeaders.has(lcHeader)) { | ||
| return null; | ||
| } | ||
| return getResponseHeader(this, lcHeader); | ||
| return this._responseHeaders.get(lcHeader); | ||
| } | ||
| open(method, uri, asynchronous, user, password) { | ||
| const { flag, properties, _ownerDocument } = this; | ||
| open(method, url, asynchronous, user, password) { | ||
| const { _ownerDocument } = this; | ||
| if (!_ownerDocument) { | ||
@@ -451,5 +430,4 @@ throw DOMException.create(this._globalObject, ["The object is in an invalid state.", "InvalidStateError"]); | ||
| const { client } = properties; | ||
| if (client && typeof client.abort === "function") { | ||
| client.abort(); | ||
| if (this._controller && typeof this._controller.abort === "function") { | ||
| this._controller.abort(); | ||
| } | ||
@@ -461,7 +439,7 @@ | ||
| if (typeof asynchronous !== "undefined") { | ||
| flag.synchronous = !asynchronous; | ||
| this._synchronous = !asynchronous; | ||
| } else { | ||
| flag.synchronous = false; | ||
| this._synchronous = false; | ||
| } | ||
| if (flag.responseType && flag.synchronous) { | ||
| if (this._responseType && this._synchronous) { | ||
| throw DOMException.create(this._globalObject, [ | ||
@@ -472,3 +450,3 @@ "The object does not support the operation or argument.", | ||
| } | ||
| if (flag.synchronous && flag.timeout) { | ||
| if (this._synchronous && this._timeout) { | ||
| throw DOMException.create(this._globalObject, [ | ||
@@ -479,8 +457,6 @@ "The object does not support the operation or argument.", | ||
| } | ||
| flag.method = method; | ||
| this._method = method; | ||
| let urlObj; | ||
| try { | ||
| urlObj = new URL(uri, _ownerDocument.baseURLSerialized()); | ||
| } catch { | ||
| const urlRecord = parseURL(url, { baseURL: _ownerDocument.baseURL() }); | ||
| if (!urlRecord) { | ||
| throw DOMException.create(this._globalObject, [ | ||
@@ -492,19 +468,30 @@ "The string did not match the expected pattern.", | ||
| if (user || (password && !urlObj.username)) { | ||
| flag.auth = { | ||
| if (user || (password && !urlRecord.username)) { | ||
| this._auth = { | ||
| user, | ||
| pass: password | ||
| }; | ||
| urlObj.username = ""; | ||
| urlObj.password = ""; | ||
| urlRecord.username = ""; | ||
| urlRecord.password = ""; | ||
| } | ||
| flag.uri = urlObj.href; | ||
| flag.requestHeaders = {}; | ||
| flag.preflight = false; | ||
| this._url = serializeURL(urlRecord); | ||
| this._requestHeaders = new HeaderList(); | ||
| this._preflight = false; | ||
| properties.send = false; | ||
| properties.uploadListener = false; | ||
| properties.abortError = false; | ||
| this._send = false; | ||
| this._uploadListener = false; | ||
| this._body = undefined; | ||
| this._abortError = false; | ||
| this._responseBytes = null; | ||
| this._responseCache = null; | ||
| this._responseTextCache = null; | ||
| this._responseXMLCache = null; | ||
| this._responseHeaders = new HeaderList(); | ||
| this._totalReceivedChunkSize = 0; | ||
| this.responseURL = ""; | ||
| this.status = 0; | ||
| this.statusText = ""; | ||
| readyStateChange(this, READY_STATES.OPENED); | ||
@@ -514,8 +501,7 @@ } | ||
| overrideMimeType(mime) { | ||
| const { readyState } = this; | ||
| if (readyState === READY_STATES.LOADING || readyState === READY_STATES.DONE) { | ||
| if (this.readyState === READY_STATES.LOADING || this.readyState === READY_STATES.DONE) { | ||
| throw DOMException.create(this._globalObject, ["The object is in an invalid state.", "InvalidStateError"]); | ||
| } | ||
| this.flag.overrideMIMEType = "application/octet-stream"; | ||
| this._overrideMIMEType = "application/octet-stream"; | ||
@@ -525,7 +511,7 @@ // Waiting for better spec: https://github.com/whatwg/xhr/issues/157 | ||
| if (parsed) { | ||
| this.flag.overrideMIMEType = parsed.essence; | ||
| this._overrideMIMEType = parsed.essence; | ||
| const charset = parsed.parameters.get("charset"); | ||
| if (charset) { | ||
| this.flag.overrideCharset = labelToName(charset); | ||
| this._overrideCharset = labelToName(charset); | ||
| } | ||
@@ -537,3 +523,3 @@ } | ||
| send(body) { | ||
| const { flag, properties, upload, _ownerDocument } = this; | ||
| const { upload, _ownerDocument } = this; | ||
| // Not per spec, but per tests: https://github.com/whatwg/xhr/issues/65 | ||
@@ -544,10 +530,10 @@ if (!_ownerDocument) { | ||
| if (this.readyState !== READY_STATES.OPENED || properties.send) { | ||
| if (this.readyState !== READY_STATES.OPENED || this._send) { | ||
| throw DOMException.create(this._globalObject, ["The object is in an invalid state.", "InvalidStateError"]); | ||
| } | ||
| properties.beforeSend = true; | ||
| this._beforeSend = true; | ||
| try { | ||
| if (flag.method === "GET" || flag.method === "HEAD") { | ||
| if (this._method === "GET" || this._method === "HEAD") { | ||
| body = null; | ||
@@ -557,37 +543,39 @@ } | ||
| if (body !== null) { | ||
| let encoding = null; | ||
| let mimeType = null; | ||
| let extractedContentType = null; | ||
| if (Document.isImpl(body)) { | ||
| encoding = "UTF-8"; | ||
| mimeType = (body._parsingMode === "html" ? "text/html" : "application/xml") + ";charset=UTF-8"; | ||
| flag.body = Buffer.from(fragmentSerialization(body, { requireWellFormed: false })); | ||
| // Note: our utf8Encode() does both USVString conversion and UTF-8 encoding. | ||
| this._body = utf8Encode(fragmentSerialization(body, { requireWellFormed: false })); | ||
| } else { | ||
| if (typeof body === "string") { | ||
| encoding = "UTF-8"; | ||
| } | ||
| const { buffer, contentType } = extractBody(body); | ||
| mimeType = contentType; | ||
| flag.body = buffer; | ||
| const { body: extractedBody, type } = extractBody(body); | ||
| this._body = extractedBody; | ||
| extractedContentType = type; | ||
| } | ||
| const existingContentType = xhrUtils.getRequestHeader(flag.requestHeaders, "content-type"); | ||
| if (mimeType !== null && existingContentType === null) { | ||
| flag.requestHeaders["Content-Type"] = mimeType; | ||
| } else if (existingContentType !== null && encoding !== null) { | ||
| // Waiting for better spec: https://github.com/whatwg/xhr/issues/188. This seems like a good guess at what | ||
| // the spec will be, in the meantime. | ||
| const parsed = MIMEType.parse(existingContentType); | ||
| if (parsed) { | ||
| const charset = parsed.parameters.get("charset"); | ||
| if (charset && !asciiCaseInsensitiveMatch(charset, encoding) && encoding !== null) { | ||
| parsed.parameters.set("charset", encoding); | ||
| xhrUtils.updateRequestHeader(flag.requestHeaders, "content-type", parsed.toString()); | ||
| const originalAuthorContentType = this._requestHeaders.get("content-type"); | ||
| if (originalAuthorContentType !== null) { | ||
| if (Document.isImpl(body) || typeof body === "string") { | ||
| const parsed = MIMEType.parse(originalAuthorContentType); | ||
| if (parsed) { | ||
| const charset = parsed.parameters.get("charset"); | ||
| if (charset && !asciiCaseInsensitiveMatch(charset, "UTF-8")) { | ||
| parsed.parameters.set("charset", "UTF-8"); | ||
| this._requestHeaders.set("Content-Type", parsed.toString()); | ||
| } | ||
| } | ||
| } | ||
| } else if (Document.isImpl(body)) { | ||
| if (body._parsingMode === "html") { | ||
| this._requestHeaders.set("Content-Type", "text/html;charset=UTF-8"); | ||
| } else { | ||
| this._requestHeaders.set("Content-Type", "application/xml;charset=UTF-8"); | ||
| } | ||
| } else if (extractedContentType !== null) { | ||
| this._requestHeaders.set("Content-Type", extractedContentType); | ||
| } | ||
| } | ||
| } finally { | ||
| if (properties.beforeSend) { | ||
| properties.beforeSend = false; | ||
| if (this._beforeSend) { | ||
| this._beforeSend = false; | ||
| } else { | ||
@@ -599,64 +587,55 @@ throw DOMException.create(this._globalObject, ["The object is in an invalid state.", "InvalidStateError"]); | ||
| if (Object.keys(upload._eventListeners).length > 0) { | ||
| properties.uploadListener = true; | ||
| this._uploadListener = true; | ||
| } | ||
| // request doesn't like zero-length bodies | ||
| if (flag.body && flag.body.byteLength === 0) { | ||
| flag.body = null; | ||
| if (this._body && this._body.byteLength === 0) { | ||
| this._body = null; | ||
| } | ||
| if (flag.synchronous) { | ||
| const flagStr = JSON.stringify(flag, function (k, v) { | ||
| if (this === flag && k === "requestManager") { | ||
| return null; | ||
| } | ||
| if (this === flag && k === "pool" && v) { | ||
| return { maxSockets: v.maxSockets }; | ||
| } | ||
| return v; | ||
| }); | ||
| const res = spawnSync( | ||
| // Per XHR spec step 11: "If req's body is null, then set this's upload complete flag." | ||
| // This prevents upload events from firing for GET/HEAD and other bodyless requests. | ||
| // Note: this._body may be undefined (for GET/HEAD) or null (for zero-length bodies). | ||
| if (!this._body) { | ||
| this._uploadComplete = true; | ||
| } | ||
| if (this._synchronous) { | ||
| const configStr = JSON.stringify(this._serializeRequest()); | ||
| const child = spawnSync( | ||
| process.execPath, | ||
| [syncWorkerFile], | ||
| { input: flagStr, maxBuffer: Infinity } | ||
| { input: configStr, maxBuffer: Infinity } | ||
| ); | ||
| if (res.status !== 0) { | ||
| throw new Error(res.stderr.toString()); | ||
| } | ||
| if (res.error) { | ||
| if (typeof res.error === "string") { | ||
| res.error = new Error(res.error); | ||
| // Try to parse the response first. If we have valid JSON, the request succeeded | ||
| // even if the process crashed during cleanup (e.g., UV_HANDLE_CLOSING on Windows). | ||
| // See: https://github.com/nodejs/node/issues/56645 | ||
| let response; | ||
| try { | ||
| response = JSON.parse(child.stdout.toString()); | ||
| } catch (parseError) { | ||
| // No valid response - check for actual errors | ||
| if (child.error) { | ||
| throw child.error; | ||
| } | ||
| throw res.error; | ||
| if (child.status !== 0) { | ||
| throw new Error(child.stderr.toString()); | ||
| } | ||
| throw new Error("Sync XHR worker did not produce a JSON-parseable response", { cause: parseError }); | ||
| } | ||
| this._adoptSerializedResponse(response); | ||
| const response = JSON.parse(res.stdout.toString()); | ||
| const resProp = response.properties; | ||
| if (resProp.responseBuffer && resProp.responseBuffer.data) { | ||
| resProp.responseBuffer = Buffer.from(resProp.responseBuffer.data); | ||
| } | ||
| if (resProp.cookieJar) { | ||
| resProp.cookieJar = tough.CookieJar.deserializeSync( | ||
| resProp.cookieJar, | ||
| _ownerDocument._cookieJar.store | ||
| ); | ||
| } | ||
| this.readyState = READY_STATES.LOADING; | ||
| this.status = response.status; | ||
| this.statusText = response.statusText; | ||
| this.responseURL = response.responseURL; | ||
| Object.assign(this.properties, response.properties); | ||
| if (resProp.error) { | ||
| xhrUtils.dispatchError(this); | ||
| throw DOMException.create(this._globalObject, [resProp.error, "NetworkError"]); | ||
| if (this._error) { | ||
| dispatchError(this, this._error); | ||
| throw DOMException.create(this._globalObject, [this._error, "NetworkError"]); | ||
| } else { | ||
| const { responseBuffer } = properties; | ||
| const contentLength = getResponseHeader(this, "content-length") || "0"; | ||
| const bufferLength = parseInt(contentLength) || responseBuffer.length; | ||
| const contentLength = this._responseHeaders.get("content-length") || "0"; | ||
| const byteLength = parseInt(contentLength) || this._responseBytes.length; | ||
| const progressObj = { lengthComputable: false }; | ||
| if (bufferLength !== 0) { | ||
| progressObj.total = bufferLength; | ||
| progressObj.loaded = bufferLength; | ||
| if (byteLength !== 0) { | ||
| progressObj.total = byteLength; | ||
| progressObj.loaded = byteLength; | ||
| progressObj.lengthComputable = true; | ||
@@ -670,60 +649,217 @@ } | ||
| } else { | ||
| properties.send = true; | ||
| this._send = true; | ||
| this._totalReceivedChunkSize = 0; | ||
| this._bufferStepSize = 1 * 1024 * 1024; | ||
| if (body !== null && body !== "") { | ||
| this._uploadComplete = false; | ||
| } else { | ||
| this._uploadComplete = true; | ||
| } | ||
| // State for upload progress - use this._body which is the processed Uint8Array | ||
| const uploadTotal = this._body ? this._body.byteLength : 0; | ||
| const uploadProgress = { | ||
| lengthComputable: uploadTotal > 0, | ||
| total: uploadTotal, | ||
| loaded: 0 | ||
| }; | ||
| // Create abort controller BEFORE firing loadstart so open() called in | ||
| // loadstart handler can properly abort this request | ||
| const abortController = new AbortController(); | ||
| this._controller = abortController; | ||
| // Register with request manager so window.close()/stop() can abort this request | ||
| const requestManagerEntry = { | ||
| abort: () => { | ||
| this._abortError = true; | ||
| abortController.abort(); | ||
| } | ||
| }; | ||
| if (this._requestManager) { | ||
| this._requestManager.add(requestManagerEntry); | ||
| } | ||
| // Per XHR spec, fire loadstart on xhr first, then on upload. | ||
| fireAnEvent("loadstart", this, ProgressEvent); | ||
| const client = xhrUtils.createClient(this); | ||
| if (!this._uploadComplete && this._uploadListener) { | ||
| fireAnEvent("loadstart", upload, ProgressEvent, uploadProgress); | ||
| } | ||
| properties.client = client; | ||
| // For new client, reset totalReceivedChunkSize and bufferStepSize | ||
| properties.totalReceivedChunkSize = 0; | ||
| properties.bufferStepSize = 1 * 1024 * 1024; | ||
| // Per XHR spec: "If this's state is not opened or this's send() flag is unset, return." | ||
| // Also check if this request was aborted (e.g., by open() called in loadstart handler) | ||
| if (this.readyState !== READY_STATES.OPENED || !this._send || abortController.signal.aborted) { | ||
| if (this._requestManager) { | ||
| this._requestManager.remove(requestManagerEntry); | ||
| } | ||
| return; | ||
| } | ||
| properties.origin = flag.origin; | ||
| // Async fetch and body streaming | ||
| (async () => { | ||
| try { | ||
| const response = await performFetch( | ||
| this._dispatcher, | ||
| { | ||
| url: this._url, | ||
| method: this._method, | ||
| requestHeaders: this._requestHeaders, | ||
| body: this._body, | ||
| origin: this._origin, | ||
| referrer: this._referrer, | ||
| userAgent: this._userAgent, | ||
| withCredentials: this._withCredentials, | ||
| auth: this._auth, | ||
| cookieJar: this._cookieJar, | ||
| uploadListener: this._uploadListener | ||
| }, | ||
| abortController.signal | ||
| ); | ||
| client.on("error", err => { | ||
| client.removeAllListeners(); | ||
| properties.error = err; | ||
| xhrUtils.dispatchError(this); | ||
| }); | ||
| // Handle network errors (includes CORS failures) | ||
| if (isNetworkError(response)) { | ||
| if (abortController.signal.aborted) { | ||
| // Request was aborted - don't fire error events | ||
| return; | ||
| } | ||
| dispatchError(this, response.error?.message || "Network error"); | ||
| return; | ||
| } | ||
| client.on("response", (res, url) => receiveResponse(this, res, url)); | ||
| // Fire upload complete events | ||
| if (!this._uploadComplete) { | ||
| this._uploadComplete = true; | ||
| if (this._uploadListener) { | ||
| uploadProgress.loaded = uploadProgress.total; | ||
| fireAnEvent("progress", upload, ProgressEvent, uploadProgress); | ||
| fireAnEvent("load", upload, ProgressEvent, uploadProgress); | ||
| fireAnEvent("loadend", upload, ProgressEvent, uploadProgress); | ||
| } | ||
| } | ||
| client.on("redirect", (response, requestHeaders, currentURL) => { | ||
| const destUrlObj = new URL(requestHeaders.Referer); | ||
| const urlObj = new URL(currentURL); | ||
| // Process response headers (CORS filtering done by performFetch) | ||
| const { headers, filteredResponseHeaders } = response; | ||
| if (destUrlObj.origin !== urlObj.origin && destUrlObj.origin !== flag.origin) { | ||
| properties.origin = "null"; | ||
| } | ||
| this.responseURL = response.url; | ||
| this.status = response.status; | ||
| this.statusText = response.statusText; | ||
| this._responseHeaders = headers; | ||
| this._filteredResponseHeaders = filteredResponseHeaders; | ||
| requestHeaders.Origin = properties.origin; | ||
| // Set up progress tracking | ||
| // If content-encoding is set, the body was compressed and we report decompressed bytes, | ||
| // so lengthComputable must be false (method b from the XHR spec) | ||
| const contentEncoding = headers.get("content-encoding"); | ||
| const contentLength = headers.get("content-length") || "0"; | ||
| const bufferLength = parseInt(contentLength) || 0; | ||
| const progressObj = { lengthComputable: false, loaded: 0, total: 0 }; | ||
| if (bufferLength !== 0 && !contentEncoding) { | ||
| progressObj.total = bufferLength; | ||
| progressObj.lengthComputable = true; | ||
| } | ||
| if (flag.origin !== destUrlObj.origin && | ||
| destUrlObj.protocol !== "data:") { | ||
| if (!xhrUtils.validCORSHeaders(this, response, flag, properties, flag.origin)) { | ||
| // Pre-allocate buffer | ||
| this._responseBytes = new Uint8Array(this._bufferStepSize); | ||
| this._responseCache = null; | ||
| this._responseTextCache = null; | ||
| this._responseXMLCache = null; | ||
| readyStateChange(this, READY_STATES.HEADERS_RECEIVED); | ||
| // Track progress for deduplication | ||
| let lastProgressReported; | ||
| // Stream the response body | ||
| if (response.body) { | ||
| let rawBytesReceived = 0; | ||
| // Body is already decompressed by the decompress interceptor. | ||
| // Track bytes for progress as they arrive. | ||
| response.body.on("data", chunk => { | ||
| rawBytesReceived += chunk.length; | ||
| progressObj.loaded = rawBytesReceived; | ||
| }); | ||
| for await (const chunk of response.body) { | ||
| // Check if aborted | ||
| if (abortController.signal.aborted) { | ||
| break; | ||
| } | ||
| // Store decompressed bytes | ||
| this._totalReceivedChunkSize += chunk.length; | ||
| if (this._totalReceivedChunkSize >= this._bufferStepSize) { | ||
| this._bufferStepSize *= 2; | ||
| while (this._totalReceivedChunkSize >= this._bufferStepSize) { | ||
| this._bufferStepSize *= 2; | ||
| } | ||
| const tmpBuf = new Uint8Array(this._bufferStepSize); | ||
| tmpBuf.set(this._responseBytes); | ||
| this._responseBytes = tmpBuf; | ||
| } | ||
| this._responseBytes.set(chunk, this._totalReceivedChunkSize - chunk.length); | ||
| this._responseCache = null; | ||
| this._responseTextCache = null; | ||
| this._responseXMLCache = null; | ||
| if (this.readyState === READY_STATES.HEADERS_RECEIVED) { | ||
| this.readyState = READY_STATES.LOADING; | ||
| } | ||
| fireAnEvent("readystatechange", this); | ||
| if (progressObj.total !== progressObj.loaded || this._totalReceivedChunkSize === rawBytesReceived) { | ||
| if (lastProgressReported !== progressObj.loaded) { | ||
| lastProgressReported = progressObj.loaded; | ||
| fireAnEvent("progress", this, ProgressEvent, progressObj); | ||
| } | ||
| } | ||
| } | ||
| } | ||
| // Request complete | ||
| clearTimeout(this._timeoutId); | ||
| this._timeoutFn = null; | ||
| this._timeoutStart = 0; | ||
| this._controller = null; | ||
| if (this._requestManager) { | ||
| this._requestManager.remove(requestManagerEntry); | ||
| } | ||
| // Don't fire completion events if aborted | ||
| if (abortController.signal.aborted) { | ||
| return; | ||
| } | ||
| if (urlObj.username || urlObj.password) { | ||
| properties.error = "Userinfo forbidden in cors redirect"; | ||
| xhrUtils.dispatchError(this); | ||
| // Fire final progress if not already fired with this loaded value | ||
| if (lastProgressReported !== progressObj.loaded) { | ||
| fireAnEvent("progress", this, ProgressEvent, progressObj); | ||
| } | ||
| readyStateChange(this, READY_STATES.DONE); | ||
| fireAnEvent("load", this, ProgressEvent, progressObj); | ||
| fireAnEvent("loadend", this, ProgressEvent, progressObj); | ||
| } catch (err) { | ||
| this._controller = null; | ||
| if (this._requestManager) { | ||
| this._requestManager.remove(requestManagerEntry); | ||
| } | ||
| // Don't fire error events if aborted | ||
| if (!abortController.signal.aborted) { | ||
| dispatchError(this, err.message || String(err)); | ||
| } | ||
| } | ||
| }); | ||
| if (body !== null && body !== "") { | ||
| properties.uploadComplete = false; | ||
| setDispatchProgressEvents(this); | ||
| } else { | ||
| properties.uploadComplete = true; | ||
| } | ||
| })(); | ||
| if (this.timeout > 0) { | ||
| properties.timeoutStart = (new Date()).getTime(); | ||
| properties.timeoutFn = () => { | ||
| client.abort(); | ||
| this._timeoutStart = (new Date()).getTime(); | ||
| this._timeoutFn = () => { | ||
| this._controller?.abort(); | ||
| if (!(this.readyState === READY_STATES.UNSENT || | ||
| (this.readyState === READY_STATES.OPENED && !properties.send) || | ||
| (this.readyState === READY_STATES.OPENED && !this._send) || | ||
| this.readyState === READY_STATES.DONE)) { | ||
| properties.send = false; | ||
| this._send = false; | ||
| let stateChanged = false; | ||
| if (!properties.uploadComplete) { | ||
| if (!this._uploadComplete) { | ||
| fireAnEvent("progress", upload, ProgressEvent); | ||
@@ -744,3 +880,3 @@ readyStateChange(this, READY_STATES.DONE); | ||
| }; | ||
| properties.timeoutId = setTimeout(properties.timeoutFn, this.timeout); | ||
| this._timeoutId = setTimeout(this._timeoutFn, this.timeout); | ||
| } | ||
@@ -751,231 +887,156 @@ } | ||
| setRequestHeader(header, value) { | ||
| const { flag, properties } = this; | ||
| if (this.readyState !== READY_STATES.OPENED || properties.send) { | ||
| throw DOMException.create(this._globalObject, ["The object is in an invalid state.", "InvalidStateError"]); | ||
| if (this.readyState !== READY_STATES.OPENED) { | ||
| throw DOMException.create( | ||
| this._globalObject, | ||
| ["setRequestHeader() can only be called in the OPENED state.", "InvalidStateError"] | ||
| ); | ||
| } | ||
| if (this._send) { | ||
| throw DOMException.create( | ||
| this._globalObject, | ||
| ["setRequestHeader() cannot be called after send()", "InvalidStateError"] | ||
| ); | ||
| } | ||
| value = normalizeHeaderValue(value); | ||
| if (!tokenRegexp.test(header) || !fieldValueRegexp.test(value)) { | ||
| throw DOMException.create(this._globalObject, [ | ||
| "The string did not match the expected pattern.", | ||
| "SyntaxError" | ||
| ]); | ||
| if (!isHeaderName(header)) { | ||
| throw DOMException.create(this._globalObject, ["Invalid header name", "SyntaxError"]); | ||
| } | ||
| if (!isHeaderValue(value)) { | ||
| throw DOMException.create(this._globalObject, ["Invalid header value", "SyntaxError"]); | ||
| } | ||
| const lcHeader = header.toLowerCase(); | ||
| if (forbiddenRequestHeaders.has(lcHeader) || lcHeader.startsWith("sec-") || lcHeader.startsWith("proxy-")) { | ||
| if (isForbiddenRequestHeader(header, value)) { | ||
| return; | ||
| } | ||
| const keys = Object.keys(flag.requestHeaders); | ||
| let n = keys.length; | ||
| while (n--) { | ||
| const key = keys[n]; | ||
| if (key.toLowerCase() === lcHeader) { | ||
| flag.requestHeaders[key] += ", " + value; | ||
| return; | ||
| } | ||
| } | ||
| flag.requestHeaders[header] = value; | ||
| this._requestHeaders.combine(header, value); | ||
| } | ||
| } | ||
| setupForSimpleEventAccessors(XMLHttpRequestImpl.prototype, ["readystatechange"]); | ||
| // Serialization methods for sync XHR worker communication | ||
| function readyStateChange(xhr, readyState) { | ||
| if (xhr.readyState === readyState) { | ||
| return; | ||
| // Called in main process before spawning sync worker | ||
| _serializeRequest() { | ||
| let body = this._body; | ||
| if (body instanceof Uint8Array) { | ||
| body = { type: "Uint8Array", data: Array.from(body) }; | ||
| } | ||
| return { | ||
| method: this._method, | ||
| url: this._url, | ||
| body, | ||
| requestHeaders: this._requestHeaders.toJSON(), | ||
| withCredentials: this._withCredentials, | ||
| mimeType: this._mimeType, | ||
| auth: this._auth, | ||
| responseType: this._responseType, | ||
| timeout: this._timeout, | ||
| preflight: this._preflight, | ||
| cookieJar: this._cookieJar, | ||
| encoding: this._encoding, | ||
| origin: this._origin, | ||
| referrer: this._referrer, | ||
| userAgent: this._userAgent | ||
| }; | ||
| } | ||
| xhr.readyState = readyState; | ||
| // Called in main process after sync worker returns | ||
| _adoptSerializedResponse(response) { | ||
| this.status = response.status; | ||
| this.statusText = response.statusText; | ||
| this.responseURL = response.responseURL; | ||
| fireAnEvent("readystatechange", xhr); | ||
| } | ||
| function receiveResponse(xhr, response, currentURL) { | ||
| const { flag, properties } = xhr; | ||
| const { rawHeaders, statusCode } = response; | ||
| let byteOffset = 0; | ||
| const headers = {}; | ||
| const filteredResponseHeaders = []; | ||
| const headerMap = {}; | ||
| const n = Number(rawHeaders.length); | ||
| for (let i = 0; i < n; i += 2) { | ||
| const k = rawHeaders[i]; | ||
| const kl = k.toLowerCase(); | ||
| const v = rawHeaders[i + 1]; | ||
| if (uniqueResponseHeaders.has(kl)) { | ||
| if (headerMap[kl] !== undefined) { | ||
| delete headers[headerMap[kl]]; | ||
| } | ||
| headers[k] = v; | ||
| } else if (headerMap[kl] !== undefined) { | ||
| headers[headerMap[kl]] += ", " + v; | ||
| } else { | ||
| headers[k] = v; | ||
| if (response.responseBytes) { | ||
| this._responseBytes = new Uint8Array(response.responseBytes); | ||
| } | ||
| headerMap[kl] = k; | ||
| } | ||
| this._responseHeaders = HeaderList.fromJSON(response.responseHeaders); | ||
| this._filteredResponseHeaders = new Set(response.filteredResponseHeaders); | ||
| this._error = response.error || ""; | ||
| this._totalReceivedChunkSize = response.totalReceivedChunkSize; | ||
| this._uploadComplete = response.uploadComplete; | ||
| const destUrlObj = new URL(currentURL); | ||
| if (properties.origin !== destUrlObj.origin && | ||
| destUrlObj.protocol !== "data:") { | ||
| if (!xhrUtils.validCORSHeaders(xhr, response, flag, properties, properties.origin)) { | ||
| return; | ||
| if (response.cookieJar) { | ||
| this._cookieJar = tough.CookieJar.deserializeSync( | ||
| response.cookieJar, | ||
| this._ownerDocument._cookieJar.store | ||
| ); | ||
| } | ||
| const acehStr = response.headers["access-control-expose-headers"]; | ||
| const aceh = new Set(acehStr ? acehStr.trim().toLowerCase().split(xhrUtils.headerListSeparatorRegexp) : []); | ||
| for (const header in headers) { | ||
| const lcHeader = header.toLowerCase(); | ||
| if (!corsSafeResponseHeaders.has(lcHeader) && !aceh.has(lcHeader)) { | ||
| filteredResponseHeaders.push(header); | ||
| } | ||
| } | ||
| } | ||
| for (const header in headers) { | ||
| const lcHeader = header.toLowerCase(); | ||
| if (forbiddenResponseHeaders.has(lcHeader)) { | ||
| filteredResponseHeaders.push(header); | ||
| } | ||
| } | ||
| // Called in worker to set up XHR from serialized config | ||
| _adoptSerializedRequest(config) { | ||
| this._method = config.method; | ||
| this._url = config.url; | ||
| this._body = config.body?.type === "Uint8Array" ? | ||
| new Uint8Array(config.body.data) : | ||
| config.body; | ||
| this._requestHeaders = HeaderList.fromJSON(config.requestHeaders); | ||
| this._synchronous = false; // Run as async in worker | ||
| this._withCredentials = config.withCredentials; | ||
| this._mimeType = config.mimeType; | ||
| this._auth = config.auth; | ||
| this._responseType = config.responseType; | ||
| this._timeout = config.timeout; | ||
| this._preflight = config.preflight; | ||
| this._cookieJar = config.cookieJar ? | ||
| tough.CookieJar.fromJSON(config.cookieJar) : | ||
| null; | ||
| this._encoding = config.encoding; | ||
| this._origin = config.origin; | ||
| this._referrer = config.referrer; | ||
| this._userAgent = config.userAgent; | ||
| xhr.responseURL = destUrlObj.href; | ||
| xhr.status = statusCode; | ||
| xhr.statusText = response.statusMessage || HTTP_STATUS_CODES[statusCode] || ""; | ||
| properties.responseHeaders = headers; | ||
| properties.filteredResponseHeaders = filteredResponseHeaders; | ||
| const contentLength = getResponseHeader(xhr, "content-length") || "0"; | ||
| const bufferLength = parseInt(contentLength) || 0; | ||
| const progressObj = { lengthComputable: false }; | ||
| let lastProgressReported; | ||
| if (bufferLength !== 0) { | ||
| progressObj.total = bufferLength; | ||
| progressObj.loaded = 0; | ||
| progressObj.lengthComputable = true; | ||
| this.readyState = READY_STATES.OPENED; | ||
| } | ||
| // pre-allocate buffer. | ||
| properties.responseBuffer = Buffer.alloc(properties.bufferStepSize); | ||
| properties.responseCache = null; | ||
| properties.responseTextCache = null; | ||
| properties.responseXMLCache = null; | ||
| readyStateChange(xhr, READY_STATES.HEADERS_RECEIVED); | ||
| if (!properties.client) { | ||
| // The request was aborted in reaction to the readystatechange event. | ||
| return; | ||
| } | ||
| // Can't use the client since the client gets the post-ungzipping bytes (which can be greater than the | ||
| // Content-Length). | ||
| response.on("data", chunk => { | ||
| byteOffset += chunk.length; | ||
| progressObj.loaded = byteOffset; | ||
| }); | ||
| properties.client.on("data", chunk => { | ||
| properties.totalReceivedChunkSize += chunk.length; | ||
| if (properties.totalReceivedChunkSize >= properties.bufferStepSize) { | ||
| properties.bufferStepSize *= 2; | ||
| while (properties.totalReceivedChunkSize >= properties.bufferStepSize) { | ||
| properties.bufferStepSize *= 2; | ||
| } | ||
| const tmpBuf = Buffer.alloc(properties.bufferStepSize); | ||
| properties.responseBuffer.copy(tmpBuf, 0, 0, properties.responseBuffer.length); | ||
| properties.responseBuffer = tmpBuf; | ||
| // Called in worker to serialize response | ||
| _serializeResponse() { | ||
| let responseBytes = this._responseBytes; | ||
| if (responseBytes instanceof Uint8Array) { | ||
| responseBytes = Array.from(responseBytes.slice(0, this._totalReceivedChunkSize)); | ||
| } | ||
| chunk.copy(properties.responseBuffer, properties.totalReceivedChunkSize - chunk.length, 0, chunk.length); | ||
| properties.responseCache = null; | ||
| properties.responseTextCache = null; | ||
| properties.responseXMLCache = null; | ||
| if (xhr.readyState === READY_STATES.HEADERS_RECEIVED) { | ||
| xhr.readyState = READY_STATES.LOADING; | ||
| let error = this._error; | ||
| if (error && typeof error !== "string") { | ||
| error = error.stack || inspect(error); | ||
| } | ||
| fireAnEvent("readystatechange", xhr); | ||
| if (progressObj.total !== progressObj.loaded || properties.totalReceivedChunkSize === byteOffset) { | ||
| if (lastProgressReported !== progressObj.loaded) { | ||
| // This is a necessary check in the gzip case where we can be getting new data from the client, as it | ||
| // un-gzips, but no new data has been gotten from the response, so we should not fire a progress event. | ||
| lastProgressReported = progressObj.loaded; | ||
| fireAnEvent("progress", xhr, ProgressEvent, progressObj); | ||
| } | ||
| } | ||
| }); | ||
| properties.client.on("end", () => { | ||
| clearTimeout(properties.timeoutId); | ||
| properties.timeoutFn = null; | ||
| properties.timeoutStart = 0; | ||
| properties.client = null; | ||
| if (lastProgressReported !== progressObj.loaded) { | ||
| // https://github.com/whatwg/xhr/issues/318 | ||
| fireAnEvent("progress", xhr, ProgressEvent, progressObj); | ||
| } | ||
| readyStateChange(xhr, READY_STATES.DONE); | ||
| fireAnEvent("load", xhr, ProgressEvent, progressObj); | ||
| fireAnEvent("loadend", xhr, ProgressEvent, progressObj); | ||
| }); | ||
| return { | ||
| status: this.status, | ||
| statusText: this.statusText, | ||
| responseURL: this.responseURL, | ||
| responseBytes, | ||
| responseHeaders: this._responseHeaders.toJSON(), | ||
| filteredResponseHeaders: Array.from(this._filteredResponseHeaders), | ||
| error, | ||
| totalReceivedChunkSize: this._totalReceivedChunkSize, | ||
| uploadComplete: this._uploadComplete, | ||
| cookieJar: this._cookieJar | ||
| }; | ||
| } | ||
| } | ||
| function setDispatchProgressEvents(xhr) { | ||
| const { properties, upload } = xhr; | ||
| const { client } = properties; | ||
| setupForSimpleEventAccessors(XMLHttpRequestImpl.prototype, ["readystatechange"]); | ||
| let total = 0; | ||
| let lengthComputable = false; | ||
| const length = client.headers && parseInt(xhrUtils.getRequestHeader(client.headers, "content-length")); | ||
| if (length) { | ||
| total = length; | ||
| lengthComputable = true; | ||
| function readyStateChange(xhr, readyState) { | ||
| if (xhr.readyState === readyState) { | ||
| return; | ||
| } | ||
| const initProgress = { | ||
| lengthComputable, | ||
| total, | ||
| loaded: 0 | ||
| }; | ||
| if (properties.uploadListener) { | ||
| fireAnEvent("loadstart", upload, ProgressEvent, initProgress); | ||
| } | ||
| xhr.readyState = readyState; | ||
| client.on("request", req => { | ||
| req.on("response", () => { | ||
| properties.uploadComplete = true; | ||
| if (!properties.uploadListener) { | ||
| return; | ||
| } | ||
| const progress = { | ||
| lengthComputable, | ||
| total, | ||
| loaded: total | ||
| }; | ||
| fireAnEvent("progress", upload, ProgressEvent, progress); | ||
| fireAnEvent("load", upload, ProgressEvent, progress); | ||
| fireAnEvent("loadend", upload, ProgressEvent, progress); | ||
| }); | ||
| }); | ||
| fireAnEvent("readystatechange", xhr); | ||
| } | ||
| function finalMIMEType(xhr) { | ||
| const { flag } = xhr; | ||
| return flag.overrideMIMEType || getResponseHeader(xhr, "content-type"); | ||
| return xhr._overrideMIMEType || xhr._responseHeaders.get("content-type"); | ||
| } | ||
| function finalCharset(xhr) { | ||
| const { flag } = xhr; | ||
| if (flag.overrideCharset) { | ||
| return flag.overrideCharset; | ||
| if (xhr._overrideCharset) { | ||
| return xhr._overrideCharset; | ||
| } | ||
| const parsedContentType = MIMEType.parse(getResponseHeader(xhr, "content-type")); | ||
| const parsedContentType = MIMEType.parse(xhr._responseHeaders.get("content-type")); | ||
| if (parsedContentType) { | ||
@@ -987,37 +1048,20 @@ return labelToName(parsedContentType.parameters.get("charset")); | ||
| function getResponseHeader(xhr, lcHeader) { | ||
| const { properties } = xhr; | ||
| const keys = Object.keys(properties.responseHeaders); | ||
| let n = keys.length; | ||
| while (n--) { | ||
| const key = keys[n]; | ||
| if (key.toLowerCase() === lcHeader) { | ||
| return properties.responseHeaders[key]; | ||
| } | ||
| } | ||
| return null; | ||
| } | ||
| function normalizeHeaderValue(value) { | ||
| return value.replace(/^[\x09\x0A\x0D\x20]+/, "").replace(/[\x09\x0A\x0D\x20]+$/, ""); | ||
| } | ||
| function extractBody(bodyInit) { | ||
| // https://fetch.spec.whatwg.org/#concept-bodyinit-extract | ||
| // except we represent the body as a Node.js Buffer instead. | ||
| // We represent the body as a `Uint8Array`. | ||
| if (Blob.isImpl(bodyInit)) { | ||
| return { | ||
| buffer: bodyInit._buffer, | ||
| contentType: bodyInit.type === "" ? null : bodyInit.type | ||
| body: bodyInit._bytes, | ||
| type: bodyInit.type === "" ? null : bodyInit.type | ||
| }; | ||
| } else if (isArrayBuffer(bodyInit)) { | ||
| } else if (idlUtils.isArrayBuffer(bodyInit)) { | ||
| return { | ||
| buffer: Buffer.from(bodyInit), | ||
| contentType: null | ||
| body: new Uint8Array(bodyInit).slice(0), | ||
| type: null | ||
| }; | ||
| } else if (ArrayBuffer.isView(bodyInit)) { | ||
| return { | ||
| buffer: Buffer.from(bodyInit.buffer, bodyInit.byteOffset, bodyInit.byteLength), | ||
| contentType: null | ||
| body: new Uint8Array(bodyInit), | ||
| type: null | ||
| }; | ||
@@ -1028,4 +1072,4 @@ } else if (FormData.isImpl(bodyInit)) { | ||
| return { | ||
| buffer: chunksToBuffer(outputChunks), | ||
| contentType: "multipart/form-data; boundary=" + utf8Decoder.decode(boundary) | ||
| body: concatTypedArrays(outputChunks), | ||
| type: "multipart/form-data; boundary=" + utf8Decode(boundary) | ||
| }; | ||
@@ -1036,4 +1080,4 @@ } | ||
| return { | ||
| buffer: Buffer.from(bodyInit, "utf-8"), | ||
| contentType: "text/plain;charset=UTF-8" | ||
| body: utf8Encode(bodyInit), | ||
| type: "text/plain;charset=UTF-8" | ||
| }; | ||
@@ -1040,0 +1084,0 @@ } |
+14
-13
| { | ||
| "name": "jsdom", | ||
| "version": "27.4.0", | ||
| "version": "28.0.0", | ||
| "description": "A JavaScript implementation of many web standards", | ||
@@ -26,7 +26,7 @@ "keywords": [ | ||
| "dependencies": { | ||
| "@acemir/cssom": "^0.9.28", | ||
| "@acemir/cssom": "^0.9.31", | ||
| "@asamuzakjp/dom-selector": "^6.7.6", | ||
| "@exodus/bytes": "^1.6.0", | ||
| "cssstyle": "^5.3.4", | ||
| "data-urls": "^6.0.0", | ||
| "@exodus/bytes": "^1.11.0", | ||
| "cssstyle": "^5.3.7", | ||
| "data-urls": "^7.0.0", | ||
| "decimal.js": "^10.6.0", | ||
@@ -41,7 +41,7 @@ "html-encoding-sniffer": "^6.0.0", | ||
| "tough-cookie": "^6.0.0", | ||
| "undici": "^7.20.0", | ||
| "w3c-xmlserializer": "^5.0.0", | ||
| "webidl-conversions": "^8.0.0", | ||
| "whatwg-mimetype": "^4.0.0", | ||
| "whatwg-url": "^15.1.0", | ||
| "ws": "^8.18.3", | ||
| "webidl-conversions": "^8.0.1", | ||
| "whatwg-mimetype": "^5.0.0", | ||
| "whatwg-url": "^16.0.0", | ||
| "xml-name-validator": "^5.0.0" | ||
@@ -58,7 +58,8 @@ }, | ||
| "devDependencies": { | ||
| "@domenic/eslint-config": "^4.0.1", | ||
| "@domenic/eslint-config": "^4.1.0", | ||
| "benchmark": "^2.1.4", | ||
| "eslint": "^9.39.1", | ||
| "eslint-plugin-html": "^8.1.3", | ||
| "globals": "^16.5.0", | ||
| "eslint": "^9.39.2", | ||
| "eslint-plugin-html": "^8.1.4", | ||
| "eslint-plugin-n": "^17.23.2", | ||
| "globals": "^17.2.0", | ||
| "js-yaml": "^4.1.1", | ||
@@ -65,0 +66,0 @@ "minimatch": "^10.1.1", |
+47
-43
@@ -124,5 +124,5 @@ <h1 align="center"> | ||
| * Changing `document.hidden` to return `false` instead of `true` | ||
| * Changing `document.visibilityState` to return `"visible"` instead of `"prerender"` | ||
| * Enabling `window.requestAnimationFrame()` and `window.cancelAnimationFrame()` methods, which otherwise do not exist | ||
| - Changing `document.hidden` to return `false` instead of `true` | ||
| - Changing `document.visibilityState` to return `"visible"` instead of `"prerender"` | ||
| - Enabling `window.requestAnimationFrame()` and `window.cancelAnimationFrame()` methods, which otherwise do not exist | ||
@@ -145,6 +145,6 @@ ```js | ||
| * Frames and iframes, via `<frame>` and `<iframe>` | ||
| * Stylesheets, via `<link rel="stylesheet">` | ||
| * Scripts, via `<script>`, but only if `runScripts: "dangerously"` is also set | ||
| * Images, via `<img>`, but only if the `canvas` npm package is also installed (see "[Canvas Support](#canvas-support)" below) | ||
| - Frames and iframes, via `<frame>` and `<iframe>` | ||
| - Stylesheets, via `<link rel="stylesheet">` | ||
| - Scripts, via `<script>`, but only if `runScripts: "dangerously"` is also set | ||
| - Images, via `<img>`, but only if the `canvas` npm package is also installed (see "[Canvas Support](#canvas-support)" below) | ||
@@ -155,50 +155,54 @@ When attempting to load resources, recall that the default value for the `url` option is `"about:blank"`, which means that any resources included via relative URLs will fail to load. (The result of trying to parse the URL `/something` against the URL `about:blank` is an error.) So, you'll likely want to set a non-default value for the `url` option in those cases, or use one of the [convenience APIs](#convenience-apis) that do so automatically. | ||
| To more fully customize jsdom's resource-loading behavior, you can pass an instance of the `ResourceLoader` class as the `resources` option value: | ||
| To more fully customize jsdom's resource-loading behavior, including the initial load made by [`JSDOM.fromURL()`](#fromurl) or any loads made with `dom.window.XMLHttpRequest` or `dom.window.WebSocket`, you can pass an options object as the `resources` option value. Doing so will opt you in to the above-described `resources: "usable"` behavior as the baseline, on top of which your customizations can be layered. | ||
| ```js | ||
| const resourceLoader = new jsdom.ResourceLoader({ | ||
| proxy: "http://127.0.0.1:9001", | ||
| strictSSL: false, | ||
| userAgent: "Mellblomenator/9000", | ||
| }); | ||
| const dom = new JSDOM(``, { resources: resourceLoader }); | ||
| ``` | ||
| The available options are: | ||
| The three options to the `ResourceLoader` constructor are: | ||
| - `proxy` is the address of an HTTP proxy to be used. | ||
| - `strictSSL` can be set to false to disable the requirement that SSL certificates be valid. | ||
| - `userAgent` affects the `User-Agent` header sent, and thus the resulting value for `navigator.userAgent`. It defaults to <code>\`Mozilla/5.0 (${process.platform || "unknown OS"}) AppleWebKit/537.36 (KHTML, like Gecko) jsdom/${jsdomVersion}\`</code>. | ||
| You can further customize resource fetching by subclassing `ResourceLoader` and overriding the `fetch()` method. For example, here is a version that overrides the response provided for a specific URL: | ||
| - `dispatcher` can be set to a custom [undici `Dispatcher`](https://undici.nodejs.org/#/docs/api/Dispatcher) for advanced use cases such as configuring a proxy or custom TLS settings. For example, to use a proxy, you can use undici's `ProxyAgent`. | ||
| - `interceptors` can be set to an array of [`undici` interceptor functions](https://undici.nodejs.org/#/docs/api/Dispatcher?id=parameter-interceptor). Interceptors can be used to modify requests or responses without writing an entirely new `Dispatcher`. | ||
| For the simple case of inspecting an incoming request or returning a synthetic response, you can use jsdom's `requestInterceptor()` helper, which receives a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object and context, and can return a [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response): | ||
| ```js | ||
| class CustomResourceLoader extends jsdom.ResourceLoader { | ||
| fetch(url, options) { | ||
| // Override the contents of this script to do something unusual. | ||
| if (url === "https://example.com/some-specific-script.js") { | ||
| return Promise.resolve(Buffer.from("window.someGlobal = 5;")); | ||
| } | ||
| const { JSDOM, requestInterceptor } = require("jsdom"); | ||
| return super.fetch(url, options); | ||
| const dom = new JSDOM(`<script src="https://example.com/some-specific-script.js"></script>`, { | ||
| url: "https://example.com/", | ||
| runScripts: "dangerously", | ||
| resources: { | ||
| userAgent: "Mellblomenator/9000", | ||
| dispatcher: new ProxyAgent("http://127.0.0.1:9001"), | ||
| interceptors: [ | ||
| requestInterceptor((request, context) => { | ||
| // Override the contents of this script to do something unusual. | ||
| if (request.url === "https://example.com/some-specific-script.js") { | ||
| return new Response("window.someGlobal = 5;", { | ||
| headers: { "Content-Type": "application/javascript" } | ||
| }); | ||
| } | ||
| // Return undefined to let the request proceed normally | ||
| }) | ||
| ] | ||
| } | ||
| } | ||
| }); | ||
| ``` | ||
| jsdom will call your custom resource loader's `fetch()` method whenever it encounters a "usable" resource, per the above section. The method takes a URL string, as well as a few options which you should pass through unmodified if calling `super.fetch()`. It must return a promise for a Node.js `Buffer` object, or return `null` if the resource is intentionally not to be loaded. In general, most cases will want to delegate to `super.fetch()`, as shown. | ||
| The context object passed to the interceptor includes `element` (the DOM element that initiated the request, or `null` for requests that are not from DOM elements). For example: | ||
| One of the options you will receive in `fetch()` will be the element (if applicable) that is fetching a resource. | ||
| ```js | ||
| class CustomResourceLoader extends jsdom.ResourceLoader { | ||
| fetch(url, options) { | ||
| if (options.element) { | ||
| console.log(`Element ${options.element.localName} is requesting the url ${url}`); | ||
| } | ||
| return super.fetch(url, options); | ||
| requestInterceptor((request, { element }) => { | ||
| if (element) { | ||
| console.log(`Element ${element.localName} is requesting ${request.url}`); | ||
| } | ||
| } | ||
| // Return undefined to let the request proceed normally | ||
| }) | ||
| ``` | ||
| To be clear on the flow: when something in your jsdom fetches resources, first the request is set up by jsdom, then it is passed through any `interceptors` in the order provided, then it reaches any provided `dispatcher` (defaulting to [`undici`'s global dispatcher](https://undici.nodejs.org/#/?id=undicigetglobaldispatcher)). If you use jsdom's `requestInterceptor()`, returning promise fulfilled with a `Response` will prevent any further interceptors from running, or the base dispatcher from being reached. | ||
| > [!WARNING] | ||
| > All resource loading customization is ignored when scripts inside the jsdom use synchronous `XMLHttpRequest`. This is a technical limitation as we cannot transfer dispatchers or interceptors across a process boundary. | ||
| ### Virtual consoles | ||
@@ -227,3 +231,3 @@ | ||
| (Note that it is probably best to set up these event listeners *before* calling `new JSDOM()`, since errors or console-invoking script might occur during parsing.) | ||
| (Note that it is probably best to set up these event listeners _before_ calling `new JSDOM()`, since errors or console-invoking script might occur during parsing.) | ||
@@ -384,3 +388,3 @@ If you simply want to redirect the virtual console output to another console, like the default Node.js one, you can do | ||
| Note that this method will throw an exception if the `JSDOM` instance was created without `runScripts` set, or if you are [using jsdom in a web browser](#running-jsdom-inside-a-web-browser). | ||
| Note that this method will throw an exception if the `JSDOM` instance was created without `runScripts` set. | ||
@@ -479,3 +483,3 @@ ### Reconfiguring the jsdom with `reconfigure(settings)` | ||
| In addition to supplying a string, the `JSDOM` constructor can also be supplied binary data, in the form of a Node.js [`Buffer`](https://nodejs.org/docs/latest/api/buffer.html) or a standard JavaScript binary data type like `ArrayBuffer`, `Uint8Array`, `DataView`, etc. When this is done, jsdom will [sniff the encoding](https://html.spec.whatwg.org/multipage/syntax.html#encoding-sniffing-algorithm) from the supplied bytes, scanning for `<meta charset>` tags just like a browser does. | ||
| In addition to supplying a string, the `JSDOM` constructor can also be supplied binary data, in the form of a standard JavaScript binary data type like `ArrayBuffer`, `Uint8Array`, `DataView`, etc. When this is done, jsdom will [sniff the encoding](https://html.spec.whatwg.org/multipage/syntax.html#encoding-sniffing-algorithm) from the supplied bytes, scanning for `<meta charset>` tags just like a browser does. | ||
@@ -482,0 +486,0 @@ If the supplied `contentType` option contains a `charset` parameter, that encoding will override the sniffed encoding—unless a UTF-8 or UTF-16 BOM is present, in which case those take precedence. (Again, this is just like a browser.) |
| "use strict"; | ||
| const ResourceLoader = require("./resource-loader.js"); | ||
| module.exports = class NoOpResourceLoader extends ResourceLoader { | ||
| fetch() { | ||
| return null; | ||
| } | ||
| }; |
| "use strict"; | ||
| const fs = require("fs"); | ||
| const { fileURLToPath } = require("url"); | ||
| const { parseURL } = require("whatwg-url"); | ||
| const dataURLFromRecord = require("data-urls").fromURLRecord; | ||
| const packageVersion = require("../../../../package.json").version; | ||
| const agentFactory = require("../../living/helpers/agent-factory"); | ||
| const Request = require("../../living/helpers/http-request"); | ||
| const IS_BROWSER = Object.prototype.toString.call(process) !== "[object process]"; | ||
| module.exports = class ResourceLoader { | ||
| constructor({ | ||
| strictSSL = true, | ||
| proxy = undefined, | ||
| userAgent = `Mozilla/5.0 (${process.platform || "unknown OS"}) AppleWebKit/537.36 ` + | ||
| `(KHTML, like Gecko) jsdom/${packageVersion}` | ||
| } = {}) { | ||
| this._strictSSL = strictSSL; | ||
| this._proxy = proxy; | ||
| this._userAgent = userAgent; | ||
| } | ||
| _readDataURL(urlRecord) { | ||
| const dataURL = dataURLFromRecord(urlRecord); | ||
| let timeoutId; | ||
| const promise = new Promise(resolve => { | ||
| timeoutId = setTimeout(resolve, 0, Buffer.from(dataURL.body)); | ||
| }); | ||
| promise.abort = () => { | ||
| if (timeoutId !== undefined) { | ||
| clearTimeout(timeoutId); | ||
| } | ||
| }; | ||
| return promise; | ||
| } | ||
| _readFile(filePath) { | ||
| let readableStream, abort; // Native Promises doesn't have an "abort" method. | ||
| // Creating a promise for two reason: | ||
| // 1. fetch always return a promise. | ||
| // 2. We need to add an abort handler. | ||
| const promise = new Promise((resolve, reject) => { | ||
| readableStream = fs.createReadStream(filePath); | ||
| let data = Buffer.alloc(0); | ||
| abort = reject; | ||
| readableStream.on("error", reject); | ||
| readableStream.on("data", chunk => { | ||
| data = Buffer.concat([data, chunk]); | ||
| }); | ||
| readableStream.on("end", () => { | ||
| resolve(data); | ||
| }); | ||
| }); | ||
| promise.abort = () => { | ||
| readableStream.destroy(); | ||
| const error = new Error("request canceled by user"); | ||
| error.isAbortError = true; | ||
| abort(error); | ||
| }; | ||
| return promise; | ||
| } | ||
| fetch(urlString, { accept, cookieJar, referrer } = {}) { | ||
| const url = parseURL(urlString); | ||
| if (!url) { | ||
| return Promise.reject(new Error(`Tried to fetch invalid URL ${urlString}`)); | ||
| } | ||
| switch (url.scheme) { | ||
| case "data": { | ||
| return this._readDataURL(url); | ||
| } | ||
| case "http": | ||
| case "https": { | ||
| const agents = agentFactory(this._proxy, this._strictSSL); | ||
| const headers = { | ||
| "User-Agent": this._userAgent, | ||
| "Accept-Language": "en", | ||
| "Accept-Encoding": "gzip", | ||
| "Accept": accept || "*/*" | ||
| }; | ||
| if (referrer && !IS_BROWSER) { | ||
| headers.Referer = referrer; | ||
| } | ||
| const requestClient = new Request( | ||
| urlString, | ||
| { followRedirects: true, cookieJar, agents }, | ||
| { headers } | ||
| ); | ||
| const promise = new Promise((resolve, reject) => { | ||
| const accumulated = []; | ||
| requestClient.once("response", res => { | ||
| promise.response = res; | ||
| const { statusCode } = res; | ||
| // TODO This deviates from the spec when it comes to | ||
| // loading resources such as images | ||
| if (statusCode < 200 || statusCode > 299) { | ||
| requestClient.abort(); | ||
| reject(new Error(`Resource was not loaded. Status: ${statusCode}`)); | ||
| } | ||
| }); | ||
| requestClient.on("data", chunk => { | ||
| accumulated.push(chunk); | ||
| }); | ||
| requestClient.on("end", () => resolve(Buffer.concat(accumulated))); | ||
| requestClient.on("error", reject); | ||
| }); | ||
| // The method fromURL in lib/api.js crashes without the following four | ||
| // properties defined on the Promise instance, causing the test suite to halt | ||
| requestClient.on("end", () => { | ||
| promise.href = requestClient.currentURL; | ||
| }); | ||
| promise.abort = requestClient.abort.bind(requestClient); | ||
| promise.getHeader = name => headers[name] || requestClient.getHeader(name); | ||
| requestClient.end(); | ||
| return promise; | ||
| } | ||
| case "file": { | ||
| try { | ||
| return this._readFile(fileURLToPath(urlString)); | ||
| } catch (e) { | ||
| return Promise.reject(e); | ||
| } | ||
| } | ||
| default: { | ||
| return Promise.reject(new Error(`Tried to fetch URL ${urlString} with invalid scheme ${url.scheme}`)); | ||
| } | ||
| } | ||
| } | ||
| }; |
| "use strict"; | ||
| const http = require("http"); | ||
| const https = require("https"); | ||
| const { HttpProxyAgent } = require("http-proxy-agent"); | ||
| const { HttpsProxyAgent } = require("https-proxy-agent"); | ||
| module.exports = function agentFactory(proxy, rejectUnauthorized) { | ||
| const agentOpts = { keepAlive: true, rejectUnauthorized }; | ||
| if (proxy) { | ||
| return { https: new HttpsProxyAgent(proxy, agentOpts), http: new HttpProxyAgent(proxy, agentOpts) }; | ||
| } | ||
| return { http: new http.Agent(agentOpts), https: new https.Agent(agentOpts) }; | ||
| }; |
| "use strict"; | ||
| const http = require("http"); | ||
| const https = require("https"); | ||
| const { Writable } = require("stream"); | ||
| const zlib = require("zlib"); | ||
| const ver = process.version.replace("v", "").split("."); | ||
| const majorNodeVersion = Number.parseInt(ver[0]); | ||
| function abortRequest(clientRequest) { | ||
| clientRequest.destroy(); | ||
| clientRequest.removeAllListeners(); | ||
| clientRequest.on("error", () => {}); | ||
| } | ||
| module.exports = class Request extends Writable { | ||
| constructor(url, clientOptions, requestOptions) { | ||
| super(); | ||
| Object.assign(this, clientOptions); | ||
| this.currentURL = url; | ||
| this._requestOptions = requestOptions; | ||
| this.headers = requestOptions.headers; | ||
| this._ended = false; | ||
| this._redirectCount = 0; | ||
| this._requestBodyBuffers = []; | ||
| this._bufferIndex = 0; | ||
| this._performRequest(); | ||
| } | ||
| abort() { | ||
| abortRequest(this._currentRequest); | ||
| this.emit("abort"); | ||
| this.removeAllListeners(); | ||
| } | ||
| pipeRequest(form) { | ||
| form.pipe(this._currentRequest); | ||
| } | ||
| write(data, encoding) { | ||
| if (data.length > 0) { | ||
| this._requestBodyBuffers.push({ data, encoding }); | ||
| this._currentRequest.write(data, encoding); | ||
| } | ||
| } | ||
| end() { | ||
| this.emit("request", this._currentRequest); | ||
| this._ended = true; | ||
| this._currentRequest.end(); | ||
| } | ||
| setHeader(name, value) { | ||
| this.headers[name] = value; | ||
| this._currentRequest.setHeader(name, value); | ||
| } | ||
| removeHeader(name) { | ||
| delete this.headers[name]; | ||
| this._currentRequest.removeHeader(name); | ||
| } | ||
| // Without this method, the test send-redirect-infinite-sync will halt the test suite | ||
| // TODO: investigate this further and ideally remove | ||
| toJSON() { | ||
| const { method, headers } = this._requestOptions; | ||
| return { uri: new URL(this.currentURL), method, headers }; | ||
| } | ||
| _writeNext(error) { | ||
| if (this._currentRequest) { | ||
| if (error) { | ||
| this.emit("error", error); | ||
| } else if (this._bufferIndex < this._requestBodyBuffers.length) { | ||
| const buffer = this._requestBodyBuffers[this._bufferIndex++]; | ||
| if (!this._currentRequest.writableEnded) { | ||
| this._currentRequest.write( | ||
| buffer.data, | ||
| buffer.encoding, | ||
| this._writeNext.bind(this) | ||
| ); | ||
| } | ||
| } else if (this._ended) { | ||
| this._currentRequest.end(); | ||
| } | ||
| } | ||
| } | ||
| _performRequest() { | ||
| const urlOptions = new URL(this.currentURL); | ||
| const scheme = urlOptions.protocol; | ||
| // browserify's (http|https).request() does not work correctly with the (url, options, callback) signature. Instead | ||
| // we need to have options for each of the URL components. Note that we can't use the spread operator because URL | ||
| // instances don't have own properties for the URL components. | ||
| const requestOptions = { | ||
| ...this._requestOptions, | ||
| agent: this.agents[scheme.substring(0, scheme.length - 1)], | ||
| protocol: urlOptions.protocol, | ||
| hostname: urlOptions.hostname, | ||
| port: urlOptions.port, | ||
| path: urlOptions.pathname + urlOptions.search | ||
| }; | ||
| const { request } = scheme === "https:" ? https : http; | ||
| this._currentRequest = request(requestOptions, response => { | ||
| this._processResponse(response); | ||
| }); | ||
| let cookies; | ||
| if (this._redirectCount === 0) { | ||
| this.originalCookieHeader = this.getHeader("Cookie"); | ||
| } | ||
| if (this.cookieJar) { | ||
| cookies = this.cookieJar.getCookieStringSync(this.currentURL); | ||
| } | ||
| if (cookies && cookies.length) { | ||
| if (this.originalCookieHeader) { | ||
| this.setHeader("Cookie", this.originalCookieHeader + "; " + cookies); | ||
| } else { | ||
| this.setHeader("Cookie", cookies); | ||
| } | ||
| } | ||
| for (const event of ["connect", "error", "socket", "timeout"]) { | ||
| this._currentRequest.on(event, (...args) => { | ||
| this.emit(event, ...args); | ||
| }); | ||
| } | ||
| if (this._isRedirect) { | ||
| this._bufferIndex = 0; | ||
| this._writeNext(); | ||
| } | ||
| } | ||
| _processResponse(response) { | ||
| const cookies = response.headers["set-cookie"]; | ||
| if (this.cookieJar && Array.isArray(cookies)) { | ||
| try { | ||
| cookies.forEach(cookie => { | ||
| this.cookieJar.setCookieSync(cookie, this.currentURL, { ignoreError: true }); | ||
| }); | ||
| } catch (e) { | ||
| this.emit("error", e); | ||
| } | ||
| } | ||
| const { statusCode } = response; | ||
| const { location } = response.headers; | ||
| // In Node v15, aborting a message with remaining data causes an error to be thrown, | ||
| // hence the version check | ||
| const catchResErrors = err => { | ||
| if (!(majorNodeVersion >= 15 && err.message === "aborted")) { | ||
| this.emit("error", err); | ||
| } | ||
| }; | ||
| response.on("error", catchResErrors); | ||
| let redirectAddress = null; | ||
| let resendWithAuth = false; | ||
| if (typeof location === "string" && | ||
| location.length && | ||
| this.followRedirects && | ||
| statusCode >= 300 && | ||
| statusCode < 400) { | ||
| redirectAddress = location; | ||
| } else if (statusCode === 401 && | ||
| /^Basic /i.test(response.headers["www-authenticate"] || "") && | ||
| (this.user && this.user.length)) { | ||
| this._requestOptions.auth = `${this.user}:${this.pass}`; | ||
| resendWithAuth = true; | ||
| } | ||
| if (redirectAddress || resendWithAuth) { | ||
| if (++this._redirectCount > 21) { | ||
| const redirectError = new Error("Maximum number of redirects exceeded"); | ||
| redirectError.code = "ERR_TOO_MANY_REDIRECTS"; | ||
| this.emit("error", redirectError); | ||
| return; | ||
| } | ||
| abortRequest(this._currentRequest); | ||
| response.destroy(); | ||
| this._isRedirect = true; | ||
| if (((statusCode === 301 || statusCode === 302) && this._requestOptions.method === "POST") || | ||
| (statusCode === 303 && !/^(?:GET|HEAD)$/.test(this._requestOptions.method))) { | ||
| this._requestOptions.method = "GET"; | ||
| this._requestBodyBuffers = []; | ||
| } | ||
| let previousHostName = this._removeMatchingHeaders(/^host$/i); | ||
| if (!previousHostName) { | ||
| previousHostName = new URL(this.currentURL).hostname; | ||
| } | ||
| const previousURL = this.currentURL; | ||
| if (!resendWithAuth) { | ||
| let nextURL; | ||
| try { | ||
| nextURL = new URL(redirectAddress, this.currentURL); | ||
| } catch (e) { | ||
| this.emit("error", e); | ||
| return; | ||
| } | ||
| if (nextURL.hostname !== previousHostName) { | ||
| this._removeMatchingHeaders(/^authorization$/i); | ||
| } | ||
| this.currentURL = nextURL.toString(); | ||
| } | ||
| this.headers.Referer = previousURL; | ||
| this.emit("redirect", response, this.headers, this.currentURL); | ||
| try { | ||
| this._performRequest(); | ||
| } catch (cause) { | ||
| this.emit("error", cause); | ||
| } | ||
| } else { | ||
| let pipeline = response; | ||
| const acceptEncoding = this.headers["Accept-Encoding"]; | ||
| const requestCompressed = typeof acceptEncoding === "string" && | ||
| (acceptEncoding.includes("gzip") || acceptEncoding.includes("deflate")); | ||
| if ( | ||
| requestCompressed && | ||
| this._requestOptions.method !== "HEAD" && | ||
| statusCode >= 200 && | ||
| statusCode !== 204 && | ||
| statusCode !== 304 | ||
| ) { | ||
| // Browserify's zlib does not support zlib.constants. | ||
| const zlibOptions = { | ||
| flush: (zlib.constants ?? zlib).Z_SYNC_FLUSH, | ||
| finishFlush: (zlib.constants ?? zlib).Z_SYNC_FLUSH | ||
| }; | ||
| const contentEncoding = (response.headers["content-encoding"] || "identity").trim().toLowerCase(); | ||
| if (contentEncoding === "gzip") { | ||
| pipeline = zlib.createGunzip(zlibOptions); | ||
| response.pipe(pipeline); | ||
| } else if (contentEncoding === "deflate") { | ||
| pipeline = zlib.createInflate(zlibOptions); | ||
| response.pipe(pipeline); | ||
| } | ||
| } | ||
| pipeline.removeAllListeners("error"); | ||
| this.emit("response", response, this.currentURL); | ||
| pipeline.on("data", bytes => this.emit("data", bytes)); | ||
| pipeline.once("end", bytes => this.emit("end", bytes)); | ||
| pipeline.on("error", catchResErrors); | ||
| pipeline.on("close", () => this.emit("close")); | ||
| this._requestBodyBuffers = []; | ||
| } | ||
| } | ||
| getHeader(key, value) { | ||
| if (this._currentRequest) { | ||
| return this._currentRequest.getHeader(key, value); | ||
| } | ||
| return null; | ||
| } | ||
| _removeMatchingHeaders(regex) { | ||
| let lastValue; | ||
| for (const header in this.headers) { | ||
| if (regex.test(header)) { | ||
| lastValue = this.headers[header]; | ||
| delete this.headers[header]; | ||
| } | ||
| } | ||
| return lastValue; | ||
| } | ||
| }; |
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 2 instances in 1 package
Debug access
Supply chain riskUses debug, reflection and dynamic code execution features.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
AI-detected potential code anomaly
Supply chain riskAI has identified unusual behaviors that may pose a security risk.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 5 instances in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 2 instances in 1 package
Debug access
Supply chain riskUses debug, reflection and dynamic code execution features.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
AI-detected potential code anomaly
Supply chain riskAI has identified unusual behaviors that may pose a security risk.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
3344473
1.17%535
0.38%88719
0.86%556
0.72%15
7.14%42
7.69%79
146.88%+ Added
+ Added
+ Added
+ Added
- Removed
- Removed
- Removed
- Removed
- Removed
Updated
Updated
Updated
Updated
Updated
Updated
Updated