| import { runInThisContext } from 'node:vm'; | ||
| import * as spyModule from '@vitest/spy'; | ||
| import { r as resolveTestRunner, a as resolveSnapshotEnvironment, d as detectAsyncLeaks, s as setupChaiConfig } from './index.DGNSnENe.js'; | ||
| import { l as loadEnvironment, e as emitModuleRunner, a as listenForErrors } from './init.DICorXCo.js'; | ||
| import { N as NativeModuleRunner } from './nativeModuleRunner.BIakptoF.js'; | ||
| import { T as Traces } from './traces.CCmnQaNT.js'; | ||
| import { V as VitestEvaluatedModules } from './evaluatedModules.Dg1zASAC.js'; | ||
| import { s as startVitestModuleRunner, c as createNodeImportMeta } from './startVitestModuleRunner.C3ZR-4J3.js'; | ||
| import { performance as performance$1 } from 'node:perf_hooks'; | ||
| import { startTests, collectTests } from '@vitest/runner'; | ||
| import { s as setupCommonEnv, b as startCoverageInsideWorker, c as stopCoverageInsideWorker } from './setup-common.B41N_kPE.js'; | ||
| import { g as globalExpect, v as vi } from './test.CTcmp4Su.js'; | ||
| import { c as closeInspector } from './inspector.CvyFGlXm.js'; | ||
| import { createRequire } from 'node:module'; | ||
| import timers from 'node:timers'; | ||
| import timersPromises from 'node:timers/promises'; | ||
| import util from 'node:util'; | ||
| import { KNOWN_ASSET_TYPES } from '@vitest/utils/constants'; | ||
| import { i as index } from './index.DlDSLQD3.js'; | ||
| import { g as getWorkerState, r as resetModules, p as provideWorkerState, a as getSafeWorkerState } from './utils.BX5Fg8C4.js'; | ||
| // this should only be used in Node | ||
| let globalSetup = false; | ||
| async function setupGlobalEnv(config, environment) { | ||
| await setupCommonEnv(config); | ||
| Object.defineProperty(globalThis, "__vitest_index__", { | ||
| value: index, | ||
| enumerable: false | ||
| }); | ||
| globalExpect.setState({ environment: environment.name }); | ||
| if (globalSetup) return; | ||
| globalSetup = true; | ||
| if ((environment.viteEnvironment || environment.name) === "client") { | ||
| const _require = createRequire(import.meta.url); | ||
| // always mock "required" `css` files, because we cannot process them | ||
| _require.extensions[".css"] = resolveCss; | ||
| _require.extensions[".scss"] = resolveCss; | ||
| _require.extensions[".sass"] = resolveCss; | ||
| _require.extensions[".less"] = resolveCss; | ||
| // since we are using Vite, we can assume how these will be resolved | ||
| KNOWN_ASSET_TYPES.forEach((type) => { | ||
| _require.extensions[`.${type}`] = resolveAsset; | ||
| }); | ||
| process.env.SSR = ""; | ||
| } else process.env.SSR = "1"; | ||
| // @ts-expect-error not typed global for patched timers | ||
| globalThis.__vitest_required__ = { | ||
| util, | ||
| timers, | ||
| timersPromises | ||
| }; | ||
| if (!config.disableConsoleIntercept) await setupConsoleLogSpy(); | ||
| } | ||
| function resolveCss(mod) { | ||
| mod.exports = ""; | ||
| } | ||
| function resolveAsset(mod, url) { | ||
| mod.exports = url; | ||
| } | ||
| async function setupConsoleLogSpy() { | ||
| const { createCustomConsole } = await import('./console.3WNpx0tS.js'); | ||
| globalThis.console = createCustomConsole(); | ||
| } | ||
| // browser shouldn't call this! | ||
| async function run(method, files, config, moduleRunner, environment, traces) { | ||
| const workerState = getWorkerState(); | ||
| const [testRunner] = await Promise.all([ | ||
| traces.$("vitest.runtime.runner", () => resolveTestRunner(config, moduleRunner, traces)), | ||
| traces.$("vitest.runtime.global_env", () => setupGlobalEnv(config, environment)), | ||
| traces.$("vitest.runtime.coverage.start", () => startCoverageInsideWorker(config.coverage, moduleRunner, { isolate: config.isolate })), | ||
| traces.$("vitest.runtime.snapshot.environment", async () => { | ||
| if (!workerState.config.snapshotOptions.snapshotEnvironment) workerState.config.snapshotOptions.snapshotEnvironment = await resolveSnapshotEnvironment(config, moduleRunner); | ||
| }) | ||
| ]); | ||
| workerState.onCancel((reason) => { | ||
| closeInspector(config); | ||
| testRunner.cancel?.(reason); | ||
| }); | ||
| workerState.durations.prepare = performance$1.now() - workerState.durations.prepare; | ||
| await traces.$(`vitest.test.runner.${method}`, async () => { | ||
| for (const file of files) { | ||
| if (config.isolate) { | ||
| moduleRunner.mocker?.reset(); | ||
| resetModules(workerState.evaluatedModules, true); | ||
| } | ||
| workerState.filepath = file.filepath; | ||
| if (method === "run") { | ||
| const collectAsyncLeaks = config.detectAsyncLeaks ? detectAsyncLeaks(file.filepath, workerState.ctx.projectName) : void 0; | ||
| await traces.$(`vitest.test.runner.${method}.module`, { attributes: { "code.file.path": file.filepath } }, () => startTests([file], testRunner)); | ||
| const leaks = await collectAsyncLeaks?.(); | ||
| if (leaks?.length) workerState.rpc.onAsyncLeaks(leaks); | ||
| } else await traces.$(`vitest.test.runner.${method}.module`, { attributes: { "code.file.path": file.filepath } }, () => collectTests([file], testRunner)); | ||
| // reset after tests, because user might call `vi.setConfig` in setupFile | ||
| vi.resetConfig(); | ||
| // mocks should not affect different files | ||
| vi.restoreAllMocks(); | ||
| } | ||
| }); | ||
| await traces.$("vitest.runtime.coverage.stop", () => stopCoverageInsideWorker(config.coverage, moduleRunner, { isolate: config.isolate })); | ||
| } | ||
| let _moduleRunner; | ||
| const evaluatedModules = new VitestEvaluatedModules(); | ||
| const moduleExecutionInfo = /* @__PURE__ */ new Map(); | ||
| async function startModuleRunner(options) { | ||
| if (_moduleRunner) return _moduleRunner; | ||
| process.exit = (code = process.exitCode || 0) => { | ||
| throw new Error(`process.exit unexpectedly called with "${code}"`); | ||
| }; | ||
| const state = () => getSafeWorkerState() || options.state; | ||
| listenForErrors(state); | ||
| if (options.state.config.experimental.viteModuleRunner === false) { | ||
| const root = options.state.config.root; | ||
| let mocker; | ||
| if (options.state.config.experimental.nodeLoader !== false) { | ||
| // this additionally imports acorn/magic-string | ||
| const { NativeModuleMocker } = await import('./nativeModuleMocker.DndvSdL6.js'); | ||
| mocker = new NativeModuleMocker({ | ||
| async resolveId(id, importer) { | ||
| // TODO: use import.meta.resolve instead | ||
| return state().rpc.resolve(id, importer, "__vitest__"); | ||
| }, | ||
| root, | ||
| moduleDirectories: state().config.deps.moduleDirectories || ["/node_modules/"], | ||
| traces: options.traces || new Traces({ enabled: false }), | ||
| getCurrentTestFilepath() { | ||
| return state().filepath; | ||
| }, | ||
| spyModule | ||
| }); | ||
| } | ||
| _moduleRunner = new NativeModuleRunner(root, mocker); | ||
| return _moduleRunner; | ||
| } | ||
| _moduleRunner = startVitestModuleRunner(options); | ||
| return _moduleRunner; | ||
| } | ||
| let _currentEnvironment; | ||
| let _environmentTime; | ||
| /** @experimental */ | ||
| async function setupBaseEnvironment(context) { | ||
| if (context.config.experimental.viteModuleRunner === false) { | ||
| const { setupNodeLoaderHooks } = await import('./native.DPzPHdi5.js'); | ||
| await setupNodeLoaderHooks(context); | ||
| } | ||
| const startTime = performance.now(); | ||
| const { environment: { name: environmentName, options: environmentOptions }, rpc, config } = context; | ||
| // we could load @vite/env, but it would take ~8ms, while this takes ~0,02ms | ||
| if (context.config.serializedDefines) try { | ||
| runInThisContext(`(() =>{\n${context.config.serializedDefines}})()`, { | ||
| lineOffset: 1, | ||
| filename: "virtual:load-defines.js" | ||
| }); | ||
| } catch (error) { | ||
| throw new Error(`Failed to load custom "defines": ${error.message}`); | ||
| } | ||
| const otel = context.traces; | ||
| const { environment, loader } = await loadEnvironment(environmentName, config.root, rpc, otel, context.config.experimental.viteModuleRunner); | ||
| _currentEnvironment = environment; | ||
| const env = await otel.$("vitest.runtime.environment.setup", { attributes: { | ||
| "vitest.environment": environment.name, | ||
| "vitest.environment.vite_environment": environment.viteEnvironment || environment.name | ||
| } }, () => environment.setup(globalThis, environmentOptions || config.environmentOptions || {})); | ||
| _environmentTime = performance.now() - startTime; | ||
| if (config.chaiConfig) setupChaiConfig(config.chaiConfig); | ||
| return async () => { | ||
| await otel.$("vitest.runtime.environment.teardown", () => env.teardown(globalThis)); | ||
| await loader?.close(); | ||
| }; | ||
| } | ||
| /** @experimental */ | ||
| async function runBaseTests(method, state, traces) { | ||
| const { ctx } = state; | ||
| state.environment = _currentEnvironment; | ||
| state.durations.environment = _environmentTime; | ||
| // state has new context, but we want to reuse existing ones | ||
| state.evaluatedModules = evaluatedModules; | ||
| state.moduleExecutionInfo = moduleExecutionInfo; | ||
| provideWorkerState(globalThis, state); | ||
| if (ctx.invalidates) ctx.invalidates.forEach((filepath) => { | ||
| (state.evaluatedModules.fileToModulesMap.get(filepath) || []).forEach((module) => { | ||
| state.evaluatedModules.invalidateModule(module); | ||
| }); | ||
| }); | ||
| ctx.files.forEach((i) => { | ||
| const filepath = i.filepath; | ||
| (state.evaluatedModules.fileToModulesMap.get(filepath) || []).forEach((module) => { | ||
| state.evaluatedModules.invalidateModule(module); | ||
| }); | ||
| }); | ||
| const moduleRunner = await startModuleRunner({ | ||
| state, | ||
| evaluatedModules: state.evaluatedModules, | ||
| spyModule, | ||
| createImportMeta: createNodeImportMeta, | ||
| traces | ||
| }); | ||
| emitModuleRunner(moduleRunner); | ||
| await run(method, ctx.files, ctx.config, moduleRunner, _currentEnvironment, traces); | ||
| } | ||
| export { runBaseTests as r, setupBaseEnvironment as s }; |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
| import { existsSync, promises, readdirSync, writeFileSync } from 'node:fs'; | ||
| import module$1 from 'node:module'; | ||
| import path from 'node:path'; | ||
| import { pathToFileURL, fileURLToPath } from 'node:url'; | ||
| import { slash, shuffle, toArray, cleanUrl } from '@vitest/utils/helpers'; | ||
| import { resolve, relative, normalize } from 'pathe'; | ||
| import pm from 'picomatch'; | ||
| import { glob } from 'tinyglobby'; | ||
| import c from 'tinyrainbow'; | ||
| import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.CdU2lD-q.js'; | ||
| import crypto from 'node:crypto'; | ||
| import { r as resolveModule } from './index.BCY_7LL2.js'; | ||
| import { mergeConfig } from 'vite'; | ||
| import { c as configFiles, d as defaultBrowserPort, a as defaultInspectPort, b as defaultPort } from './constants.CPYnjOGj.js'; | ||
| import './env.D4Lgay0q.js'; | ||
| import nodeos__default from 'node:os'; | ||
| import { isCI, isAgent, provider } from 'std-env'; | ||
| import { r as resolveCoverageProviderModule } from './coverage.D_JHT54q.js'; | ||
| const hash = crypto.hash ?? ((algorithm, data, outputEncoding) => crypto.createHash(algorithm).update(data).digest(outputEncoding)); | ||
| function getWorkersCountByPercentage(percent) { | ||
| const maxWorkersCount = nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length; | ||
| const workersCountByPercentage = Math.round(Number.parseInt(percent) / 100 * maxWorkersCount); | ||
| return Math.max(1, Math.min(maxWorkersCount, workersCountByPercentage)); | ||
| } | ||
| class BaseSequencer { | ||
| ctx; | ||
| constructor(ctx) { | ||
| this.ctx = ctx; | ||
| } | ||
| // async so it can be extended by other sequelizers | ||
| async shard(files) { | ||
| const { config } = this.ctx; | ||
| const { index, count } = config.shard; | ||
| const [shardStart, shardEnd] = this.calculateShardRange(files.length, index, count); | ||
| return [...files].map((spec) => { | ||
| const specPath = resolve(slash(config.root), slash(spec.moduleId))?.slice(config.root.length); | ||
| return { | ||
| spec, | ||
| hash: hash("sha1", specPath, "hex") | ||
| }; | ||
| }).sort((a, b) => a.hash < b.hash ? -1 : a.hash > b.hash ? 1 : 0).slice(shardStart, shardEnd).map(({ spec }) => spec); | ||
| } | ||
| // async so it can be extended by other sequelizers | ||
| async sort(files) { | ||
| const cache = this.ctx.cache; | ||
| return [...files].sort((a, b) => { | ||
| // "sequence.groupOrder" is higher priority | ||
| const groupOrderDiff = a.project.config.sequence.groupOrder - b.project.config.sequence.groupOrder; | ||
| if (groupOrderDiff !== 0) return groupOrderDiff; | ||
| // Projects run sequential | ||
| if (a.project.name !== b.project.name) return a.project.name < b.project.name ? -1 : 1; | ||
| // Isolated run first | ||
| if (a.project.config.isolate && !b.project.config.isolate) return -1; | ||
| if (!a.project.config.isolate && b.project.config.isolate) return 1; | ||
| const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`; | ||
| const keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`; | ||
| const aState = cache.getFileTestResults(keyA); | ||
| const bState = cache.getFileTestResults(keyB); | ||
| if (!aState || !bState) { | ||
| const statsA = cache.getFileStats(keyA); | ||
| const statsB = cache.getFileStats(keyB); | ||
| // run unknown first | ||
| if (!statsA || !statsB) return !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0; | ||
| // run larger files first | ||
| return statsB.size - statsA.size; | ||
| } | ||
| // run failed first | ||
| if (aState.failed && !bState.failed) return -1; | ||
| if (!aState.failed && bState.failed) return 1; | ||
| // run longer first | ||
| return bState.duration - aState.duration; | ||
| }); | ||
| } | ||
| // Calculate distributed shard range [start, end] distributed equally | ||
| calculateShardRange(filesCount, index, count) { | ||
| const baseShardSize = Math.floor(filesCount / count); | ||
| const remainderTestFilesCount = filesCount % count; | ||
| if (remainderTestFilesCount >= index) { | ||
| const shardSize = baseShardSize + 1; | ||
| return [shardSize * (index - 1), shardSize * index]; | ||
| } | ||
| const shardStart = remainderTestFilesCount * (baseShardSize + 1) + (index - remainderTestFilesCount - 1) * baseShardSize; | ||
| return [shardStart, shardStart + baseShardSize]; | ||
| } | ||
| } | ||
| class RandomSequencer extends BaseSequencer { | ||
| async sort(files) { | ||
| const { sequence } = this.ctx.config; | ||
| return shuffle(files, sequence.seed); | ||
| } | ||
| } | ||
| function resolvePath(path, root) { | ||
| return normalize(/* @__PURE__ */ resolveModule(path, { paths: [root] }) ?? resolve(root, path)); | ||
| } | ||
| function parseInspector(inspect) { | ||
| if (typeof inspect === "boolean" || inspect === void 0) return {}; | ||
| if (typeof inspect === "number") return { port: inspect }; | ||
| if (inspect.match(/https?:\//)) throw new Error(`Inspector host cannot be a URL. Use "host:port" instead of "${inspect}"`); | ||
| const [host, port] = inspect.split(":"); | ||
| if (!port) return { host }; | ||
| return { | ||
| host, | ||
| port: Number(port) || defaultInspectPort | ||
| }; | ||
| } | ||
| /** | ||
| * @deprecated Internal function | ||
| */ | ||
| function resolveApiServerConfig(options, defaultPort, parentApi, logger) { | ||
| let api; | ||
| if (options.ui && !options.api) api = { port: defaultPort }; | ||
| else if (options.api === true) api = { port: defaultPort }; | ||
| else if (typeof options.api === "number") api = { port: options.api }; | ||
| if (typeof options.api === "object") if (api) { | ||
| if (options.api.port) api.port = options.api.port; | ||
| if (options.api.strictPort) api.strictPort = options.api.strictPort; | ||
| if (options.api.host) api.host = options.api.host; | ||
| } else api = { ...options.api }; | ||
| if (api) { | ||
| if (!api.port && !api.middlewareMode) api.port = defaultPort; | ||
| } else api = { middlewareMode: true }; | ||
| // if the API server is exposed to network, disable write operations by default | ||
| if (!api.middlewareMode && api.host && api.host !== "localhost" && api.host !== "127.0.0.1") { | ||
| // assigned to browser | ||
| if (parentApi) { | ||
| if (api.allowWrite == null && api.allowExec == null) logger?.error(c.yellow(`${c.yellowBright(" WARNING ")} API server is exposed to network, disabling write and exec operations by default for security reasons. This can cause some APIs to not work as expected. Set \`browser.api.allowExec\` manually to hide this warning. See https://vitest.dev/config/browser/api for more details.`)); | ||
| } | ||
| api.allowWrite ??= parentApi?.allowWrite ?? false; | ||
| api.allowExec ??= parentApi?.allowExec ?? false; | ||
| } else { | ||
| api.allowWrite ??= parentApi?.allowWrite ?? true; | ||
| api.allowExec ??= parentApi?.allowExec ?? true; | ||
| } | ||
| return api; | ||
| } | ||
| function resolveInlineWorkerOption(value) { | ||
| if (typeof value === "string" && value.trim().endsWith("%")) return getWorkersCountByPercentage(value); | ||
| else return Number(value); | ||
| } | ||
| function resolveConfig$1(vitest, options, viteConfig) { | ||
| const mode = vitest.mode; | ||
| const logger = vitest.logger; | ||
| if (options.dom) { | ||
| if (viteConfig.test?.environment != null && viteConfig.test.environment !== "happy-dom") logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} Your config.test.environment ("${viteConfig.test.environment}") conflicts with --dom flag ("happy-dom"), ignoring "${viteConfig.test.environment}"`)); | ||
| options.environment = "happy-dom"; | ||
| } | ||
| const resolved = { | ||
| ...configDefaults, | ||
| ...options, | ||
| root: viteConfig.root, | ||
| mode | ||
| }; | ||
| if (resolved.retry && typeof resolved.retry === "object" && typeof resolved.retry.condition === "function") { | ||
| logger.console.warn(c.yellow("Warning: retry.condition function cannot be used inside a config file. Use a RegExp pattern instead, or define the function in your test file.")); | ||
| resolved.retry = { | ||
| ...resolved.retry, | ||
| condition: void 0 | ||
| }; | ||
| } | ||
| if (options.pool && typeof options.pool !== "string") { | ||
| resolved.pool = options.pool.name; | ||
| resolved.poolRunner = options.pool; | ||
| } | ||
| if ("poolOptions" in resolved) logger.deprecate("`test.poolOptions` was removed in Vitest 4. All previous `poolOptions` are now top-level options. Please, refer to the migration guide: https://vitest.dev/guide/migration#pool-rework"); | ||
| resolved.pool ??= "forks"; | ||
| resolved.project = toArray(resolved.project); | ||
| resolved.provide ??= {}; | ||
| // shallow copy tags array to avoid mutating user config | ||
| resolved.tags = [...resolved.tags || []]; | ||
| const definedTags = /* @__PURE__ */ new Set(); | ||
| resolved.tags.forEach((tag) => { | ||
| if (!tag.name || typeof tag.name !== "string") throw new Error(`Each tag defined in "test.tags" must have a "name" property, received: ${JSON.stringify(tag)}`); | ||
| if (definedTags.has(tag.name)) throw new Error(`Tag name "${tag.name}" is already defined in "test.tags". Tag names must be unique.`); | ||
| if (tag.name.match(/\s/)) throw new Error(`Tag name "${tag.name}" is invalid. Tag names cannot contain spaces.`); | ||
| if (tag.name.match(/([!()*|&])/)) throw new Error(`Tag name "${tag.name}" is invalid. Tag names cannot contain "!", "*", "&", "|", "(", or ")".`); | ||
| if (tag.name.match(/^\s*(and|or|not)\s*$/i)) throw new Error(`Tag name "${tag.name}" is invalid. Tag names cannot be a logical operator like "and", "or", "not".`); | ||
| if (typeof tag.retry === "object" && typeof tag.retry.condition === "function") throw new TypeError(`Tag "${tag.name}": retry.condition function cannot be used inside a config file. Use a RegExp pattern instead, or define the function in your test file.`); | ||
| if (tag.priority != null && (typeof tag.priority !== "number" || tag.priority < 0)) throw new TypeError(`Tag "${tag.name}": priority must be a non-negative number.`); | ||
| definedTags.add(tag.name); | ||
| }); | ||
| resolved.name = typeof options.name === "string" ? options.name : options.name?.label || ""; | ||
| resolved.color = typeof options.name !== "string" ? options.name?.color : void 0; | ||
| if (resolved.environment === "browser") throw new Error(`Looks like you set "test.environment" to "browser". To enable Browser Mode, use "test.browser.enabled" instead.`); | ||
| const inspector = resolved.inspect || resolved.inspectBrk; | ||
| resolved.inspector = { | ||
| ...resolved.inspector, | ||
| ...parseInspector(inspector), | ||
| enabled: !!inspector, | ||
| waitForDebugger: options.inspector?.waitForDebugger ?? !!resolved.inspectBrk | ||
| }; | ||
| if (viteConfig.base !== "/") resolved.base = viteConfig.base; | ||
| resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true; | ||
| if (options.shard) { | ||
| if (resolved.watch) throw new Error("You cannot use --shard option with enabled watch"); | ||
| const [indexString, countString] = options.shard.split("/"); | ||
| const index = Math.abs(Number.parseInt(indexString, 10)); | ||
| const count = Math.abs(Number.parseInt(countString, 10)); | ||
| if (Number.isNaN(count) || count <= 0) throw new Error("--shard <count> must be a positive number"); | ||
| if (Number.isNaN(index) || index <= 0 || index > count) throw new Error("--shard <index> must be a positive number less then <count>"); | ||
| resolved.shard = { | ||
| index, | ||
| count | ||
| }; | ||
| } | ||
| if (resolved.standalone && !resolved.watch) throw new Error(`Vitest standalone mode requires --watch`); | ||
| if (resolved.mergeReports && resolved.watch) throw new Error(`Cannot merge reports with --watch enabled`); | ||
| if (resolved.maxWorkers) resolved.maxWorkers = resolveInlineWorkerOption(resolved.maxWorkers); | ||
| if (!(options.fileParallelism ?? mode !== "benchmark")) | ||
| // ignore user config, parallelism cannot be implemented without limiting workers | ||
| resolved.maxWorkers = 1; | ||
| if (resolved.maxConcurrency === 0) { | ||
| logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`)); | ||
| resolved.maxConcurrency = configDefaults.maxConcurrency; | ||
| } | ||
| if (resolved.inspect || resolved.inspectBrk) { | ||
| if (resolved.maxWorkers !== 1) { | ||
| const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`; | ||
| throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism"`); | ||
| } | ||
| } | ||
| // apply browser CLI options only if the config already has the browser config and not disabled manually | ||
| if (vitest._cliOptions.browser && resolved.browser && (resolved.browser.enabled !== false || vitest._cliOptions.browser.enabled)) resolved.browser = mergeConfig(resolved.browser, vitest._cliOptions.browser); | ||
| resolved.browser ??= {}; | ||
| const browser = resolved.browser; | ||
| if (browser.enabled) { | ||
| const instances = browser.instances; | ||
| if (!browser.instances) browser.instances = []; | ||
| // use `chromium` by default when the preview provider is specified | ||
| // for a smoother experience. if chromium is not available, it will | ||
| // open the default browser anyway | ||
| if (!browser.instances.length && browser.provider?.name === "preview") browser.instances = [{ browser: "chromium" }]; | ||
| if (browser.name && instances?.length) { | ||
| // --browser=chromium filters configs to a single one | ||
| browser.instances = browser.instances.filter((instance) => instance.browser === browser.name); | ||
| // if `instances` were defined, but now they are empty, | ||
| // let's throw an error because the filter is invalid | ||
| if (!browser.instances.length) throw new Error([`"browser.instances" was set in the config, but the array is empty. Define at least one browser config.`, ` The "browser.name" was set to "${browser.name}" which filtered all configs (${instances.map((c) => c.browser).join(", ")}). Did you mean to use another name?`].join("")); | ||
| } | ||
| } | ||
| if (resolved.coverage.enabled && resolved.coverage.provider === "istanbul" && resolved.experimental?.viteModuleRunner === false) throw new Error(`"Istanbul" coverage provider is not compatible with "experimental.viteModuleRunner: false". Please, enable "viteModuleRunner" or switch to "v8" coverage provider.`); | ||
| if (browser.enabled && resolved.detectAsyncLeaks) logger.console.warn(c.yellow("The option \"detectAsyncLeaks\" is not supported in browser mode and will be ignored.")); | ||
| const containsChromium = hasBrowserChromium(vitest, resolved); | ||
| const hasOnlyChromium = hasOnlyBrowserChromium(vitest, resolved); | ||
| // Browser-mode "Chromium" only features: | ||
| if (browser.enabled && (!containsChromium || !hasOnlyChromium)) { | ||
| const browserConfig = ` | ||
| { | ||
| browser: { | ||
| provider: ${browser.provider?.name || "preview"}(), | ||
| instances: [ | ||
| ${(browser.instances || []).map((i) => `{ browser: '${i.browser}' }`).join(",\n ")} | ||
| ], | ||
| }, | ||
| } | ||
| `.trim(); | ||
| const preferredProvider = !browser.provider?.name || browser.provider.name === "preview" ? "playwright" : browser.provider.name; | ||
| const correctExample = ` | ||
| { | ||
| browser: { | ||
| provider: ${preferredProvider}(), | ||
| instances: [ | ||
| { browser: '${preferredProvider === "playwright" ? "chromium" : "chrome"}' } | ||
| ], | ||
| }, | ||
| } | ||
| `.trim(); | ||
| // requires all projects to be chromium | ||
| if (!hasOnlyChromium && resolved.coverage.enabled && resolved.coverage.provider === "v8") { | ||
| const coverageExample = ` | ||
| { | ||
| coverage: { | ||
| provider: 'istanbul', | ||
| }, | ||
| } | ||
| `.trim(); | ||
| throw new Error(`@vitest/coverage-v8 does not work with\n${browserConfig}\n\nUse either:\n${correctExample}\n\n...or change your coverage provider to:\n${coverageExample}\n`); | ||
| } | ||
| // ignores non-chromium browsers when there is at least one chromium project | ||
| if (!containsChromium && (resolved.inspect || resolved.inspectBrk)) { | ||
| const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`; | ||
| throw new Error(`${inspectOption} does not work with\n${browserConfig}\n\nUse either:\n${correctExample}\n\n...or disable ${inspectOption}\n`); | ||
| } | ||
| } | ||
| resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter); | ||
| if (resolved.coverage.changed === void 0 && resolved.changed !== void 0) resolved.coverage.changed = resolved.changed; | ||
| if (resolved.coverage.enabled && resolved.coverage.reportsDirectory) { | ||
| const reportsDirectory = resolve(resolved.root, resolved.coverage.reportsDirectory); | ||
| if (reportsDirectory === resolved.root || reportsDirectory === process.cwd()) throw new Error(`You cannot set "coverage.reportsDirectory" as ${reportsDirectory}. Vitest needs to be able to remove this directory before test run`); | ||
| if (resolved.coverage.htmlDir) resolved.coverage.htmlDir = resolve(resolved.root, resolved.coverage.htmlDir); | ||
| // infer default htmlDir based on builtin reporter's html output location | ||
| if (!resolved.coverage.htmlDir) { | ||
| const htmlReporter = resolved.coverage.reporter.find(([name]) => name === "html" || name === "html-spa"); | ||
| if (htmlReporter) { | ||
| const [, options] = htmlReporter; | ||
| const subdir = options && typeof options === "object" && "subdir" in options && typeof options.subdir === "string" ? options.subdir : void 0; | ||
| resolved.coverage.htmlDir = resolve(reportsDirectory, subdir || "."); | ||
| } else if (resolved.coverage.reporter.find(([name]) => name === "lcov")) resolved.coverage.htmlDir = resolve(reportsDirectory, "lcov-report"); | ||
| } | ||
| } | ||
| if (resolved.coverage.enabled && resolved.coverage.provider === "custom" && resolved.coverage.customProviderModule) resolved.coverage.customProviderModule = resolvePath(resolved.coverage.customProviderModule, resolved.root); | ||
| resolved.expect ??= {}; | ||
| resolved.deps ??= {}; | ||
| resolved.deps.moduleDirectories ??= []; | ||
| resolved.deps.optimizer ??= {}; | ||
| resolved.deps.optimizer.ssr ??= {}; | ||
| resolved.deps.optimizer.ssr.enabled ??= false; | ||
| resolved.deps.optimizer.client ??= {}; | ||
| resolved.deps.optimizer.client.enabled ??= false; | ||
| resolved.deps.web ??= {}; | ||
| resolved.deps.web.transformAssets ??= true; | ||
| resolved.deps.web.transformCss ??= true; | ||
| resolved.deps.web.transformGlobPattern ??= []; | ||
| resolved.setupFiles = toArray(resolved.setupFiles || []).map((file) => resolvePath(file, resolved.root)); | ||
| resolved.globalSetup = toArray(resolved.globalSetup || []).map((file) => resolvePath(file, resolved.root)); | ||
| // Add hard-coded default coverage exclusions. These cannot be overidden by user config. | ||
| // Override original exclude array for cases where user re-uses same object in test.exclude. | ||
| resolved.coverage.exclude = [ | ||
| ...resolved.coverage.exclude, | ||
| ...resolved.setupFiles.map((file) => `${resolved.coverage.allowExternal ? "**/" : ""}${relative(resolved.root, file)}`), | ||
| ...resolved.include, | ||
| resolved.config && slash(resolved.config), | ||
| ...configFiles, | ||
| "**/virtual:*", | ||
| "**/__x00__*", | ||
| "**/node_modules/**" | ||
| ].filter((pattern) => typeof pattern === "string"); | ||
| resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles]; | ||
| if (resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude); | ||
| if (resolved.runner) resolved.runner = resolvePath(resolved.runner, resolved.root); | ||
| resolved.attachmentsDir = resolve(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments"); | ||
| if (resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root); | ||
| resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0; | ||
| if (resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) { | ||
| resolved.snapshotFormat.plugins = []; | ||
| // TODO: support it via separate config (like DiffOptions) or via `Function.toString()` | ||
| if (typeof resolved.snapshotFormat.compareKeys === "function") throw new TypeError(`"snapshotFormat.compareKeys" function is not supported.`); | ||
| } | ||
| const UPDATE_SNAPSHOT = resolved.update || process.env.UPDATE_SNAPSHOT; | ||
| resolved.snapshotOptions = { | ||
| expand: resolved.expandSnapshotDiff ?? false, | ||
| snapshotFormat: resolved.snapshotFormat || {}, | ||
| updateSnapshot: UPDATE_SNAPSHOT === "all" || UPDATE_SNAPSHOT === "new" || UPDATE_SNAPSHOT === "none" ? UPDATE_SNAPSHOT : isCI && !UPDATE_SNAPSHOT ? "none" : UPDATE_SNAPSHOT ? "all" : "new", | ||
| resolveSnapshotPath: options.resolveSnapshotPath, | ||
| snapshotEnvironment: null | ||
| }; | ||
| resolved.snapshotSerializers ??= []; | ||
| resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root)); | ||
| resolved.forceRerunTriggers.push(...resolved.snapshotSerializers); | ||
| if (options.resolveSnapshotPath) delete resolved.resolveSnapshotPath; | ||
| resolved.execArgv ??= []; | ||
| resolved.pool ??= "threads"; | ||
| if (resolved.pool === "vmForks" || resolved.pool === "vmThreads" || resolved.pool === "typescript") resolved.isolate = false; | ||
| if (process.env.VITEST_MAX_WORKERS) resolved.maxWorkers = Number.parseInt(process.env.VITEST_MAX_WORKERS); | ||
| if (mode === "benchmark") { | ||
| resolved.benchmark = { | ||
| ...benchmarkConfigDefaults, | ||
| ...resolved.benchmark | ||
| }; | ||
| // override test config | ||
| resolved.coverage.enabled = false; | ||
| resolved.typecheck.enabled = false; | ||
| resolved.include = resolved.benchmark.include; | ||
| resolved.exclude = resolved.benchmark.exclude; | ||
| resolved.includeSource = resolved.benchmark.includeSource; | ||
| const reporters = Array.from(new Set([...toArray(resolved.benchmark.reporters), ...toArray(options.reporter)])).filter(Boolean); | ||
| if (reporters.length) resolved.benchmark.reporters = reporters; | ||
| else resolved.benchmark.reporters = ["default"]; | ||
| if (options.outputFile) resolved.benchmark.outputFile = options.outputFile; | ||
| // --compare from cli | ||
| if (options.compare) resolved.benchmark.compare = options.compare; | ||
| if (options.outputJson) resolved.benchmark.outputJson = options.outputJson; | ||
| } | ||
| if (typeof resolved.diff === "string") { | ||
| resolved.diff = resolvePath(resolved.diff, resolved.root); | ||
| resolved.forceRerunTriggers.push(resolved.diff); | ||
| } | ||
| resolved.api = { | ||
| ...resolveApiServerConfig(options, defaultPort), | ||
| token: crypto.randomUUID() | ||
| }; | ||
| if (options.related) resolved.related = toArray(options.related).map((file) => resolve(resolved.root, file)); | ||
| /* | ||
| * Reporters can be defined in many different ways: | ||
| * { reporter: 'json' } | ||
| * { reporter: { onFinish() { method() } } } | ||
| * { reporter: ['json', { onFinish() { method() } }] } | ||
| * { reporter: [[ 'json' ]] } | ||
| * { reporter: [[ 'json' ], 'html'] } | ||
| * { reporter: [[ 'json', { outputFile: 'test.json' } ], 'html'] } | ||
| */ | ||
| if (options.reporters) if (!Array.isArray(options.reporters)) | ||
| // Reporter name, e.g. { reporters: 'json' } | ||
| if (typeof options.reporters === "string") resolved.reporters = [[options.reporters, {}]]; | ||
| else resolved.reporters = [options.reporters]; | ||
| else { | ||
| resolved.reporters = []; | ||
| for (const reporter of options.reporters) if (Array.isArray(reporter)) | ||
| // Reporter with options, e.g. { reporters: [ [ 'json', { outputFile: 'test.json' } ] ] } | ||
| resolved.reporters.push([reporter[0], reporter[1] || {}]); | ||
| else if (typeof reporter === "string") | ||
| // Reporter name in array, e.g. { reporters: ["html", "json"]} | ||
| resolved.reporters.push([reporter, {}]); | ||
| else | ||
| // Inline reporter, e.g. { reporter: [{ onFinish() { method() } }] } | ||
| resolved.reporters.push(reporter); | ||
| } | ||
| if (mode !== "benchmark") { | ||
| // @ts-expect-error "reporter" is from CLI, should be absolute to the running directory | ||
| // it is passed down as "vitest --reporter ../reporter.js" | ||
| const reportersFromCLI = resolved.reporter; | ||
| const cliReporters = toArray(reportersFromCLI || []).map((reporter) => { | ||
| // ./reporter.js || ../reporter.js, but not .reporters/reporter.js | ||
| if (/^\.\.?\//.test(reporter)) return resolve(process.cwd(), reporter); | ||
| return reporter; | ||
| }); | ||
| if (cliReporters.length) { | ||
| // When CLI reporters are specified, preserve options from config file | ||
| const configReportersMap = /* @__PURE__ */ new Map(); | ||
| // Build a map of reporter names to their options from the config | ||
| for (const reporter of resolved.reporters) if (Array.isArray(reporter)) { | ||
| const [reporterName, reporterOptions] = reporter; | ||
| if (typeof reporterName === "string") configReportersMap.set(reporterName, reporterOptions); | ||
| } | ||
| resolved.reporters = Array.from(new Set(toArray(cliReporters))).filter(Boolean).map((reporter) => [reporter, configReportersMap.get(reporter) || {}]); | ||
| } | ||
| } | ||
| if (!resolved.reporters.length) { | ||
| resolved.reporters.push([isAgent ? "agent" : "default", {}]); | ||
| // also enable github-actions reporter as a default | ||
| if (process.env.GITHUB_ACTIONS === "true") resolved.reporters.push(["github-actions", {}]); | ||
| } | ||
| if (resolved.changed) resolved.passWithNoTests ??= true; | ||
| resolved.css ??= {}; | ||
| if (typeof resolved.css === "object") { | ||
| resolved.css.modules ??= {}; | ||
| resolved.css.modules.classNameStrategy ??= "stable"; | ||
| } | ||
| if (resolved.cache !== false) { | ||
| if (resolved.cache && typeof resolved.cache.dir === "string") vitest.logger.deprecate(`"cache.dir" is deprecated, use Vite's "cacheDir" instead if you want to change the cache director. Note caches will be written to "cacheDir\/vitest"`); | ||
| resolved.cache = { dir: viteConfig.cacheDir }; | ||
| } | ||
| resolved.sequence ??= {}; | ||
| if (resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") { | ||
| const { files, tests } = resolved.sequence.shuffle; | ||
| resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer; | ||
| resolved.sequence.shuffle = tests; | ||
| } | ||
| if (!resolved.sequence?.sequencer) | ||
| // CLI flag has higher priority | ||
| resolved.sequence.sequencer = resolved.sequence.shuffle ? RandomSequencer : BaseSequencer; | ||
| resolved.sequence.groupOrder ??= 0; | ||
| resolved.sequence.hooks ??= "stack"; | ||
| // Set seed if either files or tests are shuffled | ||
| if (resolved.sequence.sequencer === RandomSequencer || resolved.sequence.shuffle) resolved.sequence.seed ??= Date.now(); | ||
| resolved.typecheck = { | ||
| ...configDefaults.typecheck, | ||
| ...resolved.typecheck | ||
| }; | ||
| resolved.typecheck ??= {}; | ||
| resolved.typecheck.enabled ??= false; | ||
| if (resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it.")); | ||
| resolved.browser.enabled ??= false; | ||
| resolved.browser.headless ??= isCI; | ||
| if (resolved.browser.isolate) logger.console.warn(c.yellow("`browser.isolate` is deprecated. Use top-level `isolate` instead.")); | ||
| resolved.browser.isolate ??= resolved.isolate ?? true; | ||
| resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark"; | ||
| // disable in headless mode by default, and if CI is detected | ||
| resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI; | ||
| resolved.browser.commands ??= {}; | ||
| resolved.browser.detailsPanelPosition ??= "right"; | ||
| if (resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve(resolved.root, resolved.browser.screenshotDirectory); | ||
| if (resolved.inspector.enabled) resolved.browser.trackUnhandledErrors ??= false; | ||
| resolved.browser.viewport ??= {}; | ||
| resolved.browser.viewport.width ??= 414; | ||
| resolved.browser.viewport.height ??= 896; | ||
| resolved.browser.locators ??= {}; | ||
| resolved.browser.locators.testIdAttribute ??= "data-testid"; | ||
| if (typeof resolved.browser.provider === "string") { | ||
| const source = `@vitest/browser-${resolved.browser.provider}`; | ||
| throw new TypeError(`The \`browser.provider\` configuration was changed to accept a factory instead of a string. Add an import of "${resolved.browser.provider}" from "${source}" instead. See: https://vitest.dev/config/browser/provider`); | ||
| } | ||
| const isPreview = resolved.browser.provider?.name === "preview"; | ||
| if (!isPreview && resolved.browser.enabled && provider === "stackblitz") throw new Error(`stackblitz environment does not support the ${resolved.browser.provider?.name} provider. Please, use "@vitest/browser-preview" instead.`); | ||
| if (isPreview && resolved.browser.screenshotFailures === true) { | ||
| console.warn(c.yellow([ | ||
| `Browser provider "preview" doesn't support screenshots, `, | ||
| `so "browser.screenshotFailures" option is forcefully disabled. `, | ||
| `Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.` | ||
| ].join(""))); | ||
| resolved.browser.screenshotFailures = false; | ||
| } else resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui; | ||
| if (resolved.browser.provider && resolved.browser.provider.options == null) resolved.browser.provider.options = {}; | ||
| resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort, resolved.api, logger) || { port: defaultBrowserPort }; | ||
| // enable includeTaskLocation by default in UI mode | ||
| if (resolved.browser.enabled) { | ||
| if (resolved.browser.ui) resolved.includeTaskLocation ??= true; | ||
| } else if (resolved.ui) resolved.includeTaskLocation ??= true; | ||
| if (typeof resolved.browser.trace === "string" || !resolved.browser.trace) resolved.browser.trace = { mode: resolved.browser.trace || "off" }; | ||
| if (resolved.browser.trace.tracesDir != null) resolved.browser.trace.tracesDir = resolvePath(resolved.browser.trace.tracesDir, resolved.root); | ||
| if (toArray(resolved.reporters).some((reporter) => { | ||
| if (Array.isArray(reporter)) return reporter[0] === "html"; | ||
| return false; | ||
| })) resolved.includeTaskLocation ??= true; | ||
| resolved.server ??= {}; | ||
| resolved.server.deps ??= {}; | ||
| if (resolved.server.debug?.dump || process.env.VITEST_DEBUG_DUMP) { | ||
| const userFolder = resolved.server.debug?.dump || process.env.VITEST_DEBUG_DUMP; | ||
| resolved.dumpDir = resolve(resolved.root, typeof userFolder === "string" && userFolder !== "true" ? userFolder : ".vitest-dump", resolved.name || "root"); | ||
| } | ||
| resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3; | ||
| resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4; | ||
| resolved.experimental ??= {}; | ||
| if (resolved.experimental.openTelemetry?.sdkPath) { | ||
| const sdkPath = resolve(resolved.root, resolved.experimental.openTelemetry.sdkPath); | ||
| resolved.experimental.openTelemetry.sdkPath = pathToFileURL(sdkPath).toString(); | ||
| } | ||
| if (resolved.experimental.openTelemetry?.browserSdkPath) { | ||
| const browserSdkPath = resolve(resolved.root, resolved.experimental.openTelemetry.browserSdkPath); | ||
| resolved.experimental.openTelemetry.browserSdkPath = browserSdkPath; | ||
| } | ||
| if (resolved.experimental.fsModuleCachePath) resolved.experimental.fsModuleCachePath = resolve(resolved.root, resolved.experimental.fsModuleCachePath); | ||
| resolved.experimental.importDurations ??= {}; | ||
| resolved.experimental.importDurations.print ??= false; | ||
| resolved.experimental.importDurations.failOnDanger ??= false; | ||
| if (resolved.experimental.importDurations.limit == null) { | ||
| const shouldCollect = resolved.experimental.importDurations.print || resolved.experimental.importDurations.failOnDanger || resolved.ui; | ||
| resolved.experimental.importDurations.limit = shouldCollect ? 10 : 0; | ||
| } | ||
| resolved.experimental.importDurations.thresholds ??= {}; | ||
| resolved.experimental.importDurations.thresholds.warn ??= 100; | ||
| resolved.experimental.importDurations.thresholds.danger ??= 500; | ||
| return resolved; | ||
| } | ||
| function isBrowserEnabled(config) { | ||
| return Boolean(config.browser?.enabled); | ||
| } | ||
| function resolveCoverageReporters(configReporters) { | ||
| // E.g. { reporter: "html" } | ||
| if (!Array.isArray(configReporters)) return [[configReporters, {}]]; | ||
| const resolvedReporters = []; | ||
| for (const reporter of configReporters) if (Array.isArray(reporter)) | ||
| // E.g. { reporter: [ ["html", { skipEmpty: true }], ["lcov"], ["json", { file: "map.json" }] ]} | ||
| resolvedReporters.push([reporter[0], reporter[1] || {}]); | ||
| else | ||
| // E.g. { reporter: ["html", "json"]} | ||
| resolvedReporters.push([reporter, {}]); | ||
| return resolvedReporters; | ||
| } | ||
| function isChromiumName(provider, name) { | ||
| if (provider === "playwright") return name === "chromium"; | ||
| return name === "chrome" || name === "edge"; | ||
| } | ||
| function hasBrowserChromium(vitest, config) { | ||
| const browser = config.browser; | ||
| if (!browser || !browser.provider || browser.provider.name === "preview" || !browser.enabled) return false; | ||
| if (browser.name) return isChromiumName(browser.provider.name, browser.name); | ||
| if (!browser.instances) return false; | ||
| return browser.instances.some((instance) => { | ||
| const name = instance.name || (config.name ? `${config.name} (${instance.browser})` : instance.browser); | ||
| // browser config is filtered out | ||
| if (!vitest.matchesProjectFilter(name)) return false; | ||
| return isChromiumName(browser.provider.name, instance.browser); | ||
| }); | ||
| } | ||
| function hasOnlyBrowserChromium(vitest, config) { | ||
| const browser = config.browser; | ||
| if (!browser || !browser.provider || browser.provider.name === "preview" || !browser.enabled) return false; | ||
| if (browser.name) return isChromiumName(browser.provider.name, browser.name); | ||
| if (!browser.instances) return false; | ||
| return browser.instances.every((instance) => { | ||
| const name = instance.name || (config.name ? `${config.name} (${instance.browser})` : instance.browser); | ||
| // browser config is filtered out | ||
| if (!vitest.matchesProjectFilter(name)) return true; | ||
| return isChromiumName(browser.provider.name, instance.browser); | ||
| }); | ||
| } | ||
| const THRESHOLD_KEYS = [ | ||
| "lines", | ||
| "functions", | ||
| "statements", | ||
| "branches" | ||
| ]; | ||
| const GLOBAL_THRESHOLDS_KEY = "global"; | ||
| const DEFAULT_PROJECT = Symbol.for("default-project"); | ||
| let uniqueId = 0; | ||
| async function getCoverageProvider(options, loader) { | ||
| const coverageModule = await resolveCoverageProviderModule(options, loader); | ||
| if (coverageModule) return coverageModule.getProvider(); | ||
| return null; | ||
| } | ||
| class BaseCoverageProvider { | ||
| ctx; | ||
| name; | ||
| version; | ||
| options; | ||
| globCache = /* @__PURE__ */ new Map(); | ||
| autoUpdateMarker = "\n// __VITEST_COVERAGE_MARKER__"; | ||
| coverageFiles = /* @__PURE__ */ new Map(); | ||
| pendingPromises = []; | ||
| coverageFilesDirectory; | ||
| roots = []; | ||
| changedFiles; | ||
| _initialize(ctx) { | ||
| this.ctx = ctx; | ||
| if (ctx.version !== this.version) ctx.logger.warn(c.yellow(`Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}. | ||
| Running mixed versions is not supported and may lead into bugs | ||
| Update your dependencies and make sure the versions match.`)); | ||
| const config = ctx._coverageOptions; | ||
| this.options = { | ||
| ...coverageConfigDefaults, | ||
| ...config, | ||
| provider: this.name, | ||
| reportsDirectory: resolve(ctx.config.root, config.reportsDirectory || coverageConfigDefaults.reportsDirectory), | ||
| reporter: resolveCoverageReporters(config.reporter || coverageConfigDefaults.reporter), | ||
| thresholds: config.thresholds && { | ||
| ...config.thresholds, | ||
| lines: config.thresholds["100"] ? 100 : config.thresholds.lines, | ||
| branches: config.thresholds["100"] ? 100 : config.thresholds.branches, | ||
| functions: config.thresholds["100"] ? 100 : config.thresholds.functions, | ||
| statements: config.thresholds["100"] ? 100 : config.thresholds.statements | ||
| } | ||
| }; | ||
| const shard = this.ctx.config.shard; | ||
| const tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`; | ||
| this.coverageFilesDirectory = resolve(this.options.reportsDirectory, tempDirectory); | ||
| // If --project filter is set pick only roots of resolved projects | ||
| this.roots = ctx.config.project?.length ? [...new Set(ctx.projects.map((project) => project.config.root))] : [ctx.config.root]; | ||
| } | ||
| /** | ||
| * Check if file matches `coverage.include` but not `coverage.exclude` | ||
| */ | ||
| isIncluded(_filename, root) { | ||
| const roots = root ? [root] : this.roots; | ||
| const filename = slash(cleanUrl(_filename)); | ||
| const cacheHit = this.globCache.get(filename); | ||
| if (cacheHit !== void 0) return cacheHit; | ||
| // File outside project root with default allowExternal | ||
| if (this.options.allowExternal === false && roots.every((root) => !filename.startsWith(root))) { | ||
| this.globCache.set(filename, false); | ||
| return false; | ||
| } | ||
| // By default `coverage.include` matches all files, except "coverage.exclude" | ||
| const glob = this.options.include || "**"; | ||
| let included = pm.isMatch(filename, glob, { | ||
| contains: true, | ||
| dot: true, | ||
| ignore: this.options.exclude | ||
| }); | ||
| if (included && this.changedFiles) included = this.changedFiles.includes(filename); | ||
| this.globCache.set(filename, included); | ||
| return included; | ||
| } | ||
| async getUntestedFilesByRoot(testedFiles, include, root) { | ||
| let includedFiles = await glob(include, { | ||
| cwd: root, | ||
| ignore: [...this.options.exclude, ...testedFiles.map((file) => slash(file))], | ||
| absolute: true, | ||
| dot: true, | ||
| onlyFiles: true | ||
| }); | ||
| // Run again through picomatch as tinyglobby's exclude pattern is different ({ "exclude": ["math"] } should ignore "src/math.ts") | ||
| includedFiles = includedFiles.filter((file) => this.isIncluded(file, root)); | ||
| if (this.changedFiles) includedFiles = this.changedFiles.filter((file) => includedFiles.includes(file)); | ||
| return includedFiles.map((file) => slash(path.resolve(root, file))); | ||
| } | ||
| async getUntestedFiles(testedFiles) { | ||
| if (this.options.include == null) return []; | ||
| const rootMapper = this.getUntestedFilesByRoot.bind(this, testedFiles, this.options.include); | ||
| return (await Promise.all(this.roots.map(rootMapper))).flatMap((files) => files); | ||
| } | ||
| createCoverageMap() { | ||
| throw new Error("BaseReporter's createCoverageMap was not overwritten"); | ||
| } | ||
| async generateReports(_, __) { | ||
| throw new Error("BaseReporter's generateReports was not overwritten"); | ||
| } | ||
| async parseConfigModule(_) { | ||
| throw new Error("BaseReporter's parseConfigModule was not overwritten"); | ||
| } | ||
| resolveOptions() { | ||
| return this.options; | ||
| } | ||
| async clean(clean = true) { | ||
| if (clean && existsSync(this.options.reportsDirectory)) await promises.rm(this.options.reportsDirectory, { | ||
| recursive: true, | ||
| force: true, | ||
| maxRetries: 10 | ||
| }); | ||
| if (existsSync(this.coverageFilesDirectory)) await promises.rm(this.coverageFilesDirectory, { | ||
| recursive: true, | ||
| force: true, | ||
| maxRetries: 10 | ||
| }); | ||
| await promises.mkdir(this.coverageFilesDirectory, { recursive: true }); | ||
| this.coverageFiles = /* @__PURE__ */ new Map(); | ||
| this.pendingPromises = []; | ||
| } | ||
| onAfterSuiteRun({ coverage, environment, projectName, testFiles }) { | ||
| if (!coverage) return; | ||
| let entry = this.coverageFiles.get(projectName || DEFAULT_PROJECT); | ||
| if (!entry) { | ||
| entry = {}; | ||
| this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry); | ||
| } | ||
| const testFilenames = testFiles.join(); | ||
| const filename = resolve(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`); | ||
| entry[environment] ??= {}; | ||
| // If there's a result from previous run, overwrite it | ||
| entry[environment][testFilenames] = filename; | ||
| const promise = promises.writeFile(filename, JSON.stringify(coverage), "utf-8"); | ||
| this.pendingPromises.push(promise); | ||
| } | ||
| async readCoverageFiles({ onFileRead, onFinished, onDebug }) { | ||
| let index = 0; | ||
| const total = this.pendingPromises.length; | ||
| await Promise.all(this.pendingPromises); | ||
| this.pendingPromises = []; | ||
| for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) for (const [environment, coverageByTestfiles] of Object.entries(coveragePerProject)) { | ||
| const filenames = Object.values(coverageByTestfiles); | ||
| const project = this.ctx.getProjectByName(projectName); | ||
| for (const chunk of this.toSlices(filenames, this.options.processingConcurrency)) { | ||
| if (onDebug.enabled) { | ||
| index += chunk.length; | ||
| onDebug(`Reading coverage results ${index}/${total}`); | ||
| } | ||
| await Promise.all(chunk.map(async (filename) => { | ||
| const contents = await promises.readFile(filename, "utf-8"); | ||
| onFileRead(JSON.parse(contents)); | ||
| })); | ||
| } | ||
| await onFinished(project, environment); | ||
| } | ||
| } | ||
| async cleanAfterRun() { | ||
| this.coverageFiles = /* @__PURE__ */ new Map(); | ||
| await promises.rm(this.coverageFilesDirectory, { recursive: true }); | ||
| // Remove empty reports directory, e.g. when only text-reporter is used | ||
| if (readdirSync(this.options.reportsDirectory).length === 0) await promises.rm(this.options.reportsDirectory, { recursive: true }); | ||
| } | ||
| async onTestRunStart() { | ||
| if (this.options.changed) { | ||
| const { VitestGit } = await import('./git.Bm2pzPAa.js'); | ||
| this.changedFiles = await new VitestGit(this.ctx.config.root).findChangedFiles({ changedSince: this.options.changed }) ?? void 0; | ||
| } else if (this.ctx.config.changed) this.changedFiles = this.ctx.config.related; | ||
| if (this.changedFiles) this.globCache.clear(); | ||
| } | ||
| async onTestFailure() { | ||
| if (!this.options.reportOnFailure) await this.cleanAfterRun(); | ||
| } | ||
| async reportCoverage(coverageMap, { allTestsRun }) { | ||
| await this.generateReports(coverageMap || this.createCoverageMap(), allTestsRun); | ||
| if (!(!this.options.cleanOnRerun && this.ctx.config.watch)) await this.cleanAfterRun(); | ||
| } | ||
| async reportThresholds(coverageMap, allTestsRun) { | ||
| const resolvedThresholds = this.resolveThresholds(coverageMap); | ||
| this.checkThresholds(resolvedThresholds); | ||
| if (this.options.thresholds?.autoUpdate && allTestsRun) { | ||
| if (!this.ctx.vite.config.configFile) throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used."); | ||
| const configFilePath = this.ctx.vite.config.configFile; | ||
| const configModule = await this.parseConfigModule(configFilePath); | ||
| await this.updateThresholds({ | ||
| thresholds: resolvedThresholds, | ||
| configurationFile: configModule, | ||
| onUpdate: () => writeFileSync(configFilePath, configModule.generate().code.replace(this.autoUpdateMarker, ""), "utf-8") | ||
| }); | ||
| } | ||
| } | ||
| /** | ||
| * Constructs collected coverage and users' threshold options into separate sets | ||
| * where each threshold set holds their own coverage maps. Threshold set is either | ||
| * for specific files defined by glob pattern or global for all other files. | ||
| */ | ||
| resolveThresholds(coverageMap) { | ||
| const resolvedThresholds = []; | ||
| const files = coverageMap.files(); | ||
| const globalCoverageMap = this.createCoverageMap(); | ||
| for (const key of Object.keys(this.options.thresholds)) { | ||
| if (key === "perFile" || key === "autoUpdate" || key === "100" || THRESHOLD_KEYS.includes(key)) continue; | ||
| const glob = key; | ||
| const globThresholds = resolveGlobThresholds(this.options.thresholds[glob]); | ||
| const globCoverageMap = this.createCoverageMap(); | ||
| const matcher = pm(glob); | ||
| const matchingFiles = files.filter((file) => matcher(relative(this.ctx.config.root, file))); | ||
| for (const file of matchingFiles) { | ||
| const fileCoverage = coverageMap.fileCoverageFor(file); | ||
| globCoverageMap.addFileCoverage(fileCoverage); | ||
| } | ||
| resolvedThresholds.push({ | ||
| name: glob, | ||
| coverageMap: globCoverageMap, | ||
| thresholds: globThresholds | ||
| }); | ||
| } | ||
| // Global threshold is for all files, even if they are included by glob patterns | ||
| for (const file of files) { | ||
| const fileCoverage = coverageMap.fileCoverageFor(file); | ||
| globalCoverageMap.addFileCoverage(fileCoverage); | ||
| } | ||
| resolvedThresholds.unshift({ | ||
| name: GLOBAL_THRESHOLDS_KEY, | ||
| coverageMap: globalCoverageMap, | ||
| thresholds: { | ||
| branches: this.options.thresholds?.branches, | ||
| functions: this.options.thresholds?.functions, | ||
| lines: this.options.thresholds?.lines, | ||
| statements: this.options.thresholds?.statements | ||
| } | ||
| }); | ||
| return resolvedThresholds; | ||
| } | ||
| /** | ||
| * Check collected coverage against configured thresholds. Sets exit code to 1 when thresholds not reached. | ||
| */ | ||
| checkThresholds(allThresholds) { | ||
| for (const { coverageMap, thresholds, name } of allThresholds) { | ||
| if (thresholds.branches === void 0 && thresholds.functions === void 0 && thresholds.lines === void 0 && thresholds.statements === void 0) continue; | ||
| // Construct list of coverage summaries where thresholds are compared against | ||
| const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => ({ | ||
| file, | ||
| summary: coverageMap.fileCoverageFor(file).toSummary() | ||
| })) : [{ | ||
| file: null, | ||
| summary: coverageMap.getCoverageSummary() | ||
| }]; | ||
| // Check thresholds of each summary | ||
| for (const { summary, file } of summaries) for (const thresholdKey of THRESHOLD_KEYS) { | ||
| const threshold = thresholds[thresholdKey]; | ||
| if (threshold === void 0) continue; | ||
| /** | ||
| * Positive thresholds are treated as minimum coverage percentages (X means: X% of lines must be covered), | ||
| * while negative thresholds are treated as maximum uncovered counts (-X means: X lines may be uncovered). | ||
| */ | ||
| if (threshold >= 0) { | ||
| const coverage = summary.data[thresholdKey].pct; | ||
| if (coverage < threshold) { | ||
| process.exitCode = 1; | ||
| /** | ||
| * Generate error message based on perFile flag: | ||
| * - ERROR: Coverage for statements (33.33%) does not meet threshold (85%) for src/math.ts | ||
| * - ERROR: Coverage for statements (50%) does not meet global threshold (85%) | ||
| */ | ||
| let errorMessage = `ERROR: Coverage for ${thresholdKey} (${coverage}%) does not meet ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${threshold}%)`; | ||
| if (this.options.thresholds?.perFile && file) errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`; | ||
| this.ctx.logger.error(errorMessage); | ||
| } | ||
| } else { | ||
| const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered; | ||
| const absoluteThreshold = threshold * -1; | ||
| if (uncovered > absoluteThreshold) { | ||
| process.exitCode = 1; | ||
| /** | ||
| * Generate error message based on perFile flag: | ||
| * - ERROR: Uncovered statements (33) exceed threshold (30) for src/math.ts | ||
| * - ERROR: Uncovered statements (33) exceed global threshold (30) | ||
| */ | ||
| let errorMessage = `ERROR: Uncovered ${thresholdKey} (${uncovered}) exceed ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${absoluteThreshold})`; | ||
| if (this.options.thresholds?.perFile && file) errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`; | ||
| this.ctx.logger.error(errorMessage); | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| /** | ||
| * Check if current coverage is above configured thresholds and bump the thresholds if needed | ||
| */ | ||
| async updateThresholds({ thresholds: allThresholds, onUpdate, configurationFile }) { | ||
| let updatedThresholds = false; | ||
| const config = resolveConfig(configurationFile); | ||
| assertConfigurationModule(config); | ||
| for (const { coverageMap, thresholds, name } of allThresholds) { | ||
| const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => coverageMap.fileCoverageFor(file).toSummary()) : [coverageMap.getCoverageSummary()]; | ||
| const thresholdsToUpdate = []; | ||
| for (const key of THRESHOLD_KEYS) { | ||
| const threshold = thresholds[key] ?? 100; | ||
| /** | ||
| * Positive thresholds are treated as minimum coverage percentages (X means: X% of lines must be covered), | ||
| * while negative thresholds are treated as maximum uncovered counts (-X means: X lines may be uncovered). | ||
| */ | ||
| if (threshold >= 0) { | ||
| const actual = Math.min(...summaries.map((summary) => summary[key].pct)); | ||
| if (actual > threshold) thresholdsToUpdate.push([key, actual]); | ||
| } else { | ||
| const absoluteThreshold = threshold * -1; | ||
| const actual = Math.max(...summaries.map((summary) => summary[key].total - summary[key].covered)); | ||
| if (actual < absoluteThreshold) { | ||
| // If everything was covered, set new threshold to 100% (since a threshold of 0 would be considered as 0%) | ||
| const updatedThreshold = actual === 0 ? 100 : actual * -1; | ||
| thresholdsToUpdate.push([key, updatedThreshold]); | ||
| } | ||
| } | ||
| } | ||
| if (thresholdsToUpdate.length === 0) continue; | ||
| updatedThresholds = true; | ||
| const thresholdFormatter = typeof this.options.thresholds?.autoUpdate === "function" ? this.options.thresholds?.autoUpdate : (value) => value; | ||
| for (const [threshold, newValue] of thresholdsToUpdate) { | ||
| const formattedValue = thresholdFormatter(newValue); | ||
| if (name === GLOBAL_THRESHOLDS_KEY) config.test.coverage.thresholds[threshold] = formattedValue; | ||
| else { | ||
| const glob = config.test.coverage.thresholds[name]; | ||
| glob[threshold] = formattedValue; | ||
| } | ||
| } | ||
| } | ||
| if (updatedThresholds) { | ||
| this.ctx.logger.log("Updating thresholds to configuration file. You may want to push with updated coverage thresholds."); | ||
| onUpdate(); | ||
| } | ||
| } | ||
| async mergeReports(coverageMaps) { | ||
| const coverageMap = this.createCoverageMap(); | ||
| for (const coverage of coverageMaps) coverageMap.merge(coverage); | ||
| await this.generateReports(coverageMap, true); | ||
| } | ||
| hasTerminalReporter(reporters) { | ||
| return reporters.some(([reporter]) => reporter === "text" || reporter === "text-summary" || reporter === "text-lcov" || reporter === "teamcity"); | ||
| } | ||
| toSlices(array, size) { | ||
| return array.reduce((chunks, item) => { | ||
| const index = Math.max(0, chunks.length - 1); | ||
| const lastChunk = chunks[index] || []; | ||
| chunks[index] = lastChunk; | ||
| if (lastChunk.length >= size) chunks.push([item]); | ||
| else lastChunk.push(item); | ||
| return chunks; | ||
| }, []); | ||
| } | ||
| // TODO: should this be abstracted in `project`/`vitest` instead? | ||
| // if we decide to keep `viteModuleRunner: false`, we will need to abstract transformation in both main thread and tests | ||
| // custom --import=module.registerHooks need to be transformed as well somehow | ||
| async transformFile(url, project, viteEnvironment) { | ||
| const config = project.config; | ||
| // vite is disabled, should transform manually if possible | ||
| if (config.experimental.viteModuleRunner === false) { | ||
| const pathname = url.split("?")[0]; | ||
| const filename = pathname.startsWith("file://") ? fileURLToPath(pathname) : pathname; | ||
| const extension = path.extname(filename); | ||
| if (!(extension === ".ts" || extension === ".mts" || extension === ".cts")) return { | ||
| code: await promises.readFile(filename, "utf-8"), | ||
| map: null | ||
| }; | ||
| if (!module$1.stripTypeScriptTypes) throw new Error(`Cannot parse '${url}' because "module.stripTypeScriptTypes" is not supported. TypeScript coverage requires Node.js 22.15 or higher. This is NOT a bug of Vitest.`); | ||
| const isTransform = process.execArgv.includes("--experimental-transform-types") || config.execArgv.includes("--experimental-transform-types") || process.env.NODE_OPTIONS?.includes("--experimental-transform-types") || config.env?.NODE_OPTIONS?.includes("--experimental-transform-types"); | ||
| const code = await promises.readFile(filename, "utf-8"); | ||
| return { | ||
| code: module$1.stripTypeScriptTypes(code, { mode: isTransform ? "transform" : "strip" }), | ||
| map: null | ||
| }; | ||
| } | ||
| if (project.isBrowserEnabled() || viteEnvironment === "__browser__") { | ||
| const result = await (project.browser?.vite.environments.client || project.vite.environments.client).transformRequest(url); | ||
| if (result) return result; | ||
| } | ||
| return project.vite.environments[viteEnvironment].transformRequest(url); | ||
| } | ||
| createUncoveredFileTransformer(ctx) { | ||
| const projects = new Set([...ctx.projects, ctx.getRootProject()]); | ||
| return async (filename) => { | ||
| let lastError; | ||
| for (const project of projects) { | ||
| const root = project.config.root; | ||
| // On Windows root doesn't start with "/" while filenames do | ||
| if (!filename.startsWith(root) && !filename.startsWith(`/${root}`)) continue; | ||
| try { | ||
| const environment = project.config.environment; | ||
| const viteEnvironment = environment === "jsdom" || environment === "happy-dom" ? "client" : "ssr"; | ||
| return await this.transformFile(filename, project, viteEnvironment); | ||
| } catch (err) { | ||
| lastError = err; | ||
| } | ||
| } | ||
| // All vite servers failed to transform the file | ||
| throw lastError; | ||
| }; | ||
| } | ||
| } | ||
| /** | ||
| * Narrow down `unknown` glob thresholds to resolved ones | ||
| */ | ||
| function resolveGlobThresholds(thresholds) { | ||
| if (!thresholds || typeof thresholds !== "object") return {}; | ||
| if (100 in thresholds && thresholds[100] === true) return { | ||
| lines: 100, | ||
| branches: 100, | ||
| functions: 100, | ||
| statements: 100 | ||
| }; | ||
| return { | ||
| lines: "lines" in thresholds && typeof thresholds.lines === "number" ? thresholds.lines : void 0, | ||
| branches: "branches" in thresholds && typeof thresholds.branches === "number" ? thresholds.branches : void 0, | ||
| functions: "functions" in thresholds && typeof thresholds.functions === "number" ? thresholds.functions : void 0, | ||
| statements: "statements" in thresholds && typeof thresholds.statements === "number" ? thresholds.statements : void 0 | ||
| }; | ||
| } | ||
| function assertConfigurationModule(config) { | ||
| try { | ||
| // @ts-expect-error -- Intentional unsafe null pointer check as wrapped in try-catch | ||
| if (typeof config.test.coverage.thresholds !== "object") throw new TypeError("Expected config.test.coverage.thresholds to be an object"); | ||
| } catch (error) { | ||
| const message = error instanceof Error ? error.message : String(error); | ||
| throw new Error(`Unable to parse thresholds from configuration file: ${message}`); | ||
| } | ||
| } | ||
| function resolveConfig(configModule) { | ||
| const mod = configModule.exports.default; | ||
| try { | ||
| // Check for "export default { test: {...} }" | ||
| if (mod.$type === "object") return mod; | ||
| // "export default defineConfig(...)" | ||
| let config = resolveDefineConfig(mod); | ||
| if (config) return config; | ||
| // "export default mergeConfig(..., defineConfig(...))" | ||
| if (mod.$type === "function-call" && mod.$callee === "mergeConfig") { | ||
| config = resolveMergeConfig(mod); | ||
| if (config) return config; | ||
| } | ||
| } catch (error) { | ||
| // Reduce magicast's verbose errors to readable ones | ||
| throw new Error(error instanceof Error ? error.message : String(error)); | ||
| } | ||
| throw new Error("Failed to update coverage thresholds. Configuration file is too complex."); | ||
| } | ||
| function resolveDefineConfig(mod) { | ||
| if (mod.$type === "function-call" && mod.$callee === "defineConfig") { | ||
| // "export default defineConfig({ test: {...} })" | ||
| if (mod.$args[0].$type === "object") return mod.$args[0]; | ||
| if (mod.$args[0].$type === "arrow-function-expression") { | ||
| if (mod.$args[0].$body.$type === "object") | ||
| // "export default defineConfig(() => ({ test: {...} }))" | ||
| return mod.$args[0].$body; | ||
| // "export default defineConfig(() => mergeConfig({...}, ...))" | ||
| const config = resolveMergeConfig(mod.$args[0].$body); | ||
| if (config) return config; | ||
| } | ||
| } | ||
| } | ||
| function resolveMergeConfig(mod) { | ||
| if (mod.$type === "function-call" && mod.$callee === "mergeConfig") for (const arg of mod.$args) { | ||
| const config = resolveDefineConfig(arg); | ||
| if (config) return config; | ||
| } | ||
| } | ||
| export { BaseCoverageProvider as B, RandomSequencer as R, BaseSequencer as a, resolveApiServerConfig as b, getCoverageProvider as g, hash as h, isBrowserEnabled as i, resolveConfig$1 as r }; |
| import nodeos__default from 'node:os'; | ||
| import './env.D4Lgay0q.js'; | ||
| import { isCI, isAgent } from 'std-env'; | ||
| const defaultInclude = ["**/*.{test,spec}.?(c|m)[jt]s?(x)"]; | ||
| const defaultExclude = ["**/node_modules/**", "**/.git/**"]; | ||
| const benchmarkConfigDefaults = { | ||
| include: ["**/*.{bench,benchmark}.?(c|m)[jt]s?(x)"], | ||
| exclude: defaultExclude, | ||
| includeSource: [], | ||
| reporters: ["default"], | ||
| includeSamples: false | ||
| }; | ||
| // These are the generic defaults for coverage. Providers may also set some provider specific defaults. | ||
| const coverageConfigDefaults = { | ||
| provider: "v8", | ||
| enabled: false, | ||
| clean: true, | ||
| cleanOnRerun: true, | ||
| reportsDirectory: "./coverage", | ||
| exclude: [], | ||
| reportOnFailure: false, | ||
| reporter: [ | ||
| ["text", {}], | ||
| ["html", {}], | ||
| ["clover", {}], | ||
| ["json", {}] | ||
| ], | ||
| allowExternal: false, | ||
| excludeAfterRemap: false, | ||
| processingConcurrency: Math.min(20, nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length) | ||
| }; | ||
| const fakeTimersDefaults = { | ||
| loopLimit: 1e4, | ||
| shouldClearNativeTimers: true | ||
| }; | ||
| const configDefaults = Object.freeze({ | ||
| allowOnly: !isCI, | ||
| isolate: true, | ||
| watch: !isCI && process.stdin.isTTY && !isAgent, | ||
| globals: false, | ||
| environment: "node", | ||
| clearMocks: false, | ||
| restoreMocks: false, | ||
| mockReset: false, | ||
| unstubGlobals: false, | ||
| unstubEnvs: false, | ||
| include: defaultInclude, | ||
| exclude: defaultExclude, | ||
| teardownTimeout: 1e4, | ||
| forceRerunTriggers: ["**/package.json/**", "**/{vitest,vite}.config.*/**"], | ||
| update: false, | ||
| reporters: [], | ||
| silent: false, | ||
| hideSkippedTests: false, | ||
| api: false, | ||
| ui: false, | ||
| uiBase: "/__vitest__/", | ||
| open: !isCI, | ||
| css: { include: [] }, | ||
| coverage: coverageConfigDefaults, | ||
| fakeTimers: fakeTimersDefaults, | ||
| maxConcurrency: 5, | ||
| dangerouslyIgnoreUnhandledErrors: false, | ||
| typecheck: { | ||
| checker: "tsc", | ||
| include: ["**/*.{test,spec}-d.?(c|m)[jt]s?(x)"], | ||
| exclude: defaultExclude | ||
| }, | ||
| slowTestThreshold: 300, | ||
| disableConsoleIntercept: false, | ||
| detectAsyncLeaks: false | ||
| }); | ||
| export { coverageConfigDefaults as a, defaultInclude as b, configDefaults as c, defaultExclude as d, benchmarkConfigDefaults as e }; |
| import { g as globalApis } from './constants.CPYnjOGj.js'; | ||
| import { i as index } from './index.DlDSLQD3.js'; | ||
| import './test.CTcmp4Su.js'; | ||
| import '@vitest/runner'; | ||
| import '@vitest/utils/helpers'; | ||
| import '@vitest/utils/timers'; | ||
| import './benchmark.D0SlKNbZ.js'; | ||
| import '@vitest/runner/utils'; | ||
| import './utils.BX5Fg8C4.js'; | ||
| import '@vitest/expect'; | ||
| import '@vitest/utils/error'; | ||
| import 'pathe'; | ||
| import '@vitest/snapshot'; | ||
| import '@vitest/spy'; | ||
| import '@vitest/utils/offset'; | ||
| import '@vitest/utils/source-map'; | ||
| import './_commonjsHelpers.D26ty3Ew.js'; | ||
| import './rpc.MzXet3jl.js'; | ||
| import './index.Chj8NDwU.js'; | ||
| import './evaluatedModules.Dg1zASAC.js'; | ||
| import 'vite/module-runner'; | ||
| import 'expect-type'; | ||
| function registerApiGlobally() { | ||
| globalApis.forEach((api) => { | ||
| // @ts-expect-error I know what I am doing :P | ||
| globalThis[api] = index[api]; | ||
| }); | ||
| } | ||
| export { registerApiGlobally }; |
| import fs from 'node:fs'; | ||
| import { getTasks, getFullName, getTests } from '@vitest/runner/utils'; | ||
| import * as pathe from 'pathe'; | ||
| import c from 'tinyrainbow'; | ||
| import { g as getStateSymbol, t as truncateString, F as F_RIGHT, D as DefaultReporter, f as formatProjectName, s as separator } from './index.DXMFO5MJ.js'; | ||
| import { stripVTControlCharacters } from 'node:util'; | ||
| import { notNullish } from '@vitest/utils/helpers'; | ||
| function createBenchmarkJsonReport(files) { | ||
| const report = { files: [] }; | ||
| for (const file of files) { | ||
| const groups = []; | ||
| for (const task of getTasks(file)) if (task?.type === "suite") { | ||
| const benchmarks = []; | ||
| for (const t of task.tasks) { | ||
| const benchmark = t.meta.benchmark && t.result?.benchmark; | ||
| if (benchmark) benchmarks.push({ | ||
| id: t.id, | ||
| ...benchmark, | ||
| samples: [] | ||
| }); | ||
| } | ||
| if (benchmarks.length) groups.push({ | ||
| fullName: getFullName(task, " > "), | ||
| benchmarks | ||
| }); | ||
| } | ||
| report.files.push({ | ||
| filepath: file.filepath, | ||
| groups | ||
| }); | ||
| } | ||
| return report; | ||
| } | ||
| function flattenFormattedBenchmarkReport(report) { | ||
| const flat = {}; | ||
| for (const file of report.files) for (const group of file.groups) for (const t of group.benchmarks) flat[t.id] = t; | ||
| return flat; | ||
| } | ||
| const outputMap = /* @__PURE__ */ new WeakMap(); | ||
| function formatNumber(number) { | ||
| const res = String(number.toFixed(number < 100 ? 4 : 2)).split("."); | ||
| return res[0].replace(/(?=(?:\d{3})+$)\B/g, ",") + (res[1] ? `.${res[1]}` : ""); | ||
| } | ||
| const tableHead = [ | ||
| "name", | ||
| "hz", | ||
| "min", | ||
| "max", | ||
| "mean", | ||
| "p75", | ||
| "p99", | ||
| "p995", | ||
| "p999", | ||
| "rme", | ||
| "samples" | ||
| ]; | ||
| function renderBenchmarkItems(result) { | ||
| return [ | ||
| result.name, | ||
| formatNumber(result.hz || 0), | ||
| formatNumber(result.min || 0), | ||
| formatNumber(result.max || 0), | ||
| formatNumber(result.mean || 0), | ||
| formatNumber(result.p75 || 0), | ||
| formatNumber(result.p99 || 0), | ||
| formatNumber(result.p995 || 0), | ||
| formatNumber(result.p999 || 0), | ||
| `±${(result.rme || 0).toFixed(2)}%`, | ||
| (result.sampleCount || 0).toString() | ||
| ]; | ||
| } | ||
| function computeColumnWidths(results) { | ||
| const rows = [tableHead, ...results.map((v) => renderBenchmarkItems(v))]; | ||
| return Array.from(tableHead, (_, i) => Math.max(...rows.map((row) => stripVTControlCharacters(row[i]).length))); | ||
| } | ||
| function padRow(row, widths) { | ||
| return row.map((v, i) => i ? v.padStart(widths[i], " ") : v.padEnd(widths[i], " ")); | ||
| } | ||
| function renderTableHead(widths) { | ||
| return " ".repeat(3) + padRow(tableHead, widths).map(c.bold).join(" "); | ||
| } | ||
| function renderBenchmark(result, widths) { | ||
| const padded = padRow(renderBenchmarkItems(result), widths); | ||
| return [ | ||
| padded[0], | ||
| c.blue(padded[1]), | ||
| c.cyan(padded[2]), | ||
| c.cyan(padded[3]), | ||
| c.cyan(padded[4]), | ||
| c.cyan(padded[5]), | ||
| c.cyan(padded[6]), | ||
| c.cyan(padded[7]), | ||
| c.cyan(padded[8]), | ||
| c.dim(padded[9]), | ||
| c.dim(padded[10]) | ||
| ].join(" "); | ||
| } | ||
| function renderTable(options) { | ||
| const output = []; | ||
| const benchMap = {}; | ||
| for (const task of options.tasks) if (task.meta.benchmark && task.result?.benchmark) benchMap[task.id] = { | ||
| current: task.result.benchmark, | ||
| baseline: options.compare?.[task.id] | ||
| }; | ||
| const benchCount = Object.entries(benchMap).length; | ||
| const columnWidths = computeColumnWidths(Object.values(benchMap).flatMap((v) => [v.current, v.baseline]).filter(notNullish)); | ||
| let idx = 0; | ||
| const padding = " ".repeat(1 ); | ||
| for (const task of options.tasks) { | ||
| const duration = task.result?.duration; | ||
| const bench = benchMap[task.id]; | ||
| let prefix = ""; | ||
| if (idx === 0 && task.meta?.benchmark) prefix += `${renderTableHead(columnWidths)}\n${padding}`; | ||
| prefix += ` ${getStateSymbol(task)} `; | ||
| let suffix = ""; | ||
| if (task.type === "suite") suffix += c.dim(` (${getTests(task).length})`); | ||
| if (task.mode === "skip" || task.mode === "todo") suffix += c.dim(c.gray(" [skipped]")); | ||
| if (duration != null) { | ||
| const color = duration > options.slowTestThreshold ? c.yellow : c.green; | ||
| suffix += color(` ${Math.round(duration)}${c.dim("ms")}`); | ||
| } | ||
| if (options.showHeap && task.result?.heap != null) suffix += c.magenta(` ${Math.floor(task.result.heap / 1024 / 1024)} MB heap used`); | ||
| if (bench) { | ||
| let body = renderBenchmark(bench.current, columnWidths); | ||
| if (options.compare && bench.baseline) { | ||
| if (bench.current.hz) { | ||
| const diff = bench.current.hz / bench.baseline.hz; | ||
| const diffFixed = diff.toFixed(2); | ||
| if (diffFixed === "1.0.0") body += c.gray(` [${diffFixed}x]`); | ||
| if (diff > 1) body += c.blue(` [${diffFixed}x] ⇑`); | ||
| else body += c.red(` [${diffFixed}x] ⇓`); | ||
| } | ||
| output.push(padding + prefix + body + suffix); | ||
| const bodyBaseline = renderBenchmark(bench.baseline, columnWidths); | ||
| output.push(`${padding} ${bodyBaseline} ${c.dim("(baseline)")}`); | ||
| } else { | ||
| if (bench.current.rank === 1 && benchCount > 1) body += c.bold(c.green(" fastest")); | ||
| if (bench.current.rank === benchCount && benchCount > 2) body += c.bold(c.gray(" slowest")); | ||
| output.push(padding + prefix + body + suffix); | ||
| } | ||
| } else output.push(padding + prefix + task.name + suffix); | ||
| if (task.result?.state !== "pass" && outputMap.get(task) != null) { | ||
| let data = outputMap.get(task); | ||
| if (typeof data === "string") { | ||
| data = stripVTControlCharacters(data.trim().split("\n").filter(Boolean).pop()); | ||
| if (data === "") data = void 0; | ||
| } | ||
| if (data != null) { | ||
| const out = ` ${" ".repeat(options.level)}${F_RIGHT} ${data}`; | ||
| output.push(c.gray(truncateString(out, options.columns))); | ||
| } | ||
| } | ||
| idx++; | ||
| } | ||
| return output.filter(Boolean).join("\n"); | ||
| } | ||
| class BenchmarkReporter extends DefaultReporter { | ||
| compare; | ||
| async onInit(ctx) { | ||
| super.onInit(ctx); | ||
| if (this.ctx.config.benchmark?.compare) { | ||
| const compareFile = pathe.resolve(this.ctx.config.root, this.ctx.config.benchmark?.compare); | ||
| try { | ||
| this.compare = flattenFormattedBenchmarkReport(JSON.parse(await fs.promises.readFile(compareFile, "utf-8"))); | ||
| } catch (e) { | ||
| this.error(`Failed to read '${compareFile}'`, e); | ||
| } | ||
| } | ||
| } | ||
| onTaskUpdate(packs) { | ||
| for (const pack of packs) { | ||
| const task = this.ctx.state.idMap.get(pack[0]); | ||
| if (task?.type === "suite" && task.result?.state !== "run") task.tasks.filter((task) => task.result?.benchmark).sort((benchA, benchB) => benchA.result.benchmark.mean - benchB.result.benchmark.mean).forEach((bench, idx) => { | ||
| bench.result.benchmark.rank = Number(idx) + 1; | ||
| }); | ||
| } | ||
| } | ||
| onTestSuiteResult(testSuite) { | ||
| super.onTestSuiteResult(testSuite); | ||
| this.printSuiteTable(testSuite); | ||
| } | ||
| printTestModule(testModule) { | ||
| this.printSuiteTable(testModule); | ||
| } | ||
| printSuiteTable(testTask) { | ||
| const state = testTask.state(); | ||
| if (state === "pending" || state === "queued") return; | ||
| const benches = testTask.task.tasks.filter((t) => t.meta.benchmark); | ||
| const duration = testTask.task.result?.duration || 0; | ||
| if (benches.length > 0 && benches.every((t) => t.result?.state !== "run" && t.result?.state !== "queued")) { | ||
| let title = `\n ${getStateSymbol(testTask.task)} ${formatProjectName(testTask.project)}${getFullName(testTask.task, separator)}`; | ||
| if (duration != null && duration > this.ctx.config.slowTestThreshold) title += c.yellow(` ${Math.round(duration)}${c.dim("ms")}`); | ||
| this.log(title); | ||
| this.log(renderTable({ | ||
| tasks: benches, | ||
| level: 1, | ||
| columns: this.ctx.logger.getColumns(), | ||
| compare: this.compare, | ||
| showHeap: this.ctx.config.logHeapUsage, | ||
| slowTestThreshold: this.ctx.config.slowTestThreshold | ||
| })); | ||
| } | ||
| } | ||
| async onTestRunEnd(testModules, unhandledErrors, reason) { | ||
| super.onTestRunEnd(testModules, unhandledErrors, reason); | ||
| // write output for future comparison | ||
| let outputFile = this.ctx.config.benchmark?.outputJson; | ||
| if (outputFile) { | ||
| outputFile = pathe.resolve(this.ctx.config.root, outputFile); | ||
| const outputDirectory = pathe.dirname(outputFile); | ||
| if (!fs.existsSync(outputDirectory)) await fs.promises.mkdir(outputDirectory, { recursive: true }); | ||
| const output = createBenchmarkJsonReport(testModules.map((t) => t.task.file)); | ||
| await fs.promises.writeFile(outputFile, JSON.stringify(output, null, 2)); | ||
| this.log(`Benchmark report written to ${outputFile}`); | ||
| } | ||
| } | ||
| } | ||
| class VerboseBenchmarkReporter extends BenchmarkReporter { | ||
| verbose = true; | ||
| } | ||
| const BenchmarkReportsMap = { | ||
| default: BenchmarkReporter, | ||
| verbose: VerboseBenchmarkReporter | ||
| }; | ||
| export { BenchmarkReporter as B, VerboseBenchmarkReporter as V, BenchmarkReportsMap as a }; |
| import { chai } from '@vitest/expect'; | ||
| import { createHook } from 'node:async_hooks'; | ||
| import { l as loadDiffConfig, a as loadSnapshotSerializers, t as takeCoverageInsideWorker } from './setup-common.B41N_kPE.js'; | ||
| import { r as rpc } from './rpc.MzXet3jl.js'; | ||
| import { g as getWorkerState } from './utils.BX5Fg8C4.js'; | ||
| import { T as TestRunner, N as NodeBenchmarkRunner } from './test.CTcmp4Su.js'; | ||
| function setupChaiConfig(config) { | ||
| Object.assign(chai.config, config); | ||
| } | ||
| async function resolveSnapshotEnvironment(config, moduleRunner) { | ||
| if (!config.snapshotEnvironment) { | ||
| const { VitestNodeSnapshotEnvironment } = await import('./node.COQbm6gK.js'); | ||
| return new VitestNodeSnapshotEnvironment(); | ||
| } | ||
| const mod = await moduleRunner.import(config.snapshotEnvironment); | ||
| if (typeof mod.default !== "object" || !mod.default) throw new Error("Snapshot environment module must have a default export object with a shape of `SnapshotEnvironment`"); | ||
| return mod.default; | ||
| } | ||
| const IGNORED_TYPES = new Set([ | ||
| "DNSCHANNEL", | ||
| "ELDHISTOGRAM", | ||
| "PerformanceObserver", | ||
| "RANDOMBYTESREQUEST", | ||
| "SIGNREQUEST", | ||
| "STREAM_END_OF_STREAM", | ||
| "TCPWRAP", | ||
| "TIMERWRAP", | ||
| "TLSWRAP", | ||
| "ZLIB" | ||
| ]); | ||
| function detectAsyncLeaks(testFile, projectName) { | ||
| const resources = /* @__PURE__ */ new Map(); | ||
| const hook = createHook({ | ||
| init(asyncId, type, triggerAsyncId, resource) { | ||
| if (IGNORED_TYPES.has(type)) return; | ||
| let stack = ""; | ||
| const limit = Error.stackTraceLimit; | ||
| // VitestModuleEvaluator's async wrapper of node:vm causes out-of-bound stack traces, simply skip it. | ||
| // Crash fixed in https://github.com/vitejs/vite/pull/21585 | ||
| try { | ||
| Error.stackTraceLimit = 100; | ||
| stack = (/* @__PURE__ */ new Error("VITEST_DETECT_ASYNC_LEAKS")).stack || ""; | ||
| } catch { | ||
| return; | ||
| } finally { | ||
| Error.stackTraceLimit = limit; | ||
| } | ||
| if (!stack.includes(testFile)) { | ||
| const trigger = resources.get(triggerAsyncId); | ||
| if (!trigger) return; | ||
| stack = trigger.stack; | ||
| } | ||
| let isActive = isActiveDefault; | ||
| if ("hasRef" in resource) { | ||
| const ref = new WeakRef(resource); | ||
| isActive = () => ref.deref()?.hasRef() ?? false; | ||
| } | ||
| resources.set(asyncId, { | ||
| type, | ||
| stack, | ||
| projectName, | ||
| filename: testFile, | ||
| isActive | ||
| }); | ||
| }, | ||
| destroy(asyncId) { | ||
| if (resources.get(asyncId)?.type !== "PROMISE") resources.delete(asyncId); | ||
| }, | ||
| promiseResolve(asyncId) { | ||
| resources.delete(asyncId); | ||
| } | ||
| }); | ||
| hook.enable(); | ||
| return async function collect() { | ||
| await Promise.resolve(setImmediate); | ||
| hook.disable(); | ||
| const leaks = []; | ||
| for (const resource of resources.values()) if (resource.isActive()) leaks.push({ | ||
| stack: resource.stack, | ||
| type: resource.type, | ||
| filename: resource.filename, | ||
| projectName: resource.projectName | ||
| }); | ||
| resources.clear(); | ||
| return leaks; | ||
| }; | ||
| } | ||
| function isActiveDefault() { | ||
| return true; | ||
| } | ||
| async function getTestRunnerConstructor(config, moduleRunner) { | ||
| if (!config.runner) return config.mode === "test" ? TestRunner : NodeBenchmarkRunner; | ||
| const mod = await moduleRunner.import(config.runner); | ||
| if (!mod.default && typeof mod.default !== "function") throw new Error(`Runner must export a default function, but got ${typeof mod.default} imported from ${config.runner}`); | ||
| return mod.default; | ||
| } | ||
| async function resolveTestRunner(config, moduleRunner, traces) { | ||
| const testRunner = new (await (getTestRunnerConstructor(config, moduleRunner)))(config); | ||
| // inject private executor to every runner | ||
| Object.defineProperty(testRunner, "moduleRunner", { | ||
| value: moduleRunner, | ||
| enumerable: false, | ||
| configurable: false | ||
| }); | ||
| if (!testRunner.config) testRunner.config = config; | ||
| if (!testRunner.importFile) throw new Error("Runner must implement \"importFile\" method."); | ||
| if ("__setTraces" in testRunner) testRunner.__setTraces(traces); | ||
| const [diffOptions] = await Promise.all([loadDiffConfig(config, moduleRunner), loadSnapshotSerializers(config, moduleRunner)]); | ||
| testRunner.config.diffOptions = diffOptions; | ||
| // patch some methods, so custom runners don't need to call RPC | ||
| const originalOnTaskUpdate = testRunner.onTaskUpdate; | ||
| testRunner.onTaskUpdate = async (task, events) => { | ||
| const p = rpc().onTaskUpdate(task, events); | ||
| await originalOnTaskUpdate?.call(testRunner, task, events); | ||
| return p; | ||
| }; | ||
| // patch some methods, so custom runners don't need to call RPC | ||
| const originalOnTestAnnotate = testRunner.onTestAnnotate; | ||
| testRunner.onTestAnnotate = async (test, annotation) => { | ||
| const p = rpc().onTaskArtifactRecord(test.id, { | ||
| type: "internal:annotation", | ||
| location: annotation.location, | ||
| annotation | ||
| }); | ||
| const overriddenResult = await originalOnTestAnnotate?.call(testRunner, test, annotation); | ||
| const vitestResult = await p; | ||
| return overriddenResult || vitestResult.annotation; | ||
| }; | ||
| const originalOnTestArtifactRecord = testRunner.onTestArtifactRecord; | ||
| testRunner.onTestArtifactRecord = async (test, artifact) => { | ||
| const p = rpc().onTaskArtifactRecord(test.id, artifact); | ||
| const overriddenResult = await originalOnTestArtifactRecord?.call(testRunner, test, artifact); | ||
| const vitestResult = await p; | ||
| return overriddenResult || vitestResult; | ||
| }; | ||
| const originalOnCollectStart = testRunner.onCollectStart; | ||
| testRunner.onCollectStart = async (file) => { | ||
| await rpc().onQueued(file); | ||
| await originalOnCollectStart?.call(testRunner, file); | ||
| }; | ||
| const originalOnCollected = testRunner.onCollected; | ||
| testRunner.onCollected = async (files) => { | ||
| const state = getWorkerState(); | ||
| files.forEach((file) => { | ||
| file.prepareDuration = state.durations.prepare; | ||
| file.environmentLoad = state.durations.environment; | ||
| // should be collected only for a single test file in a batch | ||
| state.durations.prepare = 0; | ||
| state.durations.environment = 0; | ||
| }); | ||
| // Strip function conditions from retry config before sending via RPC | ||
| // Functions cannot be cloned by structured clone algorithm | ||
| const sanitizeRetryConditions = (task) => { | ||
| if (task.retry && typeof task.retry === "object" && typeof task.retry.condition === "function") | ||
| // Remove function condition - it can't be serialized | ||
| task.retry = { | ||
| ...task.retry, | ||
| condition: void 0 | ||
| }; | ||
| if (task.tasks) task.tasks.forEach(sanitizeRetryConditions); | ||
| }; | ||
| files.forEach(sanitizeRetryConditions); | ||
| rpc().onCollected(files); | ||
| await originalOnCollected?.call(testRunner, files); | ||
| }; | ||
| const originalOnAfterRun = testRunner.onAfterRunFiles; | ||
| testRunner.onAfterRunFiles = async (files) => { | ||
| const state = getWorkerState(); | ||
| const coverage = await takeCoverageInsideWorker(config.coverage, moduleRunner); | ||
| if (coverage) rpc().onAfterSuiteRun({ | ||
| coverage, | ||
| testFiles: files.map((file) => file.name).sort(), | ||
| environment: state.environment.viteEnvironment || state.environment.name, | ||
| projectName: state.ctx.projectName | ||
| }); | ||
| await originalOnAfterRun?.call(testRunner, files); | ||
| }; | ||
| const originalOnAfterRunTask = testRunner.onAfterRunTask; | ||
| testRunner.onAfterRunTask = async (test) => { | ||
| if (config.bail && test.result?.state === "fail") { | ||
| if (1 + await rpc().getCountOfFailedTests() >= config.bail) { | ||
| rpc().onCancel("test-failure"); | ||
| testRunner.cancel?.("test-failure"); | ||
| } | ||
| } | ||
| await originalOnAfterRunTask?.call(testRunner, test); | ||
| }; | ||
| return testRunner; | ||
| } | ||
| export { resolveSnapshotEnvironment as a, detectAsyncLeaks as d, resolveTestRunner as r, setupChaiConfig as s }; |
| import { v as vi, N as NodeBenchmarkRunner, T as TestRunner, a as assert, c as createExpect, g as globalExpect, i as inject, s as should, b as vitest } from './test.CTcmp4Su.js'; | ||
| import { b as bench } from './benchmark.D0SlKNbZ.js'; | ||
| import { V as VitestEvaluatedModules } from './evaluatedModules.Dg1zASAC.js'; | ||
| import { expectTypeOf } from 'expect-type'; | ||
| import { afterAll, afterEach, aroundAll, aroundEach, beforeAll, beforeEach, describe, it, onTestFailed, onTestFinished, recordArtifact, suite, test } from '@vitest/runner'; | ||
| import { chai } from '@vitest/expect'; | ||
| const assertType = function assertType() {}; | ||
| var index = /*#__PURE__*/Object.freeze({ | ||
| __proto__: null, | ||
| BenchmarkRunner: NodeBenchmarkRunner, | ||
| EvaluatedModules: VitestEvaluatedModules, | ||
| TestRunner: TestRunner, | ||
| afterAll: afterAll, | ||
| afterEach: afterEach, | ||
| aroundAll: aroundAll, | ||
| aroundEach: aroundEach, | ||
| assert: assert, | ||
| assertType: assertType, | ||
| beforeAll: beforeAll, | ||
| beforeEach: beforeEach, | ||
| bench: bench, | ||
| chai: chai, | ||
| createExpect: createExpect, | ||
| describe: describe, | ||
| expect: globalExpect, | ||
| expectTypeOf: expectTypeOf, | ||
| inject: inject, | ||
| it: it, | ||
| onTestFailed: onTestFailed, | ||
| onTestFinished: onTestFinished, | ||
| recordArtifact: recordArtifact, | ||
| should: should, | ||
| suite: suite, | ||
| test: test, | ||
| vi: vi, | ||
| vitest: vitest | ||
| }); | ||
| export { assertType as a, index as i }; |
Sorry, the diff of this file is too big to display
| import { i as init } from './init.DICorXCo.js'; | ||
| if (!process.send) throw new Error("Expected worker to be run in node:child_process"); | ||
| // Store globals in case tests overwrite them | ||
| const processExit = process.exit.bind(process); | ||
| const processSend = process.send.bind(process); | ||
| const processOn = process.on.bind(process); | ||
| const processOff = process.off.bind(process); | ||
| const processRemoveAllListeners = process.removeAllListeners.bind(process); | ||
| // Work-around for nodejs/node#55094 | ||
| if (process.execArgv.some((execArg) => execArg.startsWith("--prof") || execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"))) processOn("SIGTERM", () => processExit()); | ||
| processOn("error", onError); | ||
| function workerInit(options) { | ||
| const { runTests } = options; | ||
| init({ | ||
| post: (v) => processSend(v), | ||
| on: (cb) => processOn("message", cb), | ||
| off: (cb) => processOff("message", cb), | ||
| teardown: () => { | ||
| processRemoveAllListeners("message"); | ||
| processOff("error", onError); | ||
| }, | ||
| runTests: (state, traces) => executeTests("run", state, traces), | ||
| collectTests: (state, traces) => executeTests("collect", state, traces), | ||
| setup: options.setup | ||
| }); | ||
| async function executeTests(method, state, traces) { | ||
| try { | ||
| await runTests(method, state, traces); | ||
| } finally { | ||
| process.exit = processExit; | ||
| } | ||
| } | ||
| } | ||
| // Prevent leaving worker in loops where it tries to send message to closed main | ||
| // thread, errors, and tries to send the error. | ||
| function onError(error) { | ||
| if (error?.code === "ERR_IPC_CHANNEL_CLOSED" || error?.code === "EPIPE") processExit(1); | ||
| } | ||
| export { workerInit as w }; |
| import { isMainThread, parentPort } from 'node:worker_threads'; | ||
| import { i as init } from './init.DICorXCo.js'; | ||
| if (isMainThread || !parentPort) throw new Error("Expected worker to be run in node:worker_threads"); | ||
| function workerInit(options) { | ||
| const { runTests } = options; | ||
| init({ | ||
| post: (response) => parentPort.postMessage(response), | ||
| on: (callback) => parentPort.on("message", callback), | ||
| off: (callback) => parentPort.off("message", callback), | ||
| teardown: () => parentPort.removeAllListeners("message"), | ||
| runTests: async (state, traces) => runTests("run", state, traces), | ||
| collectTests: async (state, traces) => runTests("collect", state, traces), | ||
| setup: options.setup | ||
| }); | ||
| } | ||
| export { workerInit as w }; |
| import { readFileSync } from 'node:fs'; | ||
| import { isBuiltin } from 'node:module'; | ||
| import { pathToFileURL } from 'node:url'; | ||
| import { resolve } from 'pathe'; | ||
| import { ModuleRunner, EvaluatedModules } from 'vite/module-runner'; | ||
| import { b as VitestTransport } from './startVitestModuleRunner.C3ZR-4J3.js'; | ||
| import { e as environments } from './index.EY6TCHpo.js'; | ||
| import { serializeValue } from '@vitest/utils/serialize'; | ||
| import { serializeError } from '@vitest/utils/error'; | ||
| import { T as Traces } from './traces.CCmnQaNT.js'; | ||
| import { o as onCancel, a as rpcDone, c as createRuntimeRpc } from './rpc.MzXet3jl.js'; | ||
| import { createStackString, parseStacktrace } from '@vitest/utils/source-map'; | ||
| import { s as setupInspect } from './inspector.CvyFGlXm.js'; | ||
| import { V as VitestEvaluatedModules } from './evaluatedModules.Dg1zASAC.js'; | ||
| import { E as EnvironmentTeardownError } from './utils.BX5Fg8C4.js'; | ||
| function isBuiltinEnvironment(env) { | ||
| return env in environments; | ||
| } | ||
| const isWindows = process.platform === "win32"; | ||
| const _loaders = /* @__PURE__ */ new Map(); | ||
| function createEnvironmentLoader(root, rpc) { | ||
| const cachedLoader = _loaders.get(root); | ||
| if (!cachedLoader || cachedLoader.isClosed()) { | ||
| _loaders.delete(root); | ||
| const moduleRunner = new ModuleRunner({ | ||
| hmr: false, | ||
| sourcemapInterceptor: "prepareStackTrace", | ||
| transport: new VitestTransport({ | ||
| async fetchModule(id, importer, options) { | ||
| const result = await rpc.fetch(id, importer, "__vitest__", options); | ||
| if ("cached" in result) return { | ||
| code: readFileSync(result.tmp, "utf-8"), | ||
| ...result | ||
| }; | ||
| if (isWindows && "externalize" in result) | ||
| // TODO: vitest returns paths for external modules, but Vite returns file:// | ||
| // https://github.com/vitejs/vite/pull/20449 | ||
| result.externalize = isBuiltin(id) || /^(?:node:|data:|http:|https:|file:)/.test(id) ? result.externalize : pathToFileURL(result.externalize).toString(); | ||
| return result; | ||
| }, | ||
| async resolveId(id, importer) { | ||
| return rpc.resolve(id, importer, "__vitest__"); | ||
| } | ||
| }, new EvaluatedModules(), /* @__PURE__ */ new WeakMap()) | ||
| }); | ||
| _loaders.set(root, moduleRunner); | ||
| } | ||
| return _loaders.get(root); | ||
| } | ||
| async function loadNativeEnvironment(name, root, traces) { | ||
| const packageId = name[0] === "." || name[0] === "/" ? pathToFileURL(resolve(root, name)).toString() : import.meta.resolve(`vitest-environment-${name}`, pathToFileURL(root).toString()); | ||
| return resolveEnvironmentFromModule(name, packageId, await traces.$("vitest.runtime.environment.import", () => import(packageId))); | ||
| } | ||
| function resolveEnvironmentFromModule(name, packageId, pkg) { | ||
| if (!pkg || !pkg.default || typeof pkg.default !== "object") throw new TypeError(`Environment "${name}" is not a valid environment. Path "${packageId}" should export default object with a "setup" or/and "setupVM" method.`); | ||
| const environment = pkg.default; | ||
| if (environment.transformMode != null && environment.transformMode !== "web" && environment.transformMode !== "ssr") throw new TypeError(`Environment "${name}" is not a valid environment. Path "${packageId}" should export default object with a "transformMode" method equal to "ssr" or "web", received "${environment.transformMode}".`); | ||
| if (environment.transformMode) { | ||
| console.warn(`The Vitest environment ${environment.name} defines the "transformMode". This options was deprecated in Vitest 4 and will be removed in the next major version. Please, use "viteEnvironment" instead.`); | ||
| // keep for backwards compat | ||
| environment.viteEnvironment ??= environment.transformMode === "ssr" ? "ssr" : "client"; | ||
| } | ||
| return environment; | ||
| } | ||
| async function loadEnvironment(name, root, rpc, traces, viteModuleRunner) { | ||
| if (isBuiltinEnvironment(name)) return { environment: environments[name] }; | ||
| if (!viteModuleRunner) return { environment: await loadNativeEnvironment(name, root, traces) }; | ||
| const loader = createEnvironmentLoader(root, rpc); | ||
| const packageId = name[0] === "." || name[0] === "/" ? resolve(root, name) : (await traces.$("vitest.runtime.environment.resolve", () => rpc.resolve(`vitest-environment-${name}`, void 0, "__vitest__")))?.id ?? resolve(root, name); | ||
| return { | ||
| environment: resolveEnvironmentFromModule(name, packageId, await traces.$("vitest.runtime.environment.import", () => loader.import(packageId))), | ||
| loader | ||
| }; | ||
| } | ||
| const cleanupListeners = /* @__PURE__ */ new Set(); | ||
| const moduleRunnerListeners = /* @__PURE__ */ new Set(); | ||
| function onCleanup(cb) { | ||
| cleanupListeners.add(cb); | ||
| } | ||
| async function cleanup() { | ||
| await Promise.all([...cleanupListeners].map((l) => l())); | ||
| } | ||
| function onModuleRunner(cb) { | ||
| moduleRunnerListeners.add(cb); | ||
| } | ||
| function emitModuleRunner(moduleRunner) { | ||
| moduleRunnerListeners.forEach((l) => l(moduleRunner)); | ||
| } | ||
| // Store globals in case tests overwrite them | ||
| const processListeners = process.listeners.bind(process); | ||
| const processOn = process.on.bind(process); | ||
| const processOff = process.off.bind(process); | ||
| const dispose = []; | ||
| function listenForErrors(state) { | ||
| dispose.forEach((fn) => fn()); | ||
| dispose.length = 0; | ||
| function catchError(err, type, event) { | ||
| const worker = state(); | ||
| // if there is another listener, assume that it's handled by user code | ||
| // one is Vitest's own listener | ||
| if (processListeners(event).length > 1) return; | ||
| const error = serializeValue(err); | ||
| if (typeof error === "object" && error != null) { | ||
| error.VITEST_TEST_NAME = worker.current?.type === "test" ? worker.current.name : void 0; | ||
| if (worker.filepath) error.VITEST_TEST_PATH = worker.filepath; | ||
| } | ||
| state().rpc.onUnhandledError(error, type); | ||
| } | ||
| const uncaughtException = (e) => catchError(e, "Uncaught Exception", "uncaughtException"); | ||
| const unhandledRejection = (e) => catchError(e, "Unhandled Rejection", "unhandledRejection"); | ||
| processOn("uncaughtException", uncaughtException); | ||
| processOn("unhandledRejection", unhandledRejection); | ||
| dispose.push(() => { | ||
| processOff("uncaughtException", uncaughtException); | ||
| processOff("unhandledRejection", unhandledRejection); | ||
| }); | ||
| } | ||
| const resolvingModules = /* @__PURE__ */ new Set(); | ||
| async function execute(method, ctx, worker, traces) { | ||
| const prepareStart = performance.now(); | ||
| const cleanups = [setupInspect(ctx)]; | ||
| // RPC is used to communicate between worker (be it a thread worker or child process or a custom implementation) and the main thread | ||
| const rpc = ctx.rpc; | ||
| try { | ||
| // do not close the RPC channel so that we can get the error messages sent to the main thread | ||
| cleanups.push(async () => { | ||
| await Promise.all(rpc.$rejectPendingCalls(({ method, reject }) => { | ||
| reject(new EnvironmentTeardownError(`[vitest-worker]: Closing rpc while "${method}" was pending`)); | ||
| })); | ||
| }); | ||
| const state = { | ||
| ctx, | ||
| evaluatedModules: new VitestEvaluatedModules(), | ||
| resolvingModules, | ||
| moduleExecutionInfo: /* @__PURE__ */ new Map(), | ||
| config: ctx.config, | ||
| environment: null, | ||
| durations: { | ||
| environment: 0, | ||
| prepare: prepareStart | ||
| }, | ||
| rpc, | ||
| onCancel, | ||
| onCleanup: onCleanup, | ||
| providedContext: ctx.providedContext, | ||
| onFilterStackTrace(stack) { | ||
| return createStackString(parseStacktrace(stack)); | ||
| }, | ||
| metaEnv: createImportMetaEnvProxy() | ||
| }; | ||
| const methodName = method === "collect" ? "collectTests" : "runTests"; | ||
| if (!worker[methodName] || typeof worker[methodName] !== "function") throw new TypeError(`Test worker should expose "runTests" method. Received "${typeof worker.runTests}".`); | ||
| await worker[methodName](state, traces); | ||
| } finally { | ||
| await rpcDone().catch(() => {}); | ||
| await Promise.all(cleanups.map((fn) => fn())).catch(() => {}); | ||
| } | ||
| } | ||
| function run(ctx, worker, traces) { | ||
| return execute("run", ctx, worker, traces); | ||
| } | ||
| function collect(ctx, worker, traces) { | ||
| return execute("collect", ctx, worker, traces); | ||
| } | ||
| async function teardown() { | ||
| await cleanup(); | ||
| } | ||
| const env = process.env; | ||
| function createImportMetaEnvProxy() { | ||
| // packages/vitest/src/node/plugins/index.ts:146 | ||
| const booleanKeys = [ | ||
| "DEV", | ||
| "PROD", | ||
| "SSR" | ||
| ]; | ||
| return new Proxy(env, { | ||
| get(_, key) { | ||
| if (typeof key !== "string") return; | ||
| if (booleanKeys.includes(key)) return !!process.env[key]; | ||
| return process.env[key]; | ||
| }, | ||
| set(_, key, value) { | ||
| if (typeof key !== "string") return true; | ||
| if (booleanKeys.includes(key)) process.env[key] = value ? "1" : ""; | ||
| else process.env[key] = value; | ||
| return true; | ||
| } | ||
| }); | ||
| } | ||
| const __vitest_worker_response__ = true; | ||
| const memoryUsage = process.memoryUsage.bind(process); | ||
| let reportMemory = false; | ||
| let traces; | ||
| /** @experimental */ | ||
| function init(worker) { | ||
| worker.on(onMessage); | ||
| if (worker.onModuleRunner) onModuleRunner(worker.onModuleRunner); | ||
| let runPromise; | ||
| let isRunning = false; | ||
| let workerTeardown; | ||
| let setupContext; | ||
| function send(response) { | ||
| worker.post(worker.serialize ? worker.serialize(response) : response); | ||
| } | ||
| async function onMessage(rawMessage) { | ||
| const message = worker.deserialize ? worker.deserialize(rawMessage) : rawMessage; | ||
| if (message?.__vitest_worker_request__ !== true) return; | ||
| switch (message.type) { | ||
| case "start": { | ||
| process.env.VITEST_POOL_ID = String(message.poolId); | ||
| process.env.VITEST_WORKER_ID = String(message.workerId); | ||
| reportMemory = message.options.reportMemory; | ||
| traces ??= await new Traces({ | ||
| enabled: message.traces.enabled, | ||
| sdkPath: message.traces.sdkPath | ||
| }).waitInit(); | ||
| const { environment, config, pool } = message.context; | ||
| const context = traces.getContextFromCarrier(message.traces.otelCarrier); | ||
| // record telemetry as part of "start" | ||
| traces.recordInitSpan(context); | ||
| try { | ||
| setupContext = { | ||
| environment, | ||
| config, | ||
| pool, | ||
| rpc: createRuntimeRpc(worker), | ||
| projectName: config.name || "", | ||
| traces | ||
| }; | ||
| workerTeardown = await traces.$("vitest.runtime.setup", { context }, () => worker.setup?.(setupContext)); | ||
| send({ | ||
| type: "started", | ||
| __vitest_worker_response__ | ||
| }); | ||
| } catch (error) { | ||
| send({ | ||
| type: "started", | ||
| __vitest_worker_response__, | ||
| error: serializeError(error) | ||
| }); | ||
| } | ||
| break; | ||
| } | ||
| case "run": | ||
| // Prevent concurrent execution if worker is already running | ||
| if (isRunning) { | ||
| send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: serializeError(/* @__PURE__ */ new Error("[vitest-worker]: Worker is already running tests")) | ||
| }); | ||
| return; | ||
| } | ||
| try { | ||
| process.env.VITEST_WORKER_ID = String(message.context.workerId); | ||
| } catch (error) { | ||
| return send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: serializeError(error), | ||
| usedMemory: reportMemory ? memoryUsage().heapUsed : void 0 | ||
| }); | ||
| } | ||
| isRunning = true; | ||
| try { | ||
| const tracesContext = traces.getContextFromCarrier(message.otelCarrier); | ||
| runPromise = traces.$("vitest.runtime.run", { | ||
| context: tracesContext, | ||
| attributes: { | ||
| "vitest.worker.specifications": traces.isEnabled() ? getFilesWithLocations(message.context.files) : [], | ||
| "vitest.worker.id": message.context.workerId | ||
| } | ||
| }, () => run({ | ||
| ...setupContext, | ||
| ...message.context | ||
| }, worker, traces).catch((error) => serializeError(error))); | ||
| send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: await runPromise, | ||
| usedMemory: reportMemory ? memoryUsage().heapUsed : void 0 | ||
| }); | ||
| } finally { | ||
| runPromise = void 0; | ||
| isRunning = false; | ||
| } | ||
| break; | ||
| case "collect": | ||
| // Prevent concurrent execution if worker is already running | ||
| if (isRunning) { | ||
| send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: serializeError(/* @__PURE__ */ new Error("[vitest-worker]: Worker is already running tests")) | ||
| }); | ||
| return; | ||
| } | ||
| try { | ||
| process.env.VITEST_WORKER_ID = String(message.context.workerId); | ||
| } catch (error) { | ||
| return send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: serializeError(error), | ||
| usedMemory: reportMemory ? memoryUsage().heapUsed : void 0 | ||
| }); | ||
| } | ||
| isRunning = true; | ||
| try { | ||
| const tracesContext = traces.getContextFromCarrier(message.otelCarrier); | ||
| runPromise = traces.$("vitest.runtime.collect", { | ||
| context: tracesContext, | ||
| attributes: { | ||
| "vitest.worker.specifications": traces.isEnabled() ? getFilesWithLocations(message.context.files) : [], | ||
| "vitest.worker.id": message.context.workerId | ||
| } | ||
| }, () => collect({ | ||
| ...setupContext, | ||
| ...message.context | ||
| }, worker, traces).catch((error) => serializeError(error))); | ||
| send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: await runPromise, | ||
| usedMemory: reportMemory ? memoryUsage().heapUsed : void 0 | ||
| }); | ||
| } finally { | ||
| runPromise = void 0; | ||
| isRunning = false; | ||
| } | ||
| break; | ||
| case "stop": | ||
| await runPromise; | ||
| try { | ||
| const context = traces.getContextFromCarrier(message.otelCarrier); | ||
| const error = await traces.$("vitest.runtime.teardown", { context }, async () => { | ||
| const error = await teardown().catch((error) => serializeError(error)); | ||
| await workerTeardown?.(); | ||
| return error; | ||
| }); | ||
| await traces.finish(); | ||
| send({ | ||
| type: "stopped", | ||
| error, | ||
| __vitest_worker_response__ | ||
| }); | ||
| } catch (error) { | ||
| send({ | ||
| type: "stopped", | ||
| error: serializeError(error), | ||
| __vitest_worker_response__ | ||
| }); | ||
| } | ||
| worker.teardown?.(); | ||
| break; | ||
| } | ||
| } | ||
| } | ||
| function getFilesWithLocations(files) { | ||
| return files.flatMap((file) => { | ||
| if (!file.testLocations) return file.filepath; | ||
| return file.testLocations.map((location) => { | ||
| return `${file}:${location}`; | ||
| }); | ||
| }); | ||
| } | ||
| export { listenForErrors as a, emitModuleRunner as e, init as i, loadEnvironment as l }; |
| import module$1, { isBuiltin } from 'node:module'; | ||
| import { fileURLToPath } from 'node:url'; | ||
| import { MessageChannel } from 'node:worker_threads'; | ||
| import { initSyntaxLexers, hoistMocks } from '@vitest/mocker/transforms'; | ||
| import { cleanUrl } from '@vitest/utils/helpers'; | ||
| import { p as parse } from './acorn.B2iPLyUM.js'; | ||
| import MagicString from 'magic-string'; | ||
| import { resolve } from 'pathe'; | ||
| import c from 'tinyrainbow'; | ||
| import { distDir } from '../path.js'; | ||
| import { t as toBuiltin } from './modules.BJuCwlRJ.js'; | ||
| import 'node:path'; | ||
| const NOW_LENGTH = Date.now().toString().length; | ||
| const REGEXP_VITEST = new RegExp(`%3Fvitest=\\d{${NOW_LENGTH}}`); | ||
| const REGEXP_MOCK_ACTUAL = /\?mock=actual/; | ||
| async function setupNodeLoaderHooks(worker) { | ||
| if (module$1.setSourceMapsSupport) module$1.setSourceMapsSupport(true); | ||
| else if (process.setSourceMapsEnabled) process.setSourceMapsEnabled(true); | ||
| if (worker.config.experimental.nodeLoader !== false) await initSyntaxLexers(); | ||
| if (typeof module$1.registerHooks === "function") module$1.registerHooks({ | ||
| resolve(specifier, context, nextResolve) { | ||
| if (specifier.includes("mock=actual")) { | ||
| // url is already resolved by `importActual` | ||
| const moduleId = specifier.replace(REGEXP_MOCK_ACTUAL, ""); | ||
| const builtin = isBuiltin(moduleId); | ||
| return { | ||
| url: builtin ? toBuiltin(moduleId) : moduleId, | ||
| format: builtin ? "builtin" : void 0, | ||
| shortCircuit: true | ||
| }; | ||
| } | ||
| const isVitest = specifier.includes("%3Fvitest="); | ||
| const result = nextResolve(isVitest ? specifier.replace(REGEXP_VITEST, "") : specifier, context); | ||
| // avoid tracking /node_modules/ module graph for performance reasons | ||
| if (context.parentURL && result.url && !result.url.includes("/node_modules/")) worker.rpc.ensureModuleGraphEntry(result.url, context.parentURL).catch(() => { | ||
| // ignore errors | ||
| }); | ||
| // this is require for in-source tests to be invalidated if | ||
| // one of the files already imported it in --maxWorkers=1 --no-isolate | ||
| if (isVitest) result.url = `${result.url}?vitest=${Date.now()}`; | ||
| if (worker.config.experimental.nodeLoader === false || !context.parentURL || result.url.includes(distDir) || context.parentURL?.toString().includes(distDir)) return result; | ||
| const mockedResult = getNativeMocker()?.resolveMockedModule(result.url, context.parentURL); | ||
| if (mockedResult != null) return mockedResult; | ||
| return result; | ||
| }, | ||
| load: worker.config.experimental.nodeLoader === false ? void 0 : createLoadHook() | ||
| }); | ||
| else if (module$1.register) { | ||
| if (worker.config.experimental.nodeLoader !== false) console.warn(`${c.bgYellow(" WARNING ")} "module.registerHooks" is not supported in Node.js ${process.version}. This means that some features like module mocking or in-source testing are not supported. Upgrade your Node.js version to at least 22.15 or disable "experimental.nodeLoader" flag manually.\n`); | ||
| const { port1, port2 } = new MessageChannel(); | ||
| port1.unref(); | ||
| port2.unref(); | ||
| port1.on("message", (data) => { | ||
| if (!data || typeof data !== "object") return; | ||
| switch (data.event) { | ||
| case "register-module-graph-entry": { | ||
| const { url, parentURL } = data; | ||
| worker.rpc.ensureModuleGraphEntry(url, parentURL); | ||
| return; | ||
| } | ||
| default: console.error("Unknown message event:", data.event); | ||
| } | ||
| }); | ||
| /** Registers {@link file://./../nodejsWorkerLoader.ts} */ | ||
| module$1.register("#nodejs-worker-loader", { | ||
| parentURL: import.meta.url, | ||
| data: { port: port2 }, | ||
| transferList: [port2] | ||
| }); | ||
| } else if (!process.versions.deno && !process.versions.bun) console.warn("\"module.registerHooks\" and \"module.register\" are not supported. Some Vitest features may not work. Please, use Node.js 18.19.0 or higher."); | ||
| } | ||
| function replaceInSourceMarker(url, source, ms) { | ||
| const re = /import\.meta\.vitest/g; | ||
| let match; | ||
| let overridden = false; | ||
| // eslint-disable-next-line no-cond-assign | ||
| while (match = re.exec(source)) { | ||
| const { index, "0": code } = match; | ||
| overridden = true; | ||
| // should it support process.vitest for CJS modules? | ||
| ms().overwrite(index, index + code.length, "IMPORT_META_TEST()"); | ||
| } | ||
| if (overridden) { | ||
| const filename = resolve(fileURLToPath(url)); | ||
| // appending instead of prepending because functions are hoisted and we don't change the offset | ||
| ms().append(`;\nfunction IMPORT_META_TEST() { return typeof __vitest_worker__ !== 'undefined' && __vitest_worker__.filepath === "${filename.replace(/"/g, "\\\"")}" ? __vitest_index__ : undefined; }`); | ||
| } | ||
| } | ||
| const ignoreFormats = new Set([ | ||
| "addon", | ||
| "builtin", | ||
| "wasm" | ||
| ]); | ||
| function createLoadHook(_worker) { | ||
| return (url, context, nextLoad) => { | ||
| const result = url.includes("mock=") && isBuiltin(cleanUrl(url)) ? { format: "commonjs" } : nextLoad(url, context); | ||
| if (result.format && ignoreFormats.has(result.format) || url.includes(distDir)) return result; | ||
| const mocker = getNativeMocker(); | ||
| mocker?.checkCircularManualMock(url); | ||
| if (url.includes("mock=automock") || url.includes("mock=autospy")) { | ||
| const automockedResult = mocker?.loadAutomock(url, result); | ||
| if (automockedResult != null) return automockedResult; | ||
| return result; | ||
| } | ||
| if (url.includes("mock=manual")) { | ||
| const mockedResult = mocker?.loadManualMock(url, result); | ||
| if (mockedResult != null) return mockedResult; | ||
| return result; | ||
| } | ||
| // ignore non-vitest modules for performance reasons, | ||
| // vi.hoisted and vi.mock won't work outside of test files or setup files | ||
| if (!result.source || !url.includes("vitest=")) return result; | ||
| const filename = url.startsWith("file://") ? fileURLToPath(url) : url; | ||
| const source = result.source.toString(); | ||
| const transformedCode = result.format?.includes("typescript") ? module$1.stripTypeScriptTypes(source) : source; | ||
| let _ms; | ||
| const ms = () => _ms || (_ms = new MagicString(source)); | ||
| if (source.includes("import.meta.vitest")) replaceInSourceMarker(url, source, ms); | ||
| hoistMocks(transformedCode, filename, (code) => parse(code, { | ||
| ecmaVersion: "latest", | ||
| sourceType: result.format === "module" || result.format === "module-typescript" || result.format === "typescript" ? "module" : "script" | ||
| }), { | ||
| magicString: ms, | ||
| globalThisAccessor: "\"__vitest_mocker__\"" | ||
| }); | ||
| let code; | ||
| if (_ms) code = `${_ms.toString()}\n//# sourceMappingURL=${genSourceMapUrl(_ms.generateMap({ | ||
| hires: "boundary", | ||
| source: filename | ||
| }))}`; | ||
| else code = source; | ||
| return { | ||
| format: result.format, | ||
| shortCircuit: true, | ||
| source: code | ||
| }; | ||
| }; | ||
| } | ||
| function genSourceMapUrl(map) { | ||
| if (typeof map !== "string") map = JSON.stringify(map); | ||
| return `data:application/json;base64,${Buffer.from(map).toString("base64")}`; | ||
| } | ||
| function getNativeMocker() { | ||
| return typeof __vitest_mocker__ !== "undefined" ? __vitest_mocker__ : void 0; | ||
| } | ||
| export { setupNodeLoaderHooks }; |
| import module$1, { isBuiltin } from 'node:module'; | ||
| import { fileURLToPath, pathToFileURL } from 'node:url'; | ||
| import { automockModule, createManualModuleSource, collectModuleExports } from '@vitest/mocker/transforms'; | ||
| import { cleanUrl, createDefer } from '@vitest/utils/helpers'; | ||
| import { p as parse } from './acorn.B2iPLyUM.js'; | ||
| import { isAbsolute } from 'pathe'; | ||
| import { t as toBuiltin } from './modules.BJuCwlRJ.js'; | ||
| import { B as BareModuleMocker, n as normalizeModuleId } from './startVitestModuleRunner.C3ZR-4J3.js'; | ||
| import 'node:fs'; | ||
| import './utils.BX5Fg8C4.js'; | ||
| import '@vitest/utils/timers'; | ||
| import '../path.js'; | ||
| import 'node:path'; | ||
| import '../module-evaluator.js'; | ||
| import 'node:vm'; | ||
| import 'vite/module-runner'; | ||
| import './traces.CCmnQaNT.js'; | ||
| import '@vitest/mocker'; | ||
| import '@vitest/mocker/redirect'; | ||
| class NativeModuleMocker extends BareModuleMocker { | ||
| wrapDynamicImport(moduleFactory) { | ||
| if (typeof moduleFactory === "function") return new Promise((resolve, reject) => { | ||
| this.resolveMocks().finally(() => { | ||
| moduleFactory().then(resolve, reject); | ||
| }); | ||
| }); | ||
| return moduleFactory; | ||
| } | ||
| resolveMockedModule(url, parentURL) { | ||
| // don't mock modules inside of packages because there is | ||
| // a high chance that it uses `require` which is not mockable | ||
| // because we use top-level await in "manual" mocks. | ||
| // for the sake of consistency we don't support mocking anything at all | ||
| if (parentURL.includes("/node_modules/")) return; | ||
| const moduleId = normalizeModuleId(url.startsWith("file://") ? fileURLToPath(url) : url); | ||
| const mockedModule = this.getDependencyMock(moduleId); | ||
| if (!mockedModule) return; | ||
| if (mockedModule.type === "redirect") return { | ||
| url: pathToFileURL(mockedModule.redirect).toString(), | ||
| shortCircuit: true | ||
| }; | ||
| if (mockedModule.type === "automock" || mockedModule.type === "autospy") return { | ||
| url: injectQuery(url, parentURL, `mock=${mockedModule.type}`), | ||
| shortCircuit: true | ||
| }; | ||
| if (mockedModule.type === "manual") return { | ||
| url: injectQuery(url, parentURL, "mock=manual"), | ||
| shortCircuit: true | ||
| }; | ||
| } | ||
| loadAutomock(url, result) { | ||
| const moduleId = cleanUrl(normalizeModuleId(url.startsWith("file://") ? fileURLToPath(url) : url)); | ||
| let source; | ||
| if (isBuiltin(moduleId)) { | ||
| const builtinModule = getBuiltinModule(moduleId); | ||
| const exports$1 = Object.keys(builtinModule); | ||
| source = ` | ||
| import * as builtinModule from '${toBuiltin(moduleId)}?mock=actual' | ||
| ${exports$1.map((key, index) => { | ||
| return ` | ||
| const __${index} = builtinModule["${key}"] | ||
| export { __${index} as "${key}" } | ||
| `; | ||
| }).join("")}`; | ||
| } else source = result.source?.toString(); | ||
| if (source == null) return; | ||
| const mockType = url.includes("mock=automock") ? "automock" : "autospy"; | ||
| const transformedCode = transformCode(source, result.format || "module", moduleId); | ||
| try { | ||
| const ms = automockModule(transformedCode, mockType, (code) => parse(code, { | ||
| sourceType: "module", | ||
| ecmaVersion: "latest" | ||
| }), { id: moduleId }); | ||
| return { | ||
| format: "module", | ||
| source: `${ms.toString()}\n//# sourceMappingURL=${genSourceMapUrl(ms.generateMap({ | ||
| hires: "boundary", | ||
| source: moduleId | ||
| }))}`, | ||
| shortCircuit: true | ||
| }; | ||
| } catch (cause) { | ||
| throw new Error(`Cannot automock '${url}' because it failed to parse.`, { cause }); | ||
| } | ||
| } | ||
| loadManualMock(url, result) { | ||
| const moduleId = cleanUrl(normalizeModuleId(url.startsWith("file://") ? fileURLToPath(url) : url)); | ||
| // should not be possible | ||
| if (this.getDependencyMock(moduleId)?.type !== "manual") { | ||
| console.warn(`Vitest detected unregistered manual mock ${moduleId}. This is a bug in Vitest. Please, open a new issue with reproduction.`); | ||
| return; | ||
| } | ||
| if (isBuiltin(moduleId)) { | ||
| const builtinModule = getBuiltinModule(toBuiltin(moduleId)); | ||
| return { | ||
| format: "module", | ||
| source: createManualModuleSource(moduleId, Object.keys(builtinModule)), | ||
| shortCircuit: true | ||
| }; | ||
| } | ||
| if (!result.source) return; | ||
| const transformedCode = transformCode(result.source.toString(), result.format || "module", moduleId); | ||
| if (transformedCode == null) return; | ||
| const format = result.format?.startsWith("module") ? "module" : "commonjs"; | ||
| try { | ||
| return { | ||
| format: "module", | ||
| source: createManualModuleSource(moduleId, collectModuleExports(moduleId, transformedCode, format)), | ||
| shortCircuit: true | ||
| }; | ||
| } catch (cause) { | ||
| throw new Error(`Failed to mock '${url}'. See the cause for more information.`, { cause }); | ||
| } | ||
| } | ||
| processedModules = /* @__PURE__ */ new Map(); | ||
| checkCircularManualMock(url) { | ||
| const id = cleanUrl(normalizeModuleId(url.startsWith("file://") ? fileURLToPath(url) : url)); | ||
| this.processedModules.set(id, (this.processedModules.get(id) ?? 0) + 1); | ||
| // the module is mocked and requested a second time, let's resolve | ||
| // the factory function that will redefine the exports later | ||
| if (this.originalModulePromises.has(id)) { | ||
| const factoryPromise = this.factoryPromises.get(id); | ||
| this.originalModulePromises.get(id)?.resolve({ __factoryPromise: factoryPromise }); | ||
| } | ||
| } | ||
| originalModulePromises = /* @__PURE__ */ new Map(); | ||
| factoryPromises = /* @__PURE__ */ new Map(); | ||
| // potential performance improvement: | ||
| // store by URL, not ids, no need to call url.*to* methods and normalizeModuleId | ||
| getFactoryModule(id) { | ||
| const mock = this.getMockerRegistry().getById(id); | ||
| if (!mock || mock.type !== "manual") throw new Error(`Mock ${id} wasn't registered. This is probably a Vitest error. Please, open a new issue with reproduction.`); | ||
| const mockResult = mock.resolve(); | ||
| if (mockResult instanceof Promise) { | ||
| // to avoid circular dependency, we resolve this function as {__factoryPromise} in `checkCircularManualMock` | ||
| // when it's requested the second time. then the exports are exposed as `undefined`, | ||
| // but later redefined when the promise is actually resolved | ||
| const promise = createDefer(); | ||
| promise.finally(() => { | ||
| this.originalModulePromises.delete(id); | ||
| }); | ||
| mockResult.then(promise.resolve, promise.reject).finally(() => { | ||
| this.factoryPromises.delete(id); | ||
| }); | ||
| this.factoryPromises.set(id, mockResult); | ||
| this.originalModulePromises.set(id, promise); | ||
| // Node.js on windows processes all the files first, and then runs them | ||
| // unlike Node.js logic on Mac and Unix where it also runs the code while evaluating | ||
| // So on Linux/Mac this `if` won't be hit because `checkCircularManualMock` will resolve it | ||
| // And on Windows, the `checkCircularManualMock` will never have `originalModulePromises` | ||
| // because `getFactoryModule` is not called until the evaluation phase | ||
| // But if we track how many times the module was transformed, | ||
| // we can deduce when to return `__factoryPromise` to support circular modules | ||
| if ((this.processedModules.get(id) ?? 0) > 1) { | ||
| this.processedModules.set(id, (this.processedModules.get(id) ?? 1) - 1); | ||
| promise.resolve({ __factoryPromise: mockResult }); | ||
| } | ||
| return promise; | ||
| } | ||
| return mockResult; | ||
| } | ||
| importActual(rawId, importer) { | ||
| const resolvedId = import.meta.resolve(rawId, pathToFileURL(importer).toString()); | ||
| const url = new URL(resolvedId); | ||
| url.searchParams.set("mock", "actual"); | ||
| return import(url.toString()); | ||
| } | ||
| importMock(rawId, importer) { | ||
| const resolvedId = import.meta.resolve(rawId, pathToFileURL(importer).toString()); | ||
| // file is already mocked | ||
| if (resolvedId.includes("mock=")) return import(resolvedId); | ||
| const filename = fileURLToPath(resolvedId); | ||
| const external = !isAbsolute(filename) || this.isModuleDirectory(resolvedId) ? normalizeModuleId(rawId) : null; | ||
| // file is not mocked, automock or redirect it | ||
| const redirect = this.findMockRedirect(filename, external); | ||
| if (redirect) return import(pathToFileURL(redirect).toString()); | ||
| const url = new URL(resolvedId); | ||
| url.searchParams.set("mock", "automock"); | ||
| return import(url.toString()); | ||
| } | ||
| } | ||
| const replacePercentageRE = /%/g; | ||
| function injectQuery(url, importer, queryToInject) { | ||
| const { search, hash } = new URL(url.replace(replacePercentageRE, "%25"), importer); | ||
| return `${cleanUrl(url)}?${queryToInject}${search ? `&${search.slice(1)}` : ""}${hash ?? ""}`; | ||
| } | ||
| let __require; | ||
| function getBuiltinModule(moduleId) { | ||
| __require ??= module$1.createRequire(import.meta.url); | ||
| return __require(`${moduleId}?mock=actual`); | ||
| } | ||
| function genSourceMapUrl(map) { | ||
| if (typeof map !== "string") map = JSON.stringify(map); | ||
| return `data:application/json;base64,${Buffer.from(map).toString("base64")}`; | ||
| } | ||
| function transformCode(code, format, filename) { | ||
| if (format.includes("typescript")) { | ||
| if (!module$1.stripTypeScriptTypes) throw new Error(`Cannot parse '${filename}' because "module.stripTypeScriptTypes" is not supported. Module mocking requires Node.js 22.15 or higher. This is NOT a bug of Vitest.`); | ||
| return module$1.stripTypeScriptTypes(code); | ||
| } | ||
| return code; | ||
| } | ||
| export { NativeModuleMocker }; |
| import { DevEnvironment } from 'vite'; | ||
| import { V as Vitest, T as TestProject, a as TestProjectConfiguration } from './reporters.d.DVUYHHhe.js'; | ||
| /** | ||
| * Generate a unique cache identifier. | ||
| * | ||
| * Return `false` to disable caching of the file. | ||
| * @experimental | ||
| */ | ||
| interface CacheKeyIdGenerator { | ||
| (context: CacheKeyIdGeneratorContext): string | undefined | null | false; | ||
| } | ||
| /** | ||
| * @experimental | ||
| */ | ||
| interface CacheKeyIdGeneratorContext { | ||
| environment: DevEnvironment; | ||
| id: string; | ||
| sourceCode: string; | ||
| } | ||
| interface VitestPluginContext { | ||
| vitest: Vitest; | ||
| project: TestProject; | ||
| injectTestProjects: (config: TestProjectConfiguration | TestProjectConfiguration[]) => Promise<TestProject[]>; | ||
| /** | ||
| * Define a generator that will be applied before hashing the cache key. | ||
| * | ||
| * Use this to make sure Vitest generates correct hash. It is a good idea | ||
| * to define this function if your plugin can be registered with different options. | ||
| * | ||
| * This is called only if `experimental.fsModuleCache` is defined. | ||
| * @experimental | ||
| */ | ||
| experimental_defineCacheKeyGenerator: (callback: CacheKeyIdGenerator) => void; | ||
| } | ||
| export type { CacheKeyIdGenerator as C, VitestPluginContext as V, CacheKeyIdGeneratorContext as a }; |
Sorry, the diff of this file is too big to display
| import { r as resolveCoverageProviderModule } from './coverage.D_JHT54q.js'; | ||
| import { addSerializer } from '@vitest/snapshot'; | ||
| import { setSafeTimers } from '@vitest/utils/timers'; | ||
| import { g as getWorkerState } from './utils.BX5Fg8C4.js'; | ||
| async function startCoverageInsideWorker(options, loader, runtimeOptions) { | ||
| const coverageModule = await resolveCoverageProviderModule(options, loader); | ||
| if (coverageModule) return coverageModule.startCoverage?.(runtimeOptions); | ||
| return null; | ||
| } | ||
| async function takeCoverageInsideWorker(options, loader) { | ||
| const coverageModule = await resolveCoverageProviderModule(options, loader); | ||
| if (coverageModule) return coverageModule.takeCoverage?.({ moduleExecutionInfo: loader.moduleExecutionInfo }); | ||
| return null; | ||
| } | ||
| async function stopCoverageInsideWorker(options, loader, runtimeOptions) { | ||
| const coverageModule = await resolveCoverageProviderModule(options, loader); | ||
| if (coverageModule) return coverageModule.stopCoverage?.(runtimeOptions); | ||
| return null; | ||
| } | ||
| let globalSetup = false; | ||
| async function setupCommonEnv(config) { | ||
| setupDefines(config); | ||
| setupEnv(config.env); | ||
| if (globalSetup) return; | ||
| globalSetup = true; | ||
| setSafeTimers(); | ||
| if (config.globals) (await import('./globals.BXNGLnTL.js')).registerApiGlobally(); | ||
| } | ||
| function setupDefines(config) { | ||
| for (const key in config.defines) globalThis[key] = config.defines[key]; | ||
| } | ||
| function setupEnv(env) { | ||
| const state = getWorkerState(); | ||
| // same boolean-to-string assignment as VitestPlugin.configResolved | ||
| const { PROD, DEV, ...restEnvs } = env; | ||
| state.metaEnv.PROD = PROD; | ||
| state.metaEnv.DEV = DEV; | ||
| for (const key in restEnvs) state.metaEnv[key] = env[key]; | ||
| } | ||
| async function loadDiffConfig(config, moduleRunner) { | ||
| if (typeof config.diff === "object") return config.diff; | ||
| if (typeof config.diff !== "string") return; | ||
| const diffModule = await moduleRunner.import(config.diff); | ||
| if (diffModule && typeof diffModule.default === "object" && diffModule.default != null) return diffModule.default; | ||
| else throw new Error(`invalid diff config file ${config.diff}. Must have a default export with config object`); | ||
| } | ||
| async function loadSnapshotSerializers(config, moduleRunner) { | ||
| const files = config.snapshotSerializers; | ||
| (await Promise.all(files.map(async (file) => { | ||
| const mo = await moduleRunner.import(file); | ||
| if (!mo || typeof mo.default !== "object" || mo.default === null) throw new Error(`invalid snapshot serializer file ${file}. Must export a default object`); | ||
| const config = mo.default; | ||
| if (typeof config.test !== "function" || typeof config.serialize !== "function" && typeof config.print !== "function") throw new TypeError(`invalid snapshot serializer in ${file}. Must have a 'test' method along with either a 'serialize' or 'print' method.`); | ||
| return config; | ||
| }))).forEach((serializer) => addSerializer(serializer)); | ||
| } | ||
| export { loadSnapshotSerializers as a, startCoverageInsideWorker as b, stopCoverageInsideWorker as c, loadDiffConfig as l, setupCommonEnv as s, takeCoverageInsideWorker as t }; |
| import fs from 'node:fs'; | ||
| import { isBareImport } from '@vitest/utils/helpers'; | ||
| import { i as isBuiltin, a as isBrowserExternal, t as toBuiltin } from './modules.BJuCwlRJ.js'; | ||
| import { E as EnvironmentTeardownError, a as getSafeWorkerState } from './utils.BX5Fg8C4.js'; | ||
| import { pathToFileURL } from 'node:url'; | ||
| import { normalize, join } from 'pathe'; | ||
| import { distDir } from '../path.js'; | ||
| import { VitestModuleEvaluator, unwrapId } from '../module-evaluator.js'; | ||
| import { isAbsolute, resolve } from 'node:path'; | ||
| import vm from 'node:vm'; | ||
| import { MockerRegistry, mockObject, RedirectedModule, AutomockedModule } from '@vitest/mocker'; | ||
| import { findMockRedirect } from '@vitest/mocker/redirect'; | ||
| import * as viteModuleRunner from 'vite/module-runner'; | ||
| import { T as Traces } from './traces.CCmnQaNT.js'; | ||
| class BareModuleMocker { | ||
| static pendingIds = []; | ||
| spyModule; | ||
| primitives; | ||
| registries = /* @__PURE__ */ new Map(); | ||
| mockContext = { callstack: null }; | ||
| _otel; | ||
| constructor(options) { | ||
| this.options = options; | ||
| this._otel = options.traces; | ||
| this.primitives = { | ||
| Object, | ||
| Error, | ||
| Function, | ||
| RegExp, | ||
| Symbol: globalThis.Symbol, | ||
| Array, | ||
| Map | ||
| }; | ||
| if (options.spyModule) this.spyModule = options.spyModule; | ||
| } | ||
| get root() { | ||
| return this.options.root; | ||
| } | ||
| get moduleDirectories() { | ||
| return this.options.moduleDirectories || []; | ||
| } | ||
| getMockerRegistry() { | ||
| const suite = this.getSuiteFilepath(); | ||
| if (!this.registries.has(suite)) this.registries.set(suite, new MockerRegistry()); | ||
| return this.registries.get(suite); | ||
| } | ||
| reset() { | ||
| this.registries.clear(); | ||
| } | ||
| invalidateModuleById(_id) { | ||
| // implemented by mockers that control the module runner | ||
| } | ||
| isModuleDirectory(path) { | ||
| return this.moduleDirectories.some((dir) => path.includes(dir)); | ||
| } | ||
| getSuiteFilepath() { | ||
| return this.options.getCurrentTestFilepath() || "global"; | ||
| } | ||
| createError(message, codeFrame) { | ||
| const Error = this.primitives.Error; | ||
| const error = new Error(message); | ||
| Object.assign(error, { codeFrame }); | ||
| return error; | ||
| } | ||
| async resolveId(rawId, importer) { | ||
| return this._otel.$("vitest.mocker.resolve_id", { attributes: { | ||
| "vitest.module.raw_id": rawId, | ||
| "vitest.module.importer": rawId | ||
| } }, async (span) => { | ||
| const result = await this.options.resolveId(rawId, importer); | ||
| if (!result) { | ||
| span.addEvent("could not resolve id, fallback to unresolved values"); | ||
| const id = normalizeModuleId(rawId); | ||
| span.setAttributes({ | ||
| "vitest.module.id": id, | ||
| "vitest.module.url": rawId, | ||
| "vitest.module.external": id, | ||
| "vitest.module.fallback": true | ||
| }); | ||
| return { | ||
| id, | ||
| url: rawId, | ||
| external: id | ||
| }; | ||
| } | ||
| // external is node_module or unresolved module | ||
| // for example, some people mock "vscode" and don't have it installed | ||
| const external = !isAbsolute(result.file) || this.isModuleDirectory(result.file) ? normalizeModuleId(rawId) : null; | ||
| const id = normalizeModuleId(result.id); | ||
| span.setAttributes({ | ||
| "vitest.module.id": id, | ||
| "vitest.module.url": result.url, | ||
| "vitest.module.external": external ?? false | ||
| }); | ||
| return { | ||
| ...result, | ||
| id, | ||
| external | ||
| }; | ||
| }); | ||
| } | ||
| async resolveMocks() { | ||
| if (!BareModuleMocker.pendingIds.length) return; | ||
| await Promise.all(BareModuleMocker.pendingIds.map(async (mock) => { | ||
| const { id, url, external } = await this.resolveId(mock.id, mock.importer); | ||
| if (mock.action === "unmock") this.unmockPath(id); | ||
| if (mock.action === "mock") this.mockPath(mock.id, id, url, external, mock.type, mock.factory); | ||
| })); | ||
| BareModuleMocker.pendingIds = []; | ||
| } | ||
| // public method to avoid circular dependency | ||
| getMockContext() { | ||
| return this.mockContext; | ||
| } | ||
| // path used to store mocked dependencies | ||
| getMockPath(dep) { | ||
| return `mock:${dep}`; | ||
| } | ||
| getDependencyMock(id) { | ||
| return this.getMockerRegistry().getById(fixLeadingSlashes(id)); | ||
| } | ||
| getDependencyMockByUrl(url) { | ||
| return this.getMockerRegistry().get(url); | ||
| } | ||
| findMockRedirect(mockPath, external) { | ||
| return findMockRedirect(this.root, mockPath, external); | ||
| } | ||
| mockObject(object, mockExportsOrModuleType, moduleType) { | ||
| let mockExports; | ||
| if (mockExportsOrModuleType === "automock" || mockExportsOrModuleType === "autospy") { | ||
| moduleType = mockExportsOrModuleType; | ||
| mockExports = void 0; | ||
| } else mockExports = mockExportsOrModuleType; | ||
| moduleType ??= "automock"; | ||
| const createMockInstance = this.spyModule?.createMockInstance; | ||
| if (!createMockInstance) throw this.createError("[vitest] `spyModule` is not defined. This is a Vitest error. Please open a new issue with reproduction."); | ||
| return mockObject({ | ||
| globalConstructors: this.primitives, | ||
| createMockInstance, | ||
| type: moduleType | ||
| }, object, mockExports); | ||
| } | ||
| unmockPath(id) { | ||
| this.getMockerRegistry().deleteById(id); | ||
| this.invalidateModuleById(id); | ||
| } | ||
| mockPath(originalId, id, url, external, mockType, factory) { | ||
| const registry = this.getMockerRegistry(); | ||
| if (mockType === "manual") registry.register("manual", originalId, id, url, factory); | ||
| else if (mockType === "autospy") registry.register("autospy", originalId, id, url); | ||
| else { | ||
| const redirect = this.findMockRedirect(id, external); | ||
| if (redirect) registry.register("redirect", originalId, id, url, redirect); | ||
| else registry.register("automock", originalId, id, url); | ||
| } | ||
| // every time the mock is registered, we remove the previous one from the cache | ||
| this.invalidateModuleById(id); | ||
| } | ||
| async importActual(_rawId, _importer, _callstack) { | ||
| throw new Error(`importActual is not implemented`); | ||
| } | ||
| async importMock(_rawId, _importer, _callstack) { | ||
| throw new Error(`importMock is not implemented`); | ||
| } | ||
| queueMock(id, importer, factoryOrOptions) { | ||
| const mockType = getMockType(factoryOrOptions); | ||
| BareModuleMocker.pendingIds.push({ | ||
| action: "mock", | ||
| id, | ||
| importer, | ||
| factory: typeof factoryOrOptions === "function" ? factoryOrOptions : void 0, | ||
| type: mockType | ||
| }); | ||
| } | ||
| queueUnmock(id, importer) { | ||
| BareModuleMocker.pendingIds.push({ | ||
| action: "unmock", | ||
| id, | ||
| importer | ||
| }); | ||
| } | ||
| } | ||
| function getMockType(factoryOrOptions) { | ||
| if (!factoryOrOptions) return "automock"; | ||
| if (typeof factoryOrOptions === "function") return "manual"; | ||
| return factoryOrOptions.spy ? "autospy" : "automock"; | ||
| } | ||
| // unique id that is not available as "$bare_import" like "test" | ||
| // https://nodejs.org/api/modules.html#built-in-modules-with-mandatory-node-prefix | ||
| const prefixedBuiltins = new Set([ | ||
| "node:sea", | ||
| "node:sqlite", | ||
| "node:test", | ||
| "node:test/reporters" | ||
| ]); | ||
| const isWindows$1 = process.platform === "win32"; | ||
| // transform file url to id | ||
| // virtual:custom -> virtual:custom | ||
| // \0custom -> \0custom | ||
| // /root/id -> /id | ||
| // /root/id.js -> /id.js | ||
| // C:/root/id.js -> /id.js | ||
| // C:\root\id.js -> /id.js | ||
| // TODO: expose this in vite/module-runner | ||
| function normalizeModuleId(file) { | ||
| if (prefixedBuiltins.has(file)) return file; | ||
| // if it's not in the root, keep it as a path, not a URL | ||
| return slash(file).replace(/^\/@fs\//, isWindows$1 ? "" : "/").replace(/^node:/, "").replace(/^\/+/, "/").replace(/^file:\//, "/"); | ||
| } | ||
| const windowsSlashRE = /\\/g; | ||
| function slash(p) { | ||
| return p.replace(windowsSlashRE, "/"); | ||
| } | ||
| const multipleSlashRe = /^\/+/; | ||
| // module-runner incorrectly replaces file:///path with `///path` | ||
| function fixLeadingSlashes(id) { | ||
| if (id.startsWith("//")) return id.replace(multipleSlashRe, "/"); | ||
| return id; | ||
| } | ||
| // copied from vite/src/shared/utils.ts | ||
| const postfixRE = /[?#].*$/; | ||
| function cleanUrl(url) { | ||
| return url.replace(postfixRE, ""); | ||
| } | ||
| function splitFileAndPostfix(path) { | ||
| const file = cleanUrl(path); | ||
| return { | ||
| file, | ||
| postfix: path.slice(file.length) | ||
| }; | ||
| } | ||
| function injectQuery(url, queryToInject) { | ||
| const { file, postfix } = splitFileAndPostfix(url); | ||
| return `${file}?${queryToInject}${postfix[0] === "?" ? `&${postfix.slice(1)}` : postfix}`; | ||
| } | ||
| function removeQuery(url, queryToRemove) { | ||
| return url.replace(new RegExp(`[?&]${queryToRemove}(?=[&#]|$)`), "").replace(/\?$/, ""); | ||
| } | ||
| const spyModulePath = resolve(distDir, "spy.js"); | ||
| class VitestMocker extends BareModuleMocker { | ||
| filterPublicKeys; | ||
| constructor(moduleRunner, options) { | ||
| super(options); | ||
| this.moduleRunner = moduleRunner; | ||
| this.options = options; | ||
| const context = this.options.context; | ||
| if (context) this.primitives = vm.runInContext("({ Object, Error, Function, RegExp, Symbol, Array, Map })", context); | ||
| const Symbol = this.primitives.Symbol; | ||
| this.filterPublicKeys = [ | ||
| "__esModule", | ||
| Symbol.asyncIterator, | ||
| Symbol.hasInstance, | ||
| Symbol.isConcatSpreadable, | ||
| Symbol.iterator, | ||
| Symbol.match, | ||
| Symbol.matchAll, | ||
| Symbol.replace, | ||
| Symbol.search, | ||
| Symbol.split, | ||
| Symbol.species, | ||
| Symbol.toPrimitive, | ||
| Symbol.toStringTag, | ||
| Symbol.unscopables | ||
| ]; | ||
| } | ||
| get evaluatedModules() { | ||
| return this.moduleRunner.evaluatedModules; | ||
| } | ||
| async initializeSpyModule() { | ||
| if (this.spyModule) return; | ||
| this.spyModule = await this.moduleRunner.import(spyModulePath); | ||
| } | ||
| reset() { | ||
| this.registries.clear(); | ||
| } | ||
| invalidateModuleById(id) { | ||
| const mockId = this.getMockPath(id); | ||
| const node = this.evaluatedModules.getModuleById(mockId); | ||
| if (node) { | ||
| this.evaluatedModules.invalidateModule(node); | ||
| node.mockedExports = void 0; | ||
| } | ||
| } | ||
| ensureModule(id, url) { | ||
| const node = this.evaluatedModules.ensureModule(id, url); | ||
| // TODO | ||
| node.meta = { | ||
| id, | ||
| url, | ||
| code: "", | ||
| file: null, | ||
| invalidate: false | ||
| }; | ||
| return node; | ||
| } | ||
| async callFunctionMock(id, url, mock) { | ||
| const node = this.ensureModule(id, url); | ||
| if (node.exports) return node.exports; | ||
| const exports$1 = await mock.resolve(); | ||
| const moduleExports = new Proxy(exports$1, { get: (target, prop) => { | ||
| const val = target[prop]; | ||
| // 'then' can exist on non-Promise objects, need nested instanceof check for logic to work | ||
| if (prop === "then") { | ||
| if (target instanceof Promise) return target.then.bind(target); | ||
| } else if (!(prop in target)) { | ||
| if (this.filterPublicKeys.includes(prop)) return; | ||
| throw this.createError(`[vitest] No "${String(prop)}" export is defined on the "${mock.raw}" mock. Did you forget to return it from "vi.mock"? | ||
| If you need to partially mock a module, you can use "importOriginal" helper inside: | ||
| `, `vi.mock(import("${mock.raw}"), async (importOriginal) => { | ||
| const actual = await importOriginal() | ||
| return { | ||
| ...actual, | ||
| // your mocked methods | ||
| } | ||
| })`); | ||
| } | ||
| return val; | ||
| } }); | ||
| node.exports = moduleExports; | ||
| return moduleExports; | ||
| } | ||
| async importActual(rawId, importer, callstack) { | ||
| const { url } = await this.resolveId(rawId, importer); | ||
| const actualUrl = injectQuery(url, "_vitest_original"); | ||
| const node = await this.moduleRunner.fetchModule(actualUrl, importer); | ||
| return await this.moduleRunner.cachedRequest(node.url, node, callstack || [importer], void 0, true); | ||
| } | ||
| async importMock(rawId, importer) { | ||
| const { id, url, external } = await this.resolveId(rawId, importer); | ||
| let mock = this.getDependencyMock(id); | ||
| if (!mock) { | ||
| const redirect = this.findMockRedirect(id, external); | ||
| if (redirect) mock = new RedirectedModule(rawId, id, rawId, redirect); | ||
| else mock = new AutomockedModule(rawId, id, rawId); | ||
| } | ||
| if (mock.type === "automock" || mock.type === "autospy") { | ||
| const node = await this.moduleRunner.fetchModule(url, importer); | ||
| const mod = await this.moduleRunner.cachedRequest(url, node, [importer], void 0, true); | ||
| const Object = this.primitives.Object; | ||
| return this.mockObject(mod, Object.create(Object.prototype), mock.type); | ||
| } | ||
| if (mock.type === "manual") return this.callFunctionMock(id, url, mock); | ||
| const node = await this.moduleRunner.fetchModule(mock.redirect); | ||
| return this.moduleRunner.cachedRequest(mock.redirect, node, [importer], void 0, true); | ||
| } | ||
| async requestWithMockedModule(url, evaluatedNode, callstack, mock) { | ||
| return this._otel.$("vitest.mocker.evaluate", async (span) => { | ||
| const mockId = this.getMockPath(evaluatedNode.id); | ||
| span.setAttributes({ | ||
| "vitest.module.id": mockId, | ||
| "vitest.mock.type": mock.type, | ||
| "vitest.mock.id": mock.id, | ||
| "vitest.mock.url": mock.url, | ||
| "vitest.mock.raw": mock.raw | ||
| }); | ||
| if (mock.type === "automock" || mock.type === "autospy") { | ||
| const cache = this.evaluatedModules.getModuleById(mockId); | ||
| if (cache && cache.mockedExports) return cache.mockedExports; | ||
| const Object = this.primitives.Object; | ||
| // we have to define a separate object that will copy all properties into itself | ||
| // and can't just use the same `exports` define automatically by Vite before the evaluator | ||
| const exports$1 = Object.create(null); | ||
| Object.defineProperty(exports$1, Symbol.toStringTag, { | ||
| value: "Module", | ||
| configurable: true, | ||
| writable: true | ||
| }); | ||
| const node = this.ensureModule(mockId, this.getMockPath(evaluatedNode.url)); | ||
| node.meta = evaluatedNode.meta; | ||
| node.file = evaluatedNode.file; | ||
| node.mockedExports = exports$1; | ||
| const mod = await this.moduleRunner.cachedRequest(url, node, callstack, void 0, true); | ||
| this.mockObject(mod, exports$1, mock.type); | ||
| return exports$1; | ||
| } | ||
| if (mock.type === "manual" && !callstack.includes(mockId) && !callstack.includes(url)) try { | ||
| callstack.push(mockId); | ||
| // this will not work if user does Promise.all(import(), import()) | ||
| // we can also use AsyncLocalStorage to store callstack, but this won't work in the browser | ||
| // maybe we should improve mock API in the future? | ||
| this.mockContext.callstack = callstack; | ||
| return await this.callFunctionMock(mockId, this.getMockPath(url), mock); | ||
| } finally { | ||
| this.mockContext.callstack = null; | ||
| const indexMock = callstack.indexOf(mockId); | ||
| callstack.splice(indexMock, 1); | ||
| } | ||
| else if (mock.type === "redirect" && !callstack.includes(mock.redirect)) { | ||
| span.setAttribute("vitest.mock.redirect", mock.redirect); | ||
| return mock.redirect; | ||
| } | ||
| }); | ||
| } | ||
| async mockedRequest(url, evaluatedNode, callstack) { | ||
| const mock = this.getDependencyMock(evaluatedNode.id); | ||
| if (!mock) return; | ||
| return this.requestWithMockedModule(url, evaluatedNode, callstack, mock); | ||
| } | ||
| } | ||
| class VitestTransport { | ||
| constructor(options, evaluatedModules, callstacks) { | ||
| this.options = options; | ||
| this.evaluatedModules = evaluatedModules; | ||
| this.callstacks = callstacks; | ||
| } | ||
| async invoke(event) { | ||
| if (event.type !== "custom") return { error: /* @__PURE__ */ new Error(`Vitest Module Runner doesn't support Vite HMR events.`) }; | ||
| if (event.event !== "vite:invoke") return { error: /* @__PURE__ */ new Error(`Vitest Module Runner doesn't support ${event.event} event.`) }; | ||
| const { name, data } = event.data; | ||
| if (name === "getBuiltins") | ||
| // we return an empty array here to avoid client-side builtin check, | ||
| // as we need builtins to go through `fetchModule` | ||
| return { result: [] }; | ||
| if (name !== "fetchModule") return { error: /* @__PURE__ */ new Error(`Unknown method: ${name}. Expected "fetchModule".`) }; | ||
| try { | ||
| return { result: await this.options.fetchModule(...data) }; | ||
| } catch (cause) { | ||
| if (cause instanceof EnvironmentTeardownError) { | ||
| const [id, importer] = data; | ||
| let message = `Cannot load '${id}'${importer ? ` imported from ${importer}` : ""} after the environment was torn down. This is not a bug in Vitest.`; | ||
| const moduleNode = importer ? this.evaluatedModules.getModuleById(importer) : void 0; | ||
| const callstack = moduleNode ? this.callstacks.get(moduleNode) : void 0; | ||
| if (callstack) message += ` The last recorded callstack:\n- ${[ | ||
| ...callstack, | ||
| importer, | ||
| id | ||
| ].reverse().join("\n- ")}`; | ||
| const error = new EnvironmentTeardownError(message); | ||
| if (cause.stack) error.stack = cause.stack.replace(cause.message, error.message); | ||
| return { error }; | ||
| } | ||
| return { error: cause }; | ||
| } | ||
| } | ||
| } | ||
| const createNodeImportMeta = (modulePath) => { | ||
| if (!viteModuleRunner.createDefaultImportMeta) throw new Error(`createNodeImportMeta is not supported in this version of Vite.`); | ||
| const defaultMeta = viteModuleRunner.createDefaultImportMeta(modulePath); | ||
| const href = defaultMeta.url; | ||
| const importMetaResolver = createImportMetaResolver(); | ||
| return { | ||
| ...defaultMeta, | ||
| main: false, | ||
| resolve(id, parent) { | ||
| return (importMetaResolver ?? defaultMeta.resolve)(id, parent ?? href); | ||
| } | ||
| }; | ||
| }; | ||
| function createImportMetaResolver() { | ||
| if (!import.meta.resolve) return; | ||
| return (specifier, importer) => import.meta.resolve(specifier, importer); | ||
| } | ||
| // @ts-expect-error overriding private method | ||
| class VitestModuleRunner extends viteModuleRunner.ModuleRunner { | ||
| mocker; | ||
| moduleExecutionInfo; | ||
| _otel; | ||
| _callstacks; | ||
| constructor(vitestOptions) { | ||
| const options = vitestOptions; | ||
| const evaluatedModules = options.evaluatedModules; | ||
| const callstacks = /* @__PURE__ */ new WeakMap(); | ||
| const transport = new VitestTransport(options.transport, evaluatedModules, callstacks); | ||
| super({ | ||
| transport, | ||
| hmr: false, | ||
| evaluatedModules, | ||
| sourcemapInterceptor: "prepareStackTrace", | ||
| createImportMeta: vitestOptions.createImportMeta | ||
| }, options.evaluator); | ||
| this.vitestOptions = vitestOptions; | ||
| this._callstacks = callstacks; | ||
| this._otel = vitestOptions.traces || new Traces({ enabled: false }); | ||
| this.moduleExecutionInfo = options.getWorkerState().moduleExecutionInfo; | ||
| this.mocker = options.mocker || new VitestMocker(this, { | ||
| spyModule: options.spyModule, | ||
| context: options.vm?.context, | ||
| traces: this._otel, | ||
| resolveId: options.transport.resolveId, | ||
| get root() { | ||
| return options.getWorkerState().config.root; | ||
| }, | ||
| get moduleDirectories() { | ||
| return options.getWorkerState().config.deps.moduleDirectories || []; | ||
| }, | ||
| getCurrentTestFilepath() { | ||
| return options.getWorkerState().filepath; | ||
| } | ||
| }); | ||
| if (options.vm) options.vm.context.__vitest_mocker__ = this.mocker; | ||
| else Object.defineProperty(globalThis, "__vitest_mocker__", { | ||
| configurable: true, | ||
| writable: true, | ||
| value: this.mocker | ||
| }); | ||
| } | ||
| /** | ||
| * Vite checks that the module has exports emulating the Node.js behaviour, | ||
| * but Vitest is more relaxed. | ||
| * | ||
| * We should keep the Vite behavour when there is a `strict` flag. | ||
| * @internal | ||
| */ | ||
| processImport(exports$1) { | ||
| return exports$1; | ||
| } | ||
| async import(rawId) { | ||
| const resolved = await this._otel.$("vitest.module.resolve_id", { attributes: { "vitest.module.raw_id": rawId } }, async (span) => { | ||
| const result = await this.vitestOptions.transport.resolveId(rawId); | ||
| if (result) span.setAttributes({ | ||
| "vitest.module.url": result.url, | ||
| "vitest.module.file": result.file, | ||
| "vitest.module.id": result.id | ||
| }); | ||
| return result; | ||
| }); | ||
| return super.import(resolved ? resolved.url : rawId); | ||
| } | ||
| async fetchModule(url, importer) { | ||
| return await this.cachedModule(url, importer); | ||
| } | ||
| _cachedRequest(url, module, callstack = [], metadata) { | ||
| // @ts-expect-error "cachedRequest" is private | ||
| return super.cachedRequest(url, module, callstack, metadata); | ||
| } | ||
| /** | ||
| * @internal | ||
| */ | ||
| async cachedRequest(url, mod, callstack = [], metadata, ignoreMock = false) { | ||
| // Track for a better error message if dynamic import is not resolved properly | ||
| this._callstacks.set(mod, callstack); | ||
| if (ignoreMock) return this._cachedRequest(url, mod, callstack, metadata); | ||
| let mocked; | ||
| if (mod.meta && "mockedModule" in mod.meta) { | ||
| const mockedModule = mod.meta.mockedModule; | ||
| const mockId = this.mocker.getMockPath(mod.id); | ||
| // bypass mock and force "importActual" behavior when: | ||
| // - mock was removed by doUnmock (stale mockedModule in meta) | ||
| // - self-import: mock factory/file is importing the module it's mocking | ||
| const isStale = !this.mocker.getDependencyMock(mod.id); | ||
| const isSelfImport = callstack.includes(mockId) || callstack.includes(url) || "redirect" in mockedModule && callstack.includes(mockedModule.redirect); | ||
| if (isStale || isSelfImport) { | ||
| const node = await this.fetchModule(injectQuery(url, "_vitest_original")); | ||
| return this._cachedRequest(node.url, node, callstack, metadata); | ||
| } | ||
| mocked = await this.mocker.requestWithMockedModule(url, mod, callstack, mockedModule); | ||
| } else mocked = await this.mocker.mockedRequest(url, mod, callstack); | ||
| if (typeof mocked === "string") { | ||
| const node = await this.fetchModule(mocked); | ||
| return this._cachedRequest(mocked, node, callstack, metadata); | ||
| } | ||
| if (mocked != null && typeof mocked === "object") return mocked; | ||
| return this._cachedRequest(url, mod, callstack, metadata); | ||
| } | ||
| /** @internal */ | ||
| _invalidateSubTreeById(ids, invalidated = /* @__PURE__ */ new Set()) { | ||
| for (const id of ids) { | ||
| if (invalidated.has(id)) continue; | ||
| const node = this.evaluatedModules.getModuleById(id); | ||
| if (!node) continue; | ||
| invalidated.add(id); | ||
| const subIds = Array.from(this.evaluatedModules.idToModuleMap).filter(([, mod]) => mod.importers.has(id)).map(([key]) => key); | ||
| if (subIds.length) this._invalidateSubTreeById(subIds, invalidated); | ||
| this.evaluatedModules.invalidateModule(node); | ||
| } | ||
| } | ||
| } | ||
| const bareVitestRegexp = /^@?vitest(?:\/|$)/; | ||
| const normalizedDistDir = normalize(distDir); | ||
| const relativeIds = {}; | ||
| const externalizeMap = /* @__PURE__ */ new Map(); | ||
| // all Vitest imports always need to be externalized | ||
| function getCachedVitestImport(id, state) { | ||
| if (id.startsWith("/@fs/") || id.startsWith("\\@fs\\")) id = id.slice(process.platform === "win32" ? 5 : 4); | ||
| if (externalizeMap.has(id)) return { | ||
| externalize: externalizeMap.get(id), | ||
| type: "module" | ||
| }; | ||
| // always externalize Vitest because we import from there before running tests | ||
| // so we already have it cached by Node.js | ||
| const root = state().config.root; | ||
| const relativeRoot = relativeIds[root] ?? (relativeIds[root] = normalizedDistDir.slice(root.length)); | ||
| if (id.includes(distDir) || id.includes(normalizedDistDir)) { | ||
| const externalize = id.startsWith("file://") ? id : pathToFileURL(id).toString(); | ||
| externalizeMap.set(id, externalize); | ||
| return { | ||
| externalize, | ||
| type: "module" | ||
| }; | ||
| } | ||
| if (relativeRoot && relativeRoot !== "/" && id.startsWith(relativeRoot)) { | ||
| const externalize = pathToFileURL(join(root, id)).toString(); | ||
| externalizeMap.set(id, externalize); | ||
| return { | ||
| externalize, | ||
| type: "module" | ||
| }; | ||
| } | ||
| if (bareVitestRegexp.test(id)) { | ||
| externalizeMap.set(id, id); | ||
| return { | ||
| externalize: id, | ||
| type: "module" | ||
| }; | ||
| } | ||
| return null; | ||
| } | ||
| const { readFileSync } = fs; | ||
| const VITEST_VM_CONTEXT_SYMBOL = "__vitest_vm_context__"; | ||
| const cwd = process.cwd(); | ||
| const isWindows = process.platform === "win32"; | ||
| function startVitestModuleRunner(options) { | ||
| const traces = options.traces; | ||
| const state = () => getSafeWorkerState() || options.state; | ||
| const rpc = () => state().rpc; | ||
| const environment = () => { | ||
| const environment = state().environment; | ||
| return environment.viteEnvironment || environment.name; | ||
| }; | ||
| const vm = options.context && options.externalModulesExecutor ? { | ||
| context: options.context, | ||
| externalModulesExecutor: options.externalModulesExecutor | ||
| } : void 0; | ||
| const evaluator = options.evaluator || new VitestModuleEvaluator(vm, { | ||
| traces, | ||
| evaluatedModules: options.evaluatedModules, | ||
| get moduleExecutionInfo() { | ||
| return state().moduleExecutionInfo; | ||
| }, | ||
| get interopDefault() { | ||
| return state().config.deps.interopDefault; | ||
| }, | ||
| getCurrentTestFilepath: () => state().filepath | ||
| }); | ||
| const moduleRunner = new VitestModuleRunner({ | ||
| spyModule: options.spyModule, | ||
| evaluatedModules: options.evaluatedModules, | ||
| evaluator, | ||
| traces, | ||
| mocker: options.mocker, | ||
| transport: { | ||
| async fetchModule(id, importer, options) { | ||
| const resolvingModules = state().resolvingModules; | ||
| if (isWindows) { | ||
| if (id[1] === ":") { | ||
| // The drive letter is different for whatever reason, we need to normalize it to CWD | ||
| if (id[0] !== cwd[0] && id[0].toUpperCase() === cwd[0].toUpperCase()) id = (cwd[0].toUpperCase() === cwd[0] ? id[0].toUpperCase() : id[0].toLowerCase()) + id.slice(1); | ||
| // always mark absolute windows paths, otherwise Vite will externalize it | ||
| id = `/@id/${id}`; | ||
| } | ||
| } | ||
| const vitest = getCachedVitestImport(id, state); | ||
| if (vitest) return vitest; | ||
| // strip _vitest_original query added by importActual so that | ||
| // the plugin pipeline sees the original import id (e.g. virtual modules's load hook) | ||
| const isImportActual = id.includes("_vitest_original"); | ||
| if (isImportActual) id = removeQuery(id, "_vitest_original"); | ||
| const rawId = unwrapId(id); | ||
| resolvingModules.add(rawId); | ||
| try { | ||
| if (VitestMocker.pendingIds.length) await moduleRunner.mocker.resolveMocks(); | ||
| if (!isImportActual) { | ||
| const resolvedMock = moduleRunner.mocker.getDependencyMockByUrl(id); | ||
| if (resolvedMock?.type === "manual" || resolvedMock?.type === "redirect") return { | ||
| code: "", | ||
| file: null, | ||
| id: resolvedMock.id, | ||
| url: resolvedMock.url, | ||
| invalidate: false, | ||
| mockedModule: resolvedMock | ||
| }; | ||
| } | ||
| if (isBuiltin(rawId)) return { | ||
| externalize: rawId, | ||
| type: "builtin" | ||
| }; | ||
| if (isBrowserExternal(rawId)) return { | ||
| externalize: toBuiltin(rawId), | ||
| type: "builtin" | ||
| }; | ||
| // if module is invalidated, the worker will be recreated, | ||
| // so cached is always true in a single worker | ||
| if (options?.cached) return { cache: true }; | ||
| const otelCarrier = traces?.getContextCarrier(); | ||
| const result = await rpc().fetch(id, importer, environment(), options, otelCarrier); | ||
| if ("cached" in result) return { | ||
| code: readFileSync(result.tmp, "utf-8"), | ||
| ...result | ||
| }; | ||
| return result; | ||
| } catch (cause) { | ||
| // rethrow vite error if it cannot load the module because it's not resolved | ||
| if (typeof cause === "object" && cause != null && cause.code === "ERR_LOAD_URL" || typeof cause?.message === "string" && cause.message.includes("Failed to load url") || typeof cause?.message === "string" && cause.message.startsWith("Cannot find module '")) { | ||
| const error = new Error(`Cannot find ${isBareImport(id) ? "package" : "module"} '${id}'${importer ? ` imported from ${importer}` : ""}`, { cause }); | ||
| error.code = "ERR_MODULE_NOT_FOUND"; | ||
| throw error; | ||
| } | ||
| throw cause; | ||
| } finally { | ||
| resolvingModules.delete(rawId); | ||
| } | ||
| }, | ||
| resolveId(id, importer) { | ||
| return rpc().resolve(id, importer, environment()); | ||
| } | ||
| }, | ||
| getWorkerState: state, | ||
| vm, | ||
| createImportMeta: options.createImportMeta | ||
| }); | ||
| return moduleRunner; | ||
| } | ||
| export { BareModuleMocker as B, VITEST_VM_CONTEXT_SYMBOL as V, VitestModuleRunner as a, VitestTransport as b, createNodeImportMeta as c, normalizeModuleId as n, startVitestModuleRunner as s }; |
Sorry, the diff of this file is too big to display
| import { fileURLToPath, pathToFileURL } from 'node:url'; | ||
| import vm, { isContext, runInContext } from 'node:vm'; | ||
| import { dirname, basename, extname, normalize, resolve } from 'pathe'; | ||
| import { l as loadEnvironment, a as listenForErrors, e as emitModuleRunner } from './init.DICorXCo.js'; | ||
| import { distDir } from '../path.js'; | ||
| import { createCustomConsole } from './console.3WNpx0tS.js'; | ||
| import fs from 'node:fs'; | ||
| import { createRequire, Module, isBuiltin } from 'node:module'; | ||
| import { toArray, isBareImport } from '@vitest/utils/helpers'; | ||
| import { findNearestPackageData } from '@vitest/utils/resolver'; | ||
| import { dirname as dirname$1 } from 'node:path'; | ||
| import { CSS_LANGS_RE, KNOWN_ASSET_RE } from '@vitest/utils/constants'; | ||
| import { getDefaultRequestStubs } from '../module-evaluator.js'; | ||
| import { s as startVitestModuleRunner, V as VITEST_VM_CONTEXT_SYMBOL, c as createNodeImportMeta } from './startVitestModuleRunner.C3ZR-4J3.js'; | ||
| import { p as provideWorkerState } from './utils.BX5Fg8C4.js'; | ||
| function interopCommonJsModule(interopDefault, mod) { | ||
| if (isPrimitive(mod) || Array.isArray(mod) || mod instanceof Promise) return { | ||
| keys: [], | ||
| moduleExports: {}, | ||
| defaultExport: mod | ||
| }; | ||
| if (interopDefault !== false && "__esModule" in mod && !isPrimitive(mod.default)) { | ||
| const defaultKets = Object.keys(mod.default); | ||
| const moduleKeys = Object.keys(mod); | ||
| const allKeys = new Set([...defaultKets, ...moduleKeys]); | ||
| allKeys.delete("default"); | ||
| return { | ||
| keys: Array.from(allKeys), | ||
| moduleExports: new Proxy(mod, { get(mod, prop) { | ||
| return mod[prop] ?? mod.default?.[prop]; | ||
| } }), | ||
| defaultExport: mod | ||
| }; | ||
| } | ||
| return { | ||
| keys: Object.keys(mod).filter((key) => key !== "default"), | ||
| moduleExports: mod, | ||
| defaultExport: mod | ||
| }; | ||
| } | ||
| function isPrimitive(obj) { | ||
| return !(obj != null && (typeof obj === "object" || typeof obj === "function")); | ||
| } | ||
| const SyntheticModule = vm.SyntheticModule; | ||
| const SourceTextModule = vm.SourceTextModule; | ||
| const _require = createRequire(import.meta.url); | ||
| const requiresCache = /* @__PURE__ */ new WeakMap(); | ||
| class CommonjsExecutor { | ||
| context; | ||
| requireCache = /* @__PURE__ */ new Map(); | ||
| publicRequireCache = this.createProxyCache(); | ||
| moduleCache = /* @__PURE__ */ new Map(); | ||
| builtinCache = Object.create(null); | ||
| extensions = Object.create(null); | ||
| fs; | ||
| Module; | ||
| interopDefault; | ||
| constructor(options) { | ||
| this.context = options.context; | ||
| this.fs = options.fileMap; | ||
| this.interopDefault = options.interopDefault; | ||
| const primitives = vm.runInContext("({ Object, Array, Error })", this.context); | ||
| // eslint-disable-next-line ts/no-this-alias | ||
| const executor = this; | ||
| this.Module = class Module$1 { | ||
| exports; | ||
| isPreloading = false; | ||
| id; | ||
| filename; | ||
| loaded; | ||
| parent; | ||
| children = []; | ||
| path; | ||
| paths = []; | ||
| constructor(id = "", parent) { | ||
| this.exports = primitives.Object.create(Object.prototype); | ||
| // in our case the path should always be resolved already | ||
| this.path = dirname(id); | ||
| this.id = id; | ||
| this.filename = id; | ||
| this.loaded = false; | ||
| this.parent = parent; | ||
| } | ||
| get require() { | ||
| const require = requiresCache.get(this); | ||
| if (require) return require; | ||
| const _require = Module$1.createRequire(this.id); | ||
| requiresCache.set(this, _require); | ||
| return _require; | ||
| } | ||
| static getSourceMapsSupport = () => ({ | ||
| enabled: false, | ||
| nodeModules: false, | ||
| generatedCode: false | ||
| }); | ||
| static setSourceMapsSupport = () => { | ||
| // noop | ||
| }; | ||
| static register = () => { | ||
| throw new Error(`[vitest] "register" is not available when running in Vitest.`); | ||
| }; | ||
| static registerHooks = () => { | ||
| throw new Error(`[vitest] "registerHooks" is not available when running in Vitest.`); | ||
| }; | ||
| _compile(code, filename) { | ||
| const cjsModule = Module$1.wrap(code); | ||
| const script = new vm.Script(cjsModule, { | ||
| filename, | ||
| importModuleDynamically: options.importModuleDynamically | ||
| }); | ||
| // @ts-expect-error mark script with current identifier | ||
| script.identifier = filename; | ||
| const fn = script.runInContext(executor.context); | ||
| const __dirname = dirname(filename); | ||
| executor.requireCache.set(filename, this); | ||
| try { | ||
| fn(this.exports, this.require, this, filename, __dirname); | ||
| return this.exports; | ||
| } finally { | ||
| this.loaded = true; | ||
| } | ||
| } | ||
| // exposed for external use, Node.js does the opposite | ||
| static _load = (request, parent, _isMain) => { | ||
| return Module$1.createRequire(parent?.filename ?? request)(request); | ||
| }; | ||
| static wrap = (script) => { | ||
| return Module$1.wrapper[0] + script + Module$1.wrapper[1]; | ||
| }; | ||
| static wrapper = new primitives.Array("(function (exports, require, module, __filename, __dirname) { ", "\n});"); | ||
| static builtinModules = Module.builtinModules; | ||
| static findSourceMap = Module.findSourceMap; | ||
| static SourceMap = Module.SourceMap; | ||
| static syncBuiltinESMExports = Module.syncBuiltinESMExports; | ||
| static _cache = executor.publicRequireCache; | ||
| static _extensions = executor.extensions; | ||
| static createRequire = (filename) => { | ||
| return executor.createRequire(filename); | ||
| }; | ||
| static runMain = () => { | ||
| throw new primitives.Error("[vitest] \"runMain\" is not implemented."); | ||
| }; | ||
| // @ts-expect-error not typed | ||
| static _resolveFilename = Module._resolveFilename; | ||
| // @ts-expect-error not typed | ||
| static _findPath = Module._findPath; | ||
| // @ts-expect-error not typed | ||
| static _initPaths = Module._initPaths; | ||
| // @ts-expect-error not typed | ||
| static _preloadModules = Module._preloadModules; | ||
| // @ts-expect-error not typed | ||
| static _resolveLookupPaths = Module._resolveLookupPaths; | ||
| // @ts-expect-error not typed | ||
| static globalPaths = Module.globalPaths; | ||
| static isBuiltin = Module.isBuiltin; | ||
| static constants = Module.constants; | ||
| static enableCompileCache = Module.enableCompileCache; | ||
| static getCompileCacheDir = Module.getCompileCacheDir; | ||
| static flushCompileCache = Module.flushCompileCache; | ||
| static stripTypeScriptTypes = Module.stripTypeScriptTypes; | ||
| static findPackageJSON = Module.findPackageJSON; | ||
| static Module = Module$1; | ||
| }; | ||
| this.extensions[".js"] = this.requireJs; | ||
| this.extensions[".json"] = this.requireJson; | ||
| } | ||
| requireJs = (m, filename) => { | ||
| const content = this.fs.readFile(filename); | ||
| m._compile(content, filename); | ||
| }; | ||
| requireJson = (m, filename) => { | ||
| const code = this.fs.readFile(filename); | ||
| m.exports = JSON.parse(code); | ||
| }; | ||
| static cjsConditions; | ||
| static getCjsConditions() { | ||
| if (!CommonjsExecutor.cjsConditions) CommonjsExecutor.cjsConditions = parseCjsConditions(process.execArgv, process.env.NODE_OPTIONS); | ||
| return CommonjsExecutor.cjsConditions; | ||
| } | ||
| createRequire = (filename) => { | ||
| const _require = createRequire(filename); | ||
| const resolve = (id, options) => { | ||
| return _require.resolve(id, { | ||
| ...options, | ||
| conditions: CommonjsExecutor.getCjsConditions() | ||
| }); | ||
| }; | ||
| const require = ((id) => { | ||
| const resolved = resolve(id); | ||
| if (extname(resolved) === ".node" || isBuiltin(resolved)) return this.requireCoreModule(resolved); | ||
| const module = new this.Module(resolved); | ||
| return this.loadCommonJSModule(module, resolved); | ||
| }); | ||
| require.resolve = resolve; | ||
| require.resolve.paths = _require.resolve.paths; | ||
| Object.defineProperty(require, "extensions", { | ||
| get: () => this.extensions, | ||
| set: () => {}, | ||
| configurable: true | ||
| }); | ||
| require.main = void 0; | ||
| require.cache = this.publicRequireCache; | ||
| return require; | ||
| }; | ||
| createProxyCache() { | ||
| return new Proxy(Object.create(null), { | ||
| defineProperty: () => true, | ||
| deleteProperty: () => true, | ||
| set: () => true, | ||
| get: (_, key) => this.requireCache.get(key), | ||
| has: (_, key) => this.requireCache.has(key), | ||
| ownKeys: () => Array.from(this.requireCache.keys()), | ||
| getOwnPropertyDescriptor() { | ||
| return { | ||
| configurable: true, | ||
| enumerable: true | ||
| }; | ||
| } | ||
| }); | ||
| } | ||
| // very naive implementation for Node.js require | ||
| loadCommonJSModule(module, filename) { | ||
| const cached = this.requireCache.get(filename); | ||
| if (cached) return cached.exports; | ||
| const extension = this.findLongestRegisteredExtension(filename); | ||
| (this.extensions[extension] || this.extensions[".js"])(module, filename); | ||
| return module.exports; | ||
| } | ||
| findLongestRegisteredExtension(filename) { | ||
| const name = basename(filename); | ||
| let currentExtension; | ||
| let index; | ||
| let startIndex = 0; | ||
| // eslint-disable-next-line no-cond-assign | ||
| while ((index = name.indexOf(".", startIndex)) !== -1) { | ||
| startIndex = index + 1; | ||
| if (index === 0) continue; | ||
| currentExtension = name.slice(index); | ||
| if (this.extensions[currentExtension]) return currentExtension; | ||
| } | ||
| return ".js"; | ||
| } | ||
| getCoreSyntheticModule(identifier) { | ||
| if (this.moduleCache.has(identifier)) return this.moduleCache.get(identifier); | ||
| const exports$1 = this.require(identifier); | ||
| const keys = Object.keys(exports$1); | ||
| const module = new SyntheticModule([...keys, "default"], () => { | ||
| for (const key of keys) module.setExport(key, exports$1[key]); | ||
| module.setExport("default", exports$1); | ||
| }, { | ||
| context: this.context, | ||
| identifier | ||
| }); | ||
| this.moduleCache.set(identifier, module); | ||
| return module; | ||
| } | ||
| getCjsSyntheticModule(path, identifier) { | ||
| if (this.moduleCache.has(identifier)) return this.moduleCache.get(identifier); | ||
| const exports$1 = this.require(path); | ||
| // TODO: technically module should be parsed to find static exports, implement for strict mode in #2854 | ||
| const { keys, moduleExports, defaultExport } = interopCommonJsModule(this.interopDefault, exports$1); | ||
| const module = new SyntheticModule([...keys, "default"], function() { | ||
| for (const key of keys) this.setExport(key, moduleExports[key]); | ||
| this.setExport("default", defaultExport); | ||
| }, { | ||
| context: this.context, | ||
| identifier | ||
| }); | ||
| this.moduleCache.set(identifier, module); | ||
| return module; | ||
| } | ||
| // TODO: use this in strict mode, when available in #2854 | ||
| // private _getNamedCjsExports(path: string): Set<string> { | ||
| // const cachedNamedExports = this.cjsNamedExportsMap.get(path) | ||
| // if (cachedNamedExports) { | ||
| // return cachedNamedExports | ||
| // } | ||
| // if (extname(path) === '.node') { | ||
| // const moduleExports = this.require(path) | ||
| // const namedExports = new Set(Object.keys(moduleExports)) | ||
| // this.cjsNamedExportsMap.set(path, namedExports) | ||
| // return namedExports | ||
| // } | ||
| // const code = this.fs.readFile(path) | ||
| // const { exports, reexports } = parseCjs(code, path) | ||
| // const namedExports = new Set(exports) | ||
| // this.cjsNamedExportsMap.set(path, namedExports) | ||
| // for (const reexport of reexports) { | ||
| // if (isNodeBuiltin(reexport)) { | ||
| // const exports = this.require(reexport) | ||
| // if (exports !== null && typeof exports === 'object') { | ||
| // for (const e of Object.keys(exports)) { | ||
| // namedExports.add(e) | ||
| // } | ||
| // } | ||
| // } | ||
| // else { | ||
| // const require = this.createRequire(path) | ||
| // const resolved = require.resolve(reexport) | ||
| // const exports = this._getNamedCjsExports(resolved) | ||
| // for (const e of exports) { | ||
| // namedExports.add(e) | ||
| // } | ||
| // } | ||
| // } | ||
| // return namedExports | ||
| // } | ||
| require(identifier) { | ||
| if (extname(identifier) === ".node" || isBuiltin(identifier)) return this.requireCoreModule(identifier); | ||
| const module = new this.Module(identifier); | ||
| return this.loadCommonJSModule(module, identifier); | ||
| } | ||
| requireCoreModule(identifier) { | ||
| const normalized = identifier.replace(/^node:/, ""); | ||
| if (this.builtinCache[normalized]) return this.builtinCache[normalized].exports; | ||
| const moduleExports = _require(identifier); | ||
| if (identifier === "node:module" || identifier === "module") { | ||
| const module = new this.Module("/module.js"); | ||
| module.exports = this.Module; | ||
| this.builtinCache[normalized] = module; | ||
| return module.exports; | ||
| } | ||
| this.builtinCache[normalized] = _require.cache[normalized]; | ||
| // TODO: should we wrap module to rethrow context errors? | ||
| return moduleExports; | ||
| } | ||
| } | ||
| // The "module-sync" exports condition (added in Node 22.12/20.19 when | ||
| // require(esm) was unflagged) can resolve to ESM files that our CJS | ||
| // vm.Script executor cannot handle. We exclude it by passing explicit | ||
| // CJS conditions to require.resolve (Node 22.12+). | ||
| // Must be a Set because Node's internal resolver calls conditions.has(). | ||
| // User-specified --conditions/-C flags are respected, except module-sync. | ||
| function parseCjsConditions(execArgv, nodeOptions) { | ||
| const conditions = [ | ||
| "node", | ||
| "require", | ||
| "node-addons" | ||
| ]; | ||
| const args = [...execArgv, ...nodeOptions?.split(/\s+/) ?? []]; | ||
| for (let i = 0; i < args.length; i++) { | ||
| const arg = args[i]; | ||
| const eqMatch = arg.match(/^(?:--conditions|-C)=(.+)$/); | ||
| if (eqMatch) conditions.push(eqMatch[1]); | ||
| else if ((arg === "--conditions" || arg === "-C") && i + 1 < args.length) conditions.push(args[++i]); | ||
| } | ||
| return new Set(conditions.filter((c) => c !== "module-sync")); | ||
| } | ||
| const dataURIRegex = /^data:(?<mime>text\/javascript|application\/json|application\/wasm)(?:;(?<encoding>charset=utf-8|base64))?,(?<code>.*)$/; | ||
| class EsmExecutor { | ||
| moduleCache = /* @__PURE__ */ new Map(); | ||
| esmLinkMap = /* @__PURE__ */ new WeakMap(); | ||
| context; | ||
| #httpIp = IPnumber("127.0.0.0"); | ||
| constructor(executor, options) { | ||
| this.executor = executor; | ||
| this.context = options.context; | ||
| } | ||
| async evaluateModule(m) { | ||
| if (m.status === "unlinked") this.esmLinkMap.set(m, m.link((identifier, referencer) => this.executor.resolveModule(identifier, referencer.identifier))); | ||
| await this.esmLinkMap.get(m); | ||
| if (m.status === "linked") await m.evaluate(); | ||
| return m; | ||
| } | ||
| async createEsModule(fileURL, getCode) { | ||
| const cached = this.moduleCache.get(fileURL); | ||
| if (cached) return cached; | ||
| const promise = this.loadEsModule(fileURL, getCode); | ||
| this.moduleCache.set(fileURL, promise); | ||
| return promise; | ||
| } | ||
| async loadEsModule(fileURL, getCode) { | ||
| const code = await getCode(); | ||
| // TODO: should not be allowed in strict mode, implement in #2854 | ||
| if (fileURL.endsWith(".json")) { | ||
| const m = new SyntheticModule(["default"], function() { | ||
| const result = JSON.parse(code); | ||
| this.setExport("default", result); | ||
| }); | ||
| this.moduleCache.set(fileURL, m); | ||
| return m; | ||
| } | ||
| const m = new SourceTextModule(code, { | ||
| identifier: fileURL, | ||
| context: this.context, | ||
| importModuleDynamically: this.executor.importModuleDynamically, | ||
| initializeImportMeta: (meta, mod) => { | ||
| meta.url = mod.identifier; | ||
| if (mod.identifier.startsWith("file:")) { | ||
| const filename = fileURLToPath(mod.identifier); | ||
| meta.filename = filename; | ||
| meta.dirname = dirname$1(filename); | ||
| } | ||
| meta.resolve = (specifier, importer) => { | ||
| return this.executor.resolve(specifier, importer != null ? importer.toString() : mod.identifier); | ||
| }; | ||
| } | ||
| }); | ||
| this.moduleCache.set(fileURL, m); | ||
| return m; | ||
| } | ||
| async createWebAssemblyModule(fileUrl, getCode) { | ||
| const cached = this.moduleCache.get(fileUrl); | ||
| if (cached) return cached; | ||
| const m = this.loadWebAssemblyModule(getCode(), fileUrl); | ||
| this.moduleCache.set(fileUrl, m); | ||
| return m; | ||
| } | ||
| async createNetworkModule(fileUrl) { | ||
| // https://nodejs.org/api/esm.html#https-and-http-imports | ||
| if (fileUrl.startsWith("http:")) { | ||
| const url = new URL(fileUrl); | ||
| if (url.hostname !== "localhost" && url.hostname !== "::1" && (IPnumber(url.hostname) & IPmask(8)) !== this.#httpIp) throw new Error( | ||
| // we don't know the importer, so it's undefined (the same happens in --pool=threads) | ||
| `import of '${fileUrl}' by undefined is not supported: http can only be used to load local resources (use https instead).` | ||
| ); | ||
| } | ||
| return this.createEsModule(fileUrl, () => fetch(fileUrl).then((r) => r.text())); | ||
| } | ||
| async loadWebAssemblyModule(source, identifier) { | ||
| const cached = this.moduleCache.get(identifier); | ||
| if (cached) return cached; | ||
| const wasmModule = await WebAssembly.compile(source); | ||
| const exports$1 = WebAssembly.Module.exports(wasmModule); | ||
| const imports = WebAssembly.Module.imports(wasmModule); | ||
| const moduleLookup = {}; | ||
| for (const { module } of imports) if (moduleLookup[module] === void 0) moduleLookup[module] = await this.executor.resolveModule(module, identifier); | ||
| const evaluateModule = (module) => this.evaluateModule(module); | ||
| return new SyntheticModule(exports$1.map(({ name }) => name), async function() { | ||
| const importsObject = {}; | ||
| for (const { module, name } of imports) { | ||
| if (!importsObject[module]) importsObject[module] = {}; | ||
| await evaluateModule(moduleLookup[module]); | ||
| importsObject[module][name] = moduleLookup[module].namespace[name]; | ||
| } | ||
| const wasmInstance = new WebAssembly.Instance(wasmModule, importsObject); | ||
| for (const { name } of exports$1) this.setExport(name, wasmInstance.exports[name]); | ||
| }, { | ||
| context: this.context, | ||
| identifier | ||
| }); | ||
| } | ||
| cacheModule(identifier, module) { | ||
| this.moduleCache.set(identifier, module); | ||
| } | ||
| resolveCachedModule(identifier) { | ||
| return this.moduleCache.get(identifier); | ||
| } | ||
| async createDataModule(identifier) { | ||
| const cached = this.moduleCache.get(identifier); | ||
| if (cached) return cached; | ||
| const match = identifier.match(dataURIRegex); | ||
| if (!match || !match.groups) throw new Error("Invalid data URI"); | ||
| const mime = match.groups.mime; | ||
| const encoding = match.groups.encoding; | ||
| if (mime === "application/wasm") { | ||
| if (!encoding) throw new Error("Missing data URI encoding"); | ||
| if (encoding !== "base64") throw new Error(`Invalid data URI encoding: ${encoding}`); | ||
| const module = this.loadWebAssemblyModule(Buffer.from(match.groups.code, "base64"), identifier); | ||
| this.moduleCache.set(identifier, module); | ||
| return module; | ||
| } | ||
| let code = match.groups.code; | ||
| if (!encoding || encoding === "charset=utf-8") code = decodeURIComponent(code); | ||
| else if (encoding === "base64") code = Buffer.from(code, "base64").toString(); | ||
| else throw new Error(`Invalid data URI encoding: ${encoding}`); | ||
| if (mime === "application/json") { | ||
| const module = new SyntheticModule(["default"], function() { | ||
| const obj = JSON.parse(code); | ||
| this.setExport("default", obj); | ||
| }, { | ||
| context: this.context, | ||
| identifier | ||
| }); | ||
| this.moduleCache.set(identifier, module); | ||
| return module; | ||
| } | ||
| return this.createEsModule(identifier, () => code); | ||
| } | ||
| } | ||
| function IPnumber(address) { | ||
| const ip = address.match(/^(\d+)\.(\d+)\.(\d+)\.(\d+)$/); | ||
| if (ip) return (+ip[1] << 24) + (+ip[2] << 16) + (+ip[3] << 8) + +ip[4]; | ||
| throw new Error(`Expected IP address, received ${address}`); | ||
| } | ||
| function IPmask(maskSize) { | ||
| return -1 << 32 - maskSize; | ||
| } | ||
| const CLIENT_ID = "/@vite/client"; | ||
| const CLIENT_FILE = pathToFileURL(CLIENT_ID).href; | ||
| class ViteExecutor { | ||
| esm; | ||
| constructor(options) { | ||
| this.options = options; | ||
| this.esm = options.esmExecutor; | ||
| } | ||
| resolve = (identifier) => { | ||
| if (identifier === CLIENT_ID) return identifier; | ||
| }; | ||
| get workerState() { | ||
| return this.options.context.__vitest_worker__; | ||
| } | ||
| async createViteModule(fileUrl) { | ||
| if (fileUrl === CLIENT_FILE || fileUrl === CLIENT_ID) return this.createViteClientModule(); | ||
| const cached = this.esm.resolveCachedModule(fileUrl); | ||
| if (cached) return cached; | ||
| return this.esm.createEsModule(fileUrl, async () => { | ||
| try { | ||
| const result = await this.options.transform(fileUrl); | ||
| if (result.code) return result.code; | ||
| } catch (cause) { | ||
| // rethrow vite error if it cannot load the module because it's not resolved | ||
| if (typeof cause === "object" && cause.code === "ERR_LOAD_URL" || typeof cause?.message === "string" && cause.message.includes("Failed to load url")) { | ||
| const error = new Error(`Cannot find module '${fileUrl}'`, { cause }); | ||
| error.code = "ERR_MODULE_NOT_FOUND"; | ||
| throw error; | ||
| } | ||
| } | ||
| throw new Error(`[vitest] Failed to transform ${fileUrl}. Does the file exist?`); | ||
| }); | ||
| } | ||
| createViteClientModule() { | ||
| const identifier = CLIENT_ID; | ||
| const cached = this.esm.resolveCachedModule(identifier); | ||
| if (cached) return cached; | ||
| const stub = this.options.viteClientModule; | ||
| const moduleKeys = Object.keys(stub); | ||
| const module = new SyntheticModule(moduleKeys, function() { | ||
| moduleKeys.forEach((key) => { | ||
| this.setExport(key, stub[key]); | ||
| }); | ||
| }, { | ||
| context: this.options.context, | ||
| identifier | ||
| }); | ||
| this.esm.cacheModule(identifier, module); | ||
| return module; | ||
| } | ||
| canResolve = (fileUrl) => { | ||
| if (fileUrl === CLIENT_FILE) return true; | ||
| const config = this.workerState.config.deps?.web || {}; | ||
| const [modulePath] = fileUrl.split("?"); | ||
| if (config.transformCss && CSS_LANGS_RE.test(modulePath)) return true; | ||
| if (config.transformAssets && KNOWN_ASSET_RE.test(modulePath)) return true; | ||
| if (toArray(config.transformGlobPattern).some((pattern) => pattern.test(modulePath))) return true; | ||
| return false; | ||
| }; | ||
| } | ||
| const { existsSync } = fs; | ||
| // always defined when we use vm pool | ||
| const nativeResolve = import.meta.resolve; | ||
| // TODO: improve Node.js strict mode support in #2854 | ||
| class ExternalModulesExecutor { | ||
| cjs; | ||
| esm; | ||
| vite; | ||
| context; | ||
| fs; | ||
| resolvers = []; | ||
| #networkSupported = null; | ||
| constructor(options) { | ||
| this.options = options; | ||
| this.context = options.context; | ||
| this.fs = options.fileMap; | ||
| this.esm = new EsmExecutor(this, { context: this.context }); | ||
| this.cjs = new CommonjsExecutor({ | ||
| context: this.context, | ||
| importModuleDynamically: this.importModuleDynamically, | ||
| fileMap: options.fileMap, | ||
| interopDefault: options.interopDefault | ||
| }); | ||
| this.vite = new ViteExecutor({ | ||
| esmExecutor: this.esm, | ||
| context: this.context, | ||
| transform: options.transform, | ||
| viteClientModule: options.viteClientModule | ||
| }); | ||
| this.resolvers = [this.vite.resolve]; | ||
| } | ||
| async import(identifier) { | ||
| const module = await this.createModule(identifier); | ||
| await this.esm.evaluateModule(module); | ||
| return module.namespace; | ||
| } | ||
| require(identifier) { | ||
| return this.cjs.require(identifier); | ||
| } | ||
| createRequire(identifier) { | ||
| return this.cjs.createRequire(identifier); | ||
| } | ||
| // dynamic import can be used in both ESM and CJS, so we have it in the executor | ||
| importModuleDynamically = async (specifier, referencer) => { | ||
| const module = await this.resolveModule(specifier, referencer.identifier); | ||
| return await this.esm.evaluateModule(module); | ||
| }; | ||
| resolveModule = async (specifier, referencer) => { | ||
| let identifier = this.resolve(specifier, referencer); | ||
| if (identifier instanceof Promise) identifier = await identifier; | ||
| return await this.createModule(identifier); | ||
| }; | ||
| resolve(specifier, parent) { | ||
| for (const resolver of this.resolvers) { | ||
| const id = resolver(specifier, parent); | ||
| if (id) return id; | ||
| } | ||
| // import.meta.resolve can be asynchronous in older +18 Node versions | ||
| return nativeResolve(specifier, parent); | ||
| } | ||
| getModuleInformation(identifier) { | ||
| if (identifier.startsWith("data:")) return { | ||
| type: "data", | ||
| url: identifier, | ||
| path: identifier | ||
| }; | ||
| const extension = extname(identifier); | ||
| if (extension === ".node" || isBuiltin(identifier)) return { | ||
| type: "builtin", | ||
| url: identifier, | ||
| path: identifier | ||
| }; | ||
| if (this.isNetworkSupported && (identifier.startsWith("http:") || identifier.startsWith("https:"))) return { | ||
| type: "network", | ||
| url: identifier, | ||
| path: identifier | ||
| }; | ||
| const isFileUrl = identifier.startsWith("file://"); | ||
| const pathUrl = isFileUrl ? fileURLToPath(identifier.split("?")[0]) : identifier; | ||
| const fileUrl = isFileUrl ? identifier : pathToFileURL(pathUrl).toString(); | ||
| let type; | ||
| if (this.vite.canResolve(fileUrl)) type = "vite"; | ||
| else if (extension === ".mjs") type = "module"; | ||
| else if (extension === ".cjs") type = "commonjs"; | ||
| else if (extension === ".wasm") | ||
| // still experimental on NodeJS --experimental-wasm-modules | ||
| // cf. ESM_FILE_FORMAT(url) in https://nodejs.org/docs/latest-v20.x/api/esm.html#resolution-algorithm | ||
| type = "wasm"; | ||
| else type = findNearestPackageData(normalize(pathUrl)).type === "module" ? "module" : "commonjs"; | ||
| return { | ||
| type, | ||
| path: pathUrl, | ||
| url: fileUrl | ||
| }; | ||
| } | ||
| createModule(identifier) { | ||
| const { type, url, path } = this.getModuleInformation(identifier); | ||
| // create ERR_MODULE_NOT_FOUND on our own since latest NodeJS's import.meta.resolve doesn't throw on non-existing namespace or path | ||
| // https://github.com/nodejs/node/pull/49038 | ||
| if ((type === "module" || type === "commonjs" || type === "wasm") && !existsSync(path)) { | ||
| const error = /* @__PURE__ */ new Error(`Cannot find ${isBareImport(path) ? "package" : "module"} '${path}'`); | ||
| error.code = "ERR_MODULE_NOT_FOUND"; | ||
| throw error; | ||
| } | ||
| switch (type) { | ||
| case "data": return this.esm.createDataModule(identifier); | ||
| case "builtin": return this.cjs.getCoreSyntheticModule(identifier); | ||
| case "vite": return this.vite.createViteModule(url); | ||
| case "wasm": return this.esm.createWebAssemblyModule(url, () => this.fs.readBuffer(path)); | ||
| case "module": return this.esm.createEsModule(url, () => this.fs.readFileAsync(path)); | ||
| case "commonjs": return this.cjs.getCjsSyntheticModule(path, identifier); | ||
| case "network": return this.esm.createNetworkModule(url); | ||
| default: return type; | ||
| } | ||
| } | ||
| get isNetworkSupported() { | ||
| if (this.#networkSupported == null) if (process.execArgv.includes("--experimental-network-imports")) this.#networkSupported = true; | ||
| else if (process.env.NODE_OPTIONS?.includes("--experimental-network-imports")) this.#networkSupported = true; | ||
| else this.#networkSupported = false; | ||
| return this.#networkSupported; | ||
| } | ||
| } | ||
| const { promises, readFileSync } = fs; | ||
| class FileMap { | ||
| fsCache = /* @__PURE__ */ new Map(); | ||
| fsBufferCache = /* @__PURE__ */ new Map(); | ||
| async readFileAsync(path) { | ||
| const cached = this.fsCache.get(path); | ||
| if (cached != null) return cached; | ||
| const source = await promises.readFile(path, "utf-8"); | ||
| this.fsCache.set(path, source); | ||
| return source; | ||
| } | ||
| readFile(path) { | ||
| const cached = this.fsCache.get(path); | ||
| if (cached != null) return cached; | ||
| const source = readFileSync(path, "utf-8"); | ||
| this.fsCache.set(path, source); | ||
| return source; | ||
| } | ||
| readBuffer(path) { | ||
| const cached = this.fsBufferCache.get(path); | ||
| if (cached != null) return cached; | ||
| const buffer = readFileSync(path); | ||
| this.fsBufferCache.set(path, buffer); | ||
| return buffer; | ||
| } | ||
| } | ||
| const entryFile = pathToFileURL(resolve(distDir, "workers/runVmTests.js")).href; | ||
| const fileMap = new FileMap(); | ||
| const packageCache = /* @__PURE__ */ new Map(); | ||
| async function runVmTests(method, state, traces) { | ||
| const { ctx, rpc } = state; | ||
| const beforeEnvironmentTime = performance.now(); | ||
| const { environment } = await loadEnvironment(ctx.environment.name, ctx.config.root, rpc, traces, true); | ||
| state.environment = environment; | ||
| if (!environment.setupVM) { | ||
| const envName = ctx.environment.name; | ||
| const packageId = envName[0] === "." ? envName : `vitest-environment-${envName}`; | ||
| throw new TypeError(`Environment "${ctx.environment.name}" is not a valid environment. Path "${packageId}" doesn't support vm environment because it doesn't provide "setupVM" method.`); | ||
| } | ||
| const vm = await traces.$("vitest.runtime.environment.setup", { attributes: { | ||
| "vitest.environment": environment.name, | ||
| "vitest.environment.vite_environment": environment.viteEnvironment || environment.name | ||
| } }, () => environment.setupVM(ctx.environment.options || ctx.config.environmentOptions || {})); | ||
| state.durations.environment = performance.now() - beforeEnvironmentTime; | ||
| process.env.VITEST_VM_POOL = "1"; | ||
| if (!vm.getVmContext) throw new TypeError(`Environment ${environment.name} doesn't provide "getVmContext" method. It should return a context created by "vm.createContext" method.`); | ||
| const context = vm.getVmContext(); | ||
| if (!isContext(context)) throw new TypeError(`Environment ${environment.name} doesn't provide a valid context. It should be created by "vm.createContext" method.`); | ||
| provideWorkerState(context, state); | ||
| // this is unfortunately needed for our own dependencies | ||
| // we need to find a way to not rely on this by default | ||
| // because browser doesn't provide these globals | ||
| context.process = process; | ||
| context.global = context; | ||
| context.console = state.config.disableConsoleIntercept ? console : createCustomConsole(state); | ||
| // TODO: don't hardcode setImmediate in fake timers defaults | ||
| context.setImmediate = setImmediate; | ||
| context.clearImmediate = clearImmediate; | ||
| const stubs = getDefaultRequestStubs(context); | ||
| const externalModulesExecutor = new ExternalModulesExecutor({ | ||
| context, | ||
| fileMap, | ||
| packageCache, | ||
| transform: rpc.transform, | ||
| viteClientModule: stubs["/@vite/client"] | ||
| }); | ||
| process.exit = (code = process.exitCode || 0) => { | ||
| throw new Error(`process.exit unexpectedly called with "${code}"`); | ||
| }; | ||
| listenForErrors(() => state); | ||
| const moduleRunner = startVitestModuleRunner({ | ||
| context, | ||
| evaluatedModules: state.evaluatedModules, | ||
| state, | ||
| externalModulesExecutor, | ||
| createImportMeta: createNodeImportMeta, | ||
| traces | ||
| }); | ||
| emitModuleRunner(moduleRunner); | ||
| Object.defineProperty(context, VITEST_VM_CONTEXT_SYMBOL, { | ||
| value: { | ||
| context, | ||
| externalModulesExecutor | ||
| }, | ||
| configurable: true, | ||
| enumerable: false, | ||
| writable: false | ||
| }); | ||
| context.__vitest_mocker__ = moduleRunner.mocker; | ||
| if (ctx.config.serializedDefines) try { | ||
| runInContext(ctx.config.serializedDefines, context, { filename: "virtual:load-defines.js" }); | ||
| } catch (error) { | ||
| throw new Error(`Failed to load custom "defines": ${error.message}`); | ||
| } | ||
| await moduleRunner.mocker.initializeSpyModule(); | ||
| const { run } = await moduleRunner.import(entryFile); | ||
| try { | ||
| await run(method, ctx.files, ctx.config, moduleRunner, traces); | ||
| } finally { | ||
| await traces.$("vitest.runtime.environment.teardown", () => vm.teardown?.()); | ||
| } | ||
| } | ||
| function setupVmWorker(context) { | ||
| if (context.config.experimental.viteModuleRunner === false) throw new Error(`Pool "${context.pool}" cannot run with "experimental.viteModuleRunner: false". Please, use "threads" or "forks" instead.`); | ||
| } | ||
| export { runVmTests as r, setupVmWorker as s }; |
+1
-1
@@ -1,2 +0,2 @@ | ||
| export { l as loadDiffConfig, a as loadSnapshotSerializers, s as setupCommonEnv, b as startCoverageInsideWorker, c as stopCoverageInsideWorker, t as takeCoverageInsideWorker } from './chunks/setup-common.z3ZfZiWN.js'; | ||
| export { l as loadDiffConfig, a as loadSnapshotSerializers, s as setupCommonEnv, b as startCoverageInsideWorker, c as stopCoverageInsideWorker, t as takeCoverageInsideWorker } from './chunks/setup-common.B41N_kPE.js'; | ||
| export { T as Traces } from './chunks/traces.CCmnQaNT.js'; | ||
@@ -3,0 +3,0 @@ export { collectTests, startTests } from '@vitest/runner'; |
+3
-3
@@ -1,2 +0,2 @@ | ||
| import { c as createCLI } from './chunks/cac.Vtz91O0H.js'; | ||
| import { c as createCLI } from './chunks/cac.CWGDZnXT.js'; | ||
| import '@vitest/utils/helpers'; | ||
@@ -7,5 +7,4 @@ import 'events'; | ||
| import './chunks/constants.CPYnjOGj.js'; | ||
| import './chunks/index.DpkD7Zj4.js'; | ||
| import './chunks/index.DXMFO5MJ.js'; | ||
| import 'node:fs'; | ||
| import 'node:fs/promises'; | ||
| import 'node:perf_hooks'; | ||
@@ -27,2 +26,3 @@ import '@vitest/runner/utils'; | ||
| import '@vitest/utils/offset'; | ||
| import 'node:fs/promises'; | ||
| import 'node:module'; | ||
@@ -29,0 +29,0 @@ import 'node:assert'; |
+1
-1
@@ -43,3 +43,3 @@ 'use strict'; | ||
| isolate: true, | ||
| watch: !stdEnv.isCI && process.stdin.isTTY, | ||
| watch: !stdEnv.isCI && process.stdin.isTTY && !stdEnv.isAgent, | ||
| globals: false, | ||
@@ -46,0 +46,0 @@ environment: "node", |
+3
-3
| import { HookHandler, UserConfig, ConfigEnv } from 'vite'; | ||
| export { ConfigEnv, Plugin, UserConfig as ViteUserConfig, mergeConfig } from 'vite'; | ||
| import { I as InlineConfig, C as CoverageV8Options, R as ResolvedCoverageOptions, U as UserWorkspaceConfig, b as UserProjectConfigFn, c as UserProjectConfigExport } from './chunks/reporters.d.CRDGoPlb.js'; | ||
| export { a as TestProjectConfiguration, d as TestProjectInlineConfiguration, e as TestUserConfig, W as WatcherTriggerPattern } from './chunks/reporters.d.CRDGoPlb.js'; | ||
| import { V as VitestPluginContext } from './chunks/plugin.d.pmonRL8Y.js'; | ||
| import { I as InlineConfig, C as CoverageV8Options, R as ResolvedCoverageOptions, U as UserWorkspaceConfig, b as UserProjectConfigFn, c as UserProjectConfigExport } from './chunks/reporters.d.DVUYHHhe.js'; | ||
| export { a as TestProjectConfiguration, d as TestProjectInlineConfiguration, e as TestUserConfig, W as WatcherTriggerPattern } from './chunks/reporters.d.DVUYHHhe.js'; | ||
| import { V as VitestPluginContext } from './chunks/plugin.d.BuW-flqo.js'; | ||
| import { F as FakeTimerInstallOpts } from './chunks/config.d.EJLVE3es.js'; | ||
@@ -7,0 +7,0 @@ export { TestTagDefinition } from '@vitest/runner'; |
+1
-1
@@ -1,2 +0,2 @@ | ||
| export { c as configDefaults, a as coverageConfigDefaults, d as defaultExclude, b as defaultInclude } from './chunks/defaults.BlJmGxXD.js'; | ||
| export { c as configDefaults, a as coverageConfigDefaults, d as defaultExclude, b as defaultInclude } from './chunks/defaults.CdU2lD-q.js'; | ||
| export { mergeConfig } from 'vite'; | ||
@@ -3,0 +3,0 @@ export { d as defaultBrowserPort } from './chunks/constants.CPYnjOGj.js'; |
@@ -1,2 +0,2 @@ | ||
| import { R as ResolvedCoverageOptions, V as Vitest, aX as CoverageMap, al as ReportContext, T as TestProject } from './chunks/reporters.d.CRDGoPlb.js'; | ||
| import { R as ResolvedCoverageOptions, V as Vitest, aY as CoverageMap, am as ReportContext, T as TestProject } from './chunks/reporters.d.DVUYHHhe.js'; | ||
| import { TransformResult } from 'vite'; | ||
@@ -3,0 +3,0 @@ import { A as AfterSuiteRunMeta } from './chunks/rpc.d.BFMWpdph.js'; |
+2
-2
@@ -1,2 +0,2 @@ | ||
| export { B as BaseCoverageProvider } from './chunks/coverage.DUqi2f6q.js'; | ||
| export { B as BaseCoverageProvider } from './chunks/coverage.Bri33R1t.js'; | ||
| import 'node:fs'; | ||
@@ -11,3 +11,3 @@ import 'node:module'; | ||
| import 'tinyrainbow'; | ||
| import './chunks/defaults.BlJmGxXD.js'; | ||
| import './chunks/defaults.CdU2lD-q.js'; | ||
| import 'node:os'; | ||
@@ -14,0 +14,0 @@ import './chunks/env.D4Lgay0q.js'; |
+2
-2
@@ -1,5 +0,5 @@ | ||
| export { N as BenchmarkRunner, T as TestRunner, a as assert, c as createExpect, g as expect, i as inject, s as should, v as vi, b as vitest } from './chunks/test.PnxXDGpZ.js'; | ||
| export { N as BenchmarkRunner, T as TestRunner, a as assert, c as createExpect, g as expect, i as inject, s as should, v as vi, b as vitest } from './chunks/test.CTcmp4Su.js'; | ||
| export { b as bench } from './chunks/benchmark.D0SlKNbZ.js'; | ||
| export { V as EvaluatedModules } from './chunks/evaluatedModules.Dg1zASAC.js'; | ||
| export { a as assertType } from './chunks/index.IcAjQV7n.js'; | ||
| export { a as assertType } from './chunks/index.DlDSLQD3.js'; | ||
| export { expectTypeOf } from 'expect-type'; | ||
@@ -6,0 +6,0 @@ export { afterAll, afterEach, aroundAll, aroundEach, beforeAll, beforeEach, describe, it, onTestFailed, onTestFinished, recordArtifact, suite, test } from '@vitest/runner'; |
+3
-3
@@ -6,5 +6,5 @@ import * as vite from 'vite'; | ||
| import { IncomingMessage } from 'node:http'; | ||
| import { f as ResolvedConfig, e as UserConfig, g as VitestRunMode, h as VitestOptions, V as Vitest, A as ApiConfig, L as Logger, i as TestSpecification, T as TestProject, P as PoolWorker, j as PoolOptions, k as WorkerRequest, l as TestSequencer } from './chunks/reporters.d.CRDGoPlb.js'; | ||
| export { B as BaseCoverageOptions, m as BaseReporter, n as BenchmarkBuiltinReporters, o as BenchmarkReporter, p as BenchmarkReportsMap, q as BenchmarkUserOptions, r as BrowserBuiltinProvider, s as BrowserCommand, t as BrowserCommandContext, u as BrowserConfigOptions, v as BrowserInstanceOption, w as BrowserModuleMocker, x as BrowserOrchestrator, y as BrowserProvider, z as BrowserProviderOption, D as BrowserScript, E as BrowserServerFactory, F as BrowserServerOptions, G as BrowserServerState, H as BrowserServerStateSession, J as BuiltinEnvironment, K as BuiltinReporterOptions, M as BuiltinReporters, N as CSSModuleScopeStrategy, O as CoverageIstanbulOptions, Q as CoverageOptions, S as CoverageProvider, X as CoverageProviderModule, Y as CoverageReporter, C as CoverageV8Options, Z as CustomProviderOptions, _ as DefaultReporter, $ as DepsOptimizationOptions, a0 as DotReporter, a1 as EnvironmentOptions, a2 as GithubActionsReporter, a3 as HTMLOptions, a4 as HangingProcessReporter, I as InlineConfig, a5 as JUnitOptions, a6 as JUnitReporter, a7 as JsonAssertionResult, a8 as JsonOptions, a9 as JsonReporter, aa as JsonTestResult, ab as JsonTestResults, ac as ModuleDiagnostic, ad as OnServerRestartHandler, ae as OnTestsRerunHandler, af as ParentProjectBrowser, ag as Pool, ah as PoolRunnerInitializer, ai as PoolTask, aj as ProjectBrowser, ak as ProjectConfig, al as ReportContext, am as ReportedHookContext, an as Reporter, ao as ReportersMap, ap as ResolveSnapshotPathHandler, aq as ResolveSnapshotPathHandlerContext, ar as ResolvedBrowserOptions, R as ResolvedCoverageOptions, as as ResolvedProjectConfig, at as SerializedTestProject, au as TapFlatReporter, av as TapReporter, aw as TaskOptions, ax as TestCase, ay as TestCollection, az as TestDiagnostic, aA as TestModule, aB as TestModuleState, aC as TestResult, aD as TestResultFailed, aE as TestResultPassed, aF as TestResultSkipped, aG as TestRunEndReason, aH as TestRunResult, aI as TestSequencerConstructor, aJ as TestSpecificationOptions, aK as TestState, aL as TestSuite, aM as TestSuiteState, aN as ToMatchScreenshotComparators, aO as ToMatchScreenshotOptions, aP as TypecheckConfig, U as UserWorkspaceConfig, aQ as VerboseBenchmarkReporter, aR as VerboseReporter, aS as VitestEnvironment, aT as VitestPackageInstaller, W as WatcherTriggerPattern, aU as WorkerResponse, aV as _BrowserNames, aW as experimental_getRunnerTask } from './chunks/reporters.d.CRDGoPlb.js'; | ||
| export { C as CacheKeyIdGenerator, a as CacheKeyIdGeneratorContext, V as VitestPluginContext } from './chunks/plugin.d.pmonRL8Y.js'; | ||
| import { f as ResolvedConfig, e as UserConfig, g as VitestRunMode, h as VitestOptions, V as Vitest, A as ApiConfig, L as Logger, i as TestSpecification, T as TestProject, P as PoolWorker, j as PoolOptions, k as WorkerRequest, l as TestSequencer } from './chunks/reporters.d.DVUYHHhe.js'; | ||
| export { m as AgentReporter, B as BaseCoverageOptions, n as BaseReporter, o as BenchmarkBuiltinReporters, p as BenchmarkReporter, q as BenchmarkReportsMap, r as BenchmarkUserOptions, s as BrowserBuiltinProvider, t as BrowserCommand, u as BrowserCommandContext, v as BrowserConfigOptions, w as BrowserInstanceOption, x as BrowserModuleMocker, y as BrowserOrchestrator, z as BrowserProvider, D as BrowserProviderOption, E as BrowserScript, F as BrowserServerFactory, G as BrowserServerOptions, H as BrowserServerState, J as BrowserServerStateSession, K as BuiltinEnvironment, M as BuiltinReporterOptions, N as BuiltinReporters, O as CSSModuleScopeStrategy, Q as CoverageIstanbulOptions, S as CoverageOptions, X as CoverageProvider, Y as CoverageProviderModule, Z as CoverageReporter, C as CoverageV8Options, _ as CustomProviderOptions, $ as DefaultReporter, a0 as DepsOptimizationOptions, a1 as DotReporter, a2 as EnvironmentOptions, a3 as GithubActionsReporter, a4 as HTMLOptions, a5 as HangingProcessReporter, I as InlineConfig, a6 as JUnitOptions, a7 as JUnitReporter, a8 as JsonAssertionResult, a9 as JsonOptions, aa as JsonReporter, ab as JsonTestResult, ac as JsonTestResults, ad as ModuleDiagnostic, ae as OnServerRestartHandler, af as OnTestsRerunHandler, ag as ParentProjectBrowser, ah as Pool, ai as PoolRunnerInitializer, aj as PoolTask, ak as ProjectBrowser, al as ProjectConfig, am as ReportContext, an as ReportedHookContext, ao as Reporter, ap as ReportersMap, aq as ResolveSnapshotPathHandler, ar as ResolveSnapshotPathHandlerContext, as as ResolvedBrowserOptions, R as ResolvedCoverageOptions, at as ResolvedProjectConfig, au as SerializedTestProject, av as TapFlatReporter, aw as TapReporter, ax as TaskOptions, ay as TestCase, az as TestCollection, aA as TestDiagnostic, aB as TestModule, aC as TestModuleState, aD as TestResult, aE as TestResultFailed, aF as TestResultPassed, aG as TestResultSkipped, aH as TestRunEndReason, aI as TestRunResult, aJ as TestSequencerConstructor, aK as TestSpecificationOptions, aL as TestState, aM as TestSuite, aN as TestSuiteState, aO as ToMatchScreenshotComparators, aP as ToMatchScreenshotOptions, aQ as TypecheckConfig, U as UserWorkspaceConfig, aR as VerboseBenchmarkReporter, aS as VerboseReporter, aT as VitestEnvironment, aU as VitestPackageInstaller, W as WatcherTriggerPattern, aV as WorkerResponse, aW as _BrowserNames, aX as experimental_getRunnerTask } from './chunks/reporters.d.DVUYHHhe.js'; | ||
| export { C as CacheKeyIdGenerator, a as CacheKeyIdGeneratorContext, V as VitestPluginContext } from './chunks/plugin.d.BuW-flqo.js'; | ||
| export { BaseCoverageProvider } from './coverage.js'; | ||
@@ -11,0 +11,0 @@ import { Awaitable } from '@vitest/utils'; |
+8
-8
| import * as vite from 'vite'; | ||
| import { resolveConfig as resolveConfig$1, mergeConfig } from 'vite'; | ||
| export { esbuildVersion, isCSSRequest, isFileLoadingAllowed, parseAst, parseAstAsync, rollupVersion, version as viteVersion } from 'vite'; | ||
| import { V as Vitest, a as VitestPlugin } from './chunks/cli-api.Bqj5xGy5.js'; | ||
| export { F as ForksPoolWorker, G as GitNotFoundError, b as TestsNotFoundError, T as ThreadsPoolWorker, c as TypecheckPoolWorker, d as VitestPackageInstaller, e as VmForksPoolWorker, f as VmThreadsPoolWorker, g as createDebugger, h as createMethodsRPC, i as createViteLogger, j as createVitest, k as escapeTestName, l as experimental_getRunnerTask, m as getFilePoolName, n as isFileServingAllowed, o as isValidApiRequest, r as registerConsoleShortcuts, p as resolveFsAllow, s as startVitest } from './chunks/cli-api.Bqj5xGy5.js'; | ||
| export { p as parseCLI } from './chunks/cac.Vtz91O0H.js'; | ||
| import { r as resolveConfig$2 } from './chunks/coverage.DUqi2f6q.js'; | ||
| export { B as BaseCoverageProvider, a as BaseSequencer, b as resolveApiServerConfig } from './chunks/coverage.DUqi2f6q.js'; | ||
| import { V as Vitest, a as VitestPlugin } from './chunks/cli-api.DuT9iuvY.js'; | ||
| export { F as ForksPoolWorker, G as GitNotFoundError, b as TestsNotFoundError, T as ThreadsPoolWorker, c as TypecheckPoolWorker, d as VitestPackageInstaller, e as VmForksPoolWorker, f as VmThreadsPoolWorker, g as createDebugger, h as createMethodsRPC, i as createViteLogger, j as createVitest, k as escapeTestName, l as experimental_getRunnerTask, m as getFilePoolName, n as isFileServingAllowed, o as isValidApiRequest, r as registerConsoleShortcuts, p as resolveFsAllow, s as startVitest } from './chunks/cli-api.DuT9iuvY.js'; | ||
| export { p as parseCLI } from './chunks/cac.CWGDZnXT.js'; | ||
| import { r as resolveConfig$2 } from './chunks/coverage.Bri33R1t.js'; | ||
| export { B as BaseCoverageProvider, a as BaseSequencer, b as resolveApiServerConfig } from './chunks/coverage.Bri33R1t.js'; | ||
| import { slash, deepClone } from '@vitest/utils/helpers'; | ||
@@ -13,6 +13,6 @@ import { a as any } from './chunks/index.og1WyBLx.js'; | ||
| import { c as configFiles } from './chunks/constants.CPYnjOGj.js'; | ||
| export { D as DefaultReporter, a as DotReporter, G as GithubActionsReporter, H as HangingProcessReporter, J as JUnitReporter, b as JsonReporter, R as ReportersMap, T as TapFlatReporter, c as TapReporter, V as VerboseReporter } from './chunks/index.DpkD7Zj4.js'; | ||
| export { A as AgentReporter, D as DefaultReporter, a as DotReporter, G as GithubActionsReporter, H as HangingProcessReporter, J as JUnitReporter, b as JsonReporter, R as ReportersMap, T as TapFlatReporter, c as TapReporter, V as VerboseReporter } from './chunks/index.DXMFO5MJ.js'; | ||
| export { distDir, rootDir } from './path.js'; | ||
| export { generateFileHash } from '@vitest/runner/utils'; | ||
| export { B as BenchmarkReporter, a as BenchmarkReportsMap, V as VerboseBenchmarkReporter } from './chunks/index.CHsi7RlU.js'; | ||
| export { B as BenchmarkReporter, a as BenchmarkReportsMap, V as VerboseBenchmarkReporter } from './chunks/index.CEzQDJGb.js'; | ||
| import 'node:fs'; | ||
@@ -62,3 +62,3 @@ import './chunks/coverage.D_JHT54q.js'; | ||
| import 'buffer'; | ||
| import './chunks/defaults.BlJmGxXD.js'; | ||
| import './chunks/defaults.CdU2lD-q.js'; | ||
| import 'magic-string'; | ||
@@ -65,0 +65,0 @@ import '@vitest/mocker/node'; |
@@ -8,3 +8,3 @@ import { isBuiltin } from 'node:module'; | ||
| const NOW_LENGTH = Date.now().toString().length; | ||
| const REGEXP_VITEST = /* @__PURE__ */ new RegExp(`%3Fvitest=\\d{${NOW_LENGTH}}`); | ||
| const REGEXP_VITEST = new RegExp(`%3Fvitest=\\d{${NOW_LENGTH}}`); | ||
| const REGEXP_MOCK_ACTUAL = /\?mock=actual/; | ||
@@ -11,0 +11,0 @@ const resolve = (specifier, context, defaultResolve) => { |
@@ -1,2 +0,2 @@ | ||
| export { m as BaseReporter, n as BenchmarkBuiltinReporters, o as BenchmarkReporter, p as BenchmarkReportsMap, K as BuiltinReporterOptions, M as BuiltinReporters, _ as DefaultReporter, a0 as DotReporter, a2 as GithubActionsReporter, a4 as HangingProcessReporter, a6 as JUnitReporter, a7 as JsonAssertionResult, a9 as JsonReporter, aa as JsonTestResult, ab as JsonTestResults, am as ReportedHookContext, an as Reporter, ao as ReportersMap, au as TapFlatReporter, av as TapReporter, aG as TestRunEndReason, aQ as VerboseBenchmarkReporter, aR as VerboseReporter } from './chunks/reporters.d.CRDGoPlb.js'; | ||
| export { m as AgentReporter, n as BaseReporter, o as BenchmarkBuiltinReporters, p as BenchmarkReporter, q as BenchmarkReportsMap, M as BuiltinReporterOptions, N as BuiltinReporters, $ as DefaultReporter, a1 as DotReporter, a3 as GithubActionsReporter, a5 as HangingProcessReporter, a7 as JUnitReporter, a8 as JsonAssertionResult, aa as JsonReporter, ab as JsonTestResult, ac as JsonTestResults, an as ReportedHookContext, ao as Reporter, ap as ReportersMap, av as TapFlatReporter, aw as TapReporter, aH as TestRunEndReason, aR as VerboseBenchmarkReporter, aS as VerboseReporter } from './chunks/reporters.d.DVUYHHhe.js'; | ||
| import '@vitest/runner'; | ||
@@ -3,0 +3,0 @@ import '@vitest/utils'; |
@@ -1,6 +0,4 @@ | ||
| export { D as DefaultReporter, a as DotReporter, G as GithubActionsReporter, H as HangingProcessReporter, J as JUnitReporter, b as JsonReporter, R as ReportersMap, T as TapFlatReporter, c as TapReporter, V as VerboseReporter } from './chunks/index.DpkD7Zj4.js'; | ||
| export { B as BenchmarkReporter, a as BenchmarkReportsMap, V as VerboseBenchmarkReporter } from './chunks/index.CHsi7RlU.js'; | ||
| export { A as AgentReporter, D as DefaultReporter, a as DotReporter, G as GithubActionsReporter, H as HangingProcessReporter, J as JUnitReporter, b as JsonReporter, R as ReportersMap, T as TapFlatReporter, c as TapReporter, V as VerboseReporter } from './chunks/index.DXMFO5MJ.js'; | ||
| export { B as BenchmarkReporter, a as BenchmarkReportsMap, V as VerboseBenchmarkReporter } from './chunks/index.CEzQDJGb.js'; | ||
| import 'node:fs'; | ||
| import 'node:fs/promises'; | ||
| import 'pathe'; | ||
| import 'node:perf_hooks'; | ||
@@ -10,2 +8,3 @@ import '@vitest/runner/utils'; | ||
| import '@vitest/utils/source-map'; | ||
| import 'pathe'; | ||
| import 'tinyrainbow'; | ||
@@ -25,4 +24,5 @@ import './chunks/env.D4Lgay0q.js'; | ||
| import '@vitest/utils/offset'; | ||
| import 'node:fs/promises'; | ||
| import 'node:module'; | ||
| console.warn("Importing from \"vitest/reporters\" is deprecated since Vitest 4.1. Please use \"vitest/node\" instead."); |
+1
-1
@@ -1,2 +0,2 @@ | ||
| export { N as NodeBenchmarkRunner, T as VitestTestRunner } from './chunks/test.PnxXDGpZ.js'; | ||
| export { N as NodeBenchmarkRunner, T as VitestTestRunner } from './chunks/test.CTcmp4Su.js'; | ||
| import '@vitest/runner'; | ||
@@ -3,0 +3,0 @@ import '@vitest/utils/helpers'; |
+1
-1
| import { VitestModuleEvaluator } from './module-evaluator.js'; | ||
| import { V as VITEST_VM_CONTEXT_SYMBOL, s as startVitestModuleRunner, a as VitestModuleRunner } from './chunks/startVitestModuleRunner.BdSYEN5x.js'; | ||
| import { V as VITEST_VM_CONTEXT_SYMBOL, s as startVitestModuleRunner, a as VitestModuleRunner } from './chunks/startVitestModuleRunner.C3ZR-4J3.js'; | ||
| import { g as getWorkerState } from './chunks/utils.BX5Fg8C4.js'; | ||
@@ -4,0 +4,0 @@ export { e as builtinEnvironments, p as populateGlobal } from './chunks/index.EY6TCHpo.js'; |
+7
-7
@@ -1,9 +0,9 @@ | ||
| export { r as runBaseTests, s as setupEnvironment } from './chunks/base.C98-6XAz.js'; | ||
| export { i as init } from './chunks/init.Borgldul.js'; | ||
| export { r as runBaseTests, s as setupEnvironment } from './chunks/base.DM0-RqVb.js'; | ||
| export { i as init } from './chunks/init.DICorXCo.js'; | ||
| import 'node:vm'; | ||
| import '@vitest/spy'; | ||
| import './chunks/index.5lgR0kvt.js'; | ||
| import './chunks/index.DGNSnENe.js'; | ||
| import '@vitest/expect'; | ||
| import 'node:async_hooks'; | ||
| import './chunks/setup-common.z3ZfZiWN.js'; | ||
| import './chunks/setup-common.B41N_kPE.js'; | ||
| import './chunks/coverage.D_JHT54q.js'; | ||
@@ -15,3 +15,3 @@ import '@vitest/snapshot'; | ||
| import './chunks/index.Chj8NDwU.js'; | ||
| import './chunks/test.PnxXDGpZ.js'; | ||
| import './chunks/test.CTcmp4Su.js'; | ||
| import '@vitest/runner'; | ||
@@ -40,3 +40,3 @@ import '@vitest/utils/helpers'; | ||
| import './chunks/evaluatedModules.Dg1zASAC.js'; | ||
| import './chunks/startVitestModuleRunner.BdSYEN5x.js'; | ||
| import './chunks/startVitestModuleRunner.C3ZR-4J3.js'; | ||
| import './chunks/modules.BJuCwlRJ.js'; | ||
@@ -52,3 +52,3 @@ import './path.js'; | ||
| import '@vitest/utils/constants'; | ||
| import './chunks/index.IcAjQV7n.js'; | ||
| import './chunks/index.DlDSLQD3.js'; | ||
| import 'expect-type'; | ||
@@ -55,0 +55,0 @@ import './chunks/index.EY6TCHpo.js'; |
@@ -1,9 +0,9 @@ | ||
| import { r as runBaseTests, s as setupBaseEnvironment } from '../chunks/base.C98-6XAz.js'; | ||
| import { w as workerInit } from '../chunks/init-forks.BwPkXyLk.js'; | ||
| import { r as runBaseTests, s as setupBaseEnvironment } from '../chunks/base.DM0-RqVb.js'; | ||
| import { w as workerInit } from '../chunks/init-forks.DeArv0jT.js'; | ||
| import 'node:vm'; | ||
| import '@vitest/spy'; | ||
| import '../chunks/index.5lgR0kvt.js'; | ||
| import '../chunks/index.DGNSnENe.js'; | ||
| import '@vitest/expect'; | ||
| import 'node:async_hooks'; | ||
| import '../chunks/setup-common.z3ZfZiWN.js'; | ||
| import '../chunks/setup-common.B41N_kPE.js'; | ||
| import '../chunks/coverage.D_JHT54q.js'; | ||
@@ -15,3 +15,3 @@ import '@vitest/snapshot'; | ||
| import '../chunks/index.Chj8NDwU.js'; | ||
| import '../chunks/test.PnxXDGpZ.js'; | ||
| import '../chunks/test.CTcmp4Su.js'; | ||
| import '@vitest/runner'; | ||
@@ -26,3 +26,3 @@ import '@vitest/utils/helpers'; | ||
| import '../chunks/_commonjsHelpers.D26ty3Ew.js'; | ||
| import '../chunks/init.Borgldul.js'; | ||
| import '../chunks/init.DICorXCo.js'; | ||
| import 'node:fs'; | ||
@@ -32,3 +32,3 @@ import 'node:module'; | ||
| import 'vite/module-runner'; | ||
| import '../chunks/startVitestModuleRunner.BdSYEN5x.js'; | ||
| import '../chunks/startVitestModuleRunner.C3ZR-4J3.js'; | ||
| import '../chunks/modules.BJuCwlRJ.js'; | ||
@@ -57,3 +57,3 @@ import '../path.js'; | ||
| import '@vitest/utils/constants'; | ||
| import '../chunks/index.IcAjQV7n.js'; | ||
| import '../chunks/index.DlDSLQD3.js'; | ||
| import 'expect-type'; | ||
@@ -60,0 +60,0 @@ |
@@ -8,8 +8,8 @@ import { createRequire } from 'node:module'; | ||
| import { KNOWN_ASSET_TYPES } from '@vitest/utils/constants'; | ||
| import { s as setupChaiConfig, r as resolveTestRunner, a as resolveSnapshotEnvironment, d as detectAsyncLeaks } from '../chunks/index.5lgR0kvt.js'; | ||
| import { s as setupCommonEnv, b as startCoverageInsideWorker, c as stopCoverageInsideWorker } from '../chunks/setup-common.z3ZfZiWN.js'; | ||
| import { i as index } from '../chunks/index.IcAjQV7n.js'; | ||
| import { s as setupChaiConfig, r as resolveTestRunner, a as resolveSnapshotEnvironment, d as detectAsyncLeaks } from '../chunks/index.DGNSnENe.js'; | ||
| import { s as setupCommonEnv, b as startCoverageInsideWorker, c as stopCoverageInsideWorker } from '../chunks/setup-common.B41N_kPE.js'; | ||
| import { i as index } from '../chunks/index.DlDSLQD3.js'; | ||
| import { c as closeInspector } from '../chunks/inspector.CvyFGlXm.js'; | ||
| import { g as getWorkerState } from '../chunks/utils.BX5Fg8C4.js'; | ||
| import { g as globalExpect } from '../chunks/test.PnxXDGpZ.js'; | ||
| import { g as globalExpect } from '../chunks/test.CTcmp4Su.js'; | ||
| import '@vitest/expect'; | ||
@@ -16,0 +16,0 @@ import 'node:async_hooks'; |
@@ -1,9 +0,9 @@ | ||
| import { s as setupBaseEnvironment, r as runBaseTests } from '../chunks/base.C98-6XAz.js'; | ||
| import { w as workerInit } from '../chunks/init-threads.BuSVu8Ns.js'; | ||
| import { s as setupBaseEnvironment, r as runBaseTests } from '../chunks/base.DM0-RqVb.js'; | ||
| import { w as workerInit } from '../chunks/init-threads.-2OUl4Nn.js'; | ||
| import 'node:vm'; | ||
| import '@vitest/spy'; | ||
| import '../chunks/index.5lgR0kvt.js'; | ||
| import '../chunks/index.DGNSnENe.js'; | ||
| import '@vitest/expect'; | ||
| import 'node:async_hooks'; | ||
| import '../chunks/setup-common.z3ZfZiWN.js'; | ||
| import '../chunks/setup-common.B41N_kPE.js'; | ||
| import '../chunks/coverage.D_JHT54q.js'; | ||
@@ -15,3 +15,3 @@ import '@vitest/snapshot'; | ||
| import '../chunks/index.Chj8NDwU.js'; | ||
| import '../chunks/test.PnxXDGpZ.js'; | ||
| import '../chunks/test.CTcmp4Su.js'; | ||
| import '@vitest/runner'; | ||
@@ -26,3 +26,3 @@ import '@vitest/utils/helpers'; | ||
| import '../chunks/_commonjsHelpers.D26ty3Ew.js'; | ||
| import '../chunks/init.Borgldul.js'; | ||
| import '../chunks/init.DICorXCo.js'; | ||
| import 'node:fs'; | ||
@@ -32,3 +32,3 @@ import 'node:module'; | ||
| import 'vite/module-runner'; | ||
| import '../chunks/startVitestModuleRunner.BdSYEN5x.js'; | ||
| import '../chunks/startVitestModuleRunner.C3ZR-4J3.js'; | ||
| import '../chunks/modules.BJuCwlRJ.js'; | ||
@@ -57,3 +57,3 @@ import '../path.js'; | ||
| import '@vitest/utils/constants'; | ||
| import '../chunks/index.IcAjQV7n.js'; | ||
| import '../chunks/index.DlDSLQD3.js'; | ||
| import 'expect-type'; | ||
@@ -60,0 +60,0 @@ import 'node:worker_threads'; |
@@ -1,4 +0,4 @@ | ||
| import { w as workerInit } from '../chunks/init-forks.BwPkXyLk.js'; | ||
| import { r as runVmTests, s as setupVmWorker } from '../chunks/vm.V092iA4c.js'; | ||
| import '../chunks/init.Borgldul.js'; | ||
| import { w as workerInit } from '../chunks/init-forks.DeArv0jT.js'; | ||
| import { r as runVmTests, s as setupVmWorker } from '../chunks/vm.Dh2rTtmP.js'; | ||
| import '../chunks/init.DICorXCo.js'; | ||
| import 'node:fs'; | ||
@@ -9,3 +9,3 @@ import 'node:module'; | ||
| import 'vite/module-runner'; | ||
| import '../chunks/startVitestModuleRunner.BdSYEN5x.js'; | ||
| import '../chunks/startVitestModuleRunner.C3ZR-4J3.js'; | ||
| import '@vitest/utils/helpers'; | ||
@@ -12,0 +12,0 @@ import '../chunks/modules.BJuCwlRJ.js'; |
@@ -1,5 +0,5 @@ | ||
| import { w as workerInit } from '../chunks/init-threads.BuSVu8Ns.js'; | ||
| import { s as setupVmWorker, r as runVmTests } from '../chunks/vm.V092iA4c.js'; | ||
| import { w as workerInit } from '../chunks/init-threads.-2OUl4Nn.js'; | ||
| import { s as setupVmWorker, r as runVmTests } from '../chunks/vm.Dh2rTtmP.js'; | ||
| import 'node:worker_threads'; | ||
| import '../chunks/init.Borgldul.js'; | ||
| import '../chunks/init.DICorXCo.js'; | ||
| import 'node:fs'; | ||
@@ -10,3 +10,3 @@ import 'node:module'; | ||
| import 'vite/module-runner'; | ||
| import '../chunks/startVitestModuleRunner.BdSYEN5x.js'; | ||
| import '../chunks/startVitestModuleRunner.C3ZR-4J3.js'; | ||
| import '@vitest/utils/helpers'; | ||
@@ -13,0 +13,0 @@ import '../chunks/modules.BJuCwlRJ.js'; |
+16
-16
| { | ||
| "name": "vitest", | ||
| "type": "module", | ||
| "version": "4.1.0-beta.6", | ||
| "version": "4.1.0", | ||
| "description": "Next generation testing framework powered by Vite", | ||
@@ -135,6 +135,6 @@ "author": "Anthony Fu <anthonyfu117@hotmail.com>", | ||
| "vite": "^6.0.0 || ^7.0.0 || ^8.0.0-0", | ||
| "@vitest/browser-playwright": "4.1.0-beta.6", | ||
| "@vitest/browser-webdriverio": "4.1.0-beta.6", | ||
| "@vitest/ui": "4.1.0-beta.6", | ||
| "@vitest/browser-preview": "4.1.0-beta.6" | ||
| "@vitest/browser-playwright": "4.1.0", | ||
| "@vitest/browser-webdriverio": "4.1.0", | ||
| "@vitest/browser-preview": "4.1.0", | ||
| "@vitest/ui": "4.1.0" | ||
| }, | ||
@@ -180,3 +180,3 @@ "peerDependenciesMeta": { | ||
| "picomatch": "^4.0.3", | ||
| "std-env": "^3.10.0", | ||
| "std-env": "^4.0.0-rc.1", | ||
| "tinybench": "^2.9.0", | ||
@@ -188,9 +188,9 @@ "tinyexec": "^1.0.2", | ||
| "why-is-node-running": "^2.3.0", | ||
| "@vitest/mocker": "4.1.0-beta.6", | ||
| "@vitest/expect": "4.1.0-beta.6", | ||
| "@vitest/pretty-format": "4.1.0-beta.6", | ||
| "@vitest/spy": "4.1.0-beta.6", | ||
| "@vitest/runner": "4.1.0-beta.6", | ||
| "@vitest/snapshot": "4.1.0-beta.6", | ||
| "@vitest/utils": "4.1.0-beta.6" | ||
| "@vitest/expect": "4.1.0", | ||
| "@vitest/mocker": "4.1.0", | ||
| "@vitest/runner": "4.1.0", | ||
| "@vitest/snapshot": "4.1.0", | ||
| "@vitest/pretty-format": "4.1.0", | ||
| "@vitest/spy": "4.1.0", | ||
| "@vitest/utils": "4.1.0" | ||
| }, | ||
@@ -208,3 +208,3 @@ "devDependencies": { | ||
| "@types/jsdom": "^27.0.0", | ||
| "@types/node": "^24.11.0", | ||
| "@types/node": "^24.12.0", | ||
| "@types/picomatch": "^4.0.2", | ||
@@ -218,4 +218,4 @@ "@types/prompts": "^2.4.9", | ||
| "empathic": "^2.0.0", | ||
| "flatted": "3.3.4", | ||
| "happy-dom": "^20.7.0", | ||
| "flatted": "3.4.0", | ||
| "happy-dom": "^20.8.3", | ||
| "jsdom": "^27.4.0", | ||
@@ -222,0 +222,0 @@ "local-pkg": "^1.1.2", |
| import { runInThisContext } from 'node:vm'; | ||
| import * as spyModule from '@vitest/spy'; | ||
| import { r as resolveTestRunner, a as resolveSnapshotEnvironment, d as detectAsyncLeaks, s as setupChaiConfig } from './index.5lgR0kvt.js'; | ||
| import { l as loadEnvironment, e as emitModuleRunner, a as listenForErrors } from './init.Borgldul.js'; | ||
| import { N as NativeModuleRunner } from './nativeModuleRunner.BIakptoF.js'; | ||
| import { T as Traces } from './traces.CCmnQaNT.js'; | ||
| import { V as VitestEvaluatedModules } from './evaluatedModules.Dg1zASAC.js'; | ||
| import { s as startVitestModuleRunner, c as createNodeImportMeta } from './startVitestModuleRunner.BdSYEN5x.js'; | ||
| import { performance as performance$1 } from 'node:perf_hooks'; | ||
| import { startTests, collectTests } from '@vitest/runner'; | ||
| import { s as setupCommonEnv, b as startCoverageInsideWorker, c as stopCoverageInsideWorker } from './setup-common.z3ZfZiWN.js'; | ||
| import { g as globalExpect, v as vi } from './test.PnxXDGpZ.js'; | ||
| import { c as closeInspector } from './inspector.CvyFGlXm.js'; | ||
| import { createRequire } from 'node:module'; | ||
| import timers from 'node:timers'; | ||
| import timersPromises from 'node:timers/promises'; | ||
| import util from 'node:util'; | ||
| import { KNOWN_ASSET_TYPES } from '@vitest/utils/constants'; | ||
| import { i as index } from './index.IcAjQV7n.js'; | ||
| import { g as getWorkerState, r as resetModules, p as provideWorkerState, a as getSafeWorkerState } from './utils.BX5Fg8C4.js'; | ||
| // this should only be used in Node | ||
| let globalSetup = false; | ||
| async function setupGlobalEnv(config, environment) { | ||
| await setupCommonEnv(config); | ||
| Object.defineProperty(globalThis, "__vitest_index__", { | ||
| value: index, | ||
| enumerable: false | ||
| }); | ||
| globalExpect.setState({ environment: environment.name }); | ||
| if (globalSetup) return; | ||
| globalSetup = true; | ||
| if ((environment.viteEnvironment || environment.name) === "client") { | ||
| const _require = createRequire(import.meta.url); | ||
| // always mock "required" `css` files, because we cannot process them | ||
| _require.extensions[".css"] = resolveCss; | ||
| _require.extensions[".scss"] = resolveCss; | ||
| _require.extensions[".sass"] = resolveCss; | ||
| _require.extensions[".less"] = resolveCss; | ||
| // since we are using Vite, we can assume how these will be resolved | ||
| KNOWN_ASSET_TYPES.forEach((type) => { | ||
| _require.extensions[`.${type}`] = resolveAsset; | ||
| }); | ||
| process.env.SSR = ""; | ||
| } else process.env.SSR = "1"; | ||
| // @ts-expect-error not typed global for patched timers | ||
| globalThis.__vitest_required__ = { | ||
| util, | ||
| timers, | ||
| timersPromises | ||
| }; | ||
| if (!config.disableConsoleIntercept) await setupConsoleLogSpy(); | ||
| } | ||
| function resolveCss(mod) { | ||
| mod.exports = ""; | ||
| } | ||
| function resolveAsset(mod, url) { | ||
| mod.exports = url; | ||
| } | ||
| async function setupConsoleLogSpy() { | ||
| const { createCustomConsole } = await import('./console.3WNpx0tS.js'); | ||
| globalThis.console = createCustomConsole(); | ||
| } | ||
| // browser shouldn't call this! | ||
| async function run(method, files, config, moduleRunner, environment, traces) { | ||
| const workerState = getWorkerState(); | ||
| const [testRunner] = await Promise.all([ | ||
| traces.$("vitest.runtime.runner", () => resolveTestRunner(config, moduleRunner, traces)), | ||
| traces.$("vitest.runtime.global_env", () => setupGlobalEnv(config, environment)), | ||
| traces.$("vitest.runtime.coverage.start", () => startCoverageInsideWorker(config.coverage, moduleRunner, { isolate: config.isolate })), | ||
| traces.$("vitest.runtime.snapshot.environment", async () => { | ||
| if (!workerState.config.snapshotOptions.snapshotEnvironment) workerState.config.snapshotOptions.snapshotEnvironment = await resolveSnapshotEnvironment(config, moduleRunner); | ||
| }) | ||
| ]); | ||
| workerState.onCancel((reason) => { | ||
| closeInspector(config); | ||
| testRunner.cancel?.(reason); | ||
| }); | ||
| workerState.durations.prepare = performance$1.now() - workerState.durations.prepare; | ||
| await traces.$(`vitest.test.runner.${method}`, async () => { | ||
| for (const file of files) { | ||
| if (config.isolate) { | ||
| moduleRunner.mocker?.reset(); | ||
| resetModules(workerState.evaluatedModules, true); | ||
| } | ||
| workerState.filepath = file.filepath; | ||
| if (method === "run") { | ||
| const collectAsyncLeaks = config.detectAsyncLeaks ? detectAsyncLeaks(file.filepath, workerState.ctx.projectName) : void 0; | ||
| await traces.$(`vitest.test.runner.${method}.module`, { attributes: { "code.file.path": file.filepath } }, () => startTests([file], testRunner)); | ||
| const leaks = await collectAsyncLeaks?.(); | ||
| if (leaks?.length) workerState.rpc.onAsyncLeaks(leaks); | ||
| } else await traces.$(`vitest.test.runner.${method}.module`, { attributes: { "code.file.path": file.filepath } }, () => collectTests([file], testRunner)); | ||
| // reset after tests, because user might call `vi.setConfig` in setupFile | ||
| vi.resetConfig(); | ||
| // mocks should not affect different files | ||
| vi.restoreAllMocks(); | ||
| } | ||
| }); | ||
| await traces.$("vitest.runtime.coverage.stop", () => stopCoverageInsideWorker(config.coverage, moduleRunner, { isolate: config.isolate })); | ||
| } | ||
| let _moduleRunner; | ||
| const evaluatedModules = new VitestEvaluatedModules(); | ||
| const moduleExecutionInfo = /* @__PURE__ */ new Map(); | ||
| async function startModuleRunner(options) { | ||
| if (_moduleRunner) return _moduleRunner; | ||
| process.exit = (code = process.exitCode || 0) => { | ||
| throw new Error(`process.exit unexpectedly called with "${code}"`); | ||
| }; | ||
| const state = () => getSafeWorkerState() || options.state; | ||
| listenForErrors(state); | ||
| if (options.state.config.experimental.viteModuleRunner === false) { | ||
| const root = options.state.config.root; | ||
| let mocker; | ||
| if (options.state.config.experimental.nodeLoader !== false) { | ||
| // this additionally imports acorn/magic-string | ||
| const { NativeModuleMocker } = await import('./nativeModuleMocker.wQT5wU7r.js'); | ||
| mocker = new NativeModuleMocker({ | ||
| async resolveId(id, importer) { | ||
| // TODO: use import.meta.resolve instead | ||
| return state().rpc.resolve(id, importer, "__vitest__"); | ||
| }, | ||
| root, | ||
| moduleDirectories: state().config.deps.moduleDirectories || ["/node_modules/"], | ||
| traces: options.traces || new Traces({ enabled: false }), | ||
| getCurrentTestFilepath() { | ||
| return state().filepath; | ||
| }, | ||
| spyModule | ||
| }); | ||
| } | ||
| _moduleRunner = new NativeModuleRunner(root, mocker); | ||
| return _moduleRunner; | ||
| } | ||
| _moduleRunner = startVitestModuleRunner(options); | ||
| return _moduleRunner; | ||
| } | ||
| let _currentEnvironment; | ||
| let _environmentTime; | ||
| /** @experimental */ | ||
| async function setupBaseEnvironment(context) { | ||
| if (context.config.experimental.viteModuleRunner === false) { | ||
| const { setupNodeLoaderHooks } = await import('./native.mV0-490A.js'); | ||
| await setupNodeLoaderHooks(context); | ||
| } | ||
| const startTime = performance.now(); | ||
| const { environment: { name: environmentName, options: environmentOptions }, rpc, config } = context; | ||
| // we could load @vite/env, but it would take ~8ms, while this takes ~0,02ms | ||
| if (context.config.serializedDefines) try { | ||
| runInThisContext(`(() =>{\n${context.config.serializedDefines}})()`, { | ||
| lineOffset: 1, | ||
| filename: "virtual:load-defines.js" | ||
| }); | ||
| } catch (error) { | ||
| throw new Error(`Failed to load custom "defines": ${error.message}`); | ||
| } | ||
| const otel = context.traces; | ||
| const { environment, loader } = await loadEnvironment(environmentName, config.root, rpc, otel, context.config.experimental.viteModuleRunner); | ||
| _currentEnvironment = environment; | ||
| const env = await otel.$("vitest.runtime.environment.setup", { attributes: { | ||
| "vitest.environment": environment.name, | ||
| "vitest.environment.vite_environment": environment.viteEnvironment || environment.name | ||
| } }, () => environment.setup(globalThis, environmentOptions || config.environmentOptions || {})); | ||
| _environmentTime = performance.now() - startTime; | ||
| if (config.chaiConfig) setupChaiConfig(config.chaiConfig); | ||
| return async () => { | ||
| await otel.$("vitest.runtime.environment.teardown", () => env.teardown(globalThis)); | ||
| await loader?.close(); | ||
| }; | ||
| } | ||
| /** @experimental */ | ||
| async function runBaseTests(method, state, traces) { | ||
| const { ctx } = state; | ||
| state.environment = _currentEnvironment; | ||
| state.durations.environment = _environmentTime; | ||
| // state has new context, but we want to reuse existing ones | ||
| state.evaluatedModules = evaluatedModules; | ||
| state.moduleExecutionInfo = moduleExecutionInfo; | ||
| provideWorkerState(globalThis, state); | ||
| if (ctx.invalidates) ctx.invalidates.forEach((filepath) => { | ||
| (state.evaluatedModules.fileToModulesMap.get(filepath) || []).forEach((module) => { | ||
| state.evaluatedModules.invalidateModule(module); | ||
| }); | ||
| }); | ||
| ctx.files.forEach((i) => { | ||
| const filepath = i.filepath; | ||
| (state.evaluatedModules.fileToModulesMap.get(filepath) || []).forEach((module) => { | ||
| state.evaluatedModules.invalidateModule(module); | ||
| }); | ||
| }); | ||
| const moduleRunner = await startModuleRunner({ | ||
| state, | ||
| evaluatedModules: state.evaluatedModules, | ||
| spyModule, | ||
| createImportMeta: createNodeImportMeta, | ||
| traces | ||
| }); | ||
| emitModuleRunner(moduleRunner); | ||
| await run(method, ctx.files, ctx.config, moduleRunner, _currentEnvironment, traces); | ||
| } | ||
| export { runBaseTests as r, setupBaseEnvironment as s }; |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
| import { existsSync, promises, readdirSync, writeFileSync } from 'node:fs'; | ||
| import module$1 from 'node:module'; | ||
| import path from 'node:path'; | ||
| import { pathToFileURL, fileURLToPath } from 'node:url'; | ||
| import { slash, shuffle, toArray, cleanUrl } from '@vitest/utils/helpers'; | ||
| import { resolve, relative, normalize } from 'pathe'; | ||
| import pm from 'picomatch'; | ||
| import { glob } from 'tinyglobby'; | ||
| import c from 'tinyrainbow'; | ||
| import { c as configDefaults, e as benchmarkConfigDefaults, a as coverageConfigDefaults } from './defaults.BlJmGxXD.js'; | ||
| import crypto from 'node:crypto'; | ||
| import { r as resolveModule } from './index.BCY_7LL2.js'; | ||
| import { mergeConfig } from 'vite'; | ||
| import { c as configFiles, d as defaultBrowserPort, a as defaultInspectPort, b as defaultPort } from './constants.CPYnjOGj.js'; | ||
| import './env.D4Lgay0q.js'; | ||
| import nodeos__default from 'node:os'; | ||
| import { isCI, provider } from 'std-env'; | ||
| import { r as resolveCoverageProviderModule } from './coverage.D_JHT54q.js'; | ||
| const hash = crypto.hash ?? ((algorithm, data, outputEncoding) => crypto.createHash(algorithm).update(data).digest(outputEncoding)); | ||
| function getWorkersCountByPercentage(percent) { | ||
| const maxWorkersCount = nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length; | ||
| const workersCountByPercentage = Math.round(Number.parseInt(percent) / 100 * maxWorkersCount); | ||
| return Math.max(1, Math.min(maxWorkersCount, workersCountByPercentage)); | ||
| } | ||
| class BaseSequencer { | ||
| ctx; | ||
| constructor(ctx) { | ||
| this.ctx = ctx; | ||
| } | ||
| // async so it can be extended by other sequelizers | ||
| async shard(files) { | ||
| const { config } = this.ctx; | ||
| const { index, count } = config.shard; | ||
| const [shardStart, shardEnd] = this.calculateShardRange(files.length, index, count); | ||
| return [...files].map((spec) => { | ||
| const specPath = resolve(slash(config.root), slash(spec.moduleId))?.slice(config.root.length); | ||
| return { | ||
| spec, | ||
| hash: hash("sha1", specPath, "hex") | ||
| }; | ||
| }).sort((a, b) => a.hash < b.hash ? -1 : a.hash > b.hash ? 1 : 0).slice(shardStart, shardEnd).map(({ spec }) => spec); | ||
| } | ||
| // async so it can be extended by other sequelizers | ||
| async sort(files) { | ||
| const cache = this.ctx.cache; | ||
| return [...files].sort((a, b) => { | ||
| // "sequence.groupOrder" is higher priority | ||
| const groupOrderDiff = a.project.config.sequence.groupOrder - b.project.config.sequence.groupOrder; | ||
| if (groupOrderDiff !== 0) return groupOrderDiff; | ||
| // Projects run sequential | ||
| if (a.project.name !== b.project.name) return a.project.name < b.project.name ? -1 : 1; | ||
| // Isolated run first | ||
| if (a.project.config.isolate && !b.project.config.isolate) return -1; | ||
| if (!a.project.config.isolate && b.project.config.isolate) return 1; | ||
| const keyA = `${a.project.name}:${relative(this.ctx.config.root, a.moduleId)}`; | ||
| const keyB = `${b.project.name}:${relative(this.ctx.config.root, b.moduleId)}`; | ||
| const aState = cache.getFileTestResults(keyA); | ||
| const bState = cache.getFileTestResults(keyB); | ||
| if (!aState || !bState) { | ||
| const statsA = cache.getFileStats(keyA); | ||
| const statsB = cache.getFileStats(keyB); | ||
| // run unknown first | ||
| if (!statsA || !statsB) return !statsA && statsB ? -1 : !statsB && statsA ? 1 : 0; | ||
| // run larger files first | ||
| return statsB.size - statsA.size; | ||
| } | ||
| // run failed first | ||
| if (aState.failed && !bState.failed) return -1; | ||
| if (!aState.failed && bState.failed) return 1; | ||
| // run longer first | ||
| return bState.duration - aState.duration; | ||
| }); | ||
| } | ||
| // Calculate distributed shard range [start, end] distributed equally | ||
| calculateShardRange(filesCount, index, count) { | ||
| const baseShardSize = Math.floor(filesCount / count); | ||
| const remainderTestFilesCount = filesCount % count; | ||
| if (remainderTestFilesCount >= index) { | ||
| const shardSize = baseShardSize + 1; | ||
| return [shardSize * (index - 1), shardSize * index]; | ||
| } | ||
| const shardStart = remainderTestFilesCount * (baseShardSize + 1) + (index - remainderTestFilesCount - 1) * baseShardSize; | ||
| return [shardStart, shardStart + baseShardSize]; | ||
| } | ||
| } | ||
| class RandomSequencer extends BaseSequencer { | ||
| async sort(files) { | ||
| const { sequence } = this.ctx.config; | ||
| return shuffle(files, sequence.seed); | ||
| } | ||
| } | ||
| function resolvePath(path, root) { | ||
| return normalize(/* @__PURE__ */ resolveModule(path, { paths: [root] }) ?? resolve(root, path)); | ||
| } | ||
| function parseInspector(inspect) { | ||
| if (typeof inspect === "boolean" || inspect === void 0) return {}; | ||
| if (typeof inspect === "number") return { port: inspect }; | ||
| if (inspect.match(/https?:\//)) throw new Error(`Inspector host cannot be a URL. Use "host:port" instead of "${inspect}"`); | ||
| const [host, port] = inspect.split(":"); | ||
| if (!port) return { host }; | ||
| return { | ||
| host, | ||
| port: Number(port) || defaultInspectPort | ||
| }; | ||
| } | ||
| /** | ||
| * @deprecated Internal function | ||
| */ | ||
| function resolveApiServerConfig(options, defaultPort, parentApi, logger) { | ||
| let api; | ||
| if (options.ui && !options.api) api = { port: defaultPort }; | ||
| else if (options.api === true) api = { port: defaultPort }; | ||
| else if (typeof options.api === "number") api = { port: options.api }; | ||
| if (typeof options.api === "object") if (api) { | ||
| if (options.api.port) api.port = options.api.port; | ||
| if (options.api.strictPort) api.strictPort = options.api.strictPort; | ||
| if (options.api.host) api.host = options.api.host; | ||
| } else api = { ...options.api }; | ||
| if (api) { | ||
| if (!api.port && !api.middlewareMode) api.port = defaultPort; | ||
| } else api = { middlewareMode: true }; | ||
| // if the API server is exposed to network, disable write operations by default | ||
| if (!api.middlewareMode && api.host && api.host !== "localhost" && api.host !== "127.0.0.1") { | ||
| // assigned to browser | ||
| if (parentApi) { | ||
| if (api.allowWrite == null && api.allowExec == null) logger?.error(c.yellow(`${c.yellowBright(" WARNING ")} API server is exposed to network, disabling write and exec operations by default for security reasons. This can cause some APIs to not work as expected. Set \`browser.api.allowExec\` manually to hide this warning. See https://vitest.dev/config/browser/api for more details.`)); | ||
| } | ||
| api.allowWrite ??= parentApi?.allowWrite ?? false; | ||
| api.allowExec ??= parentApi?.allowExec ?? false; | ||
| } else { | ||
| api.allowWrite ??= parentApi?.allowWrite ?? true; | ||
| api.allowExec ??= parentApi?.allowExec ?? true; | ||
| } | ||
| return api; | ||
| } | ||
| function resolveInlineWorkerOption(value) { | ||
| if (typeof value === "string" && value.trim().endsWith("%")) return getWorkersCountByPercentage(value); | ||
| else return Number(value); | ||
| } | ||
| function resolveConfig$1(vitest, options, viteConfig) { | ||
| const mode = vitest.mode; | ||
| const logger = vitest.logger; | ||
| if (options.dom) { | ||
| if (viteConfig.test?.environment != null && viteConfig.test.environment !== "happy-dom") logger.console.warn(c.yellow(`${c.inverse(c.yellow(" Vitest "))} Your config.test.environment ("${viteConfig.test.environment}") conflicts with --dom flag ("happy-dom"), ignoring "${viteConfig.test.environment}"`)); | ||
| options.environment = "happy-dom"; | ||
| } | ||
| const resolved = { | ||
| ...configDefaults, | ||
| ...options, | ||
| root: viteConfig.root, | ||
| mode | ||
| }; | ||
| if (resolved.retry && typeof resolved.retry === "object" && typeof resolved.retry.condition === "function") { | ||
| logger.console.warn(c.yellow("Warning: retry.condition function cannot be used inside a config file. Use a RegExp pattern instead, or define the function in your test file.")); | ||
| resolved.retry = { | ||
| ...resolved.retry, | ||
| condition: void 0 | ||
| }; | ||
| } | ||
| if (options.pool && typeof options.pool !== "string") { | ||
| resolved.pool = options.pool.name; | ||
| resolved.poolRunner = options.pool; | ||
| } | ||
| if ("poolOptions" in resolved) logger.deprecate("`test.poolOptions` was removed in Vitest 4. All previous `poolOptions` are now top-level options. Please, refer to the migration guide: https://vitest.dev/guide/migration#pool-rework"); | ||
| resolved.pool ??= "forks"; | ||
| resolved.project = toArray(resolved.project); | ||
| resolved.provide ??= {}; | ||
| // shallow copy tags array to avoid mutating user config | ||
| resolved.tags = [...resolved.tags || []]; | ||
| const definedTags = /* @__PURE__ */ new Set(); | ||
| resolved.tags.forEach((tag) => { | ||
| if (!tag.name || typeof tag.name !== "string") throw new Error(`Each tag defined in "test.tags" must have a "name" property, received: ${JSON.stringify(tag)}`); | ||
| if (definedTags.has(tag.name)) throw new Error(`Tag name "${tag.name}" is already defined in "test.tags". Tag names must be unique.`); | ||
| if (tag.name.match(/\s/)) throw new Error(`Tag name "${tag.name}" is invalid. Tag names cannot contain spaces.`); | ||
| if (tag.name.match(/([!()*|&])/)) throw new Error(`Tag name "${tag.name}" is invalid. Tag names cannot contain "!", "*", "&", "|", "(", or ")".`); | ||
| if (tag.name.match(/^\s*(and|or|not)\s*$/i)) throw new Error(`Tag name "${tag.name}" is invalid. Tag names cannot be a logical operator like "and", "or", "not".`); | ||
| if (typeof tag.retry === "object" && typeof tag.retry.condition === "function") throw new TypeError(`Tag "${tag.name}": retry.condition function cannot be used inside a config file. Use a RegExp pattern instead, or define the function in your test file.`); | ||
| if (tag.priority != null && (typeof tag.priority !== "number" || tag.priority < 0)) throw new TypeError(`Tag "${tag.name}": priority must be a non-negative number.`); | ||
| definedTags.add(tag.name); | ||
| }); | ||
| resolved.name = typeof options.name === "string" ? options.name : options.name?.label || ""; | ||
| resolved.color = typeof options.name !== "string" ? options.name?.color : void 0; | ||
| if (resolved.environment === "browser") throw new Error(`Looks like you set "test.environment" to "browser". To enable Browser Mode, use "test.browser.enabled" instead.`); | ||
| const inspector = resolved.inspect || resolved.inspectBrk; | ||
| resolved.inspector = { | ||
| ...resolved.inspector, | ||
| ...parseInspector(inspector), | ||
| enabled: !!inspector, | ||
| waitForDebugger: options.inspector?.waitForDebugger ?? !!resolved.inspectBrk | ||
| }; | ||
| if (viteConfig.base !== "/") resolved.base = viteConfig.base; | ||
| resolved.clearScreen = resolved.clearScreen ?? viteConfig.clearScreen ?? true; | ||
| if (options.shard) { | ||
| if (resolved.watch) throw new Error("You cannot use --shard option with enabled watch"); | ||
| const [indexString, countString] = options.shard.split("/"); | ||
| const index = Math.abs(Number.parseInt(indexString, 10)); | ||
| const count = Math.abs(Number.parseInt(countString, 10)); | ||
| if (Number.isNaN(count) || count <= 0) throw new Error("--shard <count> must be a positive number"); | ||
| if (Number.isNaN(index) || index <= 0 || index > count) throw new Error("--shard <index> must be a positive number less then <count>"); | ||
| resolved.shard = { | ||
| index, | ||
| count | ||
| }; | ||
| } | ||
| if (resolved.standalone && !resolved.watch) throw new Error(`Vitest standalone mode requires --watch`); | ||
| if (resolved.mergeReports && resolved.watch) throw new Error(`Cannot merge reports with --watch enabled`); | ||
| if (resolved.maxWorkers) resolved.maxWorkers = resolveInlineWorkerOption(resolved.maxWorkers); | ||
| if (!(options.fileParallelism ?? mode !== "benchmark")) | ||
| // ignore user config, parallelism cannot be implemented without limiting workers | ||
| resolved.maxWorkers = 1; | ||
| if (resolved.maxConcurrency === 0) { | ||
| logger.console.warn(c.yellow(`The option "maxConcurrency" cannot be set to 0. Using default value ${configDefaults.maxConcurrency} instead.`)); | ||
| resolved.maxConcurrency = configDefaults.maxConcurrency; | ||
| } | ||
| if (resolved.inspect || resolved.inspectBrk) { | ||
| if (resolved.maxWorkers !== 1) { | ||
| const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`; | ||
| throw new Error(`You cannot use ${inspectOption} without "--no-file-parallelism"`); | ||
| } | ||
| } | ||
| // apply browser CLI options only if the config already has the browser config and not disabled manually | ||
| if (vitest._cliOptions.browser && resolved.browser && (resolved.browser.enabled !== false || vitest._cliOptions.browser.enabled)) resolved.browser = mergeConfig(resolved.browser, vitest._cliOptions.browser); | ||
| resolved.browser ??= {}; | ||
| const browser = resolved.browser; | ||
| if (browser.enabled) { | ||
| const instances = browser.instances; | ||
| if (!browser.instances) browser.instances = []; | ||
| // use `chromium` by default when the preview provider is specified | ||
| // for a smoother experience. if chromium is not available, it will | ||
| // open the default browser anyway | ||
| if (!browser.instances.length && browser.provider?.name === "preview") browser.instances = [{ browser: "chromium" }]; | ||
| if (browser.name && instances?.length) { | ||
| // --browser=chromium filters configs to a single one | ||
| browser.instances = browser.instances.filter((instance) => instance.browser === browser.name); | ||
| // if `instances` were defined, but now they are empty, | ||
| // let's throw an error because the filter is invalid | ||
| if (!browser.instances.length) throw new Error([`"browser.instances" was set in the config, but the array is empty. Define at least one browser config.`, ` The "browser.name" was set to "${browser.name}" which filtered all configs (${instances.map((c) => c.browser).join(", ")}). Did you mean to use another name?`].join("")); | ||
| } | ||
| } | ||
| if (resolved.coverage.enabled && resolved.coverage.provider === "istanbul" && resolved.experimental?.viteModuleRunner === false) throw new Error(`"Istanbul" coverage provider is not compatible with "experimental.viteModuleRunner: false". Please, enable "viteModuleRunner" or switch to "v8" coverage provider.`); | ||
| if (browser.enabled && resolved.detectAsyncLeaks) logger.console.warn(c.yellow("The option \"detectAsyncLeaks\" is not supported in browser mode and will be ignored.")); | ||
| const containsChromium = hasBrowserChromium(vitest, resolved); | ||
| const hasOnlyChromium = hasOnlyBrowserChromium(vitest, resolved); | ||
| // Browser-mode "Chromium" only features: | ||
| if (browser.enabled && (!containsChromium || !hasOnlyChromium)) { | ||
| const browserConfig = ` | ||
| { | ||
| browser: { | ||
| provider: ${browser.provider?.name || "preview"}(), | ||
| instances: [ | ||
| ${(browser.instances || []).map((i) => `{ browser: '${i.browser}' }`).join(",\n ")} | ||
| ], | ||
| }, | ||
| } | ||
| `.trim(); | ||
| const preferredProvider = !browser.provider?.name || browser.provider.name === "preview" ? "playwright" : browser.provider.name; | ||
| const correctExample = ` | ||
| { | ||
| browser: { | ||
| provider: ${preferredProvider}(), | ||
| instances: [ | ||
| { browser: '${preferredProvider === "playwright" ? "chromium" : "chrome"}' } | ||
| ], | ||
| }, | ||
| } | ||
| `.trim(); | ||
| // requires all projects to be chromium | ||
| if (!hasOnlyChromium && resolved.coverage.enabled && resolved.coverage.provider === "v8") { | ||
| const coverageExample = ` | ||
| { | ||
| coverage: { | ||
| provider: 'istanbul', | ||
| }, | ||
| } | ||
| `.trim(); | ||
| throw new Error(`@vitest/coverage-v8 does not work with\n${browserConfig}\n\nUse either:\n${correctExample}\n\n...or change your coverage provider to:\n${coverageExample}\n`); | ||
| } | ||
| // ignores non-chromium browsers when there is at least one chromium project | ||
| if (!containsChromium && (resolved.inspect || resolved.inspectBrk)) { | ||
| const inspectOption = `--inspect${resolved.inspectBrk ? "-brk" : ""}`; | ||
| throw new Error(`${inspectOption} does not work with\n${browserConfig}\n\nUse either:\n${correctExample}\n\n...or disable ${inspectOption}\n`); | ||
| } | ||
| } | ||
| resolved.coverage.reporter = resolveCoverageReporters(resolved.coverage.reporter); | ||
| if (resolved.coverage.changed === void 0 && resolved.changed !== void 0) resolved.coverage.changed = resolved.changed; | ||
| if (resolved.coverage.enabled && resolved.coverage.reportsDirectory) { | ||
| const reportsDirectory = resolve(resolved.root, resolved.coverage.reportsDirectory); | ||
| if (reportsDirectory === resolved.root || reportsDirectory === process.cwd()) throw new Error(`You cannot set "coverage.reportsDirectory" as ${reportsDirectory}. Vitest needs to be able to remove this directory before test run`); | ||
| if (resolved.coverage.htmlDir) resolved.coverage.htmlDir = resolve(resolved.root, resolved.coverage.htmlDir); | ||
| // infer default htmlDir based on builtin reporter's html output location | ||
| if (!resolved.coverage.htmlDir) { | ||
| const htmlReporter = resolved.coverage.reporter.find(([name]) => name === "html" || name === "html-spa"); | ||
| if (htmlReporter) { | ||
| const [, options] = htmlReporter; | ||
| const subdir = options && typeof options === "object" && "subdir" in options && typeof options.subdir === "string" ? options.subdir : void 0; | ||
| resolved.coverage.htmlDir = resolve(reportsDirectory, subdir || "."); | ||
| } else if (resolved.coverage.reporter.find(([name]) => name === "lcov")) resolved.coverage.htmlDir = resolve(reportsDirectory, "lcov-report"); | ||
| } | ||
| } | ||
| if (resolved.coverage.enabled && resolved.coverage.provider === "custom" && resolved.coverage.customProviderModule) resolved.coverage.customProviderModule = resolvePath(resolved.coverage.customProviderModule, resolved.root); | ||
| resolved.expect ??= {}; | ||
| resolved.deps ??= {}; | ||
| resolved.deps.moduleDirectories ??= []; | ||
| resolved.deps.optimizer ??= {}; | ||
| resolved.deps.optimizer.ssr ??= {}; | ||
| resolved.deps.optimizer.ssr.enabled ??= false; | ||
| resolved.deps.optimizer.client ??= {}; | ||
| resolved.deps.optimizer.client.enabled ??= false; | ||
| resolved.deps.web ??= {}; | ||
| resolved.deps.web.transformAssets ??= true; | ||
| resolved.deps.web.transformCss ??= true; | ||
| resolved.deps.web.transformGlobPattern ??= []; | ||
| resolved.setupFiles = toArray(resolved.setupFiles || []).map((file) => resolvePath(file, resolved.root)); | ||
| resolved.globalSetup = toArray(resolved.globalSetup || []).map((file) => resolvePath(file, resolved.root)); | ||
| // Add hard-coded default coverage exclusions. These cannot be overidden by user config. | ||
| // Override original exclude array for cases where user re-uses same object in test.exclude. | ||
| resolved.coverage.exclude = [ | ||
| ...resolved.coverage.exclude, | ||
| ...resolved.setupFiles.map((file) => `${resolved.coverage.allowExternal ? "**/" : ""}${relative(resolved.root, file)}`), | ||
| ...resolved.include, | ||
| resolved.config && slash(resolved.config), | ||
| ...configFiles, | ||
| "**/virtual:*", | ||
| "**/__x00__*", | ||
| "**/node_modules/**" | ||
| ].filter((pattern) => typeof pattern === "string"); | ||
| resolved.forceRerunTriggers = [...resolved.forceRerunTriggers, ...resolved.setupFiles]; | ||
| if (resolved.cliExclude) resolved.exclude.push(...resolved.cliExclude); | ||
| if (resolved.runner) resolved.runner = resolvePath(resolved.runner, resolved.root); | ||
| resolved.attachmentsDir = resolve(resolved.root, resolved.attachmentsDir ?? ".vitest-attachments"); | ||
| if (resolved.snapshotEnvironment) resolved.snapshotEnvironment = resolvePath(resolved.snapshotEnvironment, resolved.root); | ||
| resolved.testNamePattern = resolved.testNamePattern ? resolved.testNamePattern instanceof RegExp ? resolved.testNamePattern : new RegExp(resolved.testNamePattern) : void 0; | ||
| if (resolved.snapshotFormat && "plugins" in resolved.snapshotFormat) { | ||
| resolved.snapshotFormat.plugins = []; | ||
| // TODO: support it via separate config (like DiffOptions) or via `Function.toString()` | ||
| if (typeof resolved.snapshotFormat.compareKeys === "function") throw new TypeError(`"snapshotFormat.compareKeys" function is not supported.`); | ||
| } | ||
| const UPDATE_SNAPSHOT = resolved.update || process.env.UPDATE_SNAPSHOT; | ||
| resolved.snapshotOptions = { | ||
| expand: resolved.expandSnapshotDiff ?? false, | ||
| snapshotFormat: resolved.snapshotFormat || {}, | ||
| updateSnapshot: UPDATE_SNAPSHOT === "all" || UPDATE_SNAPSHOT === "new" || UPDATE_SNAPSHOT === "none" ? UPDATE_SNAPSHOT : isCI && !UPDATE_SNAPSHOT ? "none" : UPDATE_SNAPSHOT ? "all" : "new", | ||
| resolveSnapshotPath: options.resolveSnapshotPath, | ||
| snapshotEnvironment: null | ||
| }; | ||
| resolved.snapshotSerializers ??= []; | ||
| resolved.snapshotSerializers = resolved.snapshotSerializers.map((file) => resolvePath(file, resolved.root)); | ||
| resolved.forceRerunTriggers.push(...resolved.snapshotSerializers); | ||
| if (options.resolveSnapshotPath) delete resolved.resolveSnapshotPath; | ||
| resolved.execArgv ??= []; | ||
| resolved.pool ??= "threads"; | ||
| if (resolved.pool === "vmForks" || resolved.pool === "vmThreads" || resolved.pool === "typescript") resolved.isolate = false; | ||
| if (process.env.VITEST_MAX_WORKERS) resolved.maxWorkers = Number.parseInt(process.env.VITEST_MAX_WORKERS); | ||
| if (mode === "benchmark") { | ||
| resolved.benchmark = { | ||
| ...benchmarkConfigDefaults, | ||
| ...resolved.benchmark | ||
| }; | ||
| // override test config | ||
| resolved.coverage.enabled = false; | ||
| resolved.typecheck.enabled = false; | ||
| resolved.include = resolved.benchmark.include; | ||
| resolved.exclude = resolved.benchmark.exclude; | ||
| resolved.includeSource = resolved.benchmark.includeSource; | ||
| const reporters = Array.from(new Set([...toArray(resolved.benchmark.reporters), ...toArray(options.reporter)])).filter(Boolean); | ||
| if (reporters.length) resolved.benchmark.reporters = reporters; | ||
| else resolved.benchmark.reporters = ["default"]; | ||
| if (options.outputFile) resolved.benchmark.outputFile = options.outputFile; | ||
| // --compare from cli | ||
| if (options.compare) resolved.benchmark.compare = options.compare; | ||
| if (options.outputJson) resolved.benchmark.outputJson = options.outputJson; | ||
| } | ||
| if (typeof resolved.diff === "string") { | ||
| resolved.diff = resolvePath(resolved.diff, resolved.root); | ||
| resolved.forceRerunTriggers.push(resolved.diff); | ||
| } | ||
| resolved.api = { | ||
| ...resolveApiServerConfig(options, defaultPort), | ||
| token: crypto.randomUUID() | ||
| }; | ||
| if (options.related) resolved.related = toArray(options.related).map((file) => resolve(resolved.root, file)); | ||
| /* | ||
| * Reporters can be defined in many different ways: | ||
| * { reporter: 'json' } | ||
| * { reporter: { onFinish() { method() } } } | ||
| * { reporter: ['json', { onFinish() { method() } }] } | ||
| * { reporter: [[ 'json' ]] } | ||
| * { reporter: [[ 'json' ], 'html'] } | ||
| * { reporter: [[ 'json', { outputFile: 'test.json' } ], 'html'] } | ||
| */ | ||
| if (options.reporters) if (!Array.isArray(options.reporters)) | ||
| // Reporter name, e.g. { reporters: 'json' } | ||
| if (typeof options.reporters === "string") resolved.reporters = [[options.reporters, {}]]; | ||
| else resolved.reporters = [options.reporters]; | ||
| else { | ||
| resolved.reporters = []; | ||
| for (const reporter of options.reporters) if (Array.isArray(reporter)) | ||
| // Reporter with options, e.g. { reporters: [ [ 'json', { outputFile: 'test.json' } ] ] } | ||
| resolved.reporters.push([reporter[0], reporter[1] || {}]); | ||
| else if (typeof reporter === "string") | ||
| // Reporter name in array, e.g. { reporters: ["html", "json"]} | ||
| resolved.reporters.push([reporter, {}]); | ||
| else | ||
| // Inline reporter, e.g. { reporter: [{ onFinish() { method() } }] } | ||
| resolved.reporters.push(reporter); | ||
| } | ||
| if (mode !== "benchmark") { | ||
| // @ts-expect-error "reporter" is from CLI, should be absolute to the running directory | ||
| // it is passed down as "vitest --reporter ../reporter.js" | ||
| const reportersFromCLI = resolved.reporter; | ||
| const cliReporters = toArray(reportersFromCLI || []).map((reporter) => { | ||
| // ./reporter.js || ../reporter.js, but not .reporters/reporter.js | ||
| if (/^\.\.?\//.test(reporter)) return resolve(process.cwd(), reporter); | ||
| return reporter; | ||
| }); | ||
| if (cliReporters.length) { | ||
| // When CLI reporters are specified, preserve options from config file | ||
| const configReportersMap = /* @__PURE__ */ new Map(); | ||
| // Build a map of reporter names to their options from the config | ||
| for (const reporter of resolved.reporters) if (Array.isArray(reporter)) { | ||
| const [reporterName, reporterOptions] = reporter; | ||
| if (typeof reporterName === "string") configReportersMap.set(reporterName, reporterOptions); | ||
| } | ||
| resolved.reporters = Array.from(new Set(toArray(cliReporters))).filter(Boolean).map((reporter) => [reporter, configReportersMap.get(reporter) || {}]); | ||
| } | ||
| } | ||
| if (!resolved.reporters.length) { | ||
| resolved.reporters.push(["default", {}]); | ||
| // also enable github-actions reporter as a default | ||
| if (process.env.GITHUB_ACTIONS === "true") resolved.reporters.push(["github-actions", {}]); | ||
| } | ||
| if (resolved.changed) resolved.passWithNoTests ??= true; | ||
| resolved.css ??= {}; | ||
| if (typeof resolved.css === "object") { | ||
| resolved.css.modules ??= {}; | ||
| resolved.css.modules.classNameStrategy ??= "stable"; | ||
| } | ||
| if (resolved.cache !== false) { | ||
| if (resolved.cache && typeof resolved.cache.dir === "string") vitest.logger.deprecate(`"cache.dir" is deprecated, use Vite's "cacheDir" instead if you want to change the cache director. Note caches will be written to "cacheDir\/vitest"`); | ||
| resolved.cache = { dir: viteConfig.cacheDir }; | ||
| } | ||
| resolved.sequence ??= {}; | ||
| if (resolved.sequence.shuffle && typeof resolved.sequence.shuffle === "object") { | ||
| const { files, tests } = resolved.sequence.shuffle; | ||
| resolved.sequence.sequencer ??= files ? RandomSequencer : BaseSequencer; | ||
| resolved.sequence.shuffle = tests; | ||
| } | ||
| if (!resolved.sequence?.sequencer) | ||
| // CLI flag has higher priority | ||
| resolved.sequence.sequencer = resolved.sequence.shuffle ? RandomSequencer : BaseSequencer; | ||
| resolved.sequence.groupOrder ??= 0; | ||
| resolved.sequence.hooks ??= "stack"; | ||
| // Set seed if either files or tests are shuffled | ||
| if (resolved.sequence.sequencer === RandomSequencer || resolved.sequence.shuffle) resolved.sequence.seed ??= Date.now(); | ||
| resolved.typecheck = { | ||
| ...configDefaults.typecheck, | ||
| ...resolved.typecheck | ||
| }; | ||
| resolved.typecheck ??= {}; | ||
| resolved.typecheck.enabled ??= false; | ||
| if (resolved.typecheck.enabled) logger.console.warn(c.yellow("Testing types with tsc and vue-tsc is an experimental feature.\nBreaking changes might not follow SemVer, please pin Vitest's version when using it.")); | ||
| resolved.browser.enabled ??= false; | ||
| resolved.browser.headless ??= isCI; | ||
| if (resolved.browser.isolate) logger.console.warn(c.yellow("`browser.isolate` is deprecated. Use top-level `isolate` instead.")); | ||
| resolved.browser.isolate ??= resolved.isolate ?? true; | ||
| resolved.browser.fileParallelism ??= options.fileParallelism ?? mode !== "benchmark"; | ||
| // disable in headless mode by default, and if CI is detected | ||
| resolved.browser.ui ??= resolved.browser.headless === true ? false : !isCI; | ||
| resolved.browser.commands ??= {}; | ||
| resolved.browser.detailsPanelPosition ??= "right"; | ||
| if (resolved.browser.screenshotDirectory) resolved.browser.screenshotDirectory = resolve(resolved.root, resolved.browser.screenshotDirectory); | ||
| if (resolved.inspector.enabled) resolved.browser.trackUnhandledErrors ??= false; | ||
| resolved.browser.viewport ??= {}; | ||
| resolved.browser.viewport.width ??= 414; | ||
| resolved.browser.viewport.height ??= 896; | ||
| resolved.browser.locators ??= {}; | ||
| resolved.browser.locators.testIdAttribute ??= "data-testid"; | ||
| if (typeof resolved.browser.provider === "string") { | ||
| const source = `@vitest/browser-${resolved.browser.provider}`; | ||
| throw new TypeError(`The \`browser.provider\` configuration was changed to accept a factory instead of a string. Add an import of "${resolved.browser.provider}" from "${source}" instead. See: https://vitest.dev/config/browser/provider`); | ||
| } | ||
| const isPreview = resolved.browser.provider?.name === "preview"; | ||
| if (!isPreview && resolved.browser.enabled && provider === "stackblitz") throw new Error(`stackblitz environment does not support the ${resolved.browser.provider?.name} provider. Please, use "@vitest/browser-preview" instead.`); | ||
| if (isPreview && resolved.browser.screenshotFailures === true) { | ||
| console.warn(c.yellow([ | ||
| `Browser provider "preview" doesn't support screenshots, `, | ||
| `so "browser.screenshotFailures" option is forcefully disabled. `, | ||
| `Set "browser.screenshotFailures" to false or remove it from the config to suppress this warning.` | ||
| ].join(""))); | ||
| resolved.browser.screenshotFailures = false; | ||
| } else resolved.browser.screenshotFailures ??= !isPreview && !resolved.browser.ui; | ||
| if (resolved.browser.provider && resolved.browser.provider.options == null) resolved.browser.provider.options = {}; | ||
| resolved.browser.api = resolveApiServerConfig(resolved.browser, defaultBrowserPort, resolved.api, logger) || { port: defaultBrowserPort }; | ||
| // enable includeTaskLocation by default in UI mode | ||
| if (resolved.browser.enabled) { | ||
| if (resolved.browser.ui) resolved.includeTaskLocation ??= true; | ||
| } else if (resolved.ui) resolved.includeTaskLocation ??= true; | ||
| if (typeof resolved.browser.trace === "string" || !resolved.browser.trace) resolved.browser.trace = { mode: resolved.browser.trace || "off" }; | ||
| if (resolved.browser.trace.tracesDir != null) resolved.browser.trace.tracesDir = resolvePath(resolved.browser.trace.tracesDir, resolved.root); | ||
| if (toArray(resolved.reporters).some((reporter) => { | ||
| if (Array.isArray(reporter)) return reporter[0] === "html"; | ||
| return false; | ||
| })) resolved.includeTaskLocation ??= true; | ||
| resolved.server ??= {}; | ||
| resolved.server.deps ??= {}; | ||
| if (resolved.server.debug?.dump || process.env.VITEST_DEBUG_DUMP) { | ||
| const userFolder = resolved.server.debug?.dump || process.env.VITEST_DEBUG_DUMP; | ||
| resolved.dumpDir = resolve(resolved.root, typeof userFolder === "string" && userFolder !== "true" ? userFolder : ".vitest-dump", resolved.name || "root"); | ||
| } | ||
| resolved.testTimeout ??= resolved.browser.enabled ? 15e3 : 5e3; | ||
| resolved.hookTimeout ??= resolved.browser.enabled ? 3e4 : 1e4; | ||
| resolved.experimental ??= {}; | ||
| if (resolved.experimental.openTelemetry?.sdkPath) { | ||
| const sdkPath = resolve(resolved.root, resolved.experimental.openTelemetry.sdkPath); | ||
| resolved.experimental.openTelemetry.sdkPath = pathToFileURL(sdkPath).toString(); | ||
| } | ||
| if (resolved.experimental.openTelemetry?.browserSdkPath) { | ||
| const browserSdkPath = resolve(resolved.root, resolved.experimental.openTelemetry.browserSdkPath); | ||
| resolved.experimental.openTelemetry.browserSdkPath = browserSdkPath; | ||
| } | ||
| if (resolved.experimental.fsModuleCachePath) resolved.experimental.fsModuleCachePath = resolve(resolved.root, resolved.experimental.fsModuleCachePath); | ||
| resolved.experimental.importDurations ??= {}; | ||
| resolved.experimental.importDurations.print ??= false; | ||
| resolved.experimental.importDurations.failOnDanger ??= false; | ||
| if (resolved.experimental.importDurations.limit == null) { | ||
| const shouldCollect = resolved.experimental.importDurations.print || resolved.experimental.importDurations.failOnDanger || resolved.ui; | ||
| resolved.experimental.importDurations.limit = shouldCollect ? 10 : 0; | ||
| } | ||
| resolved.experimental.importDurations.thresholds ??= {}; | ||
| resolved.experimental.importDurations.thresholds.warn ??= 100; | ||
| resolved.experimental.importDurations.thresholds.danger ??= 500; | ||
| return resolved; | ||
| } | ||
| function isBrowserEnabled(config) { | ||
| return Boolean(config.browser?.enabled); | ||
| } | ||
| function resolveCoverageReporters(configReporters) { | ||
| // E.g. { reporter: "html" } | ||
| if (!Array.isArray(configReporters)) return [[configReporters, {}]]; | ||
| const resolvedReporters = []; | ||
| for (const reporter of configReporters) if (Array.isArray(reporter)) | ||
| // E.g. { reporter: [ ["html", { skipEmpty: true }], ["lcov"], ["json", { file: "map.json" }] ]} | ||
| resolvedReporters.push([reporter[0], reporter[1] || {}]); | ||
| else | ||
| // E.g. { reporter: ["html", "json"]} | ||
| resolvedReporters.push([reporter, {}]); | ||
| return resolvedReporters; | ||
| } | ||
| function isChromiumName(provider, name) { | ||
| if (provider === "playwright") return name === "chromium"; | ||
| return name === "chrome" || name === "edge"; | ||
| } | ||
| function hasBrowserChromium(vitest, config) { | ||
| const browser = config.browser; | ||
| if (!browser || !browser.provider || browser.provider.name === "preview" || !browser.enabled) return false; | ||
| if (browser.name) return isChromiumName(browser.provider.name, browser.name); | ||
| if (!browser.instances) return false; | ||
| return browser.instances.some((instance) => { | ||
| const name = instance.name || (config.name ? `${config.name} (${instance.browser})` : instance.browser); | ||
| // browser config is filtered out | ||
| if (!vitest.matchesProjectFilter(name)) return false; | ||
| return isChromiumName(browser.provider.name, instance.browser); | ||
| }); | ||
| } | ||
| function hasOnlyBrowserChromium(vitest, config) { | ||
| const browser = config.browser; | ||
| if (!browser || !browser.provider || browser.provider.name === "preview" || !browser.enabled) return false; | ||
| if (browser.name) return isChromiumName(browser.provider.name, browser.name); | ||
| if (!browser.instances) return false; | ||
| return browser.instances.every((instance) => { | ||
| const name = instance.name || (config.name ? `${config.name} (${instance.browser})` : instance.browser); | ||
| // browser config is filtered out | ||
| if (!vitest.matchesProjectFilter(name)) return true; | ||
| return isChromiumName(browser.provider.name, instance.browser); | ||
| }); | ||
| } | ||
| const THRESHOLD_KEYS = [ | ||
| "lines", | ||
| "functions", | ||
| "statements", | ||
| "branches" | ||
| ]; | ||
| const GLOBAL_THRESHOLDS_KEY = "global"; | ||
| const DEFAULT_PROJECT = Symbol.for("default-project"); | ||
| let uniqueId = 0; | ||
| async function getCoverageProvider(options, loader) { | ||
| const coverageModule = await resolveCoverageProviderModule(options, loader); | ||
| if (coverageModule) return coverageModule.getProvider(); | ||
| return null; | ||
| } | ||
| class BaseCoverageProvider { | ||
| ctx; | ||
| name; | ||
| version; | ||
| options; | ||
| globCache = /* @__PURE__ */ new Map(); | ||
| autoUpdateMarker = "\n// __VITEST_COVERAGE_MARKER__"; | ||
| coverageFiles = /* @__PURE__ */ new Map(); | ||
| pendingPromises = []; | ||
| coverageFilesDirectory; | ||
| roots = []; | ||
| changedFiles; | ||
| _initialize(ctx) { | ||
| this.ctx = ctx; | ||
| if (ctx.version !== this.version) ctx.logger.warn(c.yellow(`Loaded ${c.inverse(c.yellow(` vitest@${ctx.version} `))} and ${c.inverse(c.yellow(` @vitest/coverage-${this.name}@${this.version} `))}. | ||
| Running mixed versions is not supported and may lead into bugs | ||
| Update your dependencies and make sure the versions match.`)); | ||
| const config = ctx._coverageOptions; | ||
| this.options = { | ||
| ...coverageConfigDefaults, | ||
| ...config, | ||
| provider: this.name, | ||
| reportsDirectory: resolve(ctx.config.root, config.reportsDirectory || coverageConfigDefaults.reportsDirectory), | ||
| reporter: resolveCoverageReporters(config.reporter || coverageConfigDefaults.reporter), | ||
| thresholds: config.thresholds && { | ||
| ...config.thresholds, | ||
| lines: config.thresholds["100"] ? 100 : config.thresholds.lines, | ||
| branches: config.thresholds["100"] ? 100 : config.thresholds.branches, | ||
| functions: config.thresholds["100"] ? 100 : config.thresholds.functions, | ||
| statements: config.thresholds["100"] ? 100 : config.thresholds.statements | ||
| } | ||
| }; | ||
| const shard = this.ctx.config.shard; | ||
| const tempDirectory = `.tmp${shard ? `-${shard.index}-${shard.count}` : ""}`; | ||
| this.coverageFilesDirectory = resolve(this.options.reportsDirectory, tempDirectory); | ||
| // If --project filter is set pick only roots of resolved projects | ||
| this.roots = ctx.config.project?.length ? [...new Set(ctx.projects.map((project) => project.config.root))] : [ctx.config.root]; | ||
| } | ||
| /** | ||
| * Check if file matches `coverage.include` but not `coverage.exclude` | ||
| */ | ||
| isIncluded(_filename, root) { | ||
| const roots = root ? [root] : this.roots; | ||
| const filename = slash(cleanUrl(_filename)); | ||
| const cacheHit = this.globCache.get(filename); | ||
| if (cacheHit !== void 0) return cacheHit; | ||
| // File outside project root with default allowExternal | ||
| if (this.options.allowExternal === false && roots.every((root) => !filename.startsWith(root))) { | ||
| this.globCache.set(filename, false); | ||
| return false; | ||
| } | ||
| // By default `coverage.include` matches all files, except "coverage.exclude" | ||
| const glob = this.options.include || "**"; | ||
| let included = pm.isMatch(filename, glob, { | ||
| contains: true, | ||
| dot: true, | ||
| ignore: this.options.exclude | ||
| }); | ||
| if (included && this.changedFiles) included = this.changedFiles.includes(filename); | ||
| this.globCache.set(filename, included); | ||
| return included; | ||
| } | ||
| async getUntestedFilesByRoot(testedFiles, include, root) { | ||
| let includedFiles = await glob(include, { | ||
| cwd: root, | ||
| ignore: [...this.options.exclude, ...testedFiles.map((file) => slash(file))], | ||
| absolute: true, | ||
| dot: true, | ||
| onlyFiles: true | ||
| }); | ||
| // Run again through picomatch as tinyglobby's exclude pattern is different ({ "exclude": ["math"] } should ignore "src/math.ts") | ||
| includedFiles = includedFiles.filter((file) => this.isIncluded(file, root)); | ||
| if (this.changedFiles) includedFiles = this.changedFiles.filter((file) => includedFiles.includes(file)); | ||
| return includedFiles.map((file) => slash(path.resolve(root, file))); | ||
| } | ||
| async getUntestedFiles(testedFiles) { | ||
| if (this.options.include == null) return []; | ||
| const rootMapper = this.getUntestedFilesByRoot.bind(this, testedFiles, this.options.include); | ||
| return (await Promise.all(this.roots.map(rootMapper))).flatMap((files) => files); | ||
| } | ||
| createCoverageMap() { | ||
| throw new Error("BaseReporter's createCoverageMap was not overwritten"); | ||
| } | ||
| async generateReports(_, __) { | ||
| throw new Error("BaseReporter's generateReports was not overwritten"); | ||
| } | ||
| async parseConfigModule(_) { | ||
| throw new Error("BaseReporter's parseConfigModule was not overwritten"); | ||
| } | ||
| resolveOptions() { | ||
| return this.options; | ||
| } | ||
| async clean(clean = true) { | ||
| if (clean && existsSync(this.options.reportsDirectory)) await promises.rm(this.options.reportsDirectory, { | ||
| recursive: true, | ||
| force: true, | ||
| maxRetries: 10 | ||
| }); | ||
| if (existsSync(this.coverageFilesDirectory)) await promises.rm(this.coverageFilesDirectory, { | ||
| recursive: true, | ||
| force: true, | ||
| maxRetries: 10 | ||
| }); | ||
| await promises.mkdir(this.coverageFilesDirectory, { recursive: true }); | ||
| this.coverageFiles = /* @__PURE__ */ new Map(); | ||
| this.pendingPromises = []; | ||
| } | ||
| onAfterSuiteRun({ coverage, environment, projectName, testFiles }) { | ||
| if (!coverage) return; | ||
| let entry = this.coverageFiles.get(projectName || DEFAULT_PROJECT); | ||
| if (!entry) { | ||
| entry = {}; | ||
| this.coverageFiles.set(projectName || DEFAULT_PROJECT, entry); | ||
| } | ||
| const testFilenames = testFiles.join(); | ||
| const filename = resolve(this.coverageFilesDirectory, `coverage-${uniqueId++}.json`); | ||
| entry[environment] ??= {}; | ||
| // If there's a result from previous run, overwrite it | ||
| entry[environment][testFilenames] = filename; | ||
| const promise = promises.writeFile(filename, JSON.stringify(coverage), "utf-8"); | ||
| this.pendingPromises.push(promise); | ||
| } | ||
| async readCoverageFiles({ onFileRead, onFinished, onDebug }) { | ||
| let index = 0; | ||
| const total = this.pendingPromises.length; | ||
| await Promise.all(this.pendingPromises); | ||
| this.pendingPromises = []; | ||
| for (const [projectName, coveragePerProject] of this.coverageFiles.entries()) for (const [environment, coverageByTestfiles] of Object.entries(coveragePerProject)) { | ||
| const filenames = Object.values(coverageByTestfiles); | ||
| const project = this.ctx.getProjectByName(projectName); | ||
| for (const chunk of this.toSlices(filenames, this.options.processingConcurrency)) { | ||
| if (onDebug.enabled) { | ||
| index += chunk.length; | ||
| onDebug(`Reading coverage results ${index}/${total}`); | ||
| } | ||
| await Promise.all(chunk.map(async (filename) => { | ||
| const contents = await promises.readFile(filename, "utf-8"); | ||
| onFileRead(JSON.parse(contents)); | ||
| })); | ||
| } | ||
| await onFinished(project, environment); | ||
| } | ||
| } | ||
| async cleanAfterRun() { | ||
| this.coverageFiles = /* @__PURE__ */ new Map(); | ||
| await promises.rm(this.coverageFilesDirectory, { recursive: true }); | ||
| // Remove empty reports directory, e.g. when only text-reporter is used | ||
| if (readdirSync(this.options.reportsDirectory).length === 0) await promises.rm(this.options.reportsDirectory, { recursive: true }); | ||
| } | ||
| async onTestRunStart() { | ||
| if (this.options.changed) { | ||
| const { VitestGit } = await import('./git.Bm2pzPAa.js'); | ||
| this.changedFiles = await new VitestGit(this.ctx.config.root).findChangedFiles({ changedSince: this.options.changed }) ?? void 0; | ||
| } else if (this.ctx.config.changed) this.changedFiles = this.ctx.config.related; | ||
| if (this.changedFiles) this.globCache.clear(); | ||
| } | ||
| async onTestFailure() { | ||
| if (!this.options.reportOnFailure) await this.cleanAfterRun(); | ||
| } | ||
| async reportCoverage(coverageMap, { allTestsRun }) { | ||
| await this.generateReports(coverageMap || this.createCoverageMap(), allTestsRun); | ||
| if (!(!this.options.cleanOnRerun && this.ctx.config.watch)) await this.cleanAfterRun(); | ||
| } | ||
| async reportThresholds(coverageMap, allTestsRun) { | ||
| const resolvedThresholds = this.resolveThresholds(coverageMap); | ||
| this.checkThresholds(resolvedThresholds); | ||
| if (this.options.thresholds?.autoUpdate && allTestsRun) { | ||
| if (!this.ctx.vite.config.configFile) throw new Error("Missing configurationFile. The \"coverage.thresholds.autoUpdate\" can only be enabled when configuration file is used."); | ||
| const configFilePath = this.ctx.vite.config.configFile; | ||
| const configModule = await this.parseConfigModule(configFilePath); | ||
| await this.updateThresholds({ | ||
| thresholds: resolvedThresholds, | ||
| configurationFile: configModule, | ||
| onUpdate: () => writeFileSync(configFilePath, configModule.generate().code.replace(this.autoUpdateMarker, ""), "utf-8") | ||
| }); | ||
| } | ||
| } | ||
| /** | ||
| * Constructs collected coverage and users' threshold options into separate sets | ||
| * where each threshold set holds their own coverage maps. Threshold set is either | ||
| * for specific files defined by glob pattern or global for all other files. | ||
| */ | ||
| resolveThresholds(coverageMap) { | ||
| const resolvedThresholds = []; | ||
| const files = coverageMap.files(); | ||
| const globalCoverageMap = this.createCoverageMap(); | ||
| for (const key of Object.keys(this.options.thresholds)) { | ||
| if (key === "perFile" || key === "autoUpdate" || key === "100" || THRESHOLD_KEYS.includes(key)) continue; | ||
| const glob = key; | ||
| const globThresholds = resolveGlobThresholds(this.options.thresholds[glob]); | ||
| const globCoverageMap = this.createCoverageMap(); | ||
| const matcher = pm(glob); | ||
| const matchingFiles = files.filter((file) => matcher(relative(this.ctx.config.root, file))); | ||
| for (const file of matchingFiles) { | ||
| const fileCoverage = coverageMap.fileCoverageFor(file); | ||
| globCoverageMap.addFileCoverage(fileCoverage); | ||
| } | ||
| resolvedThresholds.push({ | ||
| name: glob, | ||
| coverageMap: globCoverageMap, | ||
| thresholds: globThresholds | ||
| }); | ||
| } | ||
| // Global threshold is for all files, even if they are included by glob patterns | ||
| for (const file of files) { | ||
| const fileCoverage = coverageMap.fileCoverageFor(file); | ||
| globalCoverageMap.addFileCoverage(fileCoverage); | ||
| } | ||
| resolvedThresholds.unshift({ | ||
| name: GLOBAL_THRESHOLDS_KEY, | ||
| coverageMap: globalCoverageMap, | ||
| thresholds: { | ||
| branches: this.options.thresholds?.branches, | ||
| functions: this.options.thresholds?.functions, | ||
| lines: this.options.thresholds?.lines, | ||
| statements: this.options.thresholds?.statements | ||
| } | ||
| }); | ||
| return resolvedThresholds; | ||
| } | ||
| /** | ||
| * Check collected coverage against configured thresholds. Sets exit code to 1 when thresholds not reached. | ||
| */ | ||
| checkThresholds(allThresholds) { | ||
| for (const { coverageMap, thresholds, name } of allThresholds) { | ||
| if (thresholds.branches === void 0 && thresholds.functions === void 0 && thresholds.lines === void 0 && thresholds.statements === void 0) continue; | ||
| // Construct list of coverage summaries where thresholds are compared against | ||
| const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => ({ | ||
| file, | ||
| summary: coverageMap.fileCoverageFor(file).toSummary() | ||
| })) : [{ | ||
| file: null, | ||
| summary: coverageMap.getCoverageSummary() | ||
| }]; | ||
| // Check thresholds of each summary | ||
| for (const { summary, file } of summaries) for (const thresholdKey of THRESHOLD_KEYS) { | ||
| const threshold = thresholds[thresholdKey]; | ||
| if (threshold === void 0) continue; | ||
| /** | ||
| * Positive thresholds are treated as minimum coverage percentages (X means: X% of lines must be covered), | ||
| * while negative thresholds are treated as maximum uncovered counts (-X means: X lines may be uncovered). | ||
| */ | ||
| if (threshold >= 0) { | ||
| const coverage = summary.data[thresholdKey].pct; | ||
| if (coverage < threshold) { | ||
| process.exitCode = 1; | ||
| /** | ||
| * Generate error message based on perFile flag: | ||
| * - ERROR: Coverage for statements (33.33%) does not meet threshold (85%) for src/math.ts | ||
| * - ERROR: Coverage for statements (50%) does not meet global threshold (85%) | ||
| */ | ||
| let errorMessage = `ERROR: Coverage for ${thresholdKey} (${coverage}%) does not meet ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${threshold}%)`; | ||
| if (this.options.thresholds?.perFile && file) errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`; | ||
| this.ctx.logger.error(errorMessage); | ||
| } | ||
| } else { | ||
| const uncovered = summary.data[thresholdKey].total - summary.data[thresholdKey].covered; | ||
| const absoluteThreshold = threshold * -1; | ||
| if (uncovered > absoluteThreshold) { | ||
| process.exitCode = 1; | ||
| /** | ||
| * Generate error message based on perFile flag: | ||
| * - ERROR: Uncovered statements (33) exceed threshold (30) for src/math.ts | ||
| * - ERROR: Uncovered statements (33) exceed global threshold (30) | ||
| */ | ||
| let errorMessage = `ERROR: Uncovered ${thresholdKey} (${uncovered}) exceed ${name === GLOBAL_THRESHOLDS_KEY ? name : `"${name}"`} threshold (${absoluteThreshold})`; | ||
| if (this.options.thresholds?.perFile && file) errorMessage += ` for ${relative("./", file).replace(/\\/g, "/")}`; | ||
| this.ctx.logger.error(errorMessage); | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| /** | ||
| * Check if current coverage is above configured thresholds and bump the thresholds if needed | ||
| */ | ||
| async updateThresholds({ thresholds: allThresholds, onUpdate, configurationFile }) { | ||
| let updatedThresholds = false; | ||
| const config = resolveConfig(configurationFile); | ||
| assertConfigurationModule(config); | ||
| for (const { coverageMap, thresholds, name } of allThresholds) { | ||
| const summaries = this.options.thresholds?.perFile ? coverageMap.files().map((file) => coverageMap.fileCoverageFor(file).toSummary()) : [coverageMap.getCoverageSummary()]; | ||
| const thresholdsToUpdate = []; | ||
| for (const key of THRESHOLD_KEYS) { | ||
| const threshold = thresholds[key] ?? 100; | ||
| /** | ||
| * Positive thresholds are treated as minimum coverage percentages (X means: X% of lines must be covered), | ||
| * while negative thresholds are treated as maximum uncovered counts (-X means: X lines may be uncovered). | ||
| */ | ||
| if (threshold >= 0) { | ||
| const actual = Math.min(...summaries.map((summary) => summary[key].pct)); | ||
| if (actual > threshold) thresholdsToUpdate.push([key, actual]); | ||
| } else { | ||
| const absoluteThreshold = threshold * -1; | ||
| const actual = Math.max(...summaries.map((summary) => summary[key].total - summary[key].covered)); | ||
| if (actual < absoluteThreshold) { | ||
| // If everything was covered, set new threshold to 100% (since a threshold of 0 would be considered as 0%) | ||
| const updatedThreshold = actual === 0 ? 100 : actual * -1; | ||
| thresholdsToUpdate.push([key, updatedThreshold]); | ||
| } | ||
| } | ||
| } | ||
| if (thresholdsToUpdate.length === 0) continue; | ||
| updatedThresholds = true; | ||
| const thresholdFormatter = typeof this.options.thresholds?.autoUpdate === "function" ? this.options.thresholds?.autoUpdate : (value) => value; | ||
| for (const [threshold, newValue] of thresholdsToUpdate) { | ||
| const formattedValue = thresholdFormatter(newValue); | ||
| if (name === GLOBAL_THRESHOLDS_KEY) config.test.coverage.thresholds[threshold] = formattedValue; | ||
| else { | ||
| const glob = config.test.coverage.thresholds[name]; | ||
| glob[threshold] = formattedValue; | ||
| } | ||
| } | ||
| } | ||
| if (updatedThresholds) { | ||
| this.ctx.logger.log("Updating thresholds to configuration file. You may want to push with updated coverage thresholds."); | ||
| onUpdate(); | ||
| } | ||
| } | ||
| async mergeReports(coverageMaps) { | ||
| const coverageMap = this.createCoverageMap(); | ||
| for (const coverage of coverageMaps) coverageMap.merge(coverage); | ||
| await this.generateReports(coverageMap, true); | ||
| } | ||
| hasTerminalReporter(reporters) { | ||
| return reporters.some(([reporter]) => reporter === "text" || reporter === "text-summary" || reporter === "text-lcov" || reporter === "teamcity"); | ||
| } | ||
| toSlices(array, size) { | ||
| return array.reduce((chunks, item) => { | ||
| const index = Math.max(0, chunks.length - 1); | ||
| const lastChunk = chunks[index] || []; | ||
| chunks[index] = lastChunk; | ||
| if (lastChunk.length >= size) chunks.push([item]); | ||
| else lastChunk.push(item); | ||
| return chunks; | ||
| }, []); | ||
| } | ||
| // TODO: should this be abstracted in `project`/`vitest` instead? | ||
| // if we decide to keep `viteModuleRunner: false`, we will need to abstract transformation in both main thread and tests | ||
| // custom --import=module.registerHooks need to be transformed as well somehow | ||
| async transformFile(url, project, viteEnvironment) { | ||
| const config = project.config; | ||
| // vite is disabled, should transform manually if possible | ||
| if (config.experimental.viteModuleRunner === false) { | ||
| const pathname = url.split("?")[0]; | ||
| const filename = pathname.startsWith("file://") ? fileURLToPath(pathname) : pathname; | ||
| const extension = path.extname(filename); | ||
| if (!(extension === ".ts" || extension === ".mts" || extension === ".cts")) return { | ||
| code: await promises.readFile(filename, "utf-8"), | ||
| map: null | ||
| }; | ||
| if (!module$1.stripTypeScriptTypes) throw new Error(`Cannot parse '${url}' because "module.stripTypeScriptTypes" is not supported. TypeScript coverage requires Node.js 22.15 or higher. This is NOT a bug of Vitest.`); | ||
| const isTransform = process.execArgv.includes("--experimental-transform-types") || config.execArgv.includes("--experimental-transform-types") || process.env.NODE_OPTIONS?.includes("--experimental-transform-types") || config.env?.NODE_OPTIONS?.includes("--experimental-transform-types"); | ||
| const code = await promises.readFile(filename, "utf-8"); | ||
| return { | ||
| code: module$1.stripTypeScriptTypes(code, { mode: isTransform ? "transform" : "strip" }), | ||
| map: null | ||
| }; | ||
| } | ||
| if (project.isBrowserEnabled() || viteEnvironment === "__browser__") { | ||
| const result = await (project.browser?.vite.environments.client || project.vite.environments.client).transformRequest(url); | ||
| if (result) return result; | ||
| } | ||
| return project.vite.environments[viteEnvironment].transformRequest(url); | ||
| } | ||
| createUncoveredFileTransformer(ctx) { | ||
| const projects = new Set([...ctx.projects, ctx.getRootProject()]); | ||
| return async (filename) => { | ||
| let lastError; | ||
| for (const project of projects) { | ||
| const root = project.config.root; | ||
| // On Windows root doesn't start with "/" while filenames do | ||
| if (!filename.startsWith(root) && !filename.startsWith(`/${root}`)) continue; | ||
| try { | ||
| const environment = project.config.environment; | ||
| const viteEnvironment = environment === "jsdom" || environment === "happy-dom" ? "client" : "ssr"; | ||
| return await this.transformFile(filename, project, viteEnvironment); | ||
| } catch (err) { | ||
| lastError = err; | ||
| } | ||
| } | ||
| // All vite servers failed to transform the file | ||
| throw lastError; | ||
| }; | ||
| } | ||
| } | ||
| /** | ||
| * Narrow down `unknown` glob thresholds to resolved ones | ||
| */ | ||
| function resolveGlobThresholds(thresholds) { | ||
| if (!thresholds || typeof thresholds !== "object") return {}; | ||
| if (100 in thresholds && thresholds[100] === true) return { | ||
| lines: 100, | ||
| branches: 100, | ||
| functions: 100, | ||
| statements: 100 | ||
| }; | ||
| return { | ||
| lines: "lines" in thresholds && typeof thresholds.lines === "number" ? thresholds.lines : void 0, | ||
| branches: "branches" in thresholds && typeof thresholds.branches === "number" ? thresholds.branches : void 0, | ||
| functions: "functions" in thresholds && typeof thresholds.functions === "number" ? thresholds.functions : void 0, | ||
| statements: "statements" in thresholds && typeof thresholds.statements === "number" ? thresholds.statements : void 0 | ||
| }; | ||
| } | ||
| function assertConfigurationModule(config) { | ||
| try { | ||
| // @ts-expect-error -- Intentional unsafe null pointer check as wrapped in try-catch | ||
| if (typeof config.test.coverage.thresholds !== "object") throw new TypeError("Expected config.test.coverage.thresholds to be an object"); | ||
| } catch (error) { | ||
| const message = error instanceof Error ? error.message : String(error); | ||
| throw new Error(`Unable to parse thresholds from configuration file: ${message}`); | ||
| } | ||
| } | ||
| function resolveConfig(configModule) { | ||
| const mod = configModule.exports.default; | ||
| try { | ||
| // Check for "export default { test: {...} }" | ||
| if (mod.$type === "object") return mod; | ||
| // "export default defineConfig(...)" | ||
| let config = resolveDefineConfig(mod); | ||
| if (config) return config; | ||
| // "export default mergeConfig(..., defineConfig(...))" | ||
| if (mod.$type === "function-call" && mod.$callee === "mergeConfig") { | ||
| config = resolveMergeConfig(mod); | ||
| if (config) return config; | ||
| } | ||
| } catch (error) { | ||
| // Reduce magicast's verbose errors to readable ones | ||
| throw new Error(error instanceof Error ? error.message : String(error)); | ||
| } | ||
| throw new Error("Failed to update coverage thresholds. Configuration file is too complex."); | ||
| } | ||
| function resolveDefineConfig(mod) { | ||
| if (mod.$type === "function-call" && mod.$callee === "defineConfig") { | ||
| // "export default defineConfig({ test: {...} })" | ||
| if (mod.$args[0].$type === "object") return mod.$args[0]; | ||
| if (mod.$args[0].$type === "arrow-function-expression") { | ||
| if (mod.$args[0].$body.$type === "object") | ||
| // "export default defineConfig(() => ({ test: {...} }))" | ||
| return mod.$args[0].$body; | ||
| // "export default defineConfig(() => mergeConfig({...}, ...))" | ||
| const config = resolveMergeConfig(mod.$args[0].$body); | ||
| if (config) return config; | ||
| } | ||
| } | ||
| } | ||
| function resolveMergeConfig(mod) { | ||
| if (mod.$type === "function-call" && mod.$callee === "mergeConfig") for (const arg of mod.$args) { | ||
| const config = resolveDefineConfig(arg); | ||
| if (config) return config; | ||
| } | ||
| } | ||
| export { BaseCoverageProvider as B, RandomSequencer as R, BaseSequencer as a, resolveApiServerConfig as b, getCoverageProvider as g, hash as h, isBrowserEnabled as i, resolveConfig$1 as r }; |
| import nodeos__default from 'node:os'; | ||
| import './env.D4Lgay0q.js'; | ||
| import { isCI } from 'std-env'; | ||
| const defaultInclude = ["**/*.{test,spec}.?(c|m)[jt]s?(x)"]; | ||
| const defaultExclude = ["**/node_modules/**", "**/.git/**"]; | ||
| const benchmarkConfigDefaults = { | ||
| include: ["**/*.{bench,benchmark}.?(c|m)[jt]s?(x)"], | ||
| exclude: defaultExclude, | ||
| includeSource: [], | ||
| reporters: ["default"], | ||
| includeSamples: false | ||
| }; | ||
| // These are the generic defaults for coverage. Providers may also set some provider specific defaults. | ||
| const coverageConfigDefaults = { | ||
| provider: "v8", | ||
| enabled: false, | ||
| clean: true, | ||
| cleanOnRerun: true, | ||
| reportsDirectory: "./coverage", | ||
| exclude: [], | ||
| reportOnFailure: false, | ||
| reporter: [ | ||
| ["text", {}], | ||
| ["html", {}], | ||
| ["clover", {}], | ||
| ["json", {}] | ||
| ], | ||
| allowExternal: false, | ||
| excludeAfterRemap: false, | ||
| processingConcurrency: Math.min(20, nodeos__default.availableParallelism?.() ?? nodeos__default.cpus().length) | ||
| }; | ||
| const fakeTimersDefaults = { | ||
| loopLimit: 1e4, | ||
| shouldClearNativeTimers: true | ||
| }; | ||
| const configDefaults = Object.freeze({ | ||
| allowOnly: !isCI, | ||
| isolate: true, | ||
| watch: !isCI && process.stdin.isTTY, | ||
| globals: false, | ||
| environment: "node", | ||
| clearMocks: false, | ||
| restoreMocks: false, | ||
| mockReset: false, | ||
| unstubGlobals: false, | ||
| unstubEnvs: false, | ||
| include: defaultInclude, | ||
| exclude: defaultExclude, | ||
| teardownTimeout: 1e4, | ||
| forceRerunTriggers: ["**/package.json/**", "**/{vitest,vite}.config.*/**"], | ||
| update: false, | ||
| reporters: [], | ||
| silent: false, | ||
| hideSkippedTests: false, | ||
| api: false, | ||
| ui: false, | ||
| uiBase: "/__vitest__/", | ||
| open: !isCI, | ||
| css: { include: [] }, | ||
| coverage: coverageConfigDefaults, | ||
| fakeTimers: fakeTimersDefaults, | ||
| maxConcurrency: 5, | ||
| dangerouslyIgnoreUnhandledErrors: false, | ||
| typecheck: { | ||
| checker: "tsc", | ||
| include: ["**/*.{test,spec}-d.?(c|m)[jt]s?(x)"], | ||
| exclude: defaultExclude | ||
| }, | ||
| slowTestThreshold: 300, | ||
| disableConsoleIntercept: false, | ||
| detectAsyncLeaks: false | ||
| }); | ||
| export { coverageConfigDefaults as a, defaultInclude as b, configDefaults as c, defaultExclude as d, benchmarkConfigDefaults as e }; |
| import { g as globalApis } from './constants.CPYnjOGj.js'; | ||
| import { i as index } from './index.IcAjQV7n.js'; | ||
| import './test.PnxXDGpZ.js'; | ||
| import '@vitest/runner'; | ||
| import '@vitest/utils/helpers'; | ||
| import '@vitest/utils/timers'; | ||
| import './benchmark.D0SlKNbZ.js'; | ||
| import '@vitest/runner/utils'; | ||
| import './utils.BX5Fg8C4.js'; | ||
| import '@vitest/expect'; | ||
| import '@vitest/utils/error'; | ||
| import 'pathe'; | ||
| import '@vitest/snapshot'; | ||
| import '@vitest/spy'; | ||
| import '@vitest/utils/offset'; | ||
| import '@vitest/utils/source-map'; | ||
| import './_commonjsHelpers.D26ty3Ew.js'; | ||
| import './rpc.MzXet3jl.js'; | ||
| import './index.Chj8NDwU.js'; | ||
| import './evaluatedModules.Dg1zASAC.js'; | ||
| import 'vite/module-runner'; | ||
| import 'expect-type'; | ||
| function registerApiGlobally() { | ||
| globalApis.forEach((api) => { | ||
| // @ts-expect-error I know what I am doing :P | ||
| globalThis[api] = index[api]; | ||
| }); | ||
| } | ||
| export { registerApiGlobally }; |
| import { chai } from '@vitest/expect'; | ||
| import { createHook } from 'node:async_hooks'; | ||
| import { l as loadDiffConfig, a as loadSnapshotSerializers, t as takeCoverageInsideWorker } from './setup-common.z3ZfZiWN.js'; | ||
| import { r as rpc } from './rpc.MzXet3jl.js'; | ||
| import { g as getWorkerState } from './utils.BX5Fg8C4.js'; | ||
| import { T as TestRunner, N as NodeBenchmarkRunner } from './test.PnxXDGpZ.js'; | ||
| function setupChaiConfig(config) { | ||
| Object.assign(chai.config, config); | ||
| } | ||
| async function resolveSnapshotEnvironment(config, moduleRunner) { | ||
| if (!config.snapshotEnvironment) { | ||
| const { VitestNodeSnapshotEnvironment } = await import('./node.COQbm6gK.js'); | ||
| return new VitestNodeSnapshotEnvironment(); | ||
| } | ||
| const mod = await moduleRunner.import(config.snapshotEnvironment); | ||
| if (typeof mod.default !== "object" || !mod.default) throw new Error("Snapshot environment module must have a default export object with a shape of `SnapshotEnvironment`"); | ||
| return mod.default; | ||
| } | ||
| const IGNORED_TYPES = new Set([ | ||
| "DNSCHANNEL", | ||
| "ELDHISTOGRAM", | ||
| "PerformanceObserver", | ||
| "RANDOMBYTESREQUEST", | ||
| "SIGNREQUEST", | ||
| "STREAM_END_OF_STREAM", | ||
| "TCPWRAP", | ||
| "TIMERWRAP", | ||
| "TLSWRAP", | ||
| "ZLIB" | ||
| ]); | ||
| function detectAsyncLeaks(testFile, projectName) { | ||
| const resources = /* @__PURE__ */ new Map(); | ||
| const hook = createHook({ | ||
| init(asyncId, type, triggerAsyncId, resource) { | ||
| if (IGNORED_TYPES.has(type)) return; | ||
| let stack = ""; | ||
| const limit = Error.stackTraceLimit; | ||
| // VitestModuleEvaluator's async wrapper of node:vm causes out-of-bound stack traces, simply skip it. | ||
| // Crash fixed in https://github.com/vitejs/vite/pull/21585 | ||
| try { | ||
| Error.stackTraceLimit = 100; | ||
| stack = (/* @__PURE__ */ new Error("VITEST_DETECT_ASYNC_LEAKS")).stack || ""; | ||
| } catch { | ||
| return; | ||
| } finally { | ||
| Error.stackTraceLimit = limit; | ||
| } | ||
| if (!stack.includes(testFile)) { | ||
| const trigger = resources.get(triggerAsyncId); | ||
| if (!trigger) return; | ||
| stack = trigger.stack; | ||
| } | ||
| let isActive = isActiveDefault; | ||
| if ("hasRef" in resource) { | ||
| const ref = new WeakRef(resource); | ||
| isActive = () => ref.deref()?.hasRef() ?? false; | ||
| } | ||
| resources.set(asyncId, { | ||
| type, | ||
| stack, | ||
| projectName, | ||
| filename: testFile, | ||
| isActive | ||
| }); | ||
| }, | ||
| destroy(asyncId) { | ||
| if (resources.get(asyncId)?.type !== "PROMISE") resources.delete(asyncId); | ||
| }, | ||
| promiseResolve(asyncId) { | ||
| resources.delete(asyncId); | ||
| } | ||
| }); | ||
| hook.enable(); | ||
| return async function collect() { | ||
| await Promise.resolve(setImmediate); | ||
| hook.disable(); | ||
| const leaks = []; | ||
| for (const resource of resources.values()) if (resource.isActive()) leaks.push({ | ||
| stack: resource.stack, | ||
| type: resource.type, | ||
| filename: resource.filename, | ||
| projectName: resource.projectName | ||
| }); | ||
| resources.clear(); | ||
| return leaks; | ||
| }; | ||
| } | ||
| function isActiveDefault() { | ||
| return true; | ||
| } | ||
| async function getTestRunnerConstructor(config, moduleRunner) { | ||
| if (!config.runner) return config.mode === "test" ? TestRunner : NodeBenchmarkRunner; | ||
| const mod = await moduleRunner.import(config.runner); | ||
| if (!mod.default && typeof mod.default !== "function") throw new Error(`Runner must export a default function, but got ${typeof mod.default} imported from ${config.runner}`); | ||
| return mod.default; | ||
| } | ||
| async function resolveTestRunner(config, moduleRunner, traces) { | ||
| const testRunner = new (await (getTestRunnerConstructor(config, moduleRunner)))(config); | ||
| // inject private executor to every runner | ||
| Object.defineProperty(testRunner, "moduleRunner", { | ||
| value: moduleRunner, | ||
| enumerable: false, | ||
| configurable: false | ||
| }); | ||
| if (!testRunner.config) testRunner.config = config; | ||
| if (!testRunner.importFile) throw new Error("Runner must implement \"importFile\" method."); | ||
| if ("__setTraces" in testRunner) testRunner.__setTraces(traces); | ||
| const [diffOptions] = await Promise.all([loadDiffConfig(config, moduleRunner), loadSnapshotSerializers(config, moduleRunner)]); | ||
| testRunner.config.diffOptions = diffOptions; | ||
| // patch some methods, so custom runners don't need to call RPC | ||
| const originalOnTaskUpdate = testRunner.onTaskUpdate; | ||
| testRunner.onTaskUpdate = async (task, events) => { | ||
| const p = rpc().onTaskUpdate(task, events); | ||
| await originalOnTaskUpdate?.call(testRunner, task, events); | ||
| return p; | ||
| }; | ||
| // patch some methods, so custom runners don't need to call RPC | ||
| const originalOnTestAnnotate = testRunner.onTestAnnotate; | ||
| testRunner.onTestAnnotate = async (test, annotation) => { | ||
| const p = rpc().onTaskArtifactRecord(test.id, { | ||
| type: "internal:annotation", | ||
| location: annotation.location, | ||
| annotation | ||
| }); | ||
| const overriddenResult = await originalOnTestAnnotate?.call(testRunner, test, annotation); | ||
| const vitestResult = await p; | ||
| return overriddenResult || vitestResult.annotation; | ||
| }; | ||
| const originalOnTestArtifactRecord = testRunner.onTestArtifactRecord; | ||
| testRunner.onTestArtifactRecord = async (test, artifact) => { | ||
| const p = rpc().onTaskArtifactRecord(test.id, artifact); | ||
| const overriddenResult = await originalOnTestArtifactRecord?.call(testRunner, test, artifact); | ||
| const vitestResult = await p; | ||
| return overriddenResult || vitestResult; | ||
| }; | ||
| const originalOnCollectStart = testRunner.onCollectStart; | ||
| testRunner.onCollectStart = async (file) => { | ||
| await rpc().onQueued(file); | ||
| await originalOnCollectStart?.call(testRunner, file); | ||
| }; | ||
| const originalOnCollected = testRunner.onCollected; | ||
| testRunner.onCollected = async (files) => { | ||
| const state = getWorkerState(); | ||
| files.forEach((file) => { | ||
| file.prepareDuration = state.durations.prepare; | ||
| file.environmentLoad = state.durations.environment; | ||
| // should be collected only for a single test file in a batch | ||
| state.durations.prepare = 0; | ||
| state.durations.environment = 0; | ||
| }); | ||
| // Strip function conditions from retry config before sending via RPC | ||
| // Functions cannot be cloned by structured clone algorithm | ||
| const sanitizeRetryConditions = (task) => { | ||
| if (task.retry && typeof task.retry === "object" && typeof task.retry.condition === "function") | ||
| // Remove function condition - it can't be serialized | ||
| task.retry = { | ||
| ...task.retry, | ||
| condition: void 0 | ||
| }; | ||
| if (task.tasks) task.tasks.forEach(sanitizeRetryConditions); | ||
| }; | ||
| files.forEach(sanitizeRetryConditions); | ||
| rpc().onCollected(files); | ||
| await originalOnCollected?.call(testRunner, files); | ||
| }; | ||
| const originalOnAfterRun = testRunner.onAfterRunFiles; | ||
| testRunner.onAfterRunFiles = async (files) => { | ||
| const state = getWorkerState(); | ||
| const coverage = await takeCoverageInsideWorker(config.coverage, moduleRunner); | ||
| if (coverage) rpc().onAfterSuiteRun({ | ||
| coverage, | ||
| testFiles: files.map((file) => file.name).sort(), | ||
| environment: state.environment.viteEnvironment || state.environment.name, | ||
| projectName: state.ctx.projectName | ||
| }); | ||
| await originalOnAfterRun?.call(testRunner, files); | ||
| }; | ||
| const originalOnAfterRunTask = testRunner.onAfterRunTask; | ||
| testRunner.onAfterRunTask = async (test) => { | ||
| if (config.bail && test.result?.state === "fail") { | ||
| if (1 + await rpc().getCountOfFailedTests() >= config.bail) { | ||
| rpc().onCancel("test-failure"); | ||
| testRunner.cancel?.("test-failure"); | ||
| } | ||
| } | ||
| await originalOnAfterRunTask?.call(testRunner, test); | ||
| }; | ||
| return testRunner; | ||
| } | ||
| export { resolveSnapshotEnvironment as a, detectAsyncLeaks as d, resolveTestRunner as r, setupChaiConfig as s }; |
| import fs from 'node:fs'; | ||
| import { getTasks, getFullName, getTests } from '@vitest/runner/utils'; | ||
| import * as pathe from 'pathe'; | ||
| import c from 'tinyrainbow'; | ||
| import { g as getStateSymbol, t as truncateString, F as F_RIGHT, D as DefaultReporter, f as formatProjectName, s as separator } from './index.DpkD7Zj4.js'; | ||
| import { stripVTControlCharacters } from 'node:util'; | ||
| import { notNullish } from '@vitest/utils/helpers'; | ||
| function createBenchmarkJsonReport(files) { | ||
| const report = { files: [] }; | ||
| for (const file of files) { | ||
| const groups = []; | ||
| for (const task of getTasks(file)) if (task?.type === "suite") { | ||
| const benchmarks = []; | ||
| for (const t of task.tasks) { | ||
| const benchmark = t.meta.benchmark && t.result?.benchmark; | ||
| if (benchmark) benchmarks.push({ | ||
| id: t.id, | ||
| ...benchmark, | ||
| samples: [] | ||
| }); | ||
| } | ||
| if (benchmarks.length) groups.push({ | ||
| fullName: getFullName(task, " > "), | ||
| benchmarks | ||
| }); | ||
| } | ||
| report.files.push({ | ||
| filepath: file.filepath, | ||
| groups | ||
| }); | ||
| } | ||
| return report; | ||
| } | ||
| function flattenFormattedBenchmarkReport(report) { | ||
| const flat = {}; | ||
| for (const file of report.files) for (const group of file.groups) for (const t of group.benchmarks) flat[t.id] = t; | ||
| return flat; | ||
| } | ||
| const outputMap = /* @__PURE__ */ new WeakMap(); | ||
| function formatNumber(number) { | ||
| const res = String(number.toFixed(number < 100 ? 4 : 2)).split("."); | ||
| return res[0].replace(/(?=(?:\d{3})+$)\B/g, ",") + (res[1] ? `.${res[1]}` : ""); | ||
| } | ||
| const tableHead = [ | ||
| "name", | ||
| "hz", | ||
| "min", | ||
| "max", | ||
| "mean", | ||
| "p75", | ||
| "p99", | ||
| "p995", | ||
| "p999", | ||
| "rme", | ||
| "samples" | ||
| ]; | ||
| function renderBenchmarkItems(result) { | ||
| return [ | ||
| result.name, | ||
| formatNumber(result.hz || 0), | ||
| formatNumber(result.min || 0), | ||
| formatNumber(result.max || 0), | ||
| formatNumber(result.mean || 0), | ||
| formatNumber(result.p75 || 0), | ||
| formatNumber(result.p99 || 0), | ||
| formatNumber(result.p995 || 0), | ||
| formatNumber(result.p999 || 0), | ||
| `±${(result.rme || 0).toFixed(2)}%`, | ||
| (result.sampleCount || 0).toString() | ||
| ]; | ||
| } | ||
| function computeColumnWidths(results) { | ||
| const rows = [tableHead, ...results.map((v) => renderBenchmarkItems(v))]; | ||
| return Array.from(tableHead, (_, i) => Math.max(...rows.map((row) => stripVTControlCharacters(row[i]).length))); | ||
| } | ||
| function padRow(row, widths) { | ||
| return row.map((v, i) => i ? v.padStart(widths[i], " ") : v.padEnd(widths[i], " ")); | ||
| } | ||
| function renderTableHead(widths) { | ||
| return " ".repeat(3) + padRow(tableHead, widths).map(c.bold).join(" "); | ||
| } | ||
| function renderBenchmark(result, widths) { | ||
| const padded = padRow(renderBenchmarkItems(result), widths); | ||
| return [ | ||
| padded[0], | ||
| c.blue(padded[1]), | ||
| c.cyan(padded[2]), | ||
| c.cyan(padded[3]), | ||
| c.cyan(padded[4]), | ||
| c.cyan(padded[5]), | ||
| c.cyan(padded[6]), | ||
| c.cyan(padded[7]), | ||
| c.cyan(padded[8]), | ||
| c.dim(padded[9]), | ||
| c.dim(padded[10]) | ||
| ].join(" "); | ||
| } | ||
| function renderTable(options) { | ||
| const output = []; | ||
| const benchMap = {}; | ||
| for (const task of options.tasks) if (task.meta.benchmark && task.result?.benchmark) benchMap[task.id] = { | ||
| current: task.result.benchmark, | ||
| baseline: options.compare?.[task.id] | ||
| }; | ||
| const benchCount = Object.entries(benchMap).length; | ||
| const columnWidths = computeColumnWidths(Object.values(benchMap).flatMap((v) => [v.current, v.baseline]).filter(notNullish)); | ||
| let idx = 0; | ||
| const padding = " ".repeat(1 ); | ||
| for (const task of options.tasks) { | ||
| const duration = task.result?.duration; | ||
| const bench = benchMap[task.id]; | ||
| let prefix = ""; | ||
| if (idx === 0 && task.meta?.benchmark) prefix += `${renderTableHead(columnWidths)}\n${padding}`; | ||
| prefix += ` ${getStateSymbol(task)} `; | ||
| let suffix = ""; | ||
| if (task.type === "suite") suffix += c.dim(` (${getTests(task).length})`); | ||
| if (task.mode === "skip" || task.mode === "todo") suffix += c.dim(c.gray(" [skipped]")); | ||
| if (duration != null) { | ||
| const color = duration > options.slowTestThreshold ? c.yellow : c.green; | ||
| suffix += color(` ${Math.round(duration)}${c.dim("ms")}`); | ||
| } | ||
| if (options.showHeap && task.result?.heap != null) suffix += c.magenta(` ${Math.floor(task.result.heap / 1024 / 1024)} MB heap used`); | ||
| if (bench) { | ||
| let body = renderBenchmark(bench.current, columnWidths); | ||
| if (options.compare && bench.baseline) { | ||
| if (bench.current.hz) { | ||
| const diff = bench.current.hz / bench.baseline.hz; | ||
| const diffFixed = diff.toFixed(2); | ||
| if (diffFixed === "1.0.0") body += c.gray(` [${diffFixed}x]`); | ||
| if (diff > 1) body += c.blue(` [${diffFixed}x] ⇑`); | ||
| else body += c.red(` [${diffFixed}x] ⇓`); | ||
| } | ||
| output.push(padding + prefix + body + suffix); | ||
| const bodyBaseline = renderBenchmark(bench.baseline, columnWidths); | ||
| output.push(`${padding} ${bodyBaseline} ${c.dim("(baseline)")}`); | ||
| } else { | ||
| if (bench.current.rank === 1 && benchCount > 1) body += c.bold(c.green(" fastest")); | ||
| if (bench.current.rank === benchCount && benchCount > 2) body += c.bold(c.gray(" slowest")); | ||
| output.push(padding + prefix + body + suffix); | ||
| } | ||
| } else output.push(padding + prefix + task.name + suffix); | ||
| if (task.result?.state !== "pass" && outputMap.get(task) != null) { | ||
| let data = outputMap.get(task); | ||
| if (typeof data === "string") { | ||
| data = stripVTControlCharacters(data.trim().split("\n").filter(Boolean).pop()); | ||
| if (data === "") data = void 0; | ||
| } | ||
| if (data != null) { | ||
| const out = ` ${" ".repeat(options.level)}${F_RIGHT} ${data}`; | ||
| output.push(c.gray(truncateString(out, options.columns))); | ||
| } | ||
| } | ||
| idx++; | ||
| } | ||
| return output.filter(Boolean).join("\n"); | ||
| } | ||
| class BenchmarkReporter extends DefaultReporter { | ||
| compare; | ||
| async onInit(ctx) { | ||
| super.onInit(ctx); | ||
| if (this.ctx.config.benchmark?.compare) { | ||
| const compareFile = pathe.resolve(this.ctx.config.root, this.ctx.config.benchmark?.compare); | ||
| try { | ||
| this.compare = flattenFormattedBenchmarkReport(JSON.parse(await fs.promises.readFile(compareFile, "utf-8"))); | ||
| } catch (e) { | ||
| this.error(`Failed to read '${compareFile}'`, e); | ||
| } | ||
| } | ||
| } | ||
| onTaskUpdate(packs) { | ||
| for (const pack of packs) { | ||
| const task = this.ctx.state.idMap.get(pack[0]); | ||
| if (task?.type === "suite" && task.result?.state !== "run") task.tasks.filter((task) => task.result?.benchmark).sort((benchA, benchB) => benchA.result.benchmark.mean - benchB.result.benchmark.mean).forEach((bench, idx) => { | ||
| bench.result.benchmark.rank = Number(idx) + 1; | ||
| }); | ||
| } | ||
| } | ||
| onTestSuiteResult(testSuite) { | ||
| super.onTestSuiteResult(testSuite); | ||
| this.printSuiteTable(testSuite); | ||
| } | ||
| printTestModule(testModule) { | ||
| this.printSuiteTable(testModule); | ||
| } | ||
| printSuiteTable(testTask) { | ||
| const state = testTask.state(); | ||
| if (state === "pending" || state === "queued") return; | ||
| const benches = testTask.task.tasks.filter((t) => t.meta.benchmark); | ||
| const duration = testTask.task.result?.duration || 0; | ||
| if (benches.length > 0 && benches.every((t) => t.result?.state !== "run" && t.result?.state !== "queued")) { | ||
| let title = `\n ${getStateSymbol(testTask.task)} ${formatProjectName(testTask.project)}${getFullName(testTask.task, separator)}`; | ||
| if (duration != null && duration > this.ctx.config.slowTestThreshold) title += c.yellow(` ${Math.round(duration)}${c.dim("ms")}`); | ||
| this.log(title); | ||
| this.log(renderTable({ | ||
| tasks: benches, | ||
| level: 1, | ||
| columns: this.ctx.logger.getColumns(), | ||
| compare: this.compare, | ||
| showHeap: this.ctx.config.logHeapUsage, | ||
| slowTestThreshold: this.ctx.config.slowTestThreshold | ||
| })); | ||
| } | ||
| } | ||
| async onTestRunEnd(testModules, unhandledErrors, reason) { | ||
| super.onTestRunEnd(testModules, unhandledErrors, reason); | ||
| // write output for future comparison | ||
| let outputFile = this.ctx.config.benchmark?.outputJson; | ||
| if (outputFile) { | ||
| outputFile = pathe.resolve(this.ctx.config.root, outputFile); | ||
| const outputDirectory = pathe.dirname(outputFile); | ||
| if (!fs.existsSync(outputDirectory)) await fs.promises.mkdir(outputDirectory, { recursive: true }); | ||
| const output = createBenchmarkJsonReport(testModules.map((t) => t.task.file)); | ||
| await fs.promises.writeFile(outputFile, JSON.stringify(output, null, 2)); | ||
| this.log(`Benchmark report written to ${outputFile}`); | ||
| } | ||
| } | ||
| } | ||
| class VerboseBenchmarkReporter extends BenchmarkReporter { | ||
| verbose = true; | ||
| } | ||
| const BenchmarkReportsMap = { | ||
| default: BenchmarkReporter, | ||
| verbose: VerboseBenchmarkReporter | ||
| }; | ||
| export { BenchmarkReporter as B, VerboseBenchmarkReporter as V, BenchmarkReportsMap as a }; |
Sorry, the diff of this file is too big to display
| import { v as vi, N as NodeBenchmarkRunner, T as TestRunner, a as assert, c as createExpect, g as globalExpect, i as inject, s as should, b as vitest } from './test.PnxXDGpZ.js'; | ||
| import { b as bench } from './benchmark.D0SlKNbZ.js'; | ||
| import { V as VitestEvaluatedModules } from './evaluatedModules.Dg1zASAC.js'; | ||
| import { expectTypeOf } from 'expect-type'; | ||
| import { afterAll, afterEach, aroundAll, aroundEach, beforeAll, beforeEach, describe, it, onTestFailed, onTestFinished, recordArtifact, suite, test } from '@vitest/runner'; | ||
| import { chai } from '@vitest/expect'; | ||
| const assertType = function assertType() {}; | ||
| var index = /*#__PURE__*/Object.freeze({ | ||
| __proto__: null, | ||
| BenchmarkRunner: NodeBenchmarkRunner, | ||
| EvaluatedModules: VitestEvaluatedModules, | ||
| TestRunner: TestRunner, | ||
| afterAll: afterAll, | ||
| afterEach: afterEach, | ||
| aroundAll: aroundAll, | ||
| aroundEach: aroundEach, | ||
| assert: assert, | ||
| assertType: assertType, | ||
| beforeAll: beforeAll, | ||
| beforeEach: beforeEach, | ||
| bench: bench, | ||
| chai: chai, | ||
| createExpect: createExpect, | ||
| describe: describe, | ||
| expect: globalExpect, | ||
| expectTypeOf: expectTypeOf, | ||
| inject: inject, | ||
| it: it, | ||
| onTestFailed: onTestFailed, | ||
| onTestFinished: onTestFinished, | ||
| recordArtifact: recordArtifact, | ||
| should: should, | ||
| suite: suite, | ||
| test: test, | ||
| vi: vi, | ||
| vitest: vitest | ||
| }); | ||
| export { assertType as a, index as i }; |
| import { i as init } from './init.Borgldul.js'; | ||
| if (!process.send) throw new Error("Expected worker to be run in node:child_process"); | ||
| // Store globals in case tests overwrite them | ||
| const processExit = process.exit.bind(process); | ||
| const processSend = process.send.bind(process); | ||
| const processOn = process.on.bind(process); | ||
| const processOff = process.off.bind(process); | ||
| const processRemoveAllListeners = process.removeAllListeners.bind(process); | ||
| // Work-around for nodejs/node#55094 | ||
| if (process.execArgv.some((execArg) => execArg.startsWith("--prof") || execArg.startsWith("--cpu-prof") || execArg.startsWith("--heap-prof") || execArg.startsWith("--diagnostic-dir"))) processOn("SIGTERM", () => processExit()); | ||
| processOn("error", onError); | ||
| function workerInit(options) { | ||
| const { runTests } = options; | ||
| init({ | ||
| post: (v) => processSend(v), | ||
| on: (cb) => processOn("message", cb), | ||
| off: (cb) => processOff("message", cb), | ||
| teardown: () => { | ||
| processRemoveAllListeners("message"); | ||
| processOff("error", onError); | ||
| }, | ||
| runTests: (state, traces) => executeTests("run", state, traces), | ||
| collectTests: (state, traces) => executeTests("collect", state, traces), | ||
| setup: options.setup | ||
| }); | ||
| async function executeTests(method, state, traces) { | ||
| try { | ||
| await runTests(method, state, traces); | ||
| } finally { | ||
| process.exit = processExit; | ||
| } | ||
| } | ||
| } | ||
| // Prevent leaving worker in loops where it tries to send message to closed main | ||
| // thread, errors, and tries to send the error. | ||
| function onError(error) { | ||
| if (error?.code === "ERR_IPC_CHANNEL_CLOSED" || error?.code === "EPIPE") processExit(1); | ||
| } | ||
| export { workerInit as w }; |
| import { isMainThread, parentPort } from 'node:worker_threads'; | ||
| import { i as init } from './init.Borgldul.js'; | ||
| if (isMainThread || !parentPort) throw new Error("Expected worker to be run in node:worker_threads"); | ||
| function workerInit(options) { | ||
| const { runTests } = options; | ||
| init({ | ||
| post: (response) => parentPort.postMessage(response), | ||
| on: (callback) => parentPort.on("message", callback), | ||
| off: (callback) => parentPort.off("message", callback), | ||
| teardown: () => parentPort.removeAllListeners("message"), | ||
| runTests: async (state, traces) => runTests("run", state, traces), | ||
| collectTests: async (state, traces) => runTests("collect", state, traces), | ||
| setup: options.setup | ||
| }); | ||
| } | ||
| export { workerInit as w }; |
| import { readFileSync } from 'node:fs'; | ||
| import { isBuiltin } from 'node:module'; | ||
| import { pathToFileURL } from 'node:url'; | ||
| import { resolve } from 'pathe'; | ||
| import { ModuleRunner, EvaluatedModules } from 'vite/module-runner'; | ||
| import { b as VitestTransport } from './startVitestModuleRunner.BdSYEN5x.js'; | ||
| import { e as environments } from './index.EY6TCHpo.js'; | ||
| import { serializeValue } from '@vitest/utils/serialize'; | ||
| import { serializeError } from '@vitest/utils/error'; | ||
| import { T as Traces } from './traces.CCmnQaNT.js'; | ||
| import { o as onCancel, a as rpcDone, c as createRuntimeRpc } from './rpc.MzXet3jl.js'; | ||
| import { createStackString, parseStacktrace } from '@vitest/utils/source-map'; | ||
| import { s as setupInspect } from './inspector.CvyFGlXm.js'; | ||
| import { V as VitestEvaluatedModules } from './evaluatedModules.Dg1zASAC.js'; | ||
| import { E as EnvironmentTeardownError } from './utils.BX5Fg8C4.js'; | ||
| function isBuiltinEnvironment(env) { | ||
| return env in environments; | ||
| } | ||
| const isWindows = process.platform === "win32"; | ||
| const _loaders = /* @__PURE__ */ new Map(); | ||
| function createEnvironmentLoader(root, rpc) { | ||
| const cachedLoader = _loaders.get(root); | ||
| if (!cachedLoader || cachedLoader.isClosed()) { | ||
| _loaders.delete(root); | ||
| const moduleRunner = new ModuleRunner({ | ||
| hmr: false, | ||
| sourcemapInterceptor: "prepareStackTrace", | ||
| transport: new VitestTransport({ | ||
| async fetchModule(id, importer, options) { | ||
| const result = await rpc.fetch(id, importer, "__vitest__", options); | ||
| if ("cached" in result) return { | ||
| code: readFileSync(result.tmp, "utf-8"), | ||
| ...result | ||
| }; | ||
| if (isWindows && "externalize" in result) | ||
| // TODO: vitest returns paths for external modules, but Vite returns file:// | ||
| // https://github.com/vitejs/vite/pull/20449 | ||
| result.externalize = isBuiltin(id) || /^(?:node:|data:|http:|https:|file:)/.test(id) ? result.externalize : pathToFileURL(result.externalize).toString(); | ||
| return result; | ||
| }, | ||
| async resolveId(id, importer) { | ||
| return rpc.resolve(id, importer, "__vitest__"); | ||
| } | ||
| }, new EvaluatedModules(), /* @__PURE__ */ new WeakMap()) | ||
| }); | ||
| _loaders.set(root, moduleRunner); | ||
| } | ||
| return _loaders.get(root); | ||
| } | ||
| async function loadNativeEnvironment(name, root, traces) { | ||
| const packageId = name[0] === "." || name[0] === "/" ? pathToFileURL(resolve(root, name)).toString() : import.meta.resolve(`vitest-environment-${name}`, pathToFileURL(root).toString()); | ||
| return resolveEnvironmentFromModule(name, packageId, await traces.$("vitest.runtime.environment.import", () => import(packageId))); | ||
| } | ||
| function resolveEnvironmentFromModule(name, packageId, pkg) { | ||
| if (!pkg || !pkg.default || typeof pkg.default !== "object") throw new TypeError(`Environment "${name}" is not a valid environment. Path "${packageId}" should export default object with a "setup" or/and "setupVM" method.`); | ||
| const environment = pkg.default; | ||
| if (environment.transformMode != null && environment.transformMode !== "web" && environment.transformMode !== "ssr") throw new TypeError(`Environment "${name}" is not a valid environment. Path "${packageId}" should export default object with a "transformMode" method equal to "ssr" or "web", received "${environment.transformMode}".`); | ||
| if (environment.transformMode) { | ||
| console.warn(`The Vitest environment ${environment.name} defines the "transformMode". This options was deprecated in Vitest 4 and will be removed in the next major version. Please, use "viteEnvironment" instead.`); | ||
| // keep for backwards compat | ||
| environment.viteEnvironment ??= environment.transformMode === "ssr" ? "ssr" : "client"; | ||
| } | ||
| return environment; | ||
| } | ||
| async function loadEnvironment(name, root, rpc, traces, viteModuleRunner) { | ||
| if (isBuiltinEnvironment(name)) return { environment: environments[name] }; | ||
| if (!viteModuleRunner) return { environment: await loadNativeEnvironment(name, root, traces) }; | ||
| const loader = createEnvironmentLoader(root, rpc); | ||
| const packageId = name[0] === "." || name[0] === "/" ? resolve(root, name) : (await traces.$("vitest.runtime.environment.resolve", () => rpc.resolve(`vitest-environment-${name}`, void 0, "__vitest__")))?.id ?? resolve(root, name); | ||
| return { | ||
| environment: resolveEnvironmentFromModule(name, packageId, await traces.$("vitest.runtime.environment.import", () => loader.import(packageId))), | ||
| loader | ||
| }; | ||
| } | ||
| const cleanupListeners = /* @__PURE__ */ new Set(); | ||
| const moduleRunnerListeners = /* @__PURE__ */ new Set(); | ||
| function onCleanup(cb) { | ||
| cleanupListeners.add(cb); | ||
| } | ||
| async function cleanup() { | ||
| await Promise.all([...cleanupListeners].map((l) => l())); | ||
| } | ||
| function onModuleRunner(cb) { | ||
| moduleRunnerListeners.add(cb); | ||
| } | ||
| function emitModuleRunner(moduleRunner) { | ||
| moduleRunnerListeners.forEach((l) => l(moduleRunner)); | ||
| } | ||
| // Store globals in case tests overwrite them | ||
| const processListeners = process.listeners.bind(process); | ||
| const processOn = process.on.bind(process); | ||
| const processOff = process.off.bind(process); | ||
| const dispose = []; | ||
| function listenForErrors(state) { | ||
| dispose.forEach((fn) => fn()); | ||
| dispose.length = 0; | ||
| function catchError(err, type, event) { | ||
| const worker = state(); | ||
| // if there is another listener, assume that it's handled by user code | ||
| // one is Vitest's own listener | ||
| if (processListeners(event).length > 1) return; | ||
| const error = serializeValue(err); | ||
| if (typeof error === "object" && error != null) { | ||
| error.VITEST_TEST_NAME = worker.current?.type === "test" ? worker.current.name : void 0; | ||
| if (worker.filepath) error.VITEST_TEST_PATH = worker.filepath; | ||
| } | ||
| state().rpc.onUnhandledError(error, type); | ||
| } | ||
| const uncaughtException = (e) => catchError(e, "Uncaught Exception", "uncaughtException"); | ||
| const unhandledRejection = (e) => catchError(e, "Unhandled Rejection", "unhandledRejection"); | ||
| processOn("uncaughtException", uncaughtException); | ||
| processOn("unhandledRejection", unhandledRejection); | ||
| dispose.push(() => { | ||
| processOff("uncaughtException", uncaughtException); | ||
| processOff("unhandledRejection", unhandledRejection); | ||
| }); | ||
| } | ||
| const resolvingModules = /* @__PURE__ */ new Set(); | ||
| async function execute(method, ctx, worker, traces) { | ||
| const prepareStart = performance.now(); | ||
| const cleanups = [setupInspect(ctx)]; | ||
| // RPC is used to communicate between worker (be it a thread worker or child process or a custom implementation) and the main thread | ||
| const rpc = ctx.rpc; | ||
| try { | ||
| // do not close the RPC channel so that we can get the error messages sent to the main thread | ||
| cleanups.push(async () => { | ||
| await Promise.all(rpc.$rejectPendingCalls(({ method, reject }) => { | ||
| reject(new EnvironmentTeardownError(`[vitest-worker]: Closing rpc while "${method}" was pending`)); | ||
| })); | ||
| }); | ||
| const state = { | ||
| ctx, | ||
| evaluatedModules: new VitestEvaluatedModules(), | ||
| resolvingModules, | ||
| moduleExecutionInfo: /* @__PURE__ */ new Map(), | ||
| config: ctx.config, | ||
| environment: null, | ||
| durations: { | ||
| environment: 0, | ||
| prepare: prepareStart | ||
| }, | ||
| rpc, | ||
| onCancel, | ||
| onCleanup: onCleanup, | ||
| providedContext: ctx.providedContext, | ||
| onFilterStackTrace(stack) { | ||
| return createStackString(parseStacktrace(stack)); | ||
| }, | ||
| metaEnv: createImportMetaEnvProxy() | ||
| }; | ||
| const methodName = method === "collect" ? "collectTests" : "runTests"; | ||
| if (!worker[methodName] || typeof worker[methodName] !== "function") throw new TypeError(`Test worker should expose "runTests" method. Received "${typeof worker.runTests}".`); | ||
| await worker[methodName](state, traces); | ||
| } finally { | ||
| await rpcDone().catch(() => {}); | ||
| await Promise.all(cleanups.map((fn) => fn())).catch(() => {}); | ||
| } | ||
| } | ||
| function run(ctx, worker, traces) { | ||
| return execute("run", ctx, worker, traces); | ||
| } | ||
| function collect(ctx, worker, traces) { | ||
| return execute("collect", ctx, worker, traces); | ||
| } | ||
| async function teardown() { | ||
| await cleanup(); | ||
| } | ||
| const env = process.env; | ||
| function createImportMetaEnvProxy() { | ||
| // packages/vitest/src/node/plugins/index.ts:146 | ||
| const booleanKeys = [ | ||
| "DEV", | ||
| "PROD", | ||
| "SSR" | ||
| ]; | ||
| return new Proxy(env, { | ||
| get(_, key) { | ||
| if (typeof key !== "string") return; | ||
| if (booleanKeys.includes(key)) return !!process.env[key]; | ||
| return process.env[key]; | ||
| }, | ||
| set(_, key, value) { | ||
| if (typeof key !== "string") return true; | ||
| if (booleanKeys.includes(key)) process.env[key] = value ? "1" : ""; | ||
| else process.env[key] = value; | ||
| return true; | ||
| } | ||
| }); | ||
| } | ||
| const __vitest_worker_response__ = true; | ||
| const memoryUsage = process.memoryUsage.bind(process); | ||
| let reportMemory = false; | ||
| let traces; | ||
| /** @experimental */ | ||
| function init(worker) { | ||
| worker.on(onMessage); | ||
| if (worker.onModuleRunner) onModuleRunner(worker.onModuleRunner); | ||
| let runPromise; | ||
| let isRunning = false; | ||
| let workerTeardown; | ||
| let setupContext; | ||
| function send(response) { | ||
| worker.post(worker.serialize ? worker.serialize(response) : response); | ||
| } | ||
| async function onMessage(rawMessage) { | ||
| const message = worker.deserialize ? worker.deserialize(rawMessage) : rawMessage; | ||
| if (message?.__vitest_worker_request__ !== true) return; | ||
| switch (message.type) { | ||
| case "start": { | ||
| process.env.VITEST_POOL_ID = String(message.poolId); | ||
| process.env.VITEST_WORKER_ID = String(message.workerId); | ||
| reportMemory = message.options.reportMemory; | ||
| traces ??= await new Traces({ | ||
| enabled: message.traces.enabled, | ||
| sdkPath: message.traces.sdkPath | ||
| }).waitInit(); | ||
| const { environment, config, pool } = message.context; | ||
| const context = traces.getContextFromCarrier(message.traces.otelCarrier); | ||
| // record telemetry as part of "start" | ||
| traces.recordInitSpan(context); | ||
| try { | ||
| setupContext = { | ||
| environment, | ||
| config, | ||
| pool, | ||
| rpc: createRuntimeRpc(worker), | ||
| projectName: config.name || "", | ||
| traces | ||
| }; | ||
| workerTeardown = await traces.$("vitest.runtime.setup", { context }, () => worker.setup?.(setupContext)); | ||
| send({ | ||
| type: "started", | ||
| __vitest_worker_response__ | ||
| }); | ||
| } catch (error) { | ||
| send({ | ||
| type: "started", | ||
| __vitest_worker_response__, | ||
| error: serializeError(error) | ||
| }); | ||
| } | ||
| break; | ||
| } | ||
| case "run": | ||
| // Prevent concurrent execution if worker is already running | ||
| if (isRunning) { | ||
| send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: serializeError(/* @__PURE__ */ new Error("[vitest-worker]: Worker is already running tests")) | ||
| }); | ||
| return; | ||
| } | ||
| try { | ||
| process.env.VITEST_WORKER_ID = String(message.context.workerId); | ||
| } catch (error) { | ||
| return send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: serializeError(error), | ||
| usedMemory: reportMemory ? memoryUsage().heapUsed : void 0 | ||
| }); | ||
| } | ||
| isRunning = true; | ||
| try { | ||
| const tracesContext = traces.getContextFromCarrier(message.otelCarrier); | ||
| runPromise = traces.$("vitest.runtime.run", { | ||
| context: tracesContext, | ||
| attributes: { | ||
| "vitest.worker.specifications": traces.isEnabled() ? getFilesWithLocations(message.context.files) : [], | ||
| "vitest.worker.id": message.context.workerId | ||
| } | ||
| }, () => run({ | ||
| ...setupContext, | ||
| ...message.context | ||
| }, worker, traces).catch((error) => serializeError(error))); | ||
| send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: await runPromise, | ||
| usedMemory: reportMemory ? memoryUsage().heapUsed : void 0 | ||
| }); | ||
| } finally { | ||
| runPromise = void 0; | ||
| isRunning = false; | ||
| } | ||
| break; | ||
| case "collect": | ||
| // Prevent concurrent execution if worker is already running | ||
| if (isRunning) { | ||
| send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: serializeError(/* @__PURE__ */ new Error("[vitest-worker]: Worker is already running tests")) | ||
| }); | ||
| return; | ||
| } | ||
| try { | ||
| process.env.VITEST_WORKER_ID = String(message.context.workerId); | ||
| } catch (error) { | ||
| return send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: serializeError(error), | ||
| usedMemory: reportMemory ? memoryUsage().heapUsed : void 0 | ||
| }); | ||
| } | ||
| isRunning = true; | ||
| try { | ||
| const tracesContext = traces.getContextFromCarrier(message.otelCarrier); | ||
| runPromise = traces.$("vitest.runtime.collect", { | ||
| context: tracesContext, | ||
| attributes: { | ||
| "vitest.worker.specifications": traces.isEnabled() ? getFilesWithLocations(message.context.files) : [], | ||
| "vitest.worker.id": message.context.workerId | ||
| } | ||
| }, () => collect({ | ||
| ...setupContext, | ||
| ...message.context | ||
| }, worker, traces).catch((error) => serializeError(error))); | ||
| send({ | ||
| type: "testfileFinished", | ||
| __vitest_worker_response__, | ||
| error: await runPromise, | ||
| usedMemory: reportMemory ? memoryUsage().heapUsed : void 0 | ||
| }); | ||
| } finally { | ||
| runPromise = void 0; | ||
| isRunning = false; | ||
| } | ||
| break; | ||
| case "stop": | ||
| await runPromise; | ||
| try { | ||
| const context = traces.getContextFromCarrier(message.otelCarrier); | ||
| const error = await traces.$("vitest.runtime.teardown", { context }, async () => { | ||
| const error = await teardown().catch((error) => serializeError(error)); | ||
| await workerTeardown?.(); | ||
| return error; | ||
| }); | ||
| await traces.finish(); | ||
| send({ | ||
| type: "stopped", | ||
| error, | ||
| __vitest_worker_response__ | ||
| }); | ||
| } catch (error) { | ||
| send({ | ||
| type: "stopped", | ||
| error: serializeError(error), | ||
| __vitest_worker_response__ | ||
| }); | ||
| } | ||
| worker.teardown?.(); | ||
| break; | ||
| } | ||
| } | ||
| } | ||
| function getFilesWithLocations(files) { | ||
| return files.flatMap((file) => { | ||
| if (!file.testLocations) return file.filepath; | ||
| return file.testLocations.map((location) => { | ||
| return `${file}:${location}`; | ||
| }); | ||
| }); | ||
| } | ||
| export { listenForErrors as a, emitModuleRunner as e, init as i, loadEnvironment as l }; |
| import module$1, { isBuiltin } from 'node:module'; | ||
| import { fileURLToPath } from 'node:url'; | ||
| import { MessageChannel } from 'node:worker_threads'; | ||
| import { initSyntaxLexers, hoistMocks } from '@vitest/mocker/transforms'; | ||
| import { cleanUrl } from '@vitest/utils/helpers'; | ||
| import { p as parse } from './acorn.B2iPLyUM.js'; | ||
| import MagicString from 'magic-string'; | ||
| import { resolve } from 'pathe'; | ||
| import c from 'tinyrainbow'; | ||
| import { distDir } from '../path.js'; | ||
| import { t as toBuiltin } from './modules.BJuCwlRJ.js'; | ||
| import 'node:path'; | ||
| const NOW_LENGTH = Date.now().toString().length; | ||
| const REGEXP_VITEST = /* @__PURE__ */ new RegExp(`%3Fvitest=\\d{${NOW_LENGTH}}`); | ||
| const REGEXP_MOCK_ACTUAL = /\?mock=actual/; | ||
| async function setupNodeLoaderHooks(worker) { | ||
| if (module$1.setSourceMapsSupport) module$1.setSourceMapsSupport(true); | ||
| else if (process.setSourceMapsEnabled) process.setSourceMapsEnabled(true); | ||
| if (worker.config.experimental.nodeLoader !== false) await initSyntaxLexers(); | ||
| if (typeof module$1.registerHooks === "function") module$1.registerHooks({ | ||
| resolve(specifier, context, nextResolve) { | ||
| if (specifier.includes("mock=actual")) { | ||
| // url is already resolved by `importActual` | ||
| const moduleId = specifier.replace(REGEXP_MOCK_ACTUAL, ""); | ||
| const builtin = isBuiltin(moduleId); | ||
| return { | ||
| url: builtin ? toBuiltin(moduleId) : moduleId, | ||
| format: builtin ? "builtin" : void 0, | ||
| shortCircuit: true | ||
| }; | ||
| } | ||
| const isVitest = specifier.includes("%3Fvitest="); | ||
| const result = nextResolve(isVitest ? specifier.replace(REGEXP_VITEST, "") : specifier, context); | ||
| // avoid tracking /node_modules/ module graph for performance reasons | ||
| if (context.parentURL && result.url && !result.url.includes("/node_modules/")) worker.rpc.ensureModuleGraphEntry(result.url, context.parentURL).catch(() => { | ||
| // ignore errors | ||
| }); | ||
| // this is require for in-source tests to be invalidated if | ||
| // one of the files already imported it in --maxWorkers=1 --no-isolate | ||
| if (isVitest) result.url = `${result.url}?vitest=${Date.now()}`; | ||
| if (worker.config.experimental.nodeLoader === false || !context.parentURL || result.url.includes(distDir) || context.parentURL?.toString().includes(distDir)) return result; | ||
| const mockedResult = getNativeMocker()?.resolveMockedModule(result.url, context.parentURL); | ||
| if (mockedResult != null) return mockedResult; | ||
| return result; | ||
| }, | ||
| load: worker.config.experimental.nodeLoader === false ? void 0 : createLoadHook() | ||
| }); | ||
| else if (module$1.register) { | ||
| if (worker.config.experimental.nodeLoader !== false) console.warn(`${c.bgYellow(" WARNING ")} "module.registerHooks" is not supported in Node.js ${process.version}. This means that some features like module mocking or in-source testing are not supported. Upgrade your Node.js version to at least 22.15 or disable "experimental.nodeLoader" flag manually.\n`); | ||
| const { port1, port2 } = new MessageChannel(); | ||
| port1.unref(); | ||
| port2.unref(); | ||
| port1.on("message", (data) => { | ||
| if (!data || typeof data !== "object") return; | ||
| switch (data.event) { | ||
| case "register-module-graph-entry": { | ||
| const { url, parentURL } = data; | ||
| worker.rpc.ensureModuleGraphEntry(url, parentURL); | ||
| return; | ||
| } | ||
| default: console.error("Unknown message event:", data.event); | ||
| } | ||
| }); | ||
| /** Registers {@link file://./../nodejsWorkerLoader.ts} */ | ||
| module$1.register("#nodejs-worker-loader", { | ||
| parentURL: import.meta.url, | ||
| data: { port: port2 }, | ||
| transferList: [port2] | ||
| }); | ||
| } else if (!process.versions.deno && !process.versions.bun) console.warn("\"module.registerHooks\" and \"module.register\" are not supported. Some Vitest features may not work. Please, use Node.js 18.19.0 or higher."); | ||
| } | ||
| function replaceInSourceMarker(url, source, ms) { | ||
| const re = /import\.meta\.vitest/g; | ||
| let match; | ||
| let overridden = false; | ||
| // eslint-disable-next-line no-cond-assign | ||
| while (match = re.exec(source)) { | ||
| const { index, "0": code } = match; | ||
| overridden = true; | ||
| // should it support process.vitest for CJS modules? | ||
| ms().overwrite(index, index + code.length, "IMPORT_META_TEST()"); | ||
| } | ||
| if (overridden) { | ||
| const filename = resolve(fileURLToPath(url)); | ||
| // appending instead of prepending because functions are hoisted and we don't change the offset | ||
| ms().append(`;\nfunction IMPORT_META_TEST() { return typeof __vitest_worker__ !== 'undefined' && __vitest_worker__.filepath === "${filename.replace(/"/g, "\\\"")}" ? __vitest_index__ : undefined; }`); | ||
| } | ||
| } | ||
| const ignoreFormats = new Set([ | ||
| "addon", | ||
| "builtin", | ||
| "wasm" | ||
| ]); | ||
| function createLoadHook(_worker) { | ||
| return (url, context, nextLoad) => { | ||
| const result = url.includes("mock=") && isBuiltin(cleanUrl(url)) ? { format: "commonjs" } : nextLoad(url, context); | ||
| if (result.format && ignoreFormats.has(result.format) || url.includes(distDir)) return result; | ||
| const mocker = getNativeMocker(); | ||
| mocker?.checkCircularManualMock(url); | ||
| if (url.includes("mock=automock") || url.includes("mock=autospy")) { | ||
| const automockedResult = mocker?.loadAutomock(url, result); | ||
| if (automockedResult != null) return automockedResult; | ||
| return result; | ||
| } | ||
| if (url.includes("mock=manual")) { | ||
| const mockedResult = mocker?.loadManualMock(url, result); | ||
| if (mockedResult != null) return mockedResult; | ||
| return result; | ||
| } | ||
| // ignore non-vitest modules for performance reasons, | ||
| // vi.hoisted and vi.mock won't work outside of test files or setup files | ||
| if (!result.source || !url.includes("vitest=")) return result; | ||
| const filename = url.startsWith("file://") ? fileURLToPath(url) : url; | ||
| const source = result.source.toString(); | ||
| const transformedCode = result.format?.includes("typescript") ? module$1.stripTypeScriptTypes(source) : source; | ||
| let _ms; | ||
| const ms = () => _ms || (_ms = new MagicString(source)); | ||
| if (source.includes("import.meta.vitest")) replaceInSourceMarker(url, source, ms); | ||
| hoistMocks(transformedCode, filename, (code) => parse(code, { | ||
| ecmaVersion: "latest", | ||
| sourceType: result.format === "module" || result.format === "module-typescript" || result.format === "typescript" ? "module" : "script" | ||
| }), { | ||
| magicString: ms, | ||
| globalThisAccessor: "\"__vitest_mocker__\"" | ||
| }); | ||
| let code; | ||
| if (_ms) code = `${_ms.toString()}\n//# sourceMappingURL=${genSourceMapUrl(_ms.generateMap({ | ||
| hires: "boundary", | ||
| source: filename | ||
| }))}`; | ||
| else code = source; | ||
| return { | ||
| format: result.format, | ||
| shortCircuit: true, | ||
| source: code | ||
| }; | ||
| }; | ||
| } | ||
| function genSourceMapUrl(map) { | ||
| if (typeof map !== "string") map = JSON.stringify(map); | ||
| return `data:application/json;base64,${Buffer.from(map).toString("base64")}`; | ||
| } | ||
| function getNativeMocker() { | ||
| return typeof __vitest_mocker__ !== "undefined" ? __vitest_mocker__ : void 0; | ||
| } | ||
| export { setupNodeLoaderHooks }; |
| import module$1, { isBuiltin } from 'node:module'; | ||
| import { fileURLToPath, pathToFileURL } from 'node:url'; | ||
| import { automockModule, createManualModuleSource, collectModuleExports } from '@vitest/mocker/transforms'; | ||
| import { cleanUrl, createDefer } from '@vitest/utils/helpers'; | ||
| import { p as parse } from './acorn.B2iPLyUM.js'; | ||
| import { isAbsolute } from 'pathe'; | ||
| import { t as toBuiltin } from './modules.BJuCwlRJ.js'; | ||
| import { B as BareModuleMocker, n as normalizeModuleId } from './startVitestModuleRunner.BdSYEN5x.js'; | ||
| import 'node:fs'; | ||
| import './utils.BX5Fg8C4.js'; | ||
| import '@vitest/utils/timers'; | ||
| import '../path.js'; | ||
| import 'node:path'; | ||
| import '../module-evaluator.js'; | ||
| import 'node:vm'; | ||
| import 'vite/module-runner'; | ||
| import './traces.CCmnQaNT.js'; | ||
| import '@vitest/mocker'; | ||
| import '@vitest/mocker/redirect'; | ||
| class NativeModuleMocker extends BareModuleMocker { | ||
| wrapDynamicImport(moduleFactory) { | ||
| if (typeof moduleFactory === "function") return new Promise((resolve, reject) => { | ||
| this.resolveMocks().finally(() => { | ||
| moduleFactory().then(resolve, reject); | ||
| }); | ||
| }); | ||
| return moduleFactory; | ||
| } | ||
| resolveMockedModule(url, parentURL) { | ||
| // don't mock modules inside of packages because there is | ||
| // a high chance that it uses `require` which is not mockable | ||
| // because we use top-level await in "manual" mocks. | ||
| // for the sake of consistency we don't support mocking anything at all | ||
| if (parentURL.includes("/node_modules/")) return; | ||
| const moduleId = normalizeModuleId(url.startsWith("file://") ? fileURLToPath(url) : url); | ||
| const mockedModule = this.getDependencyMock(moduleId); | ||
| if (!mockedModule) return; | ||
| if (mockedModule.type === "redirect") return { | ||
| url: pathToFileURL(mockedModule.redirect).toString(), | ||
| shortCircuit: true | ||
| }; | ||
| if (mockedModule.type === "automock" || mockedModule.type === "autospy") return { | ||
| url: injectQuery(url, parentURL, `mock=${mockedModule.type}`), | ||
| shortCircuit: true | ||
| }; | ||
| if (mockedModule.type === "manual") return { | ||
| url: injectQuery(url, parentURL, "mock=manual"), | ||
| shortCircuit: true | ||
| }; | ||
| } | ||
| loadAutomock(url, result) { | ||
| const moduleId = cleanUrl(normalizeModuleId(url.startsWith("file://") ? fileURLToPath(url) : url)); | ||
| let source; | ||
| if (isBuiltin(moduleId)) { | ||
| const builtinModule = getBuiltinModule(moduleId); | ||
| const exports$1 = Object.keys(builtinModule); | ||
| source = ` | ||
| import * as builtinModule from '${toBuiltin(moduleId)}?mock=actual' | ||
| ${exports$1.map((key, index) => { | ||
| return ` | ||
| const __${index} = builtinModule["${key}"] | ||
| export { __${index} as "${key}" } | ||
| `; | ||
| }).join("")}`; | ||
| } else source = result.source?.toString(); | ||
| if (source == null) return; | ||
| const mockType = url.includes("mock=automock") ? "automock" : "autospy"; | ||
| const transformedCode = transformCode(source, result.format || "module", moduleId); | ||
| try { | ||
| const ms = automockModule(transformedCode, mockType, (code) => parse(code, { | ||
| sourceType: "module", | ||
| ecmaVersion: "latest" | ||
| }), { id: moduleId }); | ||
| return { | ||
| format: "module", | ||
| source: `${ms.toString()}\n//# sourceMappingURL=${genSourceMapUrl(ms.generateMap({ | ||
| hires: "boundary", | ||
| source: moduleId | ||
| }))}`, | ||
| shortCircuit: true | ||
| }; | ||
| } catch (cause) { | ||
| throw new Error(`Cannot automock '${url}' because it failed to parse.`, { cause }); | ||
| } | ||
| } | ||
| loadManualMock(url, result) { | ||
| const moduleId = cleanUrl(normalizeModuleId(url.startsWith("file://") ? fileURLToPath(url) : url)); | ||
| // should not be possible | ||
| if (this.getDependencyMock(moduleId)?.type !== "manual") { | ||
| console.warn(`Vitest detected unregistered manual mock ${moduleId}. This is a bug in Vitest. Please, open a new issue with reproduction.`); | ||
| return; | ||
| } | ||
| if (isBuiltin(moduleId)) { | ||
| const builtinModule = getBuiltinModule(toBuiltin(moduleId)); | ||
| return { | ||
| format: "module", | ||
| source: createManualModuleSource(moduleId, Object.keys(builtinModule)), | ||
| shortCircuit: true | ||
| }; | ||
| } | ||
| if (!result.source) return; | ||
| const transformedCode = transformCode(result.source.toString(), result.format || "module", moduleId); | ||
| if (transformedCode == null) return; | ||
| const format = result.format?.startsWith("module") ? "module" : "commonjs"; | ||
| try { | ||
| return { | ||
| format: "module", | ||
| source: createManualModuleSource(moduleId, collectModuleExports(moduleId, transformedCode, format)), | ||
| shortCircuit: true | ||
| }; | ||
| } catch (cause) { | ||
| throw new Error(`Failed to mock '${url}'. See the cause for more information.`, { cause }); | ||
| } | ||
| } | ||
| processedModules = /* @__PURE__ */ new Map(); | ||
| checkCircularManualMock(url) { | ||
| const id = cleanUrl(normalizeModuleId(url.startsWith("file://") ? fileURLToPath(url) : url)); | ||
| this.processedModules.set(id, (this.processedModules.get(id) ?? 0) + 1); | ||
| // the module is mocked and requested a second time, let's resolve | ||
| // the factory function that will redefine the exports later | ||
| if (this.originalModulePromises.has(id)) { | ||
| const factoryPromise = this.factoryPromises.get(id); | ||
| this.originalModulePromises.get(id)?.resolve({ __factoryPromise: factoryPromise }); | ||
| } | ||
| } | ||
| originalModulePromises = /* @__PURE__ */ new Map(); | ||
| factoryPromises = /* @__PURE__ */ new Map(); | ||
| // potential performance improvement: | ||
| // store by URL, not ids, no need to call url.*to* methods and normalizeModuleId | ||
| getFactoryModule(id) { | ||
| const mock = this.getMockerRegistry().getById(id); | ||
| if (!mock || mock.type !== "manual") throw new Error(`Mock ${id} wasn't registered. This is probably a Vitest error. Please, open a new issue with reproduction.`); | ||
| const mockResult = mock.resolve(); | ||
| if (mockResult instanceof Promise) { | ||
| // to avoid circular dependency, we resolve this function as {__factoryPromise} in `checkCircularManualMock` | ||
| // when it's requested the second time. then the exports are exposed as `undefined`, | ||
| // but later redefined when the promise is actually resolved | ||
| const promise = createDefer(); | ||
| promise.finally(() => { | ||
| this.originalModulePromises.delete(id); | ||
| }); | ||
| mockResult.then(promise.resolve, promise.reject).finally(() => { | ||
| this.factoryPromises.delete(id); | ||
| }); | ||
| this.factoryPromises.set(id, mockResult); | ||
| this.originalModulePromises.set(id, promise); | ||
| // Node.js on windows processes all the files first, and then runs them | ||
| // unlike Node.js logic on Mac and Unix where it also runs the code while evaluating | ||
| // So on Linux/Mac this `if` won't be hit because `checkCircularManualMock` will resolve it | ||
| // And on Windows, the `checkCircularManualMock` will never have `originalModulePromises` | ||
| // because `getFactoryModule` is not called until the evaluation phase | ||
| // But if we track how many times the module was transformed, | ||
| // we can deduce when to return `__factoryPromise` to support circular modules | ||
| if ((this.processedModules.get(id) ?? 0) > 1) { | ||
| this.processedModules.set(id, (this.processedModules.get(id) ?? 1) - 1); | ||
| promise.resolve({ __factoryPromise: mockResult }); | ||
| } | ||
| return promise; | ||
| } | ||
| return mockResult; | ||
| } | ||
| importActual(rawId, importer) { | ||
| const resolvedId = import.meta.resolve(rawId, pathToFileURL(importer).toString()); | ||
| const url = new URL(resolvedId); | ||
| url.searchParams.set("mock", "actual"); | ||
| return import(url.toString()); | ||
| } | ||
| importMock(rawId, importer) { | ||
| const resolvedId = import.meta.resolve(rawId, pathToFileURL(importer).toString()); | ||
| // file is already mocked | ||
| if (resolvedId.includes("mock=")) return import(resolvedId); | ||
| const filename = fileURLToPath(resolvedId); | ||
| const external = !isAbsolute(filename) || this.isModuleDirectory(resolvedId) ? normalizeModuleId(rawId) : null; | ||
| // file is not mocked, automock or redirect it | ||
| const redirect = this.findMockRedirect(filename, external); | ||
| if (redirect) return import(pathToFileURL(redirect).toString()); | ||
| const url = new URL(resolvedId); | ||
| url.searchParams.set("mock", "automock"); | ||
| return import(url.toString()); | ||
| } | ||
| } | ||
| const replacePercentageRE = /%/g; | ||
| function injectQuery(url, importer, queryToInject) { | ||
| const { search, hash } = new URL(url.replace(replacePercentageRE, "%25"), importer); | ||
| return `${cleanUrl(url)}?${queryToInject}${search ? `&${search.slice(1)}` : ""}${hash ?? ""}`; | ||
| } | ||
| let __require; | ||
| function getBuiltinModule(moduleId) { | ||
| __require ??= module$1.createRequire(import.meta.url); | ||
| return __require(`${moduleId}?mock=actual`); | ||
| } | ||
| function genSourceMapUrl(map) { | ||
| if (typeof map !== "string") map = JSON.stringify(map); | ||
| return `data:application/json;base64,${Buffer.from(map).toString("base64")}`; | ||
| } | ||
| function transformCode(code, format, filename) { | ||
| if (format.includes("typescript")) { | ||
| if (!module$1.stripTypeScriptTypes) throw new Error(`Cannot parse '${filename}' because "module.stripTypeScriptTypes" is not supported. Module mocking requires Node.js 22.15 or higher. This is NOT a bug of Vitest.`); | ||
| return module$1.stripTypeScriptTypes(code); | ||
| } | ||
| return code; | ||
| } | ||
| export { NativeModuleMocker }; |
| import { DevEnvironment } from 'vite'; | ||
| import { V as Vitest, T as TestProject, a as TestProjectConfiguration } from './reporters.d.CRDGoPlb.js'; | ||
| /** | ||
| * Generate a unique cache identifier. | ||
| * | ||
| * Return `false` to disable caching of the file. | ||
| * @experimental | ||
| */ | ||
| interface CacheKeyIdGenerator { | ||
| (context: CacheKeyIdGeneratorContext): string | undefined | null | false; | ||
| } | ||
| /** | ||
| * @experimental | ||
| */ | ||
| interface CacheKeyIdGeneratorContext { | ||
| environment: DevEnvironment; | ||
| id: string; | ||
| sourceCode: string; | ||
| } | ||
| interface VitestPluginContext { | ||
| vitest: Vitest; | ||
| project: TestProject; | ||
| injectTestProjects: (config: TestProjectConfiguration | TestProjectConfiguration[]) => Promise<TestProject[]>; | ||
| /** | ||
| * Define a generator that will be applied before hashing the cache key. | ||
| * | ||
| * Use this to make sure Vitest generates correct hash. It is a good idea | ||
| * to define this function if your plugin can be registered with different options. | ||
| * | ||
| * This is called only if `experimental.fsModuleCache` is defined. | ||
| * @experimental | ||
| */ | ||
| experimental_defineCacheKeyGenerator: (callback: CacheKeyIdGenerator) => void; | ||
| } | ||
| export type { CacheKeyIdGenerator as C, VitestPluginContext as V, CacheKeyIdGeneratorContext as a }; |
Sorry, the diff of this file is too big to display
| import { r as resolveCoverageProviderModule } from './coverage.D_JHT54q.js'; | ||
| import { addSerializer } from '@vitest/snapshot'; | ||
| import { setSafeTimers } from '@vitest/utils/timers'; | ||
| import { g as getWorkerState } from './utils.BX5Fg8C4.js'; | ||
| async function startCoverageInsideWorker(options, loader, runtimeOptions) { | ||
| const coverageModule = await resolveCoverageProviderModule(options, loader); | ||
| if (coverageModule) return coverageModule.startCoverage?.(runtimeOptions); | ||
| return null; | ||
| } | ||
| async function takeCoverageInsideWorker(options, loader) { | ||
| const coverageModule = await resolveCoverageProviderModule(options, loader); | ||
| if (coverageModule) return coverageModule.takeCoverage?.({ moduleExecutionInfo: loader.moduleExecutionInfo }); | ||
| return null; | ||
| } | ||
| async function stopCoverageInsideWorker(options, loader, runtimeOptions) { | ||
| const coverageModule = await resolveCoverageProviderModule(options, loader); | ||
| if (coverageModule) return coverageModule.stopCoverage?.(runtimeOptions); | ||
| return null; | ||
| } | ||
| let globalSetup = false; | ||
| async function setupCommonEnv(config) { | ||
| setupDefines(config); | ||
| setupEnv(config.env); | ||
| if (globalSetup) return; | ||
| globalSetup = true; | ||
| setSafeTimers(); | ||
| if (config.globals) (await import('./globals.DA6L6i5h.js')).registerApiGlobally(); | ||
| } | ||
| function setupDefines(config) { | ||
| for (const key in config.defines) globalThis[key] = config.defines[key]; | ||
| } | ||
| function setupEnv(env) { | ||
| const state = getWorkerState(); | ||
| // same boolean-to-string assignment as VitestPlugin.configResolved | ||
| const { PROD, DEV, ...restEnvs } = env; | ||
| state.metaEnv.PROD = PROD; | ||
| state.metaEnv.DEV = DEV; | ||
| for (const key in restEnvs) state.metaEnv[key] = env[key]; | ||
| } | ||
| async function loadDiffConfig(config, moduleRunner) { | ||
| if (typeof config.diff === "object") return config.diff; | ||
| if (typeof config.diff !== "string") return; | ||
| const diffModule = await moduleRunner.import(config.diff); | ||
| if (diffModule && typeof diffModule.default === "object" && diffModule.default != null) return diffModule.default; | ||
| else throw new Error(`invalid diff config file ${config.diff}. Must have a default export with config object`); | ||
| } | ||
| async function loadSnapshotSerializers(config, moduleRunner) { | ||
| const files = config.snapshotSerializers; | ||
| (await Promise.all(files.map(async (file) => { | ||
| const mo = await moduleRunner.import(file); | ||
| if (!mo || typeof mo.default !== "object" || mo.default === null) throw new Error(`invalid snapshot serializer file ${file}. Must export a default object`); | ||
| const config = mo.default; | ||
| if (typeof config.test !== "function" || typeof config.serialize !== "function" && typeof config.print !== "function") throw new TypeError(`invalid snapshot serializer in ${file}. Must have a 'test' method along with either a 'serialize' or 'print' method.`); | ||
| return config; | ||
| }))).forEach((serializer) => addSerializer(serializer)); | ||
| } | ||
| export { loadSnapshotSerializers as a, startCoverageInsideWorker as b, stopCoverageInsideWorker as c, loadDiffConfig as l, setupCommonEnv as s, takeCoverageInsideWorker as t }; |
| import fs from 'node:fs'; | ||
| import { isBareImport } from '@vitest/utils/helpers'; | ||
| import { i as isBuiltin, a as isBrowserExternal, t as toBuiltin } from './modules.BJuCwlRJ.js'; | ||
| import { E as EnvironmentTeardownError, a as getSafeWorkerState } from './utils.BX5Fg8C4.js'; | ||
| import { pathToFileURL } from 'node:url'; | ||
| import { normalize, join } from 'pathe'; | ||
| import { distDir } from '../path.js'; | ||
| import { VitestModuleEvaluator, unwrapId } from '../module-evaluator.js'; | ||
| import { isAbsolute, resolve } from 'node:path'; | ||
| import vm from 'node:vm'; | ||
| import { MockerRegistry, mockObject, RedirectedModule, AutomockedModule } from '@vitest/mocker'; | ||
| import { findMockRedirect } from '@vitest/mocker/redirect'; | ||
| import * as viteModuleRunner from 'vite/module-runner'; | ||
| import { T as Traces } from './traces.CCmnQaNT.js'; | ||
| class BareModuleMocker { | ||
| static pendingIds = []; | ||
| spyModule; | ||
| primitives; | ||
| registries = /* @__PURE__ */ new Map(); | ||
| mockContext = { callstack: null }; | ||
| _otel; | ||
| constructor(options) { | ||
| this.options = options; | ||
| this._otel = options.traces; | ||
| this.primitives = { | ||
| Object, | ||
| Error, | ||
| Function, | ||
| RegExp, | ||
| Symbol: globalThis.Symbol, | ||
| Array, | ||
| Map | ||
| }; | ||
| if (options.spyModule) this.spyModule = options.spyModule; | ||
| } | ||
| get root() { | ||
| return this.options.root; | ||
| } | ||
| get moduleDirectories() { | ||
| return this.options.moduleDirectories || []; | ||
| } | ||
| getMockerRegistry() { | ||
| const suite = this.getSuiteFilepath(); | ||
| if (!this.registries.has(suite)) this.registries.set(suite, new MockerRegistry()); | ||
| return this.registries.get(suite); | ||
| } | ||
| reset() { | ||
| this.registries.clear(); | ||
| } | ||
| invalidateModuleById(_id) { | ||
| // implemented by mockers that control the module runner | ||
| } | ||
| isModuleDirectory(path) { | ||
| return this.moduleDirectories.some((dir) => path.includes(dir)); | ||
| } | ||
| getSuiteFilepath() { | ||
| return this.options.getCurrentTestFilepath() || "global"; | ||
| } | ||
| createError(message, codeFrame) { | ||
| const Error = this.primitives.Error; | ||
| const error = new Error(message); | ||
| Object.assign(error, { codeFrame }); | ||
| return error; | ||
| } | ||
| async resolveId(rawId, importer) { | ||
| return this._otel.$("vitest.mocker.resolve_id", { attributes: { | ||
| "vitest.module.raw_id": rawId, | ||
| "vitest.module.importer": rawId | ||
| } }, async (span) => { | ||
| const result = await this.options.resolveId(rawId, importer); | ||
| if (!result) { | ||
| span.addEvent("could not resolve id, fallback to unresolved values"); | ||
| const id = normalizeModuleId(rawId); | ||
| span.setAttributes({ | ||
| "vitest.module.id": id, | ||
| "vitest.module.url": rawId, | ||
| "vitest.module.external": id, | ||
| "vitest.module.fallback": true | ||
| }); | ||
| return { | ||
| id, | ||
| url: rawId, | ||
| external: id | ||
| }; | ||
| } | ||
| // external is node_module or unresolved module | ||
| // for example, some people mock "vscode" and don't have it installed | ||
| const external = !isAbsolute(result.file) || this.isModuleDirectory(result.file) ? normalizeModuleId(rawId) : null; | ||
| const id = normalizeModuleId(result.id); | ||
| span.setAttributes({ | ||
| "vitest.module.id": id, | ||
| "vitest.module.url": result.url, | ||
| "vitest.module.external": external ?? false | ||
| }); | ||
| return { | ||
| ...result, | ||
| id, | ||
| external | ||
| }; | ||
| }); | ||
| } | ||
| async resolveMocks() { | ||
| if (!BareModuleMocker.pendingIds.length) return; | ||
| await Promise.all(BareModuleMocker.pendingIds.map(async (mock) => { | ||
| const { id, url, external } = await this.resolveId(mock.id, mock.importer); | ||
| if (mock.action === "unmock") this.unmockPath(id); | ||
| if (mock.action === "mock") this.mockPath(mock.id, id, url, external, mock.type, mock.factory); | ||
| })); | ||
| BareModuleMocker.pendingIds = []; | ||
| } | ||
| // public method to avoid circular dependency | ||
| getMockContext() { | ||
| return this.mockContext; | ||
| } | ||
| // path used to store mocked dependencies | ||
| getMockPath(dep) { | ||
| return `mock:${dep}`; | ||
| } | ||
| getDependencyMock(id) { | ||
| return this.getMockerRegistry().getById(fixLeadingSlashes(id)); | ||
| } | ||
| getDependencyMockByUrl(url) { | ||
| return this.getMockerRegistry().get(url); | ||
| } | ||
| findMockRedirect(mockPath, external) { | ||
| return findMockRedirect(this.root, mockPath, external); | ||
| } | ||
| mockObject(object, mockExportsOrModuleType, moduleType) { | ||
| let mockExports; | ||
| if (mockExportsOrModuleType === "automock" || mockExportsOrModuleType === "autospy") { | ||
| moduleType = mockExportsOrModuleType; | ||
| mockExports = void 0; | ||
| } else mockExports = mockExportsOrModuleType; | ||
| moduleType ??= "automock"; | ||
| const createMockInstance = this.spyModule?.createMockInstance; | ||
| if (!createMockInstance) throw this.createError("[vitest] `spyModule` is not defined. This is a Vitest error. Please open a new issue with reproduction."); | ||
| return mockObject({ | ||
| globalConstructors: this.primitives, | ||
| createMockInstance, | ||
| type: moduleType | ||
| }, object, mockExports); | ||
| } | ||
| unmockPath(id) { | ||
| this.getMockerRegistry().deleteById(id); | ||
| this.invalidateModuleById(id); | ||
| } | ||
| mockPath(originalId, id, url, external, mockType, factory) { | ||
| const registry = this.getMockerRegistry(); | ||
| if (mockType === "manual") registry.register("manual", originalId, id, url, factory); | ||
| else if (mockType === "autospy") registry.register("autospy", originalId, id, url); | ||
| else { | ||
| const redirect = this.findMockRedirect(id, external); | ||
| if (redirect) registry.register("redirect", originalId, id, url, redirect); | ||
| else registry.register("automock", originalId, id, url); | ||
| } | ||
| // every time the mock is registered, we remove the previous one from the cache | ||
| this.invalidateModuleById(id); | ||
| } | ||
| async importActual(_rawId, _importer, _callstack) { | ||
| throw new Error(`importActual is not implemented`); | ||
| } | ||
| async importMock(_rawId, _importer, _callstack) { | ||
| throw new Error(`importMock is not implemented`); | ||
| } | ||
| queueMock(id, importer, factoryOrOptions) { | ||
| const mockType = getMockType(factoryOrOptions); | ||
| BareModuleMocker.pendingIds.push({ | ||
| action: "mock", | ||
| id, | ||
| importer, | ||
| factory: typeof factoryOrOptions === "function" ? factoryOrOptions : void 0, | ||
| type: mockType | ||
| }); | ||
| } | ||
| queueUnmock(id, importer) { | ||
| BareModuleMocker.pendingIds.push({ | ||
| action: "unmock", | ||
| id, | ||
| importer | ||
| }); | ||
| } | ||
| } | ||
| function getMockType(factoryOrOptions) { | ||
| if (!factoryOrOptions) return "automock"; | ||
| if (typeof factoryOrOptions === "function") return "manual"; | ||
| return factoryOrOptions.spy ? "autospy" : "automock"; | ||
| } | ||
| // unique id that is not available as "$bare_import" like "test" | ||
| // https://nodejs.org/api/modules.html#built-in-modules-with-mandatory-node-prefix | ||
| const prefixedBuiltins = new Set([ | ||
| "node:sea", | ||
| "node:sqlite", | ||
| "node:test", | ||
| "node:test/reporters" | ||
| ]); | ||
| const isWindows$1 = process.platform === "win32"; | ||
| // transform file url to id | ||
| // virtual:custom -> virtual:custom | ||
| // \0custom -> \0custom | ||
| // /root/id -> /id | ||
| // /root/id.js -> /id.js | ||
| // C:/root/id.js -> /id.js | ||
| // C:\root\id.js -> /id.js | ||
| // TODO: expose this in vite/module-runner | ||
| function normalizeModuleId(file) { | ||
| if (prefixedBuiltins.has(file)) return file; | ||
| // if it's not in the root, keep it as a path, not a URL | ||
| return slash(file).replace(/^\/@fs\//, isWindows$1 ? "" : "/").replace(/^node:/, "").replace(/^\/+/, "/").replace(/^file:\//, "/"); | ||
| } | ||
| const windowsSlashRE = /\\/g; | ||
| function slash(p) { | ||
| return p.replace(windowsSlashRE, "/"); | ||
| } | ||
| const multipleSlashRe = /^\/+/; | ||
| // module-runner incorrectly replaces file:///path with `///path` | ||
| function fixLeadingSlashes(id) { | ||
| if (id.startsWith("//")) return id.replace(multipleSlashRe, "/"); | ||
| return id; | ||
| } | ||
| // copied from vite/src/shared/utils.ts | ||
| const postfixRE = /[?#].*$/; | ||
| function cleanUrl(url) { | ||
| return url.replace(postfixRE, ""); | ||
| } | ||
| function splitFileAndPostfix(path) { | ||
| const file = cleanUrl(path); | ||
| return { | ||
| file, | ||
| postfix: path.slice(file.length) | ||
| }; | ||
| } | ||
| function injectQuery(url, queryToInject) { | ||
| const { file, postfix } = splitFileAndPostfix(url); | ||
| return `${file}?${queryToInject}${postfix[0] === "?" ? `&${postfix.slice(1)}` : postfix}`; | ||
| } | ||
| function removeQuery(url, queryToRemove) { | ||
| return url.replace(/* @__PURE__ */ new RegExp(`[?&]${queryToRemove}(?=[&#]|$)`), "").replace(/\?$/, ""); | ||
| } | ||
| const spyModulePath = resolve(distDir, "spy.js"); | ||
| class VitestMocker extends BareModuleMocker { | ||
| filterPublicKeys; | ||
| constructor(moduleRunner, options) { | ||
| super(options); | ||
| this.moduleRunner = moduleRunner; | ||
| this.options = options; | ||
| const context = this.options.context; | ||
| if (context) this.primitives = vm.runInContext("({ Object, Error, Function, RegExp, Symbol, Array, Map })", context); | ||
| const Symbol = this.primitives.Symbol; | ||
| this.filterPublicKeys = [ | ||
| "__esModule", | ||
| Symbol.asyncIterator, | ||
| Symbol.hasInstance, | ||
| Symbol.isConcatSpreadable, | ||
| Symbol.iterator, | ||
| Symbol.match, | ||
| Symbol.matchAll, | ||
| Symbol.replace, | ||
| Symbol.search, | ||
| Symbol.split, | ||
| Symbol.species, | ||
| Symbol.toPrimitive, | ||
| Symbol.toStringTag, | ||
| Symbol.unscopables | ||
| ]; | ||
| } | ||
| get evaluatedModules() { | ||
| return this.moduleRunner.evaluatedModules; | ||
| } | ||
| async initializeSpyModule() { | ||
| if (this.spyModule) return; | ||
| this.spyModule = await this.moduleRunner.import(spyModulePath); | ||
| } | ||
| reset() { | ||
| this.registries.clear(); | ||
| } | ||
| invalidateModuleById(id) { | ||
| const mockId = this.getMockPath(id); | ||
| const node = this.evaluatedModules.getModuleById(mockId); | ||
| if (node) { | ||
| this.evaluatedModules.invalidateModule(node); | ||
| node.mockedExports = void 0; | ||
| } | ||
| } | ||
| ensureModule(id, url) { | ||
| const node = this.evaluatedModules.ensureModule(id, url); | ||
| // TODO | ||
| node.meta = { | ||
| id, | ||
| url, | ||
| code: "", | ||
| file: null, | ||
| invalidate: false | ||
| }; | ||
| return node; | ||
| } | ||
| async callFunctionMock(id, url, mock) { | ||
| const node = this.ensureModule(id, url); | ||
| if (node.exports) return node.exports; | ||
| const exports$1 = await mock.resolve(); | ||
| const moduleExports = new Proxy(exports$1, { get: (target, prop) => { | ||
| const val = target[prop]; | ||
| // 'then' can exist on non-Promise objects, need nested instanceof check for logic to work | ||
| if (prop === "then") { | ||
| if (target instanceof Promise) return target.then.bind(target); | ||
| } else if (!(prop in target)) { | ||
| if (this.filterPublicKeys.includes(prop)) return; | ||
| throw this.createError(`[vitest] No "${String(prop)}" export is defined on the "${mock.raw}" mock. Did you forget to return it from "vi.mock"? | ||
| If you need to partially mock a module, you can use "importOriginal" helper inside: | ||
| `, `vi.mock(import("${mock.raw}"), async (importOriginal) => { | ||
| const actual = await importOriginal() | ||
| return { | ||
| ...actual, | ||
| // your mocked methods | ||
| } | ||
| })`); | ||
| } | ||
| return val; | ||
| } }); | ||
| node.exports = moduleExports; | ||
| return moduleExports; | ||
| } | ||
| async importActual(rawId, importer, callstack) { | ||
| const { url } = await this.resolveId(rawId, importer); | ||
| const actualUrl = injectQuery(url, "_vitest_original"); | ||
| const node = await this.moduleRunner.fetchModule(actualUrl, importer); | ||
| return await this.moduleRunner.cachedRequest(node.url, node, callstack || [importer], void 0, true); | ||
| } | ||
| async importMock(rawId, importer) { | ||
| const { id, url, external } = await this.resolveId(rawId, importer); | ||
| let mock = this.getDependencyMock(id); | ||
| if (!mock) { | ||
| const redirect = this.findMockRedirect(id, external); | ||
| if (redirect) mock = new RedirectedModule(rawId, id, rawId, redirect); | ||
| else mock = new AutomockedModule(rawId, id, rawId); | ||
| } | ||
| if (mock.type === "automock" || mock.type === "autospy") { | ||
| const node = await this.moduleRunner.fetchModule(url, importer); | ||
| const mod = await this.moduleRunner.cachedRequest(url, node, [importer], void 0, true); | ||
| const Object = this.primitives.Object; | ||
| return this.mockObject(mod, Object.create(Object.prototype), mock.type); | ||
| } | ||
| if (mock.type === "manual") return this.callFunctionMock(id, url, mock); | ||
| const node = await this.moduleRunner.fetchModule(mock.redirect); | ||
| return this.moduleRunner.cachedRequest(mock.redirect, node, [importer], void 0, true); | ||
| } | ||
| async requestWithMockedModule(url, evaluatedNode, callstack, mock) { | ||
| return this._otel.$("vitest.mocker.evaluate", async (span) => { | ||
| const mockId = this.getMockPath(evaluatedNode.id); | ||
| span.setAttributes({ | ||
| "vitest.module.id": mockId, | ||
| "vitest.mock.type": mock.type, | ||
| "vitest.mock.id": mock.id, | ||
| "vitest.mock.url": mock.url, | ||
| "vitest.mock.raw": mock.raw | ||
| }); | ||
| if (mock.type === "automock" || mock.type === "autospy") { | ||
| const cache = this.evaluatedModules.getModuleById(mockId); | ||
| if (cache && cache.mockedExports) return cache.mockedExports; | ||
| const Object = this.primitives.Object; | ||
| // we have to define a separate object that will copy all properties into itself | ||
| // and can't just use the same `exports` define automatically by Vite before the evaluator | ||
| const exports$1 = Object.create(null); | ||
| Object.defineProperty(exports$1, Symbol.toStringTag, { | ||
| value: "Module", | ||
| configurable: true, | ||
| writable: true | ||
| }); | ||
| const node = this.ensureModule(mockId, this.getMockPath(evaluatedNode.url)); | ||
| node.meta = evaluatedNode.meta; | ||
| node.file = evaluatedNode.file; | ||
| node.mockedExports = exports$1; | ||
| const mod = await this.moduleRunner.cachedRequest(url, node, callstack, void 0, true); | ||
| this.mockObject(mod, exports$1, mock.type); | ||
| return exports$1; | ||
| } | ||
| if (mock.type === "manual" && !callstack.includes(mockId) && !callstack.includes(url)) try { | ||
| callstack.push(mockId); | ||
| // this will not work if user does Promise.all(import(), import()) | ||
| // we can also use AsyncLocalStorage to store callstack, but this won't work in the browser | ||
| // maybe we should improve mock API in the future? | ||
| this.mockContext.callstack = callstack; | ||
| return await this.callFunctionMock(mockId, this.getMockPath(url), mock); | ||
| } finally { | ||
| this.mockContext.callstack = null; | ||
| const indexMock = callstack.indexOf(mockId); | ||
| callstack.splice(indexMock, 1); | ||
| } | ||
| else if (mock.type === "redirect" && !callstack.includes(mock.redirect)) { | ||
| span.setAttribute("vitest.mock.redirect", mock.redirect); | ||
| return mock.redirect; | ||
| } | ||
| }); | ||
| } | ||
| async mockedRequest(url, evaluatedNode, callstack) { | ||
| const mock = this.getDependencyMock(evaluatedNode.id); | ||
| if (!mock) return; | ||
| return this.requestWithMockedModule(url, evaluatedNode, callstack, mock); | ||
| } | ||
| } | ||
| class VitestTransport { | ||
| constructor(options, evaluatedModules, callstacks) { | ||
| this.options = options; | ||
| this.evaluatedModules = evaluatedModules; | ||
| this.callstacks = callstacks; | ||
| } | ||
| async invoke(event) { | ||
| if (event.type !== "custom") return { error: /* @__PURE__ */ new Error(`Vitest Module Runner doesn't support Vite HMR events.`) }; | ||
| if (event.event !== "vite:invoke") return { error: /* @__PURE__ */ new Error(`Vitest Module Runner doesn't support ${event.event} event.`) }; | ||
| const { name, data } = event.data; | ||
| if (name === "getBuiltins") | ||
| // we return an empty array here to avoid client-side builtin check, | ||
| // as we need builtins to go through `fetchModule` | ||
| return { result: [] }; | ||
| if (name !== "fetchModule") return { error: /* @__PURE__ */ new Error(`Unknown method: ${name}. Expected "fetchModule".`) }; | ||
| try { | ||
| return { result: await this.options.fetchModule(...data) }; | ||
| } catch (cause) { | ||
| if (cause instanceof EnvironmentTeardownError) { | ||
| const [id, importer] = data; | ||
| let message = `Cannot load '${id}'${importer ? ` imported from ${importer}` : ""} after the environment was torn down. This is not a bug in Vitest.`; | ||
| const moduleNode = importer ? this.evaluatedModules.getModuleById(importer) : void 0; | ||
| const callstack = moduleNode ? this.callstacks.get(moduleNode) : void 0; | ||
| if (callstack) message += ` The last recorded callstack:\n- ${[ | ||
| ...callstack, | ||
| importer, | ||
| id | ||
| ].reverse().join("\n- ")}`; | ||
| const error = new EnvironmentTeardownError(message); | ||
| if (cause.stack) error.stack = cause.stack.replace(cause.message, error.message); | ||
| return { error }; | ||
| } | ||
| return { error: cause }; | ||
| } | ||
| } | ||
| } | ||
| const createNodeImportMeta = (modulePath) => { | ||
| if (!viteModuleRunner.createDefaultImportMeta) throw new Error(`createNodeImportMeta is not supported in this version of Vite.`); | ||
| const defaultMeta = viteModuleRunner.createDefaultImportMeta(modulePath); | ||
| const href = defaultMeta.url; | ||
| const importMetaResolver = createImportMetaResolver(); | ||
| return { | ||
| ...defaultMeta, | ||
| main: false, | ||
| resolve(id, parent) { | ||
| return (importMetaResolver ?? defaultMeta.resolve)(id, parent ?? href); | ||
| } | ||
| }; | ||
| }; | ||
| function createImportMetaResolver() { | ||
| if (!import.meta.resolve) return; | ||
| return (specifier, importer) => import.meta.resolve(specifier, importer); | ||
| } | ||
| // @ts-expect-error overriding private method | ||
| class VitestModuleRunner extends viteModuleRunner.ModuleRunner { | ||
| mocker; | ||
| moduleExecutionInfo; | ||
| _otel; | ||
| _callstacks; | ||
| constructor(vitestOptions) { | ||
| const options = vitestOptions; | ||
| const evaluatedModules = options.evaluatedModules; | ||
| const callstacks = /* @__PURE__ */ new WeakMap(); | ||
| const transport = new VitestTransport(options.transport, evaluatedModules, callstacks); | ||
| super({ | ||
| transport, | ||
| hmr: false, | ||
| evaluatedModules, | ||
| sourcemapInterceptor: "prepareStackTrace", | ||
| createImportMeta: vitestOptions.createImportMeta | ||
| }, options.evaluator); | ||
| this.vitestOptions = vitestOptions; | ||
| this._callstacks = callstacks; | ||
| this._otel = vitestOptions.traces || new Traces({ enabled: false }); | ||
| this.moduleExecutionInfo = options.getWorkerState().moduleExecutionInfo; | ||
| this.mocker = options.mocker || new VitestMocker(this, { | ||
| spyModule: options.spyModule, | ||
| context: options.vm?.context, | ||
| traces: this._otel, | ||
| resolveId: options.transport.resolveId, | ||
| get root() { | ||
| return options.getWorkerState().config.root; | ||
| }, | ||
| get moduleDirectories() { | ||
| return options.getWorkerState().config.deps.moduleDirectories || []; | ||
| }, | ||
| getCurrentTestFilepath() { | ||
| return options.getWorkerState().filepath; | ||
| } | ||
| }); | ||
| if (options.vm) options.vm.context.__vitest_mocker__ = this.mocker; | ||
| else Object.defineProperty(globalThis, "__vitest_mocker__", { | ||
| configurable: true, | ||
| writable: true, | ||
| value: this.mocker | ||
| }); | ||
| } | ||
| /** | ||
| * Vite checks that the module has exports emulating the Node.js behaviour, | ||
| * but Vitest is more relaxed. | ||
| * | ||
| * We should keep the Vite behavour when there is a `strict` flag. | ||
| * @internal | ||
| */ | ||
| processImport(exports$1) { | ||
| return exports$1; | ||
| } | ||
| async import(rawId) { | ||
| const resolved = await this._otel.$("vitest.module.resolve_id", { attributes: { "vitest.module.raw_id": rawId } }, async (span) => { | ||
| const result = await this.vitestOptions.transport.resolveId(rawId); | ||
| if (result) span.setAttributes({ | ||
| "vitest.module.url": result.url, | ||
| "vitest.module.file": result.file, | ||
| "vitest.module.id": result.id | ||
| }); | ||
| return result; | ||
| }); | ||
| return super.import(resolved ? resolved.url : rawId); | ||
| } | ||
| async fetchModule(url, importer) { | ||
| return await this.cachedModule(url, importer); | ||
| } | ||
| _cachedRequest(url, module, callstack = [], metadata) { | ||
| // @ts-expect-error "cachedRequest" is private | ||
| return super.cachedRequest(url, module, callstack, metadata); | ||
| } | ||
| /** | ||
| * @internal | ||
| */ | ||
| async cachedRequest(url, mod, callstack = [], metadata, ignoreMock = false) { | ||
| // Track for a better error message if dynamic import is not resolved properly | ||
| this._callstacks.set(mod, callstack); | ||
| if (ignoreMock) return this._cachedRequest(url, mod, callstack, metadata); | ||
| let mocked; | ||
| if (mod.meta && "mockedModule" in mod.meta) { | ||
| const mockedModule = mod.meta.mockedModule; | ||
| const mockId = this.mocker.getMockPath(mod.id); | ||
| // bypass mock and force "importActual" behavior when: | ||
| // - mock was removed by doUnmock (stale mockedModule in meta) | ||
| // - self-import: mock factory/file is importing the module it's mocking | ||
| const isStale = !this.mocker.getDependencyMock(mod.id); | ||
| const isSelfImport = callstack.includes(mockId) || callstack.includes(url) || "redirect" in mockedModule && callstack.includes(mockedModule.redirect); | ||
| if (isStale || isSelfImport) { | ||
| const node = await this.fetchModule(injectQuery(url, "_vitest_original")); | ||
| return this._cachedRequest(node.url, node, callstack, metadata); | ||
| } | ||
| mocked = await this.mocker.requestWithMockedModule(url, mod, callstack, mockedModule); | ||
| } else mocked = await this.mocker.mockedRequest(url, mod, callstack); | ||
| if (typeof mocked === "string") { | ||
| const node = await this.fetchModule(mocked); | ||
| return this._cachedRequest(mocked, node, callstack, metadata); | ||
| } | ||
| if (mocked != null && typeof mocked === "object") return mocked; | ||
| return this._cachedRequest(url, mod, callstack, metadata); | ||
| } | ||
| /** @internal */ | ||
| _invalidateSubTreeById(ids, invalidated = /* @__PURE__ */ new Set()) { | ||
| for (const id of ids) { | ||
| if (invalidated.has(id)) continue; | ||
| const node = this.evaluatedModules.getModuleById(id); | ||
| if (!node) continue; | ||
| invalidated.add(id); | ||
| const subIds = Array.from(this.evaluatedModules.idToModuleMap).filter(([, mod]) => mod.importers.has(id)).map(([key]) => key); | ||
| if (subIds.length) this._invalidateSubTreeById(subIds, invalidated); | ||
| this.evaluatedModules.invalidateModule(node); | ||
| } | ||
| } | ||
| } | ||
| const bareVitestRegexp = /^@?vitest(?:\/|$)/; | ||
| const normalizedDistDir = normalize(distDir); | ||
| const relativeIds = {}; | ||
| const externalizeMap = /* @__PURE__ */ new Map(); | ||
| // all Vitest imports always need to be externalized | ||
| function getCachedVitestImport(id, state) { | ||
| if (id.startsWith("/@fs/") || id.startsWith("\\@fs\\")) id = id.slice(process.platform === "win32" ? 5 : 4); | ||
| if (externalizeMap.has(id)) return { | ||
| externalize: externalizeMap.get(id), | ||
| type: "module" | ||
| }; | ||
| // always externalize Vitest because we import from there before running tests | ||
| // so we already have it cached by Node.js | ||
| const root = state().config.root; | ||
| const relativeRoot = relativeIds[root] ?? (relativeIds[root] = normalizedDistDir.slice(root.length)); | ||
| if (id.includes(distDir) || id.includes(normalizedDistDir)) { | ||
| const externalize = id.startsWith("file://") ? id : pathToFileURL(id).toString(); | ||
| externalizeMap.set(id, externalize); | ||
| return { | ||
| externalize, | ||
| type: "module" | ||
| }; | ||
| } | ||
| if (relativeRoot && relativeRoot !== "/" && id.startsWith(relativeRoot)) { | ||
| const externalize = pathToFileURL(join(root, id)).toString(); | ||
| externalizeMap.set(id, externalize); | ||
| return { | ||
| externalize, | ||
| type: "module" | ||
| }; | ||
| } | ||
| if (bareVitestRegexp.test(id)) { | ||
| externalizeMap.set(id, id); | ||
| return { | ||
| externalize: id, | ||
| type: "module" | ||
| }; | ||
| } | ||
| return null; | ||
| } | ||
| const { readFileSync } = fs; | ||
| const VITEST_VM_CONTEXT_SYMBOL = "__vitest_vm_context__"; | ||
| const cwd = process.cwd(); | ||
| const isWindows = process.platform === "win32"; | ||
| function startVitestModuleRunner(options) { | ||
| const traces = options.traces; | ||
| const state = () => getSafeWorkerState() || options.state; | ||
| const rpc = () => state().rpc; | ||
| const environment = () => { | ||
| const environment = state().environment; | ||
| return environment.viteEnvironment || environment.name; | ||
| }; | ||
| const vm = options.context && options.externalModulesExecutor ? { | ||
| context: options.context, | ||
| externalModulesExecutor: options.externalModulesExecutor | ||
| } : void 0; | ||
| const evaluator = options.evaluator || new VitestModuleEvaluator(vm, { | ||
| traces, | ||
| evaluatedModules: options.evaluatedModules, | ||
| get moduleExecutionInfo() { | ||
| return state().moduleExecutionInfo; | ||
| }, | ||
| get interopDefault() { | ||
| return state().config.deps.interopDefault; | ||
| }, | ||
| getCurrentTestFilepath: () => state().filepath | ||
| }); | ||
| const moduleRunner = new VitestModuleRunner({ | ||
| spyModule: options.spyModule, | ||
| evaluatedModules: options.evaluatedModules, | ||
| evaluator, | ||
| traces, | ||
| mocker: options.mocker, | ||
| transport: { | ||
| async fetchModule(id, importer, options) { | ||
| const resolvingModules = state().resolvingModules; | ||
| if (isWindows) { | ||
| if (id[1] === ":") { | ||
| // The drive letter is different for whatever reason, we need to normalize it to CWD | ||
| if (id[0] !== cwd[0] && id[0].toUpperCase() === cwd[0].toUpperCase()) id = (cwd[0].toUpperCase() === cwd[0] ? id[0].toUpperCase() : id[0].toLowerCase()) + id.slice(1); | ||
| // always mark absolute windows paths, otherwise Vite will externalize it | ||
| id = `/@id/${id}`; | ||
| } | ||
| } | ||
| const vitest = getCachedVitestImport(id, state); | ||
| if (vitest) return vitest; | ||
| // strip _vitest_original query added by importActual so that | ||
| // the plugin pipeline sees the original import id (e.g. virtual modules's load hook) | ||
| const isImportActual = id.includes("_vitest_original"); | ||
| if (isImportActual) id = removeQuery(id, "_vitest_original"); | ||
| const rawId = unwrapId(id); | ||
| resolvingModules.add(rawId); | ||
| try { | ||
| if (VitestMocker.pendingIds.length) await moduleRunner.mocker.resolveMocks(); | ||
| if (!isImportActual) { | ||
| const resolvedMock = moduleRunner.mocker.getDependencyMockByUrl(id); | ||
| if (resolvedMock?.type === "manual" || resolvedMock?.type === "redirect") return { | ||
| code: "", | ||
| file: null, | ||
| id: resolvedMock.id, | ||
| url: resolvedMock.url, | ||
| invalidate: false, | ||
| mockedModule: resolvedMock | ||
| }; | ||
| } | ||
| if (isBuiltin(rawId)) return { | ||
| externalize: rawId, | ||
| type: "builtin" | ||
| }; | ||
| if (isBrowserExternal(rawId)) return { | ||
| externalize: toBuiltin(rawId), | ||
| type: "builtin" | ||
| }; | ||
| // if module is invalidated, the worker will be recreated, | ||
| // so cached is always true in a single worker | ||
| if (options?.cached) return { cache: true }; | ||
| const otelCarrier = traces?.getContextCarrier(); | ||
| const result = await rpc().fetch(id, importer, environment(), options, otelCarrier); | ||
| if ("cached" in result) return { | ||
| code: readFileSync(result.tmp, "utf-8"), | ||
| ...result | ||
| }; | ||
| return result; | ||
| } catch (cause) { | ||
| // rethrow vite error if it cannot load the module because it's not resolved | ||
| if (typeof cause === "object" && cause != null && cause.code === "ERR_LOAD_URL" || typeof cause?.message === "string" && cause.message.includes("Failed to load url") || typeof cause?.message === "string" && cause.message.startsWith("Cannot find module '")) { | ||
| const error = new Error(`Cannot find ${isBareImport(id) ? "package" : "module"} '${id}'${importer ? ` imported from ${importer}` : ""}`, { cause }); | ||
| error.code = "ERR_MODULE_NOT_FOUND"; | ||
| throw error; | ||
| } | ||
| throw cause; | ||
| } finally { | ||
| resolvingModules.delete(rawId); | ||
| } | ||
| }, | ||
| resolveId(id, importer) { | ||
| return rpc().resolve(id, importer, environment()); | ||
| } | ||
| }, | ||
| getWorkerState: state, | ||
| vm, | ||
| createImportMeta: options.createImportMeta | ||
| }); | ||
| return moduleRunner; | ||
| } | ||
| export { BareModuleMocker as B, VITEST_VM_CONTEXT_SYMBOL as V, VitestModuleRunner as a, VitestTransport as b, createNodeImportMeta as c, normalizeModuleId as n, startVitestModuleRunner as s }; |
Sorry, the diff of this file is too big to display
| import { fileURLToPath, pathToFileURL } from 'node:url'; | ||
| import vm, { isContext, runInContext } from 'node:vm'; | ||
| import { dirname, basename, extname, normalize, resolve } from 'pathe'; | ||
| import { l as loadEnvironment, a as listenForErrors, e as emitModuleRunner } from './init.Borgldul.js'; | ||
| import { distDir } from '../path.js'; | ||
| import { createCustomConsole } from './console.3WNpx0tS.js'; | ||
| import fs from 'node:fs'; | ||
| import { createRequire, Module, isBuiltin } from 'node:module'; | ||
| import { toArray, isBareImport } from '@vitest/utils/helpers'; | ||
| import { findNearestPackageData } from '@vitest/utils/resolver'; | ||
| import { dirname as dirname$1 } from 'node:path'; | ||
| import { CSS_LANGS_RE, KNOWN_ASSET_RE } from '@vitest/utils/constants'; | ||
| import { getDefaultRequestStubs } from '../module-evaluator.js'; | ||
| import { s as startVitestModuleRunner, V as VITEST_VM_CONTEXT_SYMBOL, c as createNodeImportMeta } from './startVitestModuleRunner.BdSYEN5x.js'; | ||
| import { p as provideWorkerState } from './utils.BX5Fg8C4.js'; | ||
| function interopCommonJsModule(interopDefault, mod) { | ||
| if (isPrimitive(mod) || Array.isArray(mod) || mod instanceof Promise) return { | ||
| keys: [], | ||
| moduleExports: {}, | ||
| defaultExport: mod | ||
| }; | ||
| if (interopDefault !== false && "__esModule" in mod && !isPrimitive(mod.default)) { | ||
| const defaultKets = Object.keys(mod.default); | ||
| const moduleKeys = Object.keys(mod); | ||
| const allKeys = new Set([...defaultKets, ...moduleKeys]); | ||
| allKeys.delete("default"); | ||
| return { | ||
| keys: Array.from(allKeys), | ||
| moduleExports: new Proxy(mod, { get(mod, prop) { | ||
| return mod[prop] ?? mod.default?.[prop]; | ||
| } }), | ||
| defaultExport: mod | ||
| }; | ||
| } | ||
| return { | ||
| keys: Object.keys(mod).filter((key) => key !== "default"), | ||
| moduleExports: mod, | ||
| defaultExport: mod | ||
| }; | ||
| } | ||
| function isPrimitive(obj) { | ||
| return !(obj != null && (typeof obj === "object" || typeof obj === "function")); | ||
| } | ||
| const SyntheticModule = vm.SyntheticModule; | ||
| const SourceTextModule = vm.SourceTextModule; | ||
| const _require = createRequire(import.meta.url); | ||
| const requiresCache = /* @__PURE__ */ new WeakMap(); | ||
| class CommonjsExecutor { | ||
| context; | ||
| requireCache = /* @__PURE__ */ new Map(); | ||
| publicRequireCache = this.createProxyCache(); | ||
| moduleCache = /* @__PURE__ */ new Map(); | ||
| builtinCache = Object.create(null); | ||
| extensions = Object.create(null); | ||
| fs; | ||
| Module; | ||
| interopDefault; | ||
| constructor(options) { | ||
| this.context = options.context; | ||
| this.fs = options.fileMap; | ||
| this.interopDefault = options.interopDefault; | ||
| const primitives = vm.runInContext("({ Object, Array, Error })", this.context); | ||
| // eslint-disable-next-line ts/no-this-alias | ||
| const executor = this; | ||
| this.Module = class Module$1 { | ||
| exports; | ||
| isPreloading = false; | ||
| id; | ||
| filename; | ||
| loaded; | ||
| parent; | ||
| children = []; | ||
| path; | ||
| paths = []; | ||
| constructor(id = "", parent) { | ||
| this.exports = primitives.Object.create(Object.prototype); | ||
| // in our case the path should always be resolved already | ||
| this.path = dirname(id); | ||
| this.id = id; | ||
| this.filename = id; | ||
| this.loaded = false; | ||
| this.parent = parent; | ||
| } | ||
| get require() { | ||
| const require = requiresCache.get(this); | ||
| if (require) return require; | ||
| const _require = Module$1.createRequire(this.id); | ||
| requiresCache.set(this, _require); | ||
| return _require; | ||
| } | ||
| static getSourceMapsSupport = () => ({ | ||
| enabled: false, | ||
| nodeModules: false, | ||
| generatedCode: false | ||
| }); | ||
| static setSourceMapsSupport = () => { | ||
| // noop | ||
| }; | ||
| static register = () => { | ||
| throw new Error(`[vitest] "register" is not available when running in Vitest.`); | ||
| }; | ||
| static registerHooks = () => { | ||
| throw new Error(`[vitest] "registerHooks" is not available when running in Vitest.`); | ||
| }; | ||
| _compile(code, filename) { | ||
| const cjsModule = Module$1.wrap(code); | ||
| const script = new vm.Script(cjsModule, { | ||
| filename, | ||
| importModuleDynamically: options.importModuleDynamically | ||
| }); | ||
| // @ts-expect-error mark script with current identifier | ||
| script.identifier = filename; | ||
| const fn = script.runInContext(executor.context); | ||
| const __dirname = dirname(filename); | ||
| executor.requireCache.set(filename, this); | ||
| try { | ||
| fn(this.exports, this.require, this, filename, __dirname); | ||
| return this.exports; | ||
| } finally { | ||
| this.loaded = true; | ||
| } | ||
| } | ||
| // exposed for external use, Node.js does the opposite | ||
| static _load = (request, parent, _isMain) => { | ||
| return Module$1.createRequire(parent?.filename ?? request)(request); | ||
| }; | ||
| static wrap = (script) => { | ||
| return Module$1.wrapper[0] + script + Module$1.wrapper[1]; | ||
| }; | ||
| static wrapper = new primitives.Array("(function (exports, require, module, __filename, __dirname) { ", "\n});"); | ||
| static builtinModules = Module.builtinModules; | ||
| static findSourceMap = Module.findSourceMap; | ||
| static SourceMap = Module.SourceMap; | ||
| static syncBuiltinESMExports = Module.syncBuiltinESMExports; | ||
| static _cache = executor.publicRequireCache; | ||
| static _extensions = executor.extensions; | ||
| static createRequire = (filename) => { | ||
| return executor.createRequire(filename); | ||
| }; | ||
| static runMain = () => { | ||
| throw new primitives.Error("[vitest] \"runMain\" is not implemented."); | ||
| }; | ||
| // @ts-expect-error not typed | ||
| static _resolveFilename = Module._resolveFilename; | ||
| // @ts-expect-error not typed | ||
| static _findPath = Module._findPath; | ||
| // @ts-expect-error not typed | ||
| static _initPaths = Module._initPaths; | ||
| // @ts-expect-error not typed | ||
| static _preloadModules = Module._preloadModules; | ||
| // @ts-expect-error not typed | ||
| static _resolveLookupPaths = Module._resolveLookupPaths; | ||
| // @ts-expect-error not typed | ||
| static globalPaths = Module.globalPaths; | ||
| static isBuiltin = Module.isBuiltin; | ||
| static constants = Module.constants; | ||
| static enableCompileCache = Module.enableCompileCache; | ||
| static getCompileCacheDir = Module.getCompileCacheDir; | ||
| static flushCompileCache = Module.flushCompileCache; | ||
| static stripTypeScriptTypes = Module.stripTypeScriptTypes; | ||
| static findPackageJSON = Module.findPackageJSON; | ||
| static Module = Module$1; | ||
| }; | ||
| this.extensions[".js"] = this.requireJs; | ||
| this.extensions[".json"] = this.requireJson; | ||
| } | ||
| requireJs = (m, filename) => { | ||
| const content = this.fs.readFile(filename); | ||
| m._compile(content, filename); | ||
| }; | ||
| requireJson = (m, filename) => { | ||
| const code = this.fs.readFile(filename); | ||
| m.exports = JSON.parse(code); | ||
| }; | ||
| static cjsConditions; | ||
| static getCjsConditions() { | ||
| if (!CommonjsExecutor.cjsConditions) CommonjsExecutor.cjsConditions = parseCjsConditions(process.execArgv, process.env.NODE_OPTIONS); | ||
| return CommonjsExecutor.cjsConditions; | ||
| } | ||
| createRequire = (filename) => { | ||
| const _require = createRequire(filename); | ||
| const resolve = (id, options) => { | ||
| return _require.resolve(id, { | ||
| ...options, | ||
| conditions: CommonjsExecutor.getCjsConditions() | ||
| }); | ||
| }; | ||
| const require = ((id) => { | ||
| const resolved = resolve(id); | ||
| if (extname(resolved) === ".node" || isBuiltin(resolved)) return this.requireCoreModule(resolved); | ||
| const module = new this.Module(resolved); | ||
| return this.loadCommonJSModule(module, resolved); | ||
| }); | ||
| require.resolve = resolve; | ||
| require.resolve.paths = _require.resolve.paths; | ||
| Object.defineProperty(require, "extensions", { | ||
| get: () => this.extensions, | ||
| set: () => {}, | ||
| configurable: true | ||
| }); | ||
| require.main = void 0; | ||
| require.cache = this.publicRequireCache; | ||
| return require; | ||
| }; | ||
| createProxyCache() { | ||
| return new Proxy(Object.create(null), { | ||
| defineProperty: () => true, | ||
| deleteProperty: () => true, | ||
| set: () => true, | ||
| get: (_, key) => this.requireCache.get(key), | ||
| has: (_, key) => this.requireCache.has(key), | ||
| ownKeys: () => Array.from(this.requireCache.keys()), | ||
| getOwnPropertyDescriptor() { | ||
| return { | ||
| configurable: true, | ||
| enumerable: true | ||
| }; | ||
| } | ||
| }); | ||
| } | ||
| // very naive implementation for Node.js require | ||
| loadCommonJSModule(module, filename) { | ||
| const cached = this.requireCache.get(filename); | ||
| if (cached) return cached.exports; | ||
| const extension = this.findLongestRegisteredExtension(filename); | ||
| (this.extensions[extension] || this.extensions[".js"])(module, filename); | ||
| return module.exports; | ||
| } | ||
| findLongestRegisteredExtension(filename) { | ||
| const name = basename(filename); | ||
| let currentExtension; | ||
| let index; | ||
| let startIndex = 0; | ||
| // eslint-disable-next-line no-cond-assign | ||
| while ((index = name.indexOf(".", startIndex)) !== -1) { | ||
| startIndex = index + 1; | ||
| if (index === 0) continue; | ||
| currentExtension = name.slice(index); | ||
| if (this.extensions[currentExtension]) return currentExtension; | ||
| } | ||
| return ".js"; | ||
| } | ||
| getCoreSyntheticModule(identifier) { | ||
| if (this.moduleCache.has(identifier)) return this.moduleCache.get(identifier); | ||
| const exports$1 = this.require(identifier); | ||
| const keys = Object.keys(exports$1); | ||
| const module = new SyntheticModule([...keys, "default"], () => { | ||
| for (const key of keys) module.setExport(key, exports$1[key]); | ||
| module.setExport("default", exports$1); | ||
| }, { | ||
| context: this.context, | ||
| identifier | ||
| }); | ||
| this.moduleCache.set(identifier, module); | ||
| return module; | ||
| } | ||
| getCjsSyntheticModule(path, identifier) { | ||
| if (this.moduleCache.has(identifier)) return this.moduleCache.get(identifier); | ||
| const exports$1 = this.require(path); | ||
| // TODO: technically module should be parsed to find static exports, implement for strict mode in #2854 | ||
| const { keys, moduleExports, defaultExport } = interopCommonJsModule(this.interopDefault, exports$1); | ||
| const module = new SyntheticModule([...keys, "default"], function() { | ||
| for (const key of keys) this.setExport(key, moduleExports[key]); | ||
| this.setExport("default", defaultExport); | ||
| }, { | ||
| context: this.context, | ||
| identifier | ||
| }); | ||
| this.moduleCache.set(identifier, module); | ||
| return module; | ||
| } | ||
| // TODO: use this in strict mode, when available in #2854 | ||
| // private _getNamedCjsExports(path: string): Set<string> { | ||
| // const cachedNamedExports = this.cjsNamedExportsMap.get(path) | ||
| // if (cachedNamedExports) { | ||
| // return cachedNamedExports | ||
| // } | ||
| // if (extname(path) === '.node') { | ||
| // const moduleExports = this.require(path) | ||
| // const namedExports = new Set(Object.keys(moduleExports)) | ||
| // this.cjsNamedExportsMap.set(path, namedExports) | ||
| // return namedExports | ||
| // } | ||
| // const code = this.fs.readFile(path) | ||
| // const { exports, reexports } = parseCjs(code, path) | ||
| // const namedExports = new Set(exports) | ||
| // this.cjsNamedExportsMap.set(path, namedExports) | ||
| // for (const reexport of reexports) { | ||
| // if (isNodeBuiltin(reexport)) { | ||
| // const exports = this.require(reexport) | ||
| // if (exports !== null && typeof exports === 'object') { | ||
| // for (const e of Object.keys(exports)) { | ||
| // namedExports.add(e) | ||
| // } | ||
| // } | ||
| // } | ||
| // else { | ||
| // const require = this.createRequire(path) | ||
| // const resolved = require.resolve(reexport) | ||
| // const exports = this._getNamedCjsExports(resolved) | ||
| // for (const e of exports) { | ||
| // namedExports.add(e) | ||
| // } | ||
| // } | ||
| // } | ||
| // return namedExports | ||
| // } | ||
| require(identifier) { | ||
| if (extname(identifier) === ".node" || isBuiltin(identifier)) return this.requireCoreModule(identifier); | ||
| const module = new this.Module(identifier); | ||
| return this.loadCommonJSModule(module, identifier); | ||
| } | ||
| requireCoreModule(identifier) { | ||
| const normalized = identifier.replace(/^node:/, ""); | ||
| if (this.builtinCache[normalized]) return this.builtinCache[normalized].exports; | ||
| const moduleExports = _require(identifier); | ||
| if (identifier === "node:module" || identifier === "module") { | ||
| const module = new this.Module("/module.js"); | ||
| module.exports = this.Module; | ||
| this.builtinCache[normalized] = module; | ||
| return module.exports; | ||
| } | ||
| this.builtinCache[normalized] = _require.cache[normalized]; | ||
| // TODO: should we wrap module to rethrow context errors? | ||
| return moduleExports; | ||
| } | ||
| } | ||
| // The "module-sync" exports condition (added in Node 22.12/20.19 when | ||
| // require(esm) was unflagged) can resolve to ESM files that our CJS | ||
| // vm.Script executor cannot handle. We exclude it by passing explicit | ||
| // CJS conditions to require.resolve (Node 22.12+). | ||
| // Must be a Set because Node's internal resolver calls conditions.has(). | ||
| // User-specified --conditions/-C flags are respected, except module-sync. | ||
| function parseCjsConditions(execArgv, nodeOptions) { | ||
| const conditions = [ | ||
| "node", | ||
| "require", | ||
| "node-addons" | ||
| ]; | ||
| const args = [...execArgv, ...nodeOptions?.split(/\s+/) ?? []]; | ||
| for (let i = 0; i < args.length; i++) { | ||
| const arg = args[i]; | ||
| const eqMatch = arg.match(/^(?:--conditions|-C)=(.+)$/); | ||
| if (eqMatch) conditions.push(eqMatch[1]); | ||
| else if ((arg === "--conditions" || arg === "-C") && i + 1 < args.length) conditions.push(args[++i]); | ||
| } | ||
| return new Set(conditions.filter((c) => c !== "module-sync")); | ||
| } | ||
| const dataURIRegex = /^data:(?<mime>text\/javascript|application\/json|application\/wasm)(?:;(?<encoding>charset=utf-8|base64))?,(?<code>.*)$/; | ||
| class EsmExecutor { | ||
| moduleCache = /* @__PURE__ */ new Map(); | ||
| esmLinkMap = /* @__PURE__ */ new WeakMap(); | ||
| context; | ||
| #httpIp = IPnumber("127.0.0.0"); | ||
| constructor(executor, options) { | ||
| this.executor = executor; | ||
| this.context = options.context; | ||
| } | ||
| async evaluateModule(m) { | ||
| if (m.status === "unlinked") this.esmLinkMap.set(m, m.link((identifier, referencer) => this.executor.resolveModule(identifier, referencer.identifier))); | ||
| await this.esmLinkMap.get(m); | ||
| if (m.status === "linked") await m.evaluate(); | ||
| return m; | ||
| } | ||
| async createEsModule(fileURL, getCode) { | ||
| const cached = this.moduleCache.get(fileURL); | ||
| if (cached) return cached; | ||
| const promise = this.loadEsModule(fileURL, getCode); | ||
| this.moduleCache.set(fileURL, promise); | ||
| return promise; | ||
| } | ||
| async loadEsModule(fileURL, getCode) { | ||
| const code = await getCode(); | ||
| // TODO: should not be allowed in strict mode, implement in #2854 | ||
| if (fileURL.endsWith(".json")) { | ||
| const m = new SyntheticModule(["default"], function() { | ||
| const result = JSON.parse(code); | ||
| this.setExport("default", result); | ||
| }); | ||
| this.moduleCache.set(fileURL, m); | ||
| return m; | ||
| } | ||
| const m = new SourceTextModule(code, { | ||
| identifier: fileURL, | ||
| context: this.context, | ||
| importModuleDynamically: this.executor.importModuleDynamically, | ||
| initializeImportMeta: (meta, mod) => { | ||
| meta.url = mod.identifier; | ||
| if (mod.identifier.startsWith("file:")) { | ||
| const filename = fileURLToPath(mod.identifier); | ||
| meta.filename = filename; | ||
| meta.dirname = dirname$1(filename); | ||
| } | ||
| meta.resolve = (specifier, importer) => { | ||
| return this.executor.resolve(specifier, importer != null ? importer.toString() : mod.identifier); | ||
| }; | ||
| } | ||
| }); | ||
| this.moduleCache.set(fileURL, m); | ||
| return m; | ||
| } | ||
| async createWebAssemblyModule(fileUrl, getCode) { | ||
| const cached = this.moduleCache.get(fileUrl); | ||
| if (cached) return cached; | ||
| const m = this.loadWebAssemblyModule(getCode(), fileUrl); | ||
| this.moduleCache.set(fileUrl, m); | ||
| return m; | ||
| } | ||
| async createNetworkModule(fileUrl) { | ||
| // https://nodejs.org/api/esm.html#https-and-http-imports | ||
| if (fileUrl.startsWith("http:")) { | ||
| const url = new URL(fileUrl); | ||
| if (url.hostname !== "localhost" && url.hostname !== "::1" && (IPnumber(url.hostname) & IPmask(8)) !== this.#httpIp) throw new Error( | ||
| // we don't know the importer, so it's undefined (the same happens in --pool=threads) | ||
| `import of '${fileUrl}' by undefined is not supported: http can only be used to load local resources (use https instead).` | ||
| ); | ||
| } | ||
| return this.createEsModule(fileUrl, () => fetch(fileUrl).then((r) => r.text())); | ||
| } | ||
| async loadWebAssemblyModule(source, identifier) { | ||
| const cached = this.moduleCache.get(identifier); | ||
| if (cached) return cached; | ||
| const wasmModule = await WebAssembly.compile(source); | ||
| const exports$1 = WebAssembly.Module.exports(wasmModule); | ||
| const imports = WebAssembly.Module.imports(wasmModule); | ||
| const moduleLookup = {}; | ||
| for (const { module } of imports) if (moduleLookup[module] === void 0) moduleLookup[module] = await this.executor.resolveModule(module, identifier); | ||
| const evaluateModule = (module) => this.evaluateModule(module); | ||
| return new SyntheticModule(exports$1.map(({ name }) => name), async function() { | ||
| const importsObject = {}; | ||
| for (const { module, name } of imports) { | ||
| if (!importsObject[module]) importsObject[module] = {}; | ||
| await evaluateModule(moduleLookup[module]); | ||
| importsObject[module][name] = moduleLookup[module].namespace[name]; | ||
| } | ||
| const wasmInstance = new WebAssembly.Instance(wasmModule, importsObject); | ||
| for (const { name } of exports$1) this.setExport(name, wasmInstance.exports[name]); | ||
| }, { | ||
| context: this.context, | ||
| identifier | ||
| }); | ||
| } | ||
| cacheModule(identifier, module) { | ||
| this.moduleCache.set(identifier, module); | ||
| } | ||
| resolveCachedModule(identifier) { | ||
| return this.moduleCache.get(identifier); | ||
| } | ||
| async createDataModule(identifier) { | ||
| const cached = this.moduleCache.get(identifier); | ||
| if (cached) return cached; | ||
| const match = identifier.match(dataURIRegex); | ||
| if (!match || !match.groups) throw new Error("Invalid data URI"); | ||
| const mime = match.groups.mime; | ||
| const encoding = match.groups.encoding; | ||
| if (mime === "application/wasm") { | ||
| if (!encoding) throw new Error("Missing data URI encoding"); | ||
| if (encoding !== "base64") throw new Error(`Invalid data URI encoding: ${encoding}`); | ||
| const module = this.loadWebAssemblyModule(Buffer.from(match.groups.code, "base64"), identifier); | ||
| this.moduleCache.set(identifier, module); | ||
| return module; | ||
| } | ||
| let code = match.groups.code; | ||
| if (!encoding || encoding === "charset=utf-8") code = decodeURIComponent(code); | ||
| else if (encoding === "base64") code = Buffer.from(code, "base64").toString(); | ||
| else throw new Error(`Invalid data URI encoding: ${encoding}`); | ||
| if (mime === "application/json") { | ||
| const module = new SyntheticModule(["default"], function() { | ||
| const obj = JSON.parse(code); | ||
| this.setExport("default", obj); | ||
| }, { | ||
| context: this.context, | ||
| identifier | ||
| }); | ||
| this.moduleCache.set(identifier, module); | ||
| return module; | ||
| } | ||
| return this.createEsModule(identifier, () => code); | ||
| } | ||
| } | ||
| function IPnumber(address) { | ||
| const ip = address.match(/^(\d+)\.(\d+)\.(\d+)\.(\d+)$/); | ||
| if (ip) return (+ip[1] << 24) + (+ip[2] << 16) + (+ip[3] << 8) + +ip[4]; | ||
| throw new Error(`Expected IP address, received ${address}`); | ||
| } | ||
| function IPmask(maskSize) { | ||
| return -1 << 32 - maskSize; | ||
| } | ||
| const CLIENT_ID = "/@vite/client"; | ||
| const CLIENT_FILE = pathToFileURL(CLIENT_ID).href; | ||
| class ViteExecutor { | ||
| esm; | ||
| constructor(options) { | ||
| this.options = options; | ||
| this.esm = options.esmExecutor; | ||
| } | ||
| resolve = (identifier) => { | ||
| if (identifier === CLIENT_ID) return identifier; | ||
| }; | ||
| get workerState() { | ||
| return this.options.context.__vitest_worker__; | ||
| } | ||
| async createViteModule(fileUrl) { | ||
| if (fileUrl === CLIENT_FILE || fileUrl === CLIENT_ID) return this.createViteClientModule(); | ||
| const cached = this.esm.resolveCachedModule(fileUrl); | ||
| if (cached) return cached; | ||
| return this.esm.createEsModule(fileUrl, async () => { | ||
| try { | ||
| const result = await this.options.transform(fileUrl); | ||
| if (result.code) return result.code; | ||
| } catch (cause) { | ||
| // rethrow vite error if it cannot load the module because it's not resolved | ||
| if (typeof cause === "object" && cause.code === "ERR_LOAD_URL" || typeof cause?.message === "string" && cause.message.includes("Failed to load url")) { | ||
| const error = new Error(`Cannot find module '${fileUrl}'`, { cause }); | ||
| error.code = "ERR_MODULE_NOT_FOUND"; | ||
| throw error; | ||
| } | ||
| } | ||
| throw new Error(`[vitest] Failed to transform ${fileUrl}. Does the file exist?`); | ||
| }); | ||
| } | ||
| createViteClientModule() { | ||
| const identifier = CLIENT_ID; | ||
| const cached = this.esm.resolveCachedModule(identifier); | ||
| if (cached) return cached; | ||
| const stub = this.options.viteClientModule; | ||
| const moduleKeys = Object.keys(stub); | ||
| const module = new SyntheticModule(moduleKeys, function() { | ||
| moduleKeys.forEach((key) => { | ||
| this.setExport(key, stub[key]); | ||
| }); | ||
| }, { | ||
| context: this.options.context, | ||
| identifier | ||
| }); | ||
| this.esm.cacheModule(identifier, module); | ||
| return module; | ||
| } | ||
| canResolve = (fileUrl) => { | ||
| if (fileUrl === CLIENT_FILE) return true; | ||
| const config = this.workerState.config.deps?.web || {}; | ||
| const [modulePath] = fileUrl.split("?"); | ||
| if (config.transformCss && CSS_LANGS_RE.test(modulePath)) return true; | ||
| if (config.transformAssets && KNOWN_ASSET_RE.test(modulePath)) return true; | ||
| if (toArray(config.transformGlobPattern).some((pattern) => pattern.test(modulePath))) return true; | ||
| return false; | ||
| }; | ||
| } | ||
| const { existsSync } = fs; | ||
| // always defined when we use vm pool | ||
| const nativeResolve = import.meta.resolve; | ||
| // TODO: improve Node.js strict mode support in #2854 | ||
| class ExternalModulesExecutor { | ||
| cjs; | ||
| esm; | ||
| vite; | ||
| context; | ||
| fs; | ||
| resolvers = []; | ||
| #networkSupported = null; | ||
| constructor(options) { | ||
| this.options = options; | ||
| this.context = options.context; | ||
| this.fs = options.fileMap; | ||
| this.esm = new EsmExecutor(this, { context: this.context }); | ||
| this.cjs = new CommonjsExecutor({ | ||
| context: this.context, | ||
| importModuleDynamically: this.importModuleDynamically, | ||
| fileMap: options.fileMap, | ||
| interopDefault: options.interopDefault | ||
| }); | ||
| this.vite = new ViteExecutor({ | ||
| esmExecutor: this.esm, | ||
| context: this.context, | ||
| transform: options.transform, | ||
| viteClientModule: options.viteClientModule | ||
| }); | ||
| this.resolvers = [this.vite.resolve]; | ||
| } | ||
| async import(identifier) { | ||
| const module = await this.createModule(identifier); | ||
| await this.esm.evaluateModule(module); | ||
| return module.namespace; | ||
| } | ||
| require(identifier) { | ||
| return this.cjs.require(identifier); | ||
| } | ||
| createRequire(identifier) { | ||
| return this.cjs.createRequire(identifier); | ||
| } | ||
| // dynamic import can be used in both ESM and CJS, so we have it in the executor | ||
| importModuleDynamically = async (specifier, referencer) => { | ||
| const module = await this.resolveModule(specifier, referencer.identifier); | ||
| return await this.esm.evaluateModule(module); | ||
| }; | ||
| resolveModule = async (specifier, referencer) => { | ||
| let identifier = this.resolve(specifier, referencer); | ||
| if (identifier instanceof Promise) identifier = await identifier; | ||
| return await this.createModule(identifier); | ||
| }; | ||
| resolve(specifier, parent) { | ||
| for (const resolver of this.resolvers) { | ||
| const id = resolver(specifier, parent); | ||
| if (id) return id; | ||
| } | ||
| // import.meta.resolve can be asynchronous in older +18 Node versions | ||
| return nativeResolve(specifier, parent); | ||
| } | ||
| getModuleInformation(identifier) { | ||
| if (identifier.startsWith("data:")) return { | ||
| type: "data", | ||
| url: identifier, | ||
| path: identifier | ||
| }; | ||
| const extension = extname(identifier); | ||
| if (extension === ".node" || isBuiltin(identifier)) return { | ||
| type: "builtin", | ||
| url: identifier, | ||
| path: identifier | ||
| }; | ||
| if (this.isNetworkSupported && (identifier.startsWith("http:") || identifier.startsWith("https:"))) return { | ||
| type: "network", | ||
| url: identifier, | ||
| path: identifier | ||
| }; | ||
| const isFileUrl = identifier.startsWith("file://"); | ||
| const pathUrl = isFileUrl ? fileURLToPath(identifier.split("?")[0]) : identifier; | ||
| const fileUrl = isFileUrl ? identifier : pathToFileURL(pathUrl).toString(); | ||
| let type; | ||
| if (this.vite.canResolve(fileUrl)) type = "vite"; | ||
| else if (extension === ".mjs") type = "module"; | ||
| else if (extension === ".cjs") type = "commonjs"; | ||
| else if (extension === ".wasm") | ||
| // still experimental on NodeJS --experimental-wasm-modules | ||
| // cf. ESM_FILE_FORMAT(url) in https://nodejs.org/docs/latest-v20.x/api/esm.html#resolution-algorithm | ||
| type = "wasm"; | ||
| else type = findNearestPackageData(normalize(pathUrl)).type === "module" ? "module" : "commonjs"; | ||
| return { | ||
| type, | ||
| path: pathUrl, | ||
| url: fileUrl | ||
| }; | ||
| } | ||
| createModule(identifier) { | ||
| const { type, url, path } = this.getModuleInformation(identifier); | ||
| // create ERR_MODULE_NOT_FOUND on our own since latest NodeJS's import.meta.resolve doesn't throw on non-existing namespace or path | ||
| // https://github.com/nodejs/node/pull/49038 | ||
| if ((type === "module" || type === "commonjs" || type === "wasm") && !existsSync(path)) { | ||
| const error = /* @__PURE__ */ new Error(`Cannot find ${isBareImport(path) ? "package" : "module"} '${path}'`); | ||
| error.code = "ERR_MODULE_NOT_FOUND"; | ||
| throw error; | ||
| } | ||
| switch (type) { | ||
| case "data": return this.esm.createDataModule(identifier); | ||
| case "builtin": return this.cjs.getCoreSyntheticModule(identifier); | ||
| case "vite": return this.vite.createViteModule(url); | ||
| case "wasm": return this.esm.createWebAssemblyModule(url, () => this.fs.readBuffer(path)); | ||
| case "module": return this.esm.createEsModule(url, () => this.fs.readFileAsync(path)); | ||
| case "commonjs": return this.cjs.getCjsSyntheticModule(path, identifier); | ||
| case "network": return this.esm.createNetworkModule(url); | ||
| default: return type; | ||
| } | ||
| } | ||
| get isNetworkSupported() { | ||
| if (this.#networkSupported == null) if (process.execArgv.includes("--experimental-network-imports")) this.#networkSupported = true; | ||
| else if (process.env.NODE_OPTIONS?.includes("--experimental-network-imports")) this.#networkSupported = true; | ||
| else this.#networkSupported = false; | ||
| return this.#networkSupported; | ||
| } | ||
| } | ||
| const { promises, readFileSync } = fs; | ||
| class FileMap { | ||
| fsCache = /* @__PURE__ */ new Map(); | ||
| fsBufferCache = /* @__PURE__ */ new Map(); | ||
| async readFileAsync(path) { | ||
| const cached = this.fsCache.get(path); | ||
| if (cached != null) return cached; | ||
| const source = await promises.readFile(path, "utf-8"); | ||
| this.fsCache.set(path, source); | ||
| return source; | ||
| } | ||
| readFile(path) { | ||
| const cached = this.fsCache.get(path); | ||
| if (cached != null) return cached; | ||
| const source = readFileSync(path, "utf-8"); | ||
| this.fsCache.set(path, source); | ||
| return source; | ||
| } | ||
| readBuffer(path) { | ||
| const cached = this.fsBufferCache.get(path); | ||
| if (cached != null) return cached; | ||
| const buffer = readFileSync(path); | ||
| this.fsBufferCache.set(path, buffer); | ||
| return buffer; | ||
| } | ||
| } | ||
| const entryFile = pathToFileURL(resolve(distDir, "workers/runVmTests.js")).href; | ||
| const fileMap = new FileMap(); | ||
| const packageCache = /* @__PURE__ */ new Map(); | ||
| async function runVmTests(method, state, traces) { | ||
| const { ctx, rpc } = state; | ||
| const beforeEnvironmentTime = performance.now(); | ||
| const { environment } = await loadEnvironment(ctx.environment.name, ctx.config.root, rpc, traces, true); | ||
| state.environment = environment; | ||
| if (!environment.setupVM) { | ||
| const envName = ctx.environment.name; | ||
| const packageId = envName[0] === "." ? envName : `vitest-environment-${envName}`; | ||
| throw new TypeError(`Environment "${ctx.environment.name}" is not a valid environment. Path "${packageId}" doesn't support vm environment because it doesn't provide "setupVM" method.`); | ||
| } | ||
| const vm = await traces.$("vitest.runtime.environment.setup", { attributes: { | ||
| "vitest.environment": environment.name, | ||
| "vitest.environment.vite_environment": environment.viteEnvironment || environment.name | ||
| } }, () => environment.setupVM(ctx.environment.options || ctx.config.environmentOptions || {})); | ||
| state.durations.environment = performance.now() - beforeEnvironmentTime; | ||
| process.env.VITEST_VM_POOL = "1"; | ||
| if (!vm.getVmContext) throw new TypeError(`Environment ${environment.name} doesn't provide "getVmContext" method. It should return a context created by "vm.createContext" method.`); | ||
| const context = vm.getVmContext(); | ||
| if (!isContext(context)) throw new TypeError(`Environment ${environment.name} doesn't provide a valid context. It should be created by "vm.createContext" method.`); | ||
| provideWorkerState(context, state); | ||
| // this is unfortunately needed for our own dependencies | ||
| // we need to find a way to not rely on this by default | ||
| // because browser doesn't provide these globals | ||
| context.process = process; | ||
| context.global = context; | ||
| context.console = state.config.disableConsoleIntercept ? console : createCustomConsole(state); | ||
| // TODO: don't hardcode setImmediate in fake timers defaults | ||
| context.setImmediate = setImmediate; | ||
| context.clearImmediate = clearImmediate; | ||
| const stubs = getDefaultRequestStubs(context); | ||
| const externalModulesExecutor = new ExternalModulesExecutor({ | ||
| context, | ||
| fileMap, | ||
| packageCache, | ||
| transform: rpc.transform, | ||
| viteClientModule: stubs["/@vite/client"] | ||
| }); | ||
| process.exit = (code = process.exitCode || 0) => { | ||
| throw new Error(`process.exit unexpectedly called with "${code}"`); | ||
| }; | ||
| listenForErrors(() => state); | ||
| const moduleRunner = startVitestModuleRunner({ | ||
| context, | ||
| evaluatedModules: state.evaluatedModules, | ||
| state, | ||
| externalModulesExecutor, | ||
| createImportMeta: createNodeImportMeta, | ||
| traces | ||
| }); | ||
| emitModuleRunner(moduleRunner); | ||
| Object.defineProperty(context, VITEST_VM_CONTEXT_SYMBOL, { | ||
| value: { | ||
| context, | ||
| externalModulesExecutor | ||
| }, | ||
| configurable: true, | ||
| enumerable: false, | ||
| writable: false | ||
| }); | ||
| context.__vitest_mocker__ = moduleRunner.mocker; | ||
| if (ctx.config.serializedDefines) try { | ||
| runInContext(ctx.config.serializedDefines, context, { filename: "virtual:load-defines.js" }); | ||
| } catch (error) { | ||
| throw new Error(`Failed to load custom "defines": ${error.message}`); | ||
| } | ||
| await moduleRunner.mocker.initializeSpyModule(); | ||
| const { run } = await moduleRunner.import(entryFile); | ||
| try { | ||
| await run(method, ctx.files, ctx.config, moduleRunner, traces); | ||
| } finally { | ||
| await traces.$("vitest.runtime.environment.teardown", () => vm.teardown?.()); | ||
| } | ||
| } | ||
| function setupVmWorker(context) { | ||
| if (context.config.experimental.viteModuleRunner === false) throw new Error(`Pool "${context.pool}" cannot run with "experimental.viteModuleRunner: false". Please, use "threads" or "forks" instead.`); | ||
| } | ||
| export { runVmTests as r, setupVmWorker as s }; |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 5 instances in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 29 instances in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 5 instances in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 29 instances in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
1882330
0.14%50323
0.13%0
-100%382
-0.26%+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
Updated
Updated
Updated
Updated
Updated
Updated
Updated
Updated