Socket
Socket
Sign inDemoInstall

webpack

Package Overview
Dependencies
76
Maintainers
4
Versions
832
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 5.90.2 to 5.90.3

3

lib/asset/RawDataUrlModule.js

@@ -20,2 +20,3 @@ /*

/** @typedef {import("../Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("../Module").SourceTypes} SourceTypes */
/** @typedef {import("../RequestShortener")} RequestShortener */

@@ -46,3 +47,3 @@ /** @typedef {import("../ResolverFactory").ResolverWithOptions} ResolverWithOptions */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -49,0 +50,0 @@ getSourceTypes() {

@@ -37,4 +37,2 @@ /*

/** @typedef {Set<Module> & { plus: Set<Module> }} ModuleSetPlus */
/**

@@ -44,8 +42,7 @@ * @typedef {Object} ChunkGroupInfo

* @property {RuntimeSpec} runtime the runtimes
* @property {ModuleSetPlus | undefined} minAvailableModules current minimal set of modules available at this point
* @property {boolean | undefined} minAvailableModulesOwned true, if minAvailableModules is owned and can be modified
* @property {ModuleSetPlus[]} availableModulesToBeMerged enqueued updates to the minimal set of available modules
* @property {bigint | undefined} minAvailableModules current minimal set of modules available at this point
* @property {bigint[]} availableModulesToBeMerged enqueued updates to the minimal set of available modules
* @property {Set<Module>=} skippedItems modules that were skipped because module is already available in parent chunks (need to reconsider when minAvailableModules is shrinking)
* @property {Set<[Module, ModuleGraphConnection[]]>=} skippedModuleConnections referenced modules that where skipped because they were not active in this runtime
* @property {ModuleSetPlus | undefined} resultingAvailableModules set of modules available including modules from this chunk group
* @property {bigint | undefined} resultingAvailableModules set of modules available including modules from this chunk group
* @property {Set<ChunkGroupInfo> | undefined} children set of children chunk groups, that will be revisited when availableModules shrink

@@ -66,13 +63,12 @@ * @property {Set<ChunkGroupInfo> | undefined} availableSources set of chunk groups that are the source for minAvailableModules

const EMPTY_SET = /** @type {ModuleSetPlus} */ (new Set());
EMPTY_SET.plus = EMPTY_SET;
const ZERO_BIGINT = BigInt(0);
const ONE_BIGINT = BigInt(1);
/**
* @param {ModuleSetPlus} a first set
* @param {ModuleSetPlus} b second set
* @returns {number} cmp
* @param {bigint} mask The mask to test
* @param {number} ordinal The ordinal of the bit to test
* @returns {boolean} If the ordinal-th bit is set in the mask
*/
const bySetSize = (a, b) => {
return b.size + b.plus.size - a.size - a.plus.size;
};
const isOrdinalSetInMask = (mask, ordinal) =>
BigInt.asUintN(1, mask >> BigInt(ordinal)) !== ZERO_BIGINT;

@@ -215,2 +211,3 @@ /**

* @param {Set<ChunkGroup>} allCreatedChunkGroups filled with all chunk groups that are created here
* @param {Map<Chunk, bigint>} maskByChunk module content mask by chunk
*/

@@ -224,3 +221,4 @@ const visitModules = (

blocksWithNestedBlocks,
allCreatedChunkGroups
allCreatedChunkGroups,
maskByChunk
) => {

@@ -236,3 +234,27 @@ const { moduleGraph, chunkGraph, moduleMemCaches } = compilation;

/** @type {Map<Module, number>} */
const ordinalByModule = new Map();
/**
* @param {Module} module The module to look up
* @returns {number} The ordinal of the module in masks
*/
const getModuleOrdinal = module => {
let ordinal = ordinalByModule.get(module);
if (ordinal === undefined) {
ordinal = ordinalByModule.size;
ordinalByModule.set(module, ordinal);
}
return ordinal;
};
for (const chunk of compilation.chunks) {
let mask = ZERO_BIGINT;
for (const m of chunkGraph.getChunkModulesIterable(chunk)) {
mask |= ONE_BIGINT << BigInt(getModuleOrdinal(m));
}
maskByChunk.set(chunk, mask);
}
/**
*

@@ -340,3 +362,2 @@ * @param {DependenciesBlock} block block

minAvailableModules: undefined,
minAvailableModulesOwned: false,
availableModulesToBeMerged: [],

@@ -364,6 +385,3 @@ skippedItems: undefined,

// this minAvailableModules (by the parent entrypoints)
const skippedItems = new Set();
for (const module of modules) {
skippedItems.add(module);
}
const skippedItems = new Set(modules);
chunkGroupInfo.skippedItems = skippedItems;

@@ -373,3 +391,3 @@ chunkGroupsForCombining.add(chunkGroupInfo);

// The application may start here: We start with an empty list of available modules
chunkGroupInfo.minAvailableModules = EMPTY_SET;
chunkGroupInfo.minAvailableModules = ZERO_BIGINT;
const chunk = chunkGroup.getEntrypointChunk();

@@ -462,2 +480,3 @@ for (const module of modules) {

);
maskByChunk.set(entrypoint.chunks[0], ZERO_BIGINT);
entrypoint.index = nextChunkGroupIndex++;

@@ -467,4 +486,3 @@ cgi = {

runtime: entrypoint.options.runtime || entrypoint.name,
minAvailableModules: EMPTY_SET,
minAvailableModulesOwned: false,
minAvailableModules: ZERO_BIGINT,
availableModulesToBeMerged: [],

@@ -504,3 +522,3 @@ skippedItems: undefined,

block: b,
module: module,
module,
chunk: entrypoint.chunks[0],

@@ -515,3 +533,3 @@ chunkGroup: entrypoint,

block: b,
module: module,
module,
chunk,

@@ -530,2 +548,3 @@ chunkGroup,

);
maskByChunk.set(c.chunks[0], ZERO_BIGINT);
c.index = nextChunkGroupIndex++;

@@ -536,3 +555,2 @@ cgi = {

minAvailableModules: undefined,
minAvailableModulesOwned: undefined,
availableModulesToBeMerged: [],

@@ -602,3 +620,3 @@ skippedItems: undefined,

block: b,
module: module,
module,
chunk: c.chunks[0],

@@ -626,8 +644,13 @@ chunkGroup: c,

// Traverse all referenced modules
for (let i = 0; i < blockModules.length; i += 3) {
for (let i = 0, len = blockModules.length; i < len; i += 3) {
const refModule = /** @type {Module} */ (blockModules[i]);
if (chunkGraph.isModuleInChunk(refModule, chunk)) {
// For single comparisons this might be cheaper
const isModuleInChunk = chunkGraph.isModuleInChunk(refModule, chunk);
if (isModuleInChunk) {
// skip early if already connected
continue;
}
const refOrdinal = /** @type {number} */ getModuleOrdinal(refModule);
const activeState = /** @type {ConnectionState} */ (

@@ -643,8 +666,3 @@ blockModules[i + 1]

if (activeState === false) continue;
}
if (
activeState === true &&
(minAvailableModules.has(refModule) ||
minAvailableModules.plus.has(refModule))
) {
} else if (isOrdinalSetInMask(minAvailableModules, refOrdinal)) {
// already in parent chunks, skip it for now

@@ -715,4 +733,4 @@ skipBuffer.push(refModule);

if (blockModules !== undefined) {
// Traverse all referenced modules
for (let i = 0; i < blockModules.length; i += 3) {
// Traverse all referenced modules in reverse order
for (let i = blockModules.length - 3; i >= 0; i -= 3) {
const refModule = /** @type {Module} */ (blockModules[i]);

@@ -724,3 +742,3 @@ const activeState = /** @type {ConnectionState} */ (

// this is relevant with circular dependencies
queueBuffer.push({
queue.push({
action:

@@ -735,9 +753,2 @@ activeState === true ? ADD_AND_ENTER_ENTRY_MODULE : PROCESS_BLOCK,

}
// Add buffered items in reverse order
if (queueBuffer.length > 0) {
for (let i = queueBuffer.length - 1; i >= 0; i--) {
queue.push(queueBuffer[i]);
}
queueBuffer.length = 0;
}
}

@@ -774,3 +785,5 @@

case ADD_AND_ENTER_MODULE: {
if (chunkGraph.isModuleInChunk(module, chunk)) {
const isModuleInChunk = chunkGraph.isModuleInChunk(module, chunk);
if (isModuleInChunk) {
// already connected, skip it

@@ -781,2 +794,6 @@ break;

chunkGraph.connectChunkAndModule(chunk, module);
const moduleOrdinal = getModuleOrdinal(module);
let chunkMask = maskByChunk.get(chunk);
chunkMask |= ONE_BIGINT << BigInt(moduleOrdinal);
maskByChunk.set(chunk, chunkMask);
}

@@ -838,40 +855,18 @@ // fallthrough

/**
* @param {ChunkGroupInfo} chunkGroupInfo The info object for the chunk group
* @returns {bigint} The mask of available modules after the chunk group
*/
const calculateResultingAvailableModules = chunkGroupInfo => {
if (chunkGroupInfo.resultingAvailableModules)
if (chunkGroupInfo.resultingAvailableModules !== undefined)
return chunkGroupInfo.resultingAvailableModules;
const minAvailableModules = chunkGroupInfo.minAvailableModules;
let resultingAvailableModules = chunkGroupInfo.minAvailableModules;
// Create a new Set of available modules at this point
// We want to be as lazy as possible. There are multiple ways doing this:
// Note that resultingAvailableModules is stored as "(a) + (b)" as it's a ModuleSetPlus
// - resultingAvailableModules = (modules of chunk) + (minAvailableModules + minAvailableModules.plus)
// - resultingAvailableModules = (minAvailableModules + modules of chunk) + (minAvailableModules.plus)
// We choose one depending on the size of minAvailableModules vs minAvailableModules.plus
let resultingAvailableModules;
if (minAvailableModules.size > minAvailableModules.plus.size) {
// resultingAvailableModules = (modules of chunk) + (minAvailableModules + minAvailableModules.plus)
resultingAvailableModules =
/** @type {Set<Module> & {plus: Set<Module>}} */ (new Set());
for (const module of minAvailableModules.plus)
minAvailableModules.add(module);
minAvailableModules.plus = EMPTY_SET;
resultingAvailableModules.plus = minAvailableModules;
chunkGroupInfo.minAvailableModulesOwned = false;
} else {
// resultingAvailableModules = (minAvailableModules + modules of chunk) + (minAvailableModules.plus)
resultingAvailableModules =
/** @type {Set<Module> & {plus: Set<Module>}} */ (
new Set(minAvailableModules)
);
resultingAvailableModules.plus = minAvailableModules.plus;
}
// add the modules from the chunk group to the set
for (const chunk of chunkGroupInfo.chunkGroup.chunks) {
for (const m of chunkGraph.getChunkModulesIterable(chunk)) {
resultingAvailableModules.add(m);
}
const mask = maskByChunk.get(chunk);
resultingAvailableModules |= mask;
}
return (chunkGroupInfo.resultingAvailableModules =

@@ -923,228 +918,20 @@ resultingAvailableModules);

const availableModulesToBeMerged = info.availableModulesToBeMerged;
let cachedMinAvailableModules = info.minAvailableModules;
const cachedMinAvailableModules = info.minAvailableModules;
let minAvailableModules = cachedMinAvailableModules;
statMergedAvailableModuleSets += availableModulesToBeMerged.length;
// 1. Get minimal available modules
// It doesn't make sense to traverse a chunk again with more available modules.
// This step calculates the minimal available modules and skips traversal when
// the list didn't shrink.
if (availableModulesToBeMerged.length > 1) {
availableModulesToBeMerged.sort(bySetSize);
}
let changed = false;
merge: for (const availableModules of availableModulesToBeMerged) {
if (cachedMinAvailableModules === undefined) {
cachedMinAvailableModules = availableModules;
info.minAvailableModules = cachedMinAvailableModules;
info.minAvailableModulesOwned = false;
changed = true;
for (const availableModules of availableModulesToBeMerged) {
if (minAvailableModules === undefined) {
minAvailableModules = availableModules;
} else {
if (info.minAvailableModulesOwned) {
// We own it and can modify it
if (cachedMinAvailableModules.plus === availableModules.plus) {
for (const m of cachedMinAvailableModules) {
if (!availableModules.has(m)) {
cachedMinAvailableModules.delete(m);
changed = true;
}
}
} else {
for (const m of cachedMinAvailableModules) {
if (!availableModules.has(m) && !availableModules.plus.has(m)) {
cachedMinAvailableModules.delete(m);
changed = true;
}
}
for (const m of cachedMinAvailableModules.plus) {
if (!availableModules.has(m) && !availableModules.plus.has(m)) {
// We can't remove modules from the plus part
// so we need to merge plus into the normal part to allow modifying it
const iterator =
cachedMinAvailableModules.plus[Symbol.iterator]();
// fast forward add all modules until m
/** @type {IteratorResult<Module>} */
let it;
while (!(it = iterator.next()).done) {
const module = it.value;
if (module === m) break;
cachedMinAvailableModules.add(module);
}
// check the remaining modules before adding
while (!(it = iterator.next()).done) {
const module = it.value;
if (
availableModules.has(module) ||
availableModules.plus.has(module)
) {
cachedMinAvailableModules.add(module);
}
}
cachedMinAvailableModules.plus = EMPTY_SET;
changed = true;
continue merge;
}
}
}
} else if (cachedMinAvailableModules.plus === availableModules.plus) {
// Common and fast case when the plus part is shared
// We only need to care about the normal part
if (availableModules.size < cachedMinAvailableModules.size) {
// the new availableModules is smaller so it's faster to
// fork from the new availableModules
statForkedAvailableModules++;
statForkedAvailableModulesCount += availableModules.size;
statForkedMergedModulesCount += cachedMinAvailableModules.size;
// construct a new Set as intersection of cachedMinAvailableModules and availableModules
const newSet = /** @type {ModuleSetPlus} */ (new Set());
newSet.plus = availableModules.plus;
for (const m of availableModules) {
if (cachedMinAvailableModules.has(m)) {
newSet.add(m);
}
}
statForkedResultModulesCount += newSet.size;
cachedMinAvailableModules = newSet;
info.minAvailableModulesOwned = true;
info.minAvailableModules = newSet;
changed = true;
continue merge;
}
for (const m of cachedMinAvailableModules) {
if (!availableModules.has(m)) {
// cachedMinAvailableModules need to be modified
// but we don't own it
statForkedAvailableModules++;
statForkedAvailableModulesCount +=
cachedMinAvailableModules.size;
statForkedMergedModulesCount += availableModules.size;
// construct a new Set as intersection of cachedMinAvailableModules and availableModules
// as the plus part is equal we can just take over this one
const newSet = /** @type {ModuleSetPlus} */ (new Set());
newSet.plus = availableModules.plus;
const iterator = cachedMinAvailableModules[Symbol.iterator]();
// fast forward add all modules until m
/** @type {IteratorResult<Module>} */
let it;
while (!(it = iterator.next()).done) {
const module = it.value;
if (module === m) break;
newSet.add(module);
}
// check the remaining modules before adding
while (!(it = iterator.next()).done) {
const module = it.value;
if (availableModules.has(module)) {
newSet.add(module);
}
}
statForkedResultModulesCount += newSet.size;
cachedMinAvailableModules = newSet;
info.minAvailableModulesOwned = true;
info.minAvailableModules = newSet;
changed = true;
continue merge;
}
}
} else {
for (const m of cachedMinAvailableModules) {
if (!availableModules.has(m) && !availableModules.plus.has(m)) {
// cachedMinAvailableModules need to be modified
// but we don't own it
statForkedAvailableModules++;
statForkedAvailableModulesCount +=
cachedMinAvailableModules.size;
statForkedAvailableModulesCountPlus +=
cachedMinAvailableModules.plus.size;
statForkedMergedModulesCount += availableModules.size;
statForkedMergedModulesCountPlus += availableModules.plus.size;
// construct a new Set as intersection of cachedMinAvailableModules and availableModules
const newSet = /** @type {ModuleSetPlus} */ (new Set());
newSet.plus = EMPTY_SET;
const iterator = cachedMinAvailableModules[Symbol.iterator]();
// fast forward add all modules until m
/** @type {IteratorResult<Module>} */
let it;
while (!(it = iterator.next()).done) {
const module = it.value;
if (module === m) break;
newSet.add(module);
}
// check the remaining modules before adding
while (!(it = iterator.next()).done) {
const module = it.value;
if (
availableModules.has(module) ||
availableModules.plus.has(module)
) {
newSet.add(module);
}
}
// also check all modules in cachedMinAvailableModules.plus
for (const module of cachedMinAvailableModules.plus) {
if (
availableModules.has(module) ||
availableModules.plus.has(module)
) {
newSet.add(module);
}
}
statForkedResultModulesCount += newSet.size;
cachedMinAvailableModules = newSet;
info.minAvailableModulesOwned = true;
info.minAvailableModules = newSet;
changed = true;
continue merge;
}
}
for (const m of cachedMinAvailableModules.plus) {
if (!availableModules.has(m) && !availableModules.plus.has(m)) {
// cachedMinAvailableModules need to be modified
// but we don't own it
statForkedAvailableModules++;
statForkedAvailableModulesCount +=
cachedMinAvailableModules.size;
statForkedAvailableModulesCountPlus +=
cachedMinAvailableModules.plus.size;
statForkedMergedModulesCount += availableModules.size;
statForkedMergedModulesCountPlus += availableModules.plus.size;
// construct a new Set as intersection of cachedMinAvailableModules and availableModules
// we already know that all modules directly from cachedMinAvailableModules are in availableModules too
const newSet = /** @type {ModuleSetPlus} */ (
new Set(cachedMinAvailableModules)
);
newSet.plus = EMPTY_SET;
const iterator =
cachedMinAvailableModules.plus[Symbol.iterator]();
// fast forward add all modules until m
/** @type {IteratorResult<Module>} */
let it;
while (!(it = iterator.next()).done) {
const module = it.value;
if (module === m) break;
newSet.add(module);
}
// check the remaining modules before adding
while (!(it = iterator.next()).done) {
const module = it.value;
if (
availableModules.has(module) ||
availableModules.plus.has(module)
) {
newSet.add(module);
}
}
statForkedResultModulesCount += newSet.size;
cachedMinAvailableModules = newSet;
info.minAvailableModulesOwned = true;
info.minAvailableModules = newSet;
changed = true;
continue merge;
}
}
}
minAvailableModules &= availableModules;
}
}
const changed = minAvailableModules !== cachedMinAvailableModules;
availableModulesToBeMerged.length = 0;
if (changed) {
info.minAvailableModules = minAvailableModules;
info.resultingAvailableModules = undefined;

@@ -1162,3 +949,3 @@ outdatedChunkGroupInfo.add(info);

)) {
if (!source.minAvailableModules) {
if (source.minAvailableModules === undefined) {
chunkGroupsForCombining.delete(info);

@@ -1169,13 +956,5 @@ break;

}
for (const info of chunkGroupsForCombining) {
const availableModules = /** @type {ModuleSetPlus} */ (new Set());
availableModules.plus = EMPTY_SET;
const mergeSet = set => {
if (set.size > availableModules.plus.size) {
for (const item of availableModules.plus) availableModules.add(item);
availableModules.plus = set;
} else {
for (const item of set) availableModules.add(item);
}
};
let availableModules = ZERO_BIGINT;
// combine minAvailableModules from all resultingAvailableModules

@@ -1187,7 +966,5 @@ for (const source of /** @type {Set<ChunkGroupInfo>} */ (

calculateResultingAvailableModules(source);
mergeSet(resultingAvailableModules);
mergeSet(resultingAvailableModules.plus);
availableModules |= resultingAvailableModules;
}
info.minAvailableModules = availableModules;
info.minAvailableModulesOwned = false;
info.resultingAvailableModules = undefined;

@@ -1206,9 +983,7 @@ outdatedChunkGroupInfo.add(info);

const minAvailableModules =
/** @type {ModuleSetPlus} */
/** @type {bigint} */
(info.minAvailableModules);
for (const module of info.skippedItems) {
if (
!minAvailableModules.has(module) &&
!minAvailableModules.plus.has(module)
) {
const ordinal = getModuleOrdinal(module);
if (!isOrdinalSetInMask(minAvailableModules, ordinal)) {
queue.push({

@@ -1230,3 +1005,3 @@ action: ADD_AND_ENTER_MODULE,

const minAvailableModules =
/** @type {ModuleSetPlus} */
/** @type {bigint} */
(info.minAvailableModules);

@@ -1241,12 +1016,9 @@ for (const entry of info.skippedModuleConnections) {

if (activeState === true) {
const ordinal = getModuleOrdinal(module);
info.skippedModuleConnections.delete(entry);
if (isOrdinalSetInMask(minAvailableModules, ordinal)) {
info.skippedItems.add(module);
continue;
}
}
if (
activeState === true &&
(minAvailableModules.has(module) ||
minAvailableModules.plus.has(module))
) {
info.skippedItems.add(module);
continue;
}
queue.push({

@@ -1341,3 +1113,3 @@ action: activeState === true ? ADD_AND_ENTER_MODULE : PROCESS_BLOCK,

const process = (current, visited = new Set()) => {
const process = (current, visited) => {
if (visited.has(current)) {

@@ -1354,4 +1126,3 @@ return;

for (let i = 0; i < blockModules.length; i += 3) {
const refModule = /** @type {Module} */ (blockModules[i]);
for (let i = 0, len = blockModules.length; i < len; i += 3) {
const activeState = /** @type {ConnectionState} */ (

@@ -1363,2 +1134,3 @@ blockModules[i + 1]

}
const refModule = /** @type {Module} */ (blockModules[i]);

@@ -1373,5 +1145,6 @@ if (refModule) {

process(block);
process(block, new Set());
}
outdatedOrderIndexChunkGroups.clear();
ordinalByModule.clear();

@@ -1395,3 +1168,3 @@ logger.log(

* @param {Map<AsyncDependenciesBlock, BlockChunkGroupConnection[]>} blockConnections connection for blocks
* @param {Map<ChunkGroup, ChunkGroupInfo>} chunkGroupInfoMap mapping from chunk group to available modules
* @param {Map<Chunk, bigint>} maskByChunk mapping from chunk to module mask
*/

@@ -1402,3 +1175,3 @@ const connectChunkGroups = (

blockConnections,
chunkGroupInfoMap
maskByChunk
) => {

@@ -1411,3 +1184,3 @@ const { chunkGraph } = compilation;

* @param {ChunkGroup} chunkGroup the chunkGroup to scan
* @param {ModuleSetPlus} availableModules the comparator set
* @param {bigint} availableModules the comparator set
* @returns {boolean} return true if all modules of a chunk are available

@@ -1417,6 +1190,4 @@ */

for (const chunk of chunkGroup.chunks) {
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
if (!availableModules.has(module) && !availableModules.plus.has(module))
return false;
}
const chunkMask = maskByChunk.get(chunk);
if ((chunkMask & availableModules) !== chunkMask) return false;
}

@@ -1506,2 +1277,5 @@ return true;

/** @type {Map<Chunk, bigint>} */
const maskByChunk = new Map();
// PART ONE

@@ -1517,3 +1291,4 @@

blocksWithNestedBlocks,
allCreatedChunkGroups
allCreatedChunkGroups,
maskByChunk
);

@@ -1529,3 +1304,3 @@ logger.timeEnd("visitModules");

blockConnections,
chunkGroupInfoMap
maskByChunk
);

@@ -1532,0 +1307,0 @@ logger.timeEnd("connectChunkGroups");

@@ -26,2 +26,3 @@ /*

/** @typedef {import("../Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("../Module").SourceTypes} SourceTypes */
/** @typedef {import("../RequestShortener")} RequestShortener */

@@ -58,3 +59,3 @@ /** @typedef {import("../ResolverFactory").ResolverWithOptions} ResolverWithOptions */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -61,0 +62,0 @@ getSourceTypes() {

@@ -25,2 +25,3 @@ /*

/** @typedef {import("../Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("../Module").SourceTypes} SourceTypes */
/** @typedef {import("../RequestShortener")} RequestShortener */

@@ -120,3 +121,3 @@ /** @typedef {import("../ResolverFactory").ResolverWithOptions} ResolverWithOptions */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -123,0 +124,0 @@ getSourceTypes() {

@@ -24,2 +24,3 @@ /*

/** @typedef {import("../Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("../Module").SourceTypes} SourceTypes */
/** @typedef {import("../RequestShortener")} RequestShortener */

@@ -123,3 +124,3 @@ /** @typedef {import("../ResolverFactory").ResolverWithOptions} ResolverWithOptions */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -126,0 +127,0 @@ getSourceTypes() {

@@ -41,2 +41,3 @@ /*

/** @typedef {import("./Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("./Module").SourceTypes} SourceTypes */
/** @typedef {import("./ModuleGraph")} ModuleGraph */

@@ -148,3 +149,3 @@ /** @typedef {import("./RequestShortener")} RequestShortener */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -151,0 +152,0 @@ getSourceTypes() {

@@ -229,3 +229,3 @@ /*

withHmr ? "moduleIds.push(token); " : ""
}token = ""; exports = {}; exportsWithId.length = 0; }`,
}token = ""; exports = {}; exportsWithId.length = 0; exportsWithDashes.length = 0; }`,
`else if(cc == ${cc("\\")}) { token += data[++i] }`,

@@ -232,0 +232,0 @@ `else { token += data[i]; }`

@@ -154,5 +154,2 @@ /*

/**
* @param {ObjectDeserializerContext} context context
*/
deserialize(context) {

@@ -159,0 +156,0 @@ const { read } = context;

@@ -28,2 +28,3 @@ /*

/** @typedef {import("./Module").SourceContext} SourceContext */
/** @typedef {import("./Module").SourceTypes} SourceTypes */
/** @typedef {import("./RequestShortener")} RequestShortener */

@@ -70,3 +71,3 @@ /** @typedef {import("./ResolverFactory").ResolverWithOptions} ResolverWithOptions */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -73,0 +74,0 @@ getSourceTypes() {

@@ -146,3 +146,4 @@ /*

let ids = this.getIds(moduleGraph);
if (ids.length === 0) return this._getReferencedExportsInDestructuring();
if (ids.length === 0)
return this._getReferencedExportsInDestructuring(moduleGraph);
let namespaceObjectAsContext = this.namespaceObjectAsContext;

@@ -164,3 +165,3 @@ if (ids[0] === "default") {

if (ids.length === 1)
return this._getReferencedExportsInDestructuring();
return this._getReferencedExportsInDestructuring(moduleGraph);
ids = ids.slice(1);

@@ -183,17 +184,26 @@ namespaceObjectAsContext = true;

return this._getReferencedExportsInDestructuring(ids);
return this._getReferencedExportsInDestructuring(moduleGraph, ids);
}
/**
* @param {ModuleGraph} moduleGraph module graph
* @param {string[]=} ids ids
* @returns {(string[] | ReferencedExport)[]} referenced exports
*/
_getReferencedExportsInDestructuring(ids) {
_getReferencedExportsInDestructuring(moduleGraph, ids) {
if (this.referencedPropertiesInDestructuring) {
/** @type {ReferencedExport[]} */
const refs = [];
const importedModule = moduleGraph.getModule(this);
const canMangle =
Array.isArray(ids) &&
ids.length > 0 &&
!moduleGraph
.getExportsInfo(importedModule)
.getExportInfo(ids[0])
.isReexport();
for (const key of this.referencedPropertiesInDestructuring) {
refs.push({
name: ids ? ids.concat([key]) : [key],
canMangle: Array.isArray(ids) && ids.length > 0
canMangle
});

@@ -200,0 +210,0 @@ }

@@ -32,3 +32,3 @@ /*

* @param {Object} workerDependencyOptions options
* @param {string} workerDependencyOptions.publicPath public path for the worker
* @param {string=} workerDependencyOptions.publicPath public path for the worker
*/

@@ -35,0 +35,0 @@ constructor(request, range, workerDependencyOptions) {

@@ -74,6 +74,6 @@ /*

/**
* @param {ChunkLoading} chunkLoading chunk loading
* @param {WasmLoading} wasmLoading wasm loading
* @param {OutputModule} module output module
* @param {WorkerPublicPath} workerPublicPath worker public path
* @param {ChunkLoading=} chunkLoading chunk loading
* @param {WasmLoading=} wasmLoading wasm loading
* @param {OutputModule=} module output module
* @param {WorkerPublicPath=} workerPublicPath worker public path
*/

@@ -80,0 +80,0 @@ constructor(chunkLoading, wasmLoading, module, workerPublicPath) {

@@ -25,2 +25,3 @@ /*

/** @typedef {import("./Module").SourceContext} SourceContext */
/** @typedef {import("./Module").SourceTypes} SourceTypes */
/** @typedef {import("./RequestShortener")} RequestShortener */

@@ -56,3 +57,3 @@ /** @typedef {import("./ResolverFactory").ResolverWithOptions} ResolverWithOptions */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -59,0 +60,0 @@ getSourceTypes() {

@@ -37,2 +37,3 @@ /*

/** @typedef {import("./Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("./Module").SourceTypes} SourceTypes */
/** @typedef {import("./NormalModuleFactory")} NormalModuleFactory */

@@ -419,3 +420,3 @@ /** @typedef {import("./RequestShortener")} RequestShortener */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -422,0 +423,0 @@ getSourceTypes() {

@@ -30,2 +30,3 @@ /*

/** @typedef {import("../Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("../Module").SourceTypes} SourceTypes */
/** @typedef {import("../ModuleFactory").ModuleFactoryCreateData} ModuleFactoryCreateData */

@@ -194,3 +195,3 @@ /** @typedef {import("../ModuleFactory").ModuleFactoryResult} ModuleFactoryResult */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -197,0 +198,0 @@ getSourceTypes() {

@@ -24,2 +24,3 @@ /*

/** @typedef {import("./Compilation")} Compilation */
/** @typedef {import("./Compilation").AssetInfo} AssetInfo */
/** @typedef {import("./ConcatenationScope")} ConcatenationScope */

@@ -31,2 +32,3 @@ /** @typedef {import("./Dependency")} Dependency */

/** @typedef {import("./FileSystemInfo")} FileSystemInfo */
/** @typedef {import("./FileSystemInfo").Snapshot} Snapshot */
/** @typedef {import("./ModuleGraphConnection").ConnectionState} ConnectionState */

@@ -106,2 +108,17 @@ /** @typedef {import("./ModuleTypeConstants").ModuleTypes} ModuleTypes */

/**
* @typedef {Object} KnownBuildInfo
* @property {boolean=} cacheable
* @property {boolean=} parsed
* @property {LazySet<string>=} fileDependencies
* @property {LazySet<string>=} contextDependencies
* @property {LazySet<string>=} missingDependencies
* @property {LazySet<string>=} buildDependencies
* @property {(Map<string, string | Set<string>>)=} valueDependencies
* @property {TODO=} hash
* @property {Record<string, Source>=} assets
* @property {Map<string, AssetInfo | undefined>=} assetsInfo
* @property {(Snapshot | null)=} snapshot
*/
/**
* @typedef {Object} NeedBuildContext

@@ -114,3 +131,3 @@ * @property {Compilation} compilation

/** @typedef {KnownBuildMeta & Record<string, any>} BuildMeta */
/** @typedef {Record<string, any>} BuildInfo */
/** @typedef {KnownBuildInfo & Record<string, any>} BuildInfo */

@@ -122,2 +139,6 @@ /**

/** @typedef {Set<string>} SourceTypes */
/** @typedef {{ factoryMeta: FactoryMeta | undefined, resolveOptions: ResolveOptions | undefined }} UnsafeCacheData */
const EMPTY_RESOLVE_OPTIONS = {};

@@ -820,3 +841,3 @@

* @abstract
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -965,3 +986,3 @@ getSourceTypes() {

* This data will be passed to restoreFromUnsafeCache later.
* @returns {object} cached data
* @returns {UnsafeCacheData} cached data
*/

@@ -968,0 +989,0 @@ getUnsafeCacheData() {

@@ -22,2 +22,3 @@ /*

/** @typedef {import("./Watching")} Watching */
/** @typedef {import("./logging/Logger").Logger} Logger */
/** @typedef {import("./util/fs").InputFileSystem} InputFileSystem */

@@ -84,3 +85,3 @@ /** @typedef {import("./util/fs").IntermediateFileSystem} IntermediateFileSystem */

/** @type {Stats[]} */
/** @type {(Stats | null)[]} */
const compilerStats = this.compilers.map(() => null);

@@ -99,3 +100,5 @@ let doneCompilers = 0;

if (doneCompilers === this.compilers.length) {
this.hooks.done.call(new MultiStats(compilerStats));
this.hooks.done.call(
new MultiStats(/** @type {Stats[]} */ (compilerStats))
);
}

@@ -186,2 +189,6 @@ });

/**
* @param {string | (function(): string)} name name of the logger, or function called once to get the logger name
* @returns {Logger} a logger with that name
*/
getInfrastructureLogger(name) {

@@ -209,2 +216,6 @@ return this.compilers[0].getInfrastructureLogger(name);

const missing = [];
/**
* @param {Compiler} compiler compiler
* @returns {boolean} target was found
*/
const targetFound = compiler => {

@@ -282,2 +293,6 @@ for (const edge of edges) {

let remainingCompilers = compilers;
/**
* @param {string} d dependency
* @returns {boolean} when dependency was fulfilled
*/
const isDependencyFulfilled = d => fulfilledNames.has(d);

@@ -300,2 +315,6 @@ const getReadyCompilers = () => {

};
/**
* @param {Callback<MultiStats>} callback callback
* @returns {void}
*/
const runCompilers = callback => {

@@ -312,3 +331,3 @@ if (remainingCompilers.length === 0) return callback();

},
callback
/** @type {Callback<TODO>} */ (callback)
);

@@ -327,3 +346,3 @@ };

_runGraph(setup, run, callback) {
/** @typedef {{ compiler: Compiler, setupResult: SetupResult, result: Stats, state: "pending" | "blocked" | "queued" | "starting" | "running" | "running-outdated" | "done", children: Node[], parents: Node[] }} Node */
/** @typedef {{ compiler: Compiler, setupResult: undefined | SetupResult, result: undefined | Stats, state: "pending" | "blocked" | "queued" | "starting" | "running" | "running-outdated" | "done", children: Node[], parents: Node[] }} Node */

@@ -353,8 +372,10 @@ // State transitions for nodes:

const compilerToNode = new Map();
for (const node of nodes) compilerToNode.set(node.compiler.name, node);
for (const node of nodes) {
compilerToNode.set(/** @type {string} */ (node.compiler.name), node);
}
for (const node of nodes) {
const dependencies = this.dependencies.get(node.compiler);
if (!dependencies) continue;
for (const dep of dependencies) {
const parent = compilerToNode.get(dep);
const parent = /** @type {Node} */ (compilerToNode.get(dep));
node.parents.push(parent);

@@ -374,6 +395,6 @@ parent.children.push(node);

let running = 0;
const parallelism = this._options.parallelism;
const parallelism = /** @type {number} */ (this._options.parallelism);
/**
* @param {Node} node node
* @param {Error=} err error
* @param {(Error | null)=} err error
* @param {Stats=} stats result

@@ -454,2 +475,3 @@ * @returns {void}

/** @type {SetupResult[]} */
const setupResults = [];

@@ -476,3 +498,3 @@ nodes.forEach((node, i) => {

while (running < parallelism && queue.length > 0 && !errored) {
const node = queue.dequeue();
const node = /** @type {Node} */ (queue.dequeue());
if (

@@ -485,3 +507,7 @@ node.state === "queued" ||

node.state = "starting";
run(node.compiler, node.setupResult, nodeDone.bind(null, node));
run(
node.compiler,
/** @type {SetupResult} */ (node.setupResult),
nodeDone.bind(null, node)
);
node.state = "running";

@@ -488,0 +514,0 @@ }

@@ -52,3 +52,2 @@ /*

/** @typedef {import("webpack-sources").Source} Source */
/** @typedef {import("../declarations/LoaderContext").NormalModuleLoaderContext} NormalModuleLoaderContext */
/** @typedef {import("../declarations/WebpackOptions").Mode} Mode */

@@ -62,7 +61,12 @@ /** @typedef {import("../declarations/WebpackOptions").ResolveOptions} ResolveOptions */

/** @typedef {import("./Generator")} Generator */
/** @typedef {import("./Module").BuildInfo} BuildInfo */
/** @typedef {import("./Module").BuildMeta} BuildMeta */
/** @typedef {import("./Module").CodeGenerationContext} CodeGenerationContext */
/** @typedef {import("./Module").CodeGenerationResult} CodeGenerationResult */
/** @typedef {import("./Module").ConcatenationBailoutReasonContext} ConcatenationBailoutReasonContext */
/** @typedef {import("./Module").KnownBuildInfo} KnownBuildInfo */
/** @typedef {import("./Module").LibIdentOptions} LibIdentOptions */
/** @typedef {import("./Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("./Module").SourceTypes} SourceTypes */
/** @typedef {import("./Module").UnsafeCacheData} UnsafeCacheData */
/** @typedef {import("./ModuleGraph")} ModuleGraph */

@@ -74,2 +78,3 @@ /** @typedef {import("./ModuleGraphConnection").ConnectionState} ConnectionState */

/** @typedef {import("./RequestShortener")} RequestShortener */
/** @typedef {import("./ResolverFactory").ResolveContext} ResolveContext */
/** @typedef {import("./ResolverFactory").ResolverWithOptions} ResolverWithOptions */

@@ -84,2 +89,7 @@ /** @typedef {import("./RuntimeTemplate")} RuntimeTemplate */

/** @typedef {{[k: string]: any}} ParserOptions */
/** @typedef {{[k: string]: any}} GeneratorOptions */
/** @typedef {UnsafeCacheData & { parser: undefined | Parser, parserOptions: undefined | ParserOptions, generator: undefined | Generator, generatorOptions: undefined | GeneratorOptions }} NormalModuleUnsafeCacheData */
/**

@@ -219,5 +229,5 @@ * @typedef {Object} SourceMap

* @property {Parser} parser the parser used
* @property {Record<string, any>=} parserOptions the options of the parser used
* @property {ParserOptions=} parserOptions the options of the parser used
* @property {Generator} generator the generator used
* @property {Record<string, any>=} generatorOptions the options of the generator used
* @property {GeneratorOptions=} generatorOptions the options of the generator used
* @property {ResolveOptions=} resolveOptions options used for resolving requests from this module

@@ -272,5 +282,8 @@ */

};
compilationHooksMap.set(compilation, hooks);
compilationHooksMap.set(
compilation,
/** @type {NormalModuleCompilationHooks} */ (hooks)
);
}
return hooks;
return /** @type {NormalModuleCompilationHooks} */ (hooks);
}

@@ -309,7 +322,9 @@

this.binary = /^(asset|webassembly)\b/.test(type);
/** @type {Parser} */
/** @type {undefined | Parser} */
this.parser = parser;
/** @type {undefined | ParserOptions} */
this.parserOptions = parserOptions;
/** @type {Generator} */
/** @type {undefined | Generator} */
this.generator = generator;
/** @type {undefined | GeneratorOptions} */
this.generatorOptions = generatorOptions;

@@ -329,9 +344,18 @@ /** @type {string} */

// Info from Build
/** @type {(WebpackError | null)=} */
/** @type {WebpackError | null} */
this.error = null;
/** @private @type {Source=} */
/**
* @private
* @type {Source | null}
*/
this._source = null;
/** @private @type {Map<string, number> | undefined} **/
/**
* @private
* @type {Map<string, number> | undefined}
**/
this._sourceSizes = undefined;
/** @private @type {Set<string>} */
/**
* @private
* @type {undefined | SourceTypes}
**/
this._sourceTypes = undefined;

@@ -430,3 +454,3 @@

if (this._sourceTypes === undefined) this.getSourceTypes();
for (const type of this._sourceTypes) {
for (const type of /** @type {SourceTypes} */ (this._sourceTypes)) {
this.size(type);

@@ -445,6 +469,8 @@ }

* This data will be passed to restoreFromUnsafeCache later.
* @returns {object} cached data
* @returns {UnsafeCacheData} cached data
*/
getUnsafeCacheData() {
const data = super.getUnsafeCacheData();
const data =
/** @type {NormalModuleUnsafeCacheData} */
(super.getUnsafeCacheData());
data.parserOptions = this.parserOptions;

@@ -455,2 +481,7 @@ data.generatorOptions = this.generatorOptions;

/**
* restore unsafe cache data
* @param {NormalModuleUnsafeCacheData} unsafeCacheData data from getUnsafeCacheData
* @param {NormalModuleFactory} normalModuleFactory the normal module factory handling the unsafe caching
*/
restoreFromUnsafeCache(unsafeCacheData, normalModuleFactory) {

@@ -480,4 +511,4 @@ this._restoreFromUnsafeCache(unsafeCacheData, normalModuleFactory);

* @param {string} name the asset name
* @param {string} content the content
* @param {string | TODO} sourceMap an optional source map
* @param {string | Buffer} content the content
* @param {(string | SourceMap)=} sourceMap an optional source map
* @param {Object=} associatedObjectForCache object for caching

@@ -508,3 +539,7 @@ * @returns {Source} the created source

name,
contextifySourceMap(context, sourceMap, associatedObjectForCache)
contextifySourceMap(
context,
/** @type {SourceMap} */ (sourceMap),
associatedObjectForCache
)
);

@@ -518,2 +553,4 @@ }

/**
* @private
* @template T
* @param {ResolverWithOptions} resolver a resolver

@@ -524,3 +561,3 @@ * @param {WebpackOptions} options webpack options

* @param {NormalModuleCompilationHooks} hooks the hooks
* @returns {NormalModuleLoaderContext} loader context
* @returns {import("../declarations/LoaderContext").NormalModuleLoaderContext<T>} loader context
*/

@@ -534,12 +571,15 @@ _createLoaderContext(resolver, options, compilation, fs, hooks) {

};
/**
* @returns {ResolveContext} resolve context
*/
const getResolveContext = () => {
return {
fileDependencies: {
add: d => loaderContext.addDependency(d)
add: d => /** @type {TODO} */ (loaderContext).addDependency(d)
},
contextDependencies: {
add: d => loaderContext.addContextDependency(d)
add: d => /** @type {TODO} */ (loaderContext).addContextDependency(d)
},
missingDependencies: {
add: d => loaderContext.addMissingDependency(d)
add: d => /** @type {TODO} */ (loaderContext).addMissingDependency(d)
}

@@ -561,2 +601,7 @@ };

const utils = {
/**
* @param {string} context context
* @param {string} request request
* @returns {string} result
*/
absolutify: (context, request) => {

@@ -567,2 +612,7 @@ return context === this.context

},
/**
* @param {string} context context
* @param {string} request request
* @returns {string} result
*/
contextify: (context, request) => {

@@ -573,2 +623,6 @@ return context === this.context

},
/**
* @param {(string | typeof import("./util/Hash"))=} type type
* @returns {Hash} hash
*/
createHash: type => {

@@ -578,2 +632,3 @@ return createHash(type || compilation.outputOptions.hashFunction);

};
/** @type {import("../declarations/LoaderContext").NormalModuleLoaderContext<T>} */
const loaderContext = {

@@ -584,3 +639,3 @@ version: 2,

let { options } = loader;
let { options } = /** @type {LoaderItem} */ (loader);

@@ -673,8 +728,18 @@ if (typeof options === "string") {

emitFile: (name, content, sourceMap, assetInfo) => {
if (!this.buildInfo.assets) {
this.buildInfo.assets = Object.create(null);
this.buildInfo.assetsInfo = new Map();
const buildInfo = /** @type {BuildInfo} */ (this.buildInfo);
if (!buildInfo.assets) {
buildInfo.assets = Object.create(null);
buildInfo.assetsInfo = new Map();
}
this.buildInfo.assets[name] = this.createSourceForAsset(
options.context,
const assets =
/** @type {NonNullable<KnownBuildInfo["assets"]>} */
(buildInfo.assets);
const assetsInfo =
/** @type {NonNullable<KnownBuildInfo["assetsInfo"]>} */
(buildInfo.assetsInfo);
assets[name] = this.createSourceForAsset(
/** @type {string} */ (options.context),
name,

@@ -685,12 +750,14 @@ content,

);
this.buildInfo.assetsInfo.set(name, assetInfo);
assetsInfo.set(name, assetInfo);
},
addBuildDependency: dep => {
if (this.buildInfo.buildDependencies === undefined) {
this.buildInfo.buildDependencies = new LazySet();
const buildInfo = /** @type {BuildInfo} */ (this.buildInfo);
if (buildInfo.buildDependencies === undefined) {
buildInfo.buildDependencies = new LazySet();
}
this.buildInfo.buildDependencies.add(dep);
buildInfo.buildDependencies.add(dep);
},
utils,
rootContext: options.context,
rootContext: /** @type {string} */ (options.context),
webpack: true,

@@ -712,2 +779,8 @@ sourceMap: !!this.useSourceMap,

// TODO remove `loaderContext` in webpack@6
/**
* @param {TODO} loaderContext loader context
* @param {number} index index
* @returns {LoaderItem | null} loader
*/
getCurrentLoader(loaderContext, index = loaderContext.loaderIndex) {

@@ -729,3 +802,3 @@ if (

* @param {string | Buffer} content the content
* @param {string | TODO} sourceMap an optional source map
* @param {(string | SourceMapSource)=} sourceMap an optional source map
* @param {Object=} associatedObjectForCache object for caching

@@ -751,3 +824,7 @@ * @returns {Source} the created source

contextifySourceUrl(context, identifier, associatedObjectForCache),
contextifySourceMap(context, sourceMap, associatedObjectForCache)
contextifySourceMap(
context,
/** @type {TODO} */ (sourceMap),
associatedObjectForCache
)
);

@@ -820,3 +897,3 @@ }

this._source = this.createSource(
options.context,
/** @type {string} */ (options.context),
this.binary ? asBuffer(source) : asString(source),

@@ -836,7 +913,9 @@ sourceMap,

this.buildInfo.fileDependencies = new LazySet();
this.buildInfo.contextDependencies = new LazySet();
this.buildInfo.missingDependencies = new LazySet();
this.buildInfo.cacheable = true;
const buildInfo = /** @type {BuildInfo} */ (this.buildInfo);
buildInfo.fileDependencies = new LazySet();
buildInfo.contextDependencies = new LazySet();
buildInfo.missingDependencies = new LazySet();
buildInfo.cacheable = true;
try {

@@ -850,3 +929,4 @@ hooks.beforeLoaders.call(this.loaders, this, loaderContext);

if (this.loaders.length > 0) {
this.buildInfo.buildDependencies = new LazySet();
/** @type {BuildInfo} */
(this.buildInfo).buildDependencies = new LazySet();
}

@@ -882,3 +962,4 @@

if (!result) {
this.buildInfo.cacheable = false;
/** @type {BuildInfo} */
(this.buildInfo).cacheable = false;
return processResult(

@@ -889,9 +970,26 @@ err || new Error("No result from loader-runner processing"),

}
this.buildInfo.fileDependencies.addAll(result.fileDependencies);
this.buildInfo.contextDependencies.addAll(result.contextDependencies);
this.buildInfo.missingDependencies.addAll(result.missingDependencies);
const buildInfo = /** @type {BuildInfo} */ (this.buildInfo);
const fileDependencies =
/** @type {NonNullable<KnownBuildInfo["fileDependencies"]>} */
(buildInfo.fileDependencies);
const contextDependencies =
/** @type {NonNullable<KnownBuildInfo["contextDependencies"]>} */
(buildInfo.contextDependencies);
const missingDependencies =
/** @type {NonNullable<KnownBuildInfo["missingDependencies"]>} */
(buildInfo.missingDependencies);
fileDependencies.addAll(result.fileDependencies);
contextDependencies.addAll(result.contextDependencies);
missingDependencies.addAll(result.missingDependencies);
for (const loader of this.loaders) {
this.buildInfo.buildDependencies.add(loader.loader);
const buildDependencies =
/** @type {NonNullable<KnownBuildInfo["buildDependencies"]>} */
(buildInfo.buildDependencies);
buildDependencies.add(loader.loader);
}
this.buildInfo.cacheable = this.buildInfo.cacheable && result.cacheable;
buildInfo.cacheable = buildInfo.cacheable && result.cacheable;
processResult(err, result.result);

@@ -913,2 +1011,7 @@ }

/**
* @param {TODO} rule rule
* @param {string} content content
* @returns {boolean} result
*/
applyNoParseRule(rule, content) {

@@ -927,5 +1030,7 @@ // must start with "rule" if rule is a string

// check if module should not be parsed
// returns "true" if the module should !not! be parsed
// returns "false" if the module !must! be parsed
/**
* @param {TODO} noParseRule no parse rule
* @param {string} request request
* @returns {boolean} check if module should not be parsed, returns "true" if the module should !not! be parsed, returns "false" if the module !must! be parsed
*/
shouldPreventParsing(noParseRule, request) {

@@ -956,2 +1061,6 @@ // if no noParseRule exists, return false

/**
* @param {Compilation} compilation compilation
* @private
*/
_initBuildHash(compilation) {

@@ -965,3 +1074,4 @@ const hash = createHash(compilation.outputOptions.hashFunction);

hash.update(JSON.stringify(this.buildMeta));
this.buildInfo.hash = /** @type {string} */ (hash.digest("hex"));
/** @type {BuildInfo} */
(this.buildInfo).hash = /** @type {string} */ (hash.digest("hex"));
}

@@ -1012,6 +1122,14 @@

/**
* @param {Error} e error
* @returns {void}
*/
const handleParseError = e => {
const source = this._source.source();
const source = /** @type {Source} */ (this._source).source();
const loaders = this.loaders.map(item =>
contextify(options.context, item.loader, compilation.compiler.root)
contextify(
/** @type {string} */ (options.context),
item.loader,
compilation.compiler.root
)
);

@@ -1024,3 +1142,3 @@ const error = new ModuleParseError(source, e, loaders, this.type);

const handleParseResult = result => {
const handleParseResult = () => {
this.dependencies.sort(

@@ -1033,3 +1151,5 @@ concatComparators(

this._initBuildHash(compilation);
this._lastSuccessfulBuildMeta = this.buildMeta;
this._lastSuccessfulBuildMeta =
/** @type {BuildMeta} */
(this.buildMeta);
return handleBuildDone();

@@ -1047,3 +1167,4 @@ };

const snapshotOptions = compilation.options.snapshot.module;
if (!this.buildInfo.cacheable || !snapshotOptions) {
const { cacheable } = /** @type {BuildInfo} */ (this.buildInfo);
if (!cacheable || !snapshotOptions) {
return callback();

@@ -1053,3 +1174,7 @@ }

// This makes it easier to find problems with watching and/or caching
/** @type {undefined | Set<string>} */
let nonAbsoluteDependencies = undefined;
/**
* @param {LazySet<string>} deps deps
*/
const checkDependencies = deps => {

@@ -1071,3 +1196,7 @@ for (const dep of deps) {

(depWithoutGlob !== dep
? this.buildInfo.contextDependencies
? /** @type {NonNullable<KnownBuildInfo["contextDependencies"]>} */
(
/** @type {BuildInfo} */ (this.buildInfo)
.contextDependencies
)
: deps

@@ -1082,5 +1211,15 @@ ).add(absolute);

};
checkDependencies(this.buildInfo.fileDependencies);
checkDependencies(this.buildInfo.missingDependencies);
checkDependencies(this.buildInfo.contextDependencies);
const buildInfo = /** @type {BuildInfo} */ (this.buildInfo);
const fileDependencies =
/** @type {NonNullable<KnownBuildInfo["fileDependencies"]>} */
(buildInfo.fileDependencies);
const contextDependencies =
/** @type {NonNullable<KnownBuildInfo["contextDependencies"]>} */
(buildInfo.contextDependencies);
const missingDependencies =
/** @type {NonNullable<KnownBuildInfo["missingDependencies"]>} */
(buildInfo.missingDependencies);
checkDependencies(fileDependencies);
checkDependencies(missingDependencies);
checkDependencies(contextDependencies);
if (nonAbsoluteDependencies !== undefined) {

@@ -1096,5 +1235,5 @@ const InvalidDependenciesModuleWarning =

startTime,
this.buildInfo.fileDependencies,
this.buildInfo.contextDependencies,
this.buildInfo.missingDependencies,
fileDependencies,
contextDependencies,
missingDependencies,
snapshotOptions,

@@ -1106,6 +1245,6 @@ (err, snapshot) => {

}
this.buildInfo.fileDependencies = undefined;
this.buildInfo.contextDependencies = undefined;
this.buildInfo.missingDependencies = undefined;
this.buildInfo.snapshot = snapshot;
buildInfo.fileDependencies = undefined;
buildInfo.contextDependencies = undefined;
buildInfo.missingDependencies = undefined;
buildInfo.snapshot = snapshot;
return callback();

@@ -1129,3 +1268,4 @@ }

// We assume that we need module and exports
this.buildInfo.parsed = false;
/** @type {BuildInfo} */
(this.buildInfo).parsed = false;
this._initBuildHash(compilation);

@@ -1135,6 +1275,6 @@ return handleBuildDone();

let result;
try {
const source = this._source.source();
result = this.parser.parse(this._ast || source, {
const source = /** @type {Source} */ (this._source).source();
/** @type {Parser} */
(this.parser).parse(this._ast || source, {
source,

@@ -1147,6 +1287,6 @@ current: this,

} catch (e) {
handleParseError(e);
handleParseError(/** @type {Error} */ (e));
return;
}
handleParseResult(result);
handleParseResult();
});

@@ -1160,3 +1300,5 @@ }

getConcatenationBailoutReason(context) {
return this.generator.getConcatenationBailoutReason(this, context);
return /** @type {Generator} */ (
this.generator
).getConcatenationBailoutReason(this, context);
}

@@ -1213,7 +1355,9 @@

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/
getSourceTypes() {
if (this._sourceTypes === undefined) {
this._sourceTypes = this.generator.getTypes(this);
this._sourceTypes = /** @type {Generator} */ (this.generator).getTypes(
this
);
}

@@ -1241,3 +1385,5 @@ return this._sourceTypes;

if (!this.buildInfo.parsed) {
const { parsed } = /** @type {BuildInfo} */ (this.buildInfo);
if (!parsed) {
runtimeRequirements.add(RuntimeGlobals.module);

@@ -1314,11 +1460,12 @@ runtimeRequirements.add(RuntimeGlobals.exports);

const { cacheable, snapshot, valueDependencies } =
/** @type {BuildInfo} */ (this.buildInfo);
// always build when module is not cacheable
if (!this.buildInfo.cacheable) return callback(null, true);
if (!cacheable) return callback(null, true);
// build when there is no snapshot to check
if (!this.buildInfo.snapshot) return callback(null, true);
if (!snapshot) return callback(null, true);
// build when valueDependencies have changed
/** @type {Map<string, string | Set<string>>} */
const valueDependencies = this.buildInfo.valueDependencies;
if (valueDependencies) {

@@ -1342,3 +1489,3 @@ if (!valueCacheVersions) return callback(null, true);

// check snapshot for validity
fileSystemInfo.checkSnapshotValid(this.buildInfo.snapshot, (err, valid) => {
fileSystemInfo.checkSnapshotValid(snapshot, (err, valid) => {
if (err) return callback(err);

@@ -1371,3 +1518,6 @@ if (!valid) return callback(null, true);

}
const size = Math.max(1, this.generator.getSize(this, type));
const size = Math.max(
1,
/** @type {Generator} */ (this.generator).getSize(this, type)
);
if (this._sourceSizes === undefined) {

@@ -1392,3 +1542,4 @@ this._sourceSizes = new Map();

) {
const { snapshot, buildDependencies: buildDeps } = this.buildInfo;
const { snapshot, buildDependencies: buildDeps } =
/** @type {BuildInfo} */ (this.buildInfo);
if (snapshot) {

@@ -1403,3 +1554,3 @@ fileDependencies.addAll(snapshot.getFileIterable());

missingDependencies: missingDeps
} = this.buildInfo;
} = /** @type {BuildInfo} */ (this.buildInfo);
if (fileDeps !== undefined) fileDependencies.addAll(fileDeps);

@@ -1420,3 +1571,3 @@ if (contextDeps !== undefined) contextDependencies.addAll(contextDeps);

updateHash(hash, context) {
hash.update(this.buildInfo.hash);
hash.update(/** @type {BuildInfo} */ (this.buildInfo).hash);
this.generator.updateHash(hash, {

@@ -1423,0 +1574,0 @@ module: this,

@@ -41,6 +41,13 @@ /*

/** @typedef {import("./ModuleFactory").ModuleFactoryCreateData} ModuleFactoryCreateData */
/** @typedef {import("./ModuleFactory").ModuleFactoryCreateDataContextInfo} ModuleFactoryCreateDataContextInfo */
/** @typedef {import("./ModuleFactory").ModuleFactoryResult} ModuleFactoryResult */
/** @typedef {import("./NormalModule").GeneratorOptions} GeneratorOptions */
/** @typedef {import("./NormalModule").LoaderItem} LoaderItem */
/** @typedef {import("./NormalModule").NormalModuleCreateData} NormalModuleCreateData */
/** @typedef {import("./NormalModule").ParserOptions} ParserOptions */
/** @typedef {import("./Parser")} Parser */
/** @typedef {import("./ResolverFactory")} ResolverFactory */
/** @typedef {import("./ResolverFactory").ResolveContext} ResolveContext */
/** @typedef {import("./ResolverFactory").ResolveRequest} ResolveRequest */
/** @typedef {import("./ResolverFactory").ResolverWithOptions} ResolverWithOptions */
/** @typedef {import("./dependencies/ModuleDependency")} ModuleDependency */

@@ -71,5 +78,5 @@ /** @typedef {import("./util/fs").InputFileSystem} InputFileSystem */

* @property {string} resource
* @property {string} path
* @property {string} query
* @property {string} fragment
* @property {string=} path
* @property {string=} query
* @property {string=} fragment
* @property {string=} context

@@ -80,3 +87,4 @@ */

/** @typedef {Object} ParsedLoaderRequest
/**
* @typedef {Object} ParsedLoaderRequest
* @property {string} loader loader

@@ -86,5 +94,16 @@ * @property {string|undefined} options options

/**
* @template T
* @callback Callback
* @param {(Error | null)=} err
* @param {T=} stats
* @returns {void}
*/
const EMPTY_RESOLVE_OPTIONS = {};
/** @type {ParserOptions} */
const EMPTY_PARSER_OPTIONS = {};
/** @type {GeneratorOptions} */
const EMPTY_GENERATOR_OPTIONS = {};
/** @type {ParsedLoaderRequest[]} */
const EMPTY_ELEMENTS = [];

@@ -95,2 +114,6 @@

/**
* @param {LoaderItem} data data
* @returns {string} ident
*/
const loaderToIdent = data => {

@@ -112,2 +135,7 @@ if (!data.options) {

/**
* @param {LoaderItem[]} loaders loaders
* @param {string} resource resource
* @returns {string} stringified loaders and resource
*/
const stringifyLoadersAndResource = (loaders, resource) => {

@@ -121,2 +149,7 @@ let str = "";

/**
* @param {number} times times
* @param {(err?: null | Error) => void} callback callback
* @returns {(err?: null | Error) => void} callback
*/
const needCalls = (times, callback) => {

@@ -134,2 +167,10 @@ return err => {

/**
* @template T
* @template O
* @param {T} globalOptions global options
* @param {string} type type
* @param {O} localOptions local options
* @returns {T & O | T | O} result
*/
const mergeGlobalOptions = (globalOptions, type, localOptions) => {

@@ -158,7 +199,18 @@ const parts = type.split("/");

// TODO webpack 6 remove
/**
* @param {string} name name
* @param {TODO} hook hook
* @returns {string} result
*/
const deprecationChangedHookMessage = (name, hook) => {
const names = hook.taps
.map(tapped => {
return tapped.name;
})
.map(
/**
* @param {TODO} tapped tapped
* @returns {string} name
*/
tapped => {
return tapped.name;
}
)
.join(", ");

@@ -238,10 +290,15 @@

module: new SyncWaterfallHook(["module", "createData", "resolveData"]),
/** @type {HookMap<SyncBailHook<[ParserOptions], Parser>>} */
createParser: new HookMap(() => new SyncBailHook(["parserOptions"])),
/** @type {HookMap<SyncBailHook<[TODO, ParserOptions], void>>} */
parser: new HookMap(() => new SyncHook(["parser", "parserOptions"])),
/** @type {HookMap<SyncBailHook<[GeneratorOptions], Generator>>} */
createGenerator: new HookMap(
() => new SyncBailHook(["generatorOptions"])
),
/** @type {HookMap<SyncBailHook<[TODO, GeneratorOptions], void>>} */
generator: new HookMap(
() => new SyncHook(["generator", "generatorOptions"])
),
/** @type {HookMap<SyncBailHook<[TODO, ResolveData], Module>>} */
createModuleClass: new HookMap(

@@ -264,3 +321,3 @@ () => new SyncBailHook(["createData", "resolveData"])

this._globalGeneratorOptions = options.generator;
/** @type {Map<string, WeakMap<Object, TODO>>} */
/** @type {Map<string, WeakMap<Object, Parser>>} */
this.parserCache = new Map();

@@ -327,3 +384,5 @@ /** @type {Map<string, WeakMap<Object, Generator>>} */

createdModule = this.hooks.createModuleClass
.for(createData.settings.type)
.for(
/** @type {ModuleSettings} */ (createData.settings).type
)
.call(createData, resolveData);

@@ -430,3 +489,3 @@

.split(/!+/);
unresolvedResource = rawElements.pop();
unresolvedResource = /** @type {string} */ (rawElements.pop());
elements = rawElements.map(el => {

@@ -449,2 +508,3 @@ const { path, query } = cachedParseResourceWithoutFragment(el);

/** @type {ResolveContext} */
const resolveContext = {

@@ -459,2 +519,3 @@ fileDependencies,

/** @type {undefined | LoaderItem[]} */
let loaders;

@@ -467,3 +528,3 @@

try {
for (const item of loaders) {
for (const item of /** @type {LoaderItem[]} */ (loaders)) {
if (typeof item.options === "string" && item.options[0] === "?") {

@@ -488,3 +549,3 @@ const ident = item.options.slice(1);

} catch (e) {
return callback(e);
return callback(/** @type {Error} */ (e));
}

@@ -501,4 +562,8 @@

: "") +
stringifyLoadersAndResource(loaders, resourceData.resource);
stringifyLoadersAndResource(
/** @type {LoaderItem[]} */ (loaders),
resourceData.resource
);
/** @type {ModuleSettings} */
const settings = {};

@@ -574,3 +639,8 @@ const useLoadersPost = [];

let postLoaders, normalLoaders, preLoaders;
/** @type {undefined | LoaderItem[]} */
let postLoaders;
/** @type {undefined | LoaderItem[]} */
let normalLoaders;
/** @type {undefined | LoaderItem[]} */
let preLoaders;

@@ -581,12 +651,17 @@ const continueCallback = needCalls(3, err => {

}
const allLoaders = postLoaders;
const allLoaders = /** @type {LoaderItem[]} */ (postLoaders);
if (matchResourceData === undefined) {
for (const loader of loaders) allLoaders.push(loader);
for (const loader of normalLoaders) allLoaders.push(loader);
for (const loader of /** @type {LoaderItem[]} */ (loaders))
allLoaders.push(loader);
for (const loader of /** @type {LoaderItem[]} */ (normalLoaders))
allLoaders.push(loader);
} else {
for (const loader of normalLoaders) allLoaders.push(loader);
for (const loader of loaders) allLoaders.push(loader);
for (const loader of /** @type {LoaderItem[]} */ (normalLoaders))
allLoaders.push(loader);
for (const loader of /** @type {LoaderItem[]} */ (loaders))
allLoaders.push(loader);
}
for (const loader of preLoaders) allLoaders.push(loader);
let type = settings.type;
for (const loader of /** @type {LoaderItem[]} */ (preLoaders))
allLoaders.push(loader);
let type = /** @type {string} */ (settings.type);
const resolveOptions = settings.resolve;

@@ -628,3 +703,3 @@ const layer = settings.layer;

} catch (e) {
return callback(e);
return callback(/** @type {Error} */ (e));
}

@@ -671,3 +746,3 @@ callback();

contextScheme ? this.context : context,
elements,
/** @type {LoaderItem[]} */ (elements),
loaderResolver,

@@ -682,2 +757,5 @@ resolveContext,

/**
* @param {string} context context
*/
const defaultResolve = context => {

@@ -861,2 +939,10 @@ if (/^($|\?)/.test(unresolvedResource)) {

/**
* @param {ModuleFactoryCreateDataContextInfo} contextInfo context info
* @param {string} context context
* @param {string} unresolvedResource unresolved resource
* @param {ResolverWithOptions} resolver resolver
* @param {ResolveContext} resolveContext resolver context
* @param {(err: null | Error, res?: string | false, req?: ResolveRequest) => void} callback callback
*/
resolveResource(

@@ -926,2 +1012,12 @@ contextInfo,

/**
* @param {Error} error error
* @param {ModuleFactoryCreateDataContextInfo} contextInfo context info
* @param {string} context context
* @param {string} unresolvedResource unresolved resource
* @param {ResolverWithOptions} resolver resolver
* @param {ResolveContext} resolveContext resolver context
* @param {Callback<string[]>} callback callback
* @private
*/
_resolveResourceErrorHints(

@@ -1038,3 +1134,3 @@ error,

if (err) return callback(err);
callback(null, hints.filter(Boolean));
callback(null, /** @type {string[]} */ (hints).filter(Boolean));
}

@@ -1044,2 +1140,11 @@ );

/**
* @param {ModuleFactoryCreateDataContextInfo} contextInfo context info
* @param {string} context context
* @param {LoaderItem[]} array array
* @param {ResolverWithOptions} resolver resolver
* @param {ResolveContext} resolveContext resolve context
* @param {Callback<LoaderItem[]>} callback callback
* @returns {void} result
*/
resolveRequestArray(

@@ -1053,2 +1158,3 @@ contextInfo,

) {
// LoaderItem
if (array.length === 0) return callback(null, array);

@@ -1095,6 +1201,7 @@ asyncLib.map(

? "commonjs"
: resolveRequest.descriptionFileData === undefined
: /** @type {ResolveRequest} */
(resolveRequest).descriptionFileData === undefined
? undefined
: resolveRequest.descriptionFileData.type;
: /** @type {ResolveRequest} */
(resolveRequest).descriptionFileData.type;
const resolved = {

@@ -1109,12 +1216,21 @@ loader: parsedResult.path,

: item.options,
ident: item.options === undefined ? undefined : item.ident
ident:
item.options === undefined
? undefined
: /** @type {string} */ (item.ident)
};
return callback(null, resolved);
return callback(null, /** @type {LoaderItem} */ (resolved));
}
);
},
callback
/** @type {Callback<TODO>} */ (callback)
);
}
/**
* @param {string} type type
* @param {ParserOptions} parserOptions parser options
* @returns {Parser} parser
*/
getParser(type, parserOptions = EMPTY_PARSER_OPTIONS) {

@@ -1140,3 +1256,3 @@ let cache = this.parserCache.get(type);

* @param {string} type type
* @param {{[k: string]: any}} parserOptions parser options
* @param {ParserOptions} parserOptions parser options
* @returns {Parser} parser

@@ -1158,2 +1274,7 @@ */

/**
* @param {string} type type of generator
* @param {GeneratorOptions} generatorOptions generator options
* @returns {Generator} generator
*/
getGenerator(type, generatorOptions = EMPTY_GENERATOR_OPTIONS) {

@@ -1177,2 +1298,7 @@ let cache = this.generatorCache.get(type);

/**
* @param {string} type type of generator
* @param {GeneratorOptions} generatorOptions generator options
* @returns {Generator} generator
*/
createGenerator(type, generatorOptions = {}) {

@@ -1194,2 +1320,7 @@ generatorOptions = mergeGlobalOptions(

/**
* @param {Parameters<ResolverFactory["get"]>[0]} type type of resolver
* @param {Parameters<ResolverFactory["get"]>[1]=} resolveOptions options
* @returns {ReturnType<ResolverFactory["get"]>} the resolver
*/
getResolver(type, resolveOptions) {

@@ -1196,0 +1327,0 @@ return this.resolverFactory.get(type, resolveOptions);

@@ -55,2 +55,3 @@ /*

/** @typedef {import("../Module").LibIdentOptions} LibIdentOptions */
/** @typedef {import("../Module").SourceTypes} SourceTypes */
/** @typedef {import("../ModuleGraph")} ModuleGraph */

@@ -714,3 +715,3 @@ /** @typedef {import("../ModuleGraphConnection")} ModuleGraphConnection */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -717,0 +718,0 @@ getSourceTypes() {

@@ -22,2 +22,3 @@ /*

/** @typedef {import("./Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("./Module").SourceTypes} SourceTypes */
/** @typedef {import("./RequestShortener")} RequestShortener */

@@ -50,3 +51,3 @@ /** @typedef {import("./ResolverFactory").ResolverWithOptions} ResolverWithOptions */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -53,0 +54,0 @@ getSourceTypes() {

@@ -16,3 +16,5 @@ /*

/** @typedef {import("enhanced-resolve").ResolveContext} ResolveContext */
/** @typedef {import("enhanced-resolve").ResolveOptions} ResolveOptions */
/** @typedef {import("enhanced-resolve").ResolveRequest} ResolveRequest */
/** @typedef {import("enhanced-resolve").Resolver} Resolver */

@@ -19,0 +21,0 @@ /** @typedef {import("../declarations/WebpackOptions").ResolveOptions} WebpackResolveOptions */

@@ -22,2 +22,3 @@ /*

/** @typedef {import("./Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("./Module").SourceTypes} SourceTypes */
/** @typedef {import("./RequestShortener")} RequestShortener */

@@ -127,3 +128,3 @@ /** @typedef {import("./ResolverFactory").ResolverWithOptions} ResolverWithOptions */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -130,0 +131,0 @@ getSourceTypes() {

@@ -28,2 +28,3 @@ /*

/** @typedef {import("../Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("../Module").SourceTypes} SourceTypes */
/** @typedef {import("../RequestShortener")} RequestShortener */

@@ -151,3 +152,3 @@ /** @typedef {import("../ResolverFactory").ResolverWithOptions} ResolverWithOptions */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -154,0 +155,0 @@ getSourceTypes() {

@@ -24,2 +24,3 @@ /*

/** @typedef {import("../Module").NeedBuildContext} NeedBuildContext */
/** @typedef {import("../Module").SourceTypes} SourceTypes */
/** @typedef {import("../RequestShortener")} RequestShortener */

@@ -124,3 +125,3 @@ /** @typedef {import("../ResolverFactory").ResolverWithOptions} ResolverWithOptions */

/**
* @returns {Set<string>} types available (do not mutate)
* @returns {SourceTypes} types available (do not mutate)
*/

@@ -127,0 +128,0 @@ getSourceTypes() {

@@ -53,3 +53,3 @@ /*

/**
* @param {(string|StatsOptions)=} options stats options
* @param {(string | boolean | StatsOptions)=} options stats options
* @returns {StatsCompilation} json output

@@ -70,3 +70,3 @@ */

/**
* @param {(string|StatsOptions)=} options stats options
* @param {(string | boolean | StatsOptions)=} options stats options
* @returns {string} string output

@@ -73,0 +73,0 @@ */

@@ -26,2 +26,4 @@ /*

// and all dependent resources
// eslint-disable-next-line no-warning-comments
// @ts-ignore
fn = undefined;

@@ -28,0 +30,0 @@ return /** @type {T} */ (result);

@@ -22,2 +22,3 @@ /*

/** @typedef {import("../Generator").GenerateContext} GenerateContext */
/** @typedef {import("../Module")} Module */
/** @typedef {import("../NormalModule")} NormalModule */

@@ -131,3 +132,3 @@ /** @typedef {import("../RuntimeTemplate")} RuntimeTemplate */

moduleGraph,
module: moduleGraph.getModule(dep),
module: /** @type {Module} */ (moduleGraph.getModule(dep)),
request: dep.request,

@@ -134,0 +135,0 @@ importVar: importData.importVar,

@@ -23,2 +23,3 @@ /*

/** @typedef {import("../declarations/WebpackOptions").WebpackOptions} WebpackOptions */
/** @typedef {import("../declarations/WebpackOptions").WebpackPluginFunction} WebpackPluginFunction */
/** @typedef {import("./Compiler").WatchOptions} WatchOptions */

@@ -34,3 +35,3 @@ /** @typedef {import("./MultiCompiler").MultiCompilerOptions} MultiCompilerOptions */

* @callback Callback
* @param {Error=} err
* @param {(Error | null)=} err
* @param {T=} stats

@@ -76,3 +77,4 @@ * @returns {void}

if (typeof plugin === "function") {
plugin.call(compiler, compiler);
/** @type {WebpackPluginFunction} */
(plugin).call(compiler, compiler);
} else if (plugin) {

@@ -166,3 +168,3 @@ plugin.apply(compiler);

} catch (err) {
process.nextTick(() => callback(err));
process.nextTick(() => callback(/** @type {Error} */ (err)));
return null;

@@ -169,0 +171,0 @@ }

{
"name": "webpack",
"version": "5.90.2",
"version": "5.90.3",
"author": "Tobias Koppers @sokra",

@@ -5,0 +5,0 @@ "description": "Packs ECMAScript/CommonJs/AMD modules for the browser. Allows you to split your codebase into multiple bundles, which can be loaded on demand. Supports loaders to preprocess files, i.e. json, jsx, es7, css, less, ... and your custom stuff.",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc