Socket
Socket
Sign inDemoInstall

webpack

Package Overview
Dependencies
Maintainers
2
Versions
837
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

webpack - npm Package Compare versions

Comparing version 5.58.0 to 5.58.1

46

lib/ChunkGraph.js

@@ -9,3 +9,2 @@ /*

const util = require("util");
const ChunkCombination = require("./ChunkCombination");
const Entrypoint = require("./Entrypoint");

@@ -45,4 +44,2 @@ const ModuleGraphConnection = require("./ModuleGraphConnection");

const EMPTY_RUNTIME_SPEC_SET = new RuntimeSpecSet();
const ZERO_BIG_INT = BigInt(0);

@@ -185,3 +182,4 @@

constructor() {
this.chunkCombination = ChunkCombination.empty;
/** @type {SortableSet<Chunk>} */
this.chunks = new SortableSet();
/** @type {Set<Chunk> | undefined} */

@@ -309,3 +307,3 @@ this.entryInChunks = undefined;

const cgc = this._getChunkGraphChunk(chunk);
cgm.chunkCombination = cgm.chunkCombination.with(chunk);
cgm.chunks.add(chunk);
cgc.modules.add(module);

@@ -323,3 +321,3 @@ }

cgc.modules.delete(module);
cgm.chunkCombination = cgm.chunkCombination.without(chunk);
cgm.chunks.delete(chunk);
}

@@ -335,3 +333,3 @@

const cgm = this._getChunkGraphModule(module);
cgm.chunkCombination = cgm.chunkCombination.without(chunk);
cgm.chunks.delete(chunk);
}

@@ -403,9 +401,9 @@ cgc.modules.clear();

for (const chunk of oldCgm.chunkCombination._chunks) {
for (const chunk of oldCgm.chunks) {
const cgc = this._getChunkGraphChunk(chunk);
cgc.modules.delete(oldModule);
cgc.modules.add(newModule);
newCgm.chunkCombination = newCgm.chunkCombination.with(chunk);
newCgm.chunks.add(chunk);
}
oldCgm.chunkCombination = ChunkCombination.empty;
oldCgm.chunks.clear();

@@ -499,11 +497,2 @@ if (oldCgm.entryInChunks !== undefined) {

* @param {Module} module the module
* @returns {ChunkCombination} chunk combination (do not modify)
*/
getModuleChunkCombination(module) {
const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination;
}
/**
* @param {Module} module the module
* @returns {Iterable<Chunk>} iterable of chunks (do not modify)

@@ -513,3 +502,3 @@ */

const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination._chunks;
return cgm.chunks;
}

@@ -524,5 +513,4 @@

const cgm = this._getChunkGraphModule(module);
const chunks = cgm.chunkCombination._chunks;
chunks.sortWith(sortFn);
return chunks;
cgm.chunks.sortWith(sortFn);
return cgm.chunks;
}

@@ -536,3 +524,3 @@

const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination.getChunks();
return cgm.chunks.getFromCache(getArray);
}

@@ -546,3 +534,3 @@

const cgm = this._getChunkGraphModule(module);
return cgm.chunkCombination.size;
return cgm.chunks.size;
}

@@ -556,6 +544,3 @@

const cgm = this._getChunkGraphModule(module);
if (cgm.chunkCombination.size === 0) return EMPTY_RUNTIME_SPEC_SET;
return cgm.chunkCombination._chunks.getFromUnorderedCache(
getModuleRuntimes
);
return cgm.chunks.getFromUnorderedCache(getModuleRuntimes);
}

@@ -924,3 +909,4 @@

for (const module of this.getChunkModulesIterable(chunkB)) {
// getChunkModules is used here to create a clone, because disconnectChunkAndModule modifies
for (const module of this.getChunkModules(chunkB)) {
this.disconnectChunkAndModule(chunkB, module);

@@ -927,0 +913,0 @@ this.connectChunkAndModule(chunkA, module);

@@ -469,19 +469,2 @@ /*

const resourceDataForRules = matchResourceData || resourceData;
const result = this.ruleSet.exec({
resource: resourceDataForRules.path,
realResource: resourceData.path,
resourceQuery: resourceDataForRules.query,
resourceFragment: resourceDataForRules.fragment,
scheme,
assertions,
mimetype: matchResourceData ? "" : resourceData.data.mimetype || "",
dependency: dependencyType,
descriptionData: matchResourceData
? undefined
: resourceData.data.descriptionFileData,
issuer: contextInfo.issuer,
compiler: contextInfo.compiler,
issuerLayer: contextInfo.issuerLayer || ""
});
const settings = {};

@@ -491,24 +474,60 @@ const useLoadersPost = [];

const useLoadersPre = [];
for (const r of result) {
if (r.type === "use") {
if (!noAutoLoaders && !noPrePostAutoLoaders) {
useLoaders.push(r.value);
// handle .webpack[] suffix
let resource;
let match;
if (
matchResourceData &&
typeof (resource = matchResourceData.resource) === "string" &&
(match = /\.webpack\[([^\]]+)\]$/.exec(resource))
) {
settings.type = match[1];
matchResourceData.resource = matchResourceData.resource.slice(
0,
-settings.type.length - 10
);
} else {
settings.type = "javascript/auto";
const resourceDataForRules = matchResourceData || resourceData;
const result = this.ruleSet.exec({
resource: resourceDataForRules.path,
realResource: resourceData.path,
resourceQuery: resourceDataForRules.query,
resourceFragment: resourceDataForRules.fragment,
scheme,
assertions,
mimetype: matchResourceData
? ""
: resourceData.data.mimetype || "",
dependency: dependencyType,
descriptionData: matchResourceData
? undefined
: resourceData.data.descriptionFileData,
issuer: contextInfo.issuer,
compiler: contextInfo.compiler,
issuerLayer: contextInfo.issuerLayer || ""
});
for (const r of result) {
if (r.type === "use") {
if (!noAutoLoaders && !noPrePostAutoLoaders) {
useLoaders.push(r.value);
}
} else if (r.type === "use-post") {
if (!noPrePostAutoLoaders) {
useLoadersPost.push(r.value);
}
} else if (r.type === "use-pre") {
if (!noPreAutoLoaders && !noPrePostAutoLoaders) {
useLoadersPre.push(r.value);
}
} else if (
typeof r.value === "object" &&
r.value !== null &&
typeof settings[r.type] === "object" &&
settings[r.type] !== null
) {
settings[r.type] = cachedCleverMerge(settings[r.type], r.value);
} else {
settings[r.type] = r.value;
}
} else if (r.type === "use-post") {
if (!noPrePostAutoLoaders) {
useLoadersPost.push(r.value);
}
} else if (r.type === "use-pre") {
if (!noPreAutoLoaders && !noPrePostAutoLoaders) {
useLoadersPre.push(r.value);
}
} else if (
typeof r.value === "object" &&
r.value !== null &&
typeof settings[r.type] === "object" &&
settings[r.type] !== null
) {
settings[r.type] = cachedCleverMerge(settings[r.type], r.value);
} else {
settings[r.type] = r.value;
}

@@ -533,19 +552,2 @@ }

let type = settings.type;
if (!type) {
let resource;
let match;
if (
matchResourceData &&
typeof (resource = matchResourceData.resource) === "string" &&
(match = /\.webpack\[([^\]]+)\]$/.exec(resource))
) {
type = match[1];
matchResourceData.resource = matchResourceData.resource.slice(
0,
-type.length - 10
);
} else {
type = "javascript/auto";
}
}
const resolveOptions = settings.resolve;

@@ -552,0 +554,0 @@ const layer = settings.layer;

@@ -8,6 +8,7 @@ /*

const ChunkCombination = require("../ChunkCombination");
const Chunk = require("../Chunk");
const { STAGE_ADVANCED } = require("../OptimizationStages");
const WebpackError = require("../WebpackError");
const { requestToId } = require("../ids/IdHelpers");
const { isSubset } = require("../util/SetHelpers");
const SortableSet = require("../util/SortableSet");

@@ -29,3 +30,2 @@ const {

/** @typedef {import("../../declarations/WebpackOptions").Output} OutputOptions */
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../ChunkGraph")} ChunkGraph */

@@ -160,5 +160,5 @@ /** @typedef {import("../ChunkGroup")} ChunkGroup */

* @property {Record<string, number>} sizes
* @property {ChunkCombination} chunks
* @property {Set<Chunk>} chunks
* @property {Set<Chunk>} reuseableChunks
* @property {Set<ChunkCombination>} chunkCombinations
* @property {Set<bigint | Chunk>} chunksKeys
*/

@@ -210,2 +210,15 @@

/**
* @template T
* @param {Set<T>} a set
* @param {Set<T>} b other set
* @returns {boolean} true if at least one item of a is in b
*/
const isOverlap = (a, b) => {
for (const item of a) {
if (b.has(item)) return true;
}
return false;
};
const compareModuleIterables = compareIterables(compareModulesByIdentifier);

@@ -763,12 +776,52 @@

const moduleGraph = compilation.moduleGraph;
// Give each selected chunk an index (to create strings from chunks)
/** @type {Map<Chunk, bigint>} */
const chunkIndexMap = new Map();
const ZERO = BigInt("0");
const ONE = BigInt("1");
let index = ONE;
for (const chunk of chunks) {
chunkIndexMap.set(chunk, index);
index = index << ONE;
}
/**
* @param {Iterable<Chunk>} chunks list of chunks
* @returns {bigint | Chunk} key of the chunks
*/
const getKey = chunks => {
const iterator = chunks[Symbol.iterator]();
let result = iterator.next();
if (result.done) return ZERO;
const first = result.value;
result = iterator.next();
if (result.done) return first;
let key =
chunkIndexMap.get(first) | chunkIndexMap.get(result.value);
while (!(result = iterator.next()).done) {
key = key | chunkIndexMap.get(result.value);
}
return key;
};
const keyToString = key => {
if (typeof key === "bigint") return key.toString(16);
return chunkIndexMap.get(key).toString(16);
};
const getChunkCombinationsInGraph = memoize(() => {
/** @type {Set<ChunkCombination>} */
const chunkCombinationsInGraph = new Set();
const getChunkSetsInGraph = memoize(() => {
/** @type {Map<bigint, Set<Chunk>>} */
const chunkSetsInGraph = new Map();
/** @type {Set<Chunk>} */
const singleChunkSets = new Set();
for (const module of compilation.modules) {
const chunkCombination =
chunkGraph.getModuleChunkCombination(module);
chunkCombinationsInGraph.add(chunkCombination);
const chunks = chunkGraph.getModuleChunksIterable(module);
const chunksKey = getKey(chunks);
if (typeof chunksKey === "bigint") {
if (!chunkSetsInGraph.has(chunksKey)) {
chunkSetsInGraph.set(chunksKey, new Set(chunks));
}
} else {
singleChunkSets.add(chunksKey);
}
}
return chunkCombinationsInGraph;
return { chunkSetsInGraph, singleChunkSets };
});

@@ -778,3 +831,3 @@

* @param {Module} module the module
* @returns {Iterable<ChunkCombination>} groups of chunks with equal exports
* @returns {Iterable<Chunk[]>} groups of chunks with equal exports
*/

@@ -786,5 +839,8 @@ const groupChunksByExports = module => {

const key = exportsInfo.getUsageKey(chunk.runtime);
const combination =
groupedByUsedExports.get(key) || ChunkCombination.empty;
groupedByUsedExports.set(key, combination.with(chunk));
const list = groupedByUsedExports.get(key);
if (list !== undefined) {
list.push(chunk);
} else {
groupedByUsedExports.set(key, [chunk]);
}
}

@@ -794,16 +850,25 @@ return groupedByUsedExports.values();

/** @type {Map<Module, Iterable<ChunkCombination>>} */
/** @type {Map<Module, Iterable<Chunk[]>>} */
const groupedByExportsMap = new Map();
const getExportsChunkCombinationsInGraph = memoize(() => {
/** @type {Set<ChunkCombination>} */
const chunkCombinationsInGraph = new Set();
const getExportsChunkSetsInGraph = memoize(() => {
/** @type {Map<bigint, Set<Chunk>>} */
const chunkSetsInGraph = new Map();
/** @type {Set<Chunk>} */
const singleChunkSets = new Set();
for (const module of compilation.modules) {
const groupedChunks = Array.from(groupChunksByExports(module));
groupedByExportsMap.set(module, groupedChunks);
for (const chunkCombination of groupedChunks) {
chunkCombinationsInGraph.add(chunkCombination);
for (const chunks of groupedChunks) {
if (chunks.length === 1) {
singleChunkSets.add(chunks[0]);
} else {
const chunksKey = /** @type {bigint} */ (getKey(chunks));
if (!chunkSetsInGraph.has(chunksKey)) {
chunkSetsInGraph.set(chunksKey, new Set(chunks));
}
}
}
}
return chunkCombinationsInGraph;
return { chunkSetsInGraph, singleChunkSets };
});

@@ -814,51 +879,52 @@

// (only smaller sets can be subset)
const groupChunkCombinationsByCount = chunkCombinations => {
/** @type {Map<number, ChunkCombination[]>} */
const chunkCombinationsByCount = new Map();
for (const chunksSet of chunkCombinations) {
const groupChunkSetsByCount = chunkSets => {
/** @type {Map<number, Array<Set<Chunk>>>} */
const chunkSetsByCount = new Map();
for (const chunksSet of chunkSets) {
const count = chunksSet.size;
let array = chunkCombinationsByCount.get(count);
let array = chunkSetsByCount.get(count);
if (array === undefined) {
array = [];
chunkCombinationsByCount.set(count, array);
chunkSetsByCount.set(count, array);
}
array.push(chunksSet);
}
return chunkCombinationsByCount;
return chunkSetsByCount;
};
const getChunkCombinationsByCount = memoize(() =>
groupChunkCombinationsByCount(getChunkCombinationsInGraph())
const getChunkSetsByCount = memoize(() =>
groupChunkSetsByCount(
getChunkSetsInGraph().chunkSetsInGraph.values()
)
);
const getExportsChunkCombinationsByCount = memoize(() =>
groupChunkCombinationsByCount(getExportsChunkCombinationsInGraph())
const getExportsChunkSetsByCount = memoize(() =>
groupChunkSetsByCount(
getExportsChunkSetsInGraph().chunkSetsInGraph.values()
)
);
/**
* Create a list of possible combinations
* @param {Map<number, ChunkCombination[]>} chunkCombinationsByCount by count
* @returns {function(ChunkCombination): ChunkCombination[]} get combinations function
*/
const createGetCombinations = chunkCombinationsByCount => {
/** @type {Map<ChunkCombination, ChunkCombination[]>} */
// Create a list of possible combinations
const createGetCombinations = (
chunkSets,
singleChunkSets,
chunkSetsByCount
) => {
/** @type {Map<bigint | Chunk, (Set<Chunk> | Chunk)[]>} */
const combinationsCache = new Map();
/**
* @param {ChunkCombination} chunkCombination chunkCombination
* @returns {ChunkCombination[]} combinations
*/
return chunkCombination => {
const cacheEntry = combinationsCache.get(chunkCombination);
return key => {
const cacheEntry = combinationsCache.get(key);
if (cacheEntry !== undefined) return cacheEntry;
if (chunkCombination.size === 1) {
const result = [chunkCombination];
combinationsCache.set(chunkCombination, result);
if (key instanceof Chunk) {
const result = [key];
combinationsCache.set(key, result);
return result;
}
/** @type {ChunkCombination[]} */
const array = [chunkCombination];
for (const [count, setArray] of chunkCombinationsByCount) {
const chunksSet = chunkSets.get(key);
/** @type {(Set<Chunk> | Chunk)[]} */
const array = [chunksSet];
for (const [count, setArray] of chunkSetsByCount) {
// "equal" is not needed because they would have been merge in the first step
if (count < chunkCombination.size) {
if (count < chunksSet.size) {
for (const set of setArray) {
if (chunkCombination.isSubset(set)) {
if (isSubset(chunksSet, set)) {
array.push(set);

@@ -869,3 +935,8 @@ }

}
combinationsCache.set(chunkCombination, array);
for (const chunk of singleChunkSets) {
if (chunksSet.has(chunk)) {
array.push(chunk);
}
}
combinationsCache.set(key, array);
return array;

@@ -876,3 +947,8 @@ };

const getCombinationsFactory = memoize(() => {
return createGetCombinations(getChunkCombinationsByCount());
const { chunkSetsInGraph, singleChunkSets } = getChunkSetsInGraph();
return createGetCombinations(
chunkSetsInGraph,
singleChunkSets,
getChunkSetsByCount()
);
});

@@ -882,3 +958,9 @@ const getCombinations = key => getCombinationsFactory()(key);

const getExportsCombinationsFactory = memoize(() => {
return createGetCombinations(getExportsChunkCombinationsByCount());
const { chunkSetsInGraph, singleChunkSets } =
getExportsChunkSetsInGraph();
return createGetCombinations(
chunkSetsInGraph,
singleChunkSets,
getExportsChunkSetsByCount()
);
});

@@ -888,11 +970,17 @@ const getExportsCombinations = key =>

/** @type {WeakMap<ChunkCombination, WeakMap<ChunkFilterFunction, ChunkCombination>>} */
/**
* @typedef {Object} SelectedChunksResult
* @property {Chunk[]} chunks the list of chunks
* @property {bigint | Chunk} key a key of the list
*/
/** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFunction, SelectedChunksResult>>} */
const selectedChunksCacheByChunksSet = new WeakMap();
/**
* get chunks by applying the filter function to the list
* get list and key by applying the filter function to the list
* It is cached for performance reasons
* @param {ChunkCombination} chunks list of chunks
* @param {Set<Chunk> | Chunk} chunks list of chunks
* @param {ChunkFilterFunction} chunkFilter filter function for chunks
* @returns {ChunkCombination} selected chunks
* @returns {SelectedChunksResult} list and key
*/

@@ -905,12 +993,18 @@ const getSelectedChunks = (chunks, chunkFilter) => {

}
/** @type {ChunkCombination} */
/** @type {SelectedChunksResult} */
let entry2 = entry.get(chunkFilter);
if (entry2 === undefined) {
/** @type {ChunkCombination} */
let selectedChunks = ChunkCombination.empty;
for (const chunk of chunks.chunksIterable) {
if (chunkFilter(chunk))
selectedChunks = selectedChunks.with(chunk);
/** @type {Chunk[]} */
const selectedChunks = [];
if (chunks instanceof Chunk) {
if (chunkFilter(chunks)) selectedChunks.push(chunks);
} else {
for (const chunk of chunks) {
if (chunkFilter(chunk)) selectedChunks.push(chunk);
}
}
entry2 = selectedChunks;
entry2 = {
chunks: selectedChunks,
key: getKey(selectedChunks)
};
entry.set(chunkFilter, entry2);

@@ -934,3 +1028,4 @@ }

* @param {number} cacheGroupIndex the index of the cache group of ordering
* @param {ChunkCombination} selectedChunks chunks selected for this module
* @param {Chunk[]} selectedChunks chunks selected for this module
* @param {bigint | Chunk} selectedChunksKey a key of selectedChunks
* @param {Module} module the current module

@@ -943,10 +1038,11 @@ * @returns {void}

selectedChunks,
selectedChunksKey,
module
) => {
// Break if minimum number of chunks is not reached
if (selectedChunks.size < cacheGroup.minChunks) return;
if (selectedChunks.length < cacheGroup.minChunks) return;
// Determine name for split chunk
const name = cacheGroup.getName(
module,
selectedChunks.getChunks(),
selectedChunks,
cacheGroup.key

@@ -957,3 +1053,7 @@ );

if (existingChunk) {
const parentValidationKey = `${name}|${selectedChunks.debugId}`;
const parentValidationKey = `${name}|${
typeof selectedChunksKey === "bigint"
? selectedChunksKey
: selectedChunksKey.debugId
}`;
const valid = alreadyValidatedParents.get(parentValidationKey);

@@ -967,3 +1067,3 @@ if (valid === false) return;

const queue = new Set();
for (const chunk of selectedChunks.chunksIterable) {
for (const chunk of selectedChunks) {
for (const group of chunk.groupsIterable) {

@@ -1014,3 +1114,5 @@ queue.add(group);

cacheGroup.key +
(name ? ` name:${name}` : ` chunks:${selectedChunks.debugId}`);
(name
? ` name:${name}`
: ` chunks:${keyToString(selectedChunksKey)}`);
// Add module to maps

@@ -1030,5 +1132,5 @@ let info = chunksInfoMap.get(key);

sizes: {},
chunks: ChunkCombination.empty,
chunks: new Set(),
reuseableChunks: new Set(),
chunkCombinations: new Set()
chunksKeys: new Set()
})

@@ -1044,6 +1146,8 @@ );

}
const oldChunksKeysSize = info.chunkCombinations.size;
info.chunkCombinations.add(selectedChunks);
if (oldChunksKeysSize !== info.chunkCombinations.size) {
info.chunks = info.chunks.withAll(selectedChunks);
const oldChunksKeysSize = info.chunksKeys.size;
info.chunksKeys.add(selectedChunksKey);
if (oldChunksKeysSize !== info.chunksKeys.size) {
for (const chunk of selectedChunks) {
info.chunks.add(chunk);
}
}

@@ -1069,5 +1173,24 @@ };

const chunkCombination =
chunkGraph.getModuleChunkCombination(module);
// Prepare some values (usedExports = false)
const getCombs = memoize(() => {
const chunks = chunkGraph.getModuleChunksIterable(module);
const chunksKey = getKey(chunks);
return getCombinations(chunksKey);
});
// Prepare some values (usedExports = true)
const getCombsByUsedExports = memoize(() => {
// fill the groupedByExportsMap
getExportsChunkSetsInGraph();
/** @type {Set<Set<Chunk> | Chunk>} */
const set = new Set();
const groupedByUsedExports = groupedByExportsMap.get(module);
for (const chunks of groupedByUsedExports) {
const chunksKey = getKey(chunks);
for (const comb of getExportsCombinations(chunksKey))
set.add(comb);
}
return set;
});
let cacheGroupIndex = 0;

@@ -1077,40 +1200,14 @@ for (const cacheGroupSource of cacheGroups) {

// Break if minimum number of chunks is not reached
if (chunkCombination.size < cacheGroup.minChunks) continue;
/** @type {Iterable<ChunkCombination>} */
let combs;
if (cacheGroup.usedExports) {
// fill the groupedByExportsMap
getExportsChunkCombinationsInGraph();
/** @type {Set<ChunkCombination>} */
const set = new Set();
const groupedByUsedExports = groupedByExportsMap.get(module);
for (const chunkCombination of groupedByUsedExports) {
const preSelectedChunks = getSelectedChunks(
chunkCombination,
cacheGroup.chunksFilter
);
// Break if minimum number of chunks is not reached
if (preSelectedChunks.size < cacheGroup.minChunks) continue;
for (const comb of getExportsCombinations(preSelectedChunks))
set.add(comb);
}
combs = set;
} else {
const preSelectedChunks = getSelectedChunks(
chunkCombination,
cacheGroup.chunksFilter
);
// Break if minimum number of chunks is not reached
if (preSelectedChunks.size < cacheGroup.minChunks) continue;
combs = getCombinations(preSelectedChunks);
}
const combs = cacheGroup.usedExports
? getCombsByUsedExports()
: getCombs();
// For all combination of chunk selection
for (const selectedChunks of combs) {
for (const chunkCombination of combs) {
// Break if minimum number of chunks is not reached
const count = chunkCombination.size;
const count =
chunkCombination instanceof Chunk ? 1 : chunkCombination.size;
if (count < cacheGroup.minChunks) continue;
// Select chunks by configuration
const { chunks: selectedChunks, key: selectedChunksKey } =
getSelectedChunks(chunkCombination, cacheGroup.chunksFilter);

@@ -1121,2 +1218,3 @@ addModuleToChunksInfoMap(

selectedChunks,
selectedChunksKey,
module

@@ -1213,8 +1311,8 @@ );

newChunk = chunkByName;
const newChunks = item.chunks.without(newChunk);
isExistingChunk = newChunks !== item.chunks;
if (isExistingChunk) item.chunks = newChunks;
const oldSize = item.chunks.size;
item.chunks.delete(newChunk);
isExistingChunk = item.chunks.size !== oldSize;
}
} else if (item.cacheGroup.reuseExistingChunk) {
outer: for (const chunk of item.chunks.chunksIterable) {
outer: for (const chunk of item.chunks) {
if (

@@ -1253,3 +1351,3 @@ chunkGraph.getNumberOfChunkModules(chunk) !==

if (newChunk) {
item.chunks = item.chunks.without(newChunk);
item.chunks.delete(newChunk);
chunkName = undefined;

@@ -1265,3 +1363,3 @@ isExistingChunk = true;

let usedChunks = item.chunks;
const usedChunks = new Set(item.chunks);

@@ -1274,3 +1372,3 @@ // Check if maxRequests condition can be fulfilled

) {
for (const chunk of usedChunks.chunksIterable) {
for (const chunk of usedChunks) {
// respect max requests

@@ -1289,3 +1387,3 @@ const maxRequests = chunk.isOnlyInitial()

) {
usedChunks = usedChunks.without(chunk);
usedChunks.delete(chunk);
}

@@ -1295,7 +1393,7 @@ }

outer: for (const chunk of usedChunks.chunksIterable) {
outer: for (const chunk of usedChunks) {
for (const module of item.modules) {
if (chunkGraph.isModuleInChunk(module, chunk)) continue outer;
}
usedChunks = usedChunks.without(chunk);
usedChunks.delete(chunk);
}

@@ -1305,5 +1403,6 @@

// => readd all modules to the queue, as things could have been changed
if (usedChunks !== item.chunks) {
if (isExistingChunk) usedChunks = usedChunks.with(newChunk);
if (usedChunks.size < item.chunks.size) {
if (isExistingChunk) usedChunks.add(newChunk);
if (usedChunks.size >= item.cacheGroup.minChunks) {
const chunksArr = Array.from(usedChunks);
for (const module of item.modules) {

@@ -1313,3 +1412,4 @@ addModuleToChunksInfoMap(

item.cacheGroupIndex,
usedChunks,
chunksArr,
getKey(usedChunks),
module

@@ -1328,3 +1428,3 @@ );

) {
const [chunk] = usedChunks.chunksIterable;
const [chunk] = usedChunks;
let chunkSizes = Object.create(null);

@@ -1363,3 +1463,3 @@ for (const module of chunkGraph.getChunkModulesIterable(chunk)) {

// Walk through all chunks
for (const chunk of usedChunks.chunksIterable) {
for (const chunk of usedChunks) {
// Add graph connections for splitted chunk

@@ -1394,3 +1494,3 @@ chunk.split(newChunk);

// Remove module from used chunks
for (const chunk of usedChunks.chunksIterable) {
for (const chunk of usedChunks) {
chunkGraph.disconnectChunkAndModule(chunk, module);

@@ -1402,3 +1502,3 @@ }

for (const module of item.modules) {
for (const chunk of usedChunks.chunksIterable) {
for (const chunk of usedChunks) {
chunkGraph.disconnectChunkAndModule(chunk, module);

@@ -1445,3 +1545,3 @@ }

for (const [key, info] of chunksInfoMap) {
if (info.chunks.hasSharedChunks(usedChunks)) {
if (isOverlap(info.chunks, usedChunks)) {
// update modules and total size

@@ -1448,0 +1548,0 @@ // may remove it from the map when < minSize

@@ -8,3 +8,2 @@ /*

const WebpackError = require("../WebpackError");
const { someInIterable } = require("../util/IterableHelpers");

@@ -47,7 +46,3 @@ /** @typedef {import("../ChunkGraph")} ChunkGraph */

if (newHead) {
if (
!someInIterable(chunkGraph.getModuleChunksIterable(newHead), c =>
c.canBeInitial()
)
)
if (!chunkGraph.getModuleChunks(newHead).some(c => c.canBeInitial()))
continue;

@@ -54,0 +49,0 @@ final = false;

{
"name": "webpack",
"version": "5.58.0",
"version": "5.58.1",
"author": "Tobias Koppers @sokra",

@@ -5,0 +5,0 @@ "description": "Packs CommonJs/AMD modules for the browser. Allows to split your codebase into multiple bundles, which can be loaded on demand. Support loaders to preprocess files, i.e. json, jsx, es7, css, less, ... and your custom stuff.",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc