Add JS-CATCH-FALLBACK-01 rule and update npm packages
Add PHP-ALIAS-01 rule: prohibit field aliasing in serialization 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
106
node_modules/webpack/lib/optimize/SplitChunksPlugin.js
generated
vendored
106
node_modules/webpack/lib/optimize/SplitChunksPlugin.js
generated
vendored
@@ -31,6 +31,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
/** @typedef {import("../ChunkGroup")} ChunkGroup */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {import("../Module")} Module */
|
||||
/** @typedef {import("../Module").SourceType} SourceType */
|
||||
/** @typedef {import("../ModuleGraph")} ModuleGraph */
|
||||
/** @typedef {import("../TemplatedPathPlugin").TemplatePath} TemplatePath */
|
||||
/** @typedef {import("../util/deterministicGrouping").GroupedItems<Module>} DeterministicGroupingGroupedItemsForModule */
|
||||
@@ -42,22 +43,26 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
* @returns {boolean | undefined}
|
||||
*/
|
||||
|
||||
/** @typedef {number} Priority */
|
||||
/** @typedef {number} Size */
|
||||
/** @typedef {number} CountOfChunk */
|
||||
/** @typedef {number} CountOfRequest */
|
||||
|
||||
/**
|
||||
* @callback CombineSizeFunction
|
||||
* @param {number} a
|
||||
* @param {number} b
|
||||
* @returns {number}
|
||||
* @param {Size} a
|
||||
* @param {Size} b
|
||||
* @returns {Size}
|
||||
*/
|
||||
|
||||
/** @typedef {string} SourceType */
|
||||
/** @typedef {SourceType[]} SourceTypes */
|
||||
/** @typedef {SourceType[]} DefaultSizeTypes */
|
||||
/** @typedef {Record<SourceType, number>} SplitChunksSizes */
|
||||
/** @typedef {Record<SourceType, Size>} SplitChunksSizes */
|
||||
|
||||
/**
|
||||
* @typedef {object} CacheGroupSource
|
||||
* @property {string} key
|
||||
* @property {number=} priority
|
||||
* @property {Priority=} priority
|
||||
* @property {GetNameFn=} getName
|
||||
* @property {ChunkFilterFn=} chunksFilter
|
||||
* @property {boolean=} enforce
|
||||
@@ -67,9 +72,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
* @property {SplitChunksSizes} enforceSizeThreshold
|
||||
* @property {SplitChunksSizes} maxAsyncSize
|
||||
* @property {SplitChunksSizes} maxInitialSize
|
||||
* @property {number=} minChunks
|
||||
* @property {number=} maxAsyncRequests
|
||||
* @property {number=} maxInitialRequests
|
||||
* @property {CountOfChunk=} minChunks
|
||||
* @property {CountOfRequest=} maxAsyncRequests
|
||||
* @property {CountOfRequest=} maxInitialRequests
|
||||
* @property {TemplatePath=} filename
|
||||
* @property {string=} idHint
|
||||
* @property {string=} automaticNameDelimiter
|
||||
@@ -80,7 +85,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
/**
|
||||
* @typedef {object} CacheGroup
|
||||
* @property {string} key
|
||||
* @property {number} priority
|
||||
* @property {Priority} priority
|
||||
* @property {GetNameFn=} getName
|
||||
* @property {ChunkFilterFn} chunksFilter
|
||||
* @property {SplitChunksSizes} minSize
|
||||
@@ -89,9 +94,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
* @property {SplitChunksSizes} enforceSizeThreshold
|
||||
* @property {SplitChunksSizes} maxAsyncSize
|
||||
* @property {SplitChunksSizes} maxInitialSize
|
||||
* @property {number} minChunks
|
||||
* @property {number} maxAsyncRequests
|
||||
* @property {number} maxInitialRequests
|
||||
* @property {CountOfChunk} minChunks
|
||||
* @property {CountOfRequest} maxAsyncRequests
|
||||
* @property {CountOfRequest} maxInitialRequests
|
||||
* @property {TemplatePath=} filename
|
||||
* @property {string} idHint
|
||||
* @property {string} automaticNameDelimiter
|
||||
@@ -143,9 +148,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
* @property {SplitChunksSizes} enforceSizeThreshold
|
||||
* @property {SplitChunksSizes} maxInitialSize
|
||||
* @property {SplitChunksSizes} maxAsyncSize
|
||||
* @property {number} minChunks
|
||||
* @property {number} maxAsyncRequests
|
||||
* @property {number} maxInitialRequests
|
||||
* @property {CountOfChunk} minChunks
|
||||
* @property {CountOfRequest} maxAsyncRequests
|
||||
* @property {CountOfRequest} maxInitialRequests
|
||||
* @property {boolean} hidePathInfo
|
||||
* @property {TemplatePath=} filename
|
||||
* @property {string} automaticNameDelimiter
|
||||
@@ -155,15 +160,17 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
* @property {FallbackCacheGroup} fallbackCacheGroup
|
||||
*/
|
||||
|
||||
/** @typedef {Set<Chunk>} ChunkSet */
|
||||
|
||||
/**
|
||||
* @typedef {object} ChunksInfoItem
|
||||
* @property {SortableSet<Module>} modules
|
||||
* @property {CacheGroup} cacheGroup
|
||||
* @property {number} cacheGroupIndex
|
||||
* @property {string=} name
|
||||
* @property {Record<SourceType, number>} sizes
|
||||
* @property {Set<Chunk>} chunks
|
||||
* @property {Set<Chunk>} reusableChunks
|
||||
* @property {SplitChunksSizes} sizes
|
||||
* @property {ChunkSet} chunks
|
||||
* @property {ChunkSet} reusableChunks
|
||||
* @property {Set<bigint | Chunk>} chunksKeys
|
||||
*/
|
||||
|
||||
@@ -195,7 +202,7 @@ const hashFilename = (name, outputOptions) => {
|
||||
|
||||
/**
|
||||
* @param {Chunk} chunk the chunk
|
||||
* @returns {number} the number of requests
|
||||
* @returns {CountOfRequest} the number of requests
|
||||
*/
|
||||
const getRequests = (chunk) => {
|
||||
let requests = 0;
|
||||
@@ -209,7 +216,7 @@ const getRequests = (chunk) => {
|
||||
* @template {object} T
|
||||
* @template {object} R
|
||||
* @param {T} obj obj an object
|
||||
* @param {function(T[keyof T], keyof T): T[keyof T]} fn fn
|
||||
* @param {(obj: T[keyof T], key: keyof T) => T[keyof T]} fn fn
|
||||
* @returns {T} result
|
||||
*/
|
||||
const mapObject = (obj, fn) => {
|
||||
@@ -293,7 +300,7 @@ const ALL_CHUNK_FILTER = (_chunk) => true;
|
||||
*/
|
||||
const normalizeSizes = (value, defaultSizeTypes) => {
|
||||
if (typeof value === "number") {
|
||||
/** @type {Record<string, number>} */
|
||||
/** @type {SplitChunksSizes} */
|
||||
const o = {};
|
||||
for (const sizeType of defaultSizeTypes) o[sizeType] = value;
|
||||
return o;
|
||||
@@ -321,7 +328,7 @@ const mergeSizes = (...sizes) => {
|
||||
* @returns {boolean} true, if there are sizes > 0
|
||||
*/
|
||||
const hasNonZeroSizes = (sizes) => {
|
||||
for (const key of Object.keys(sizes)) {
|
||||
for (const key of /** @type {SourceType[]} */ (Object.keys(sizes))) {
|
||||
if (sizes[key] > 0) return true;
|
||||
}
|
||||
return false;
|
||||
@@ -334,8 +341,8 @@ const hasNonZeroSizes = (sizes) => {
|
||||
* @returns {SplitChunksSizes} the combine sizes
|
||||
*/
|
||||
const combineSizes = (a, b, combine) => {
|
||||
const aKeys = new Set(Object.keys(a));
|
||||
const bKeys = new Set(Object.keys(b));
|
||||
const aKeys = /** @type {Set<SourceType>} */ (new Set(Object.keys(a)));
|
||||
const bKeys = /** @type {Set<SourceType>} */ (new Set(Object.keys(b)));
|
||||
/** @type {SplitChunksSizes} */
|
||||
const result = {};
|
||||
for (const key of aKeys) {
|
||||
@@ -355,7 +362,7 @@ const combineSizes = (a, b, combine) => {
|
||||
* @returns {boolean} true if there are sizes and all existing sizes are at least `minSize`
|
||||
*/
|
||||
const checkMinSize = (sizes, minSize) => {
|
||||
for (const key of Object.keys(minSize)) {
|
||||
for (const key of /** @type {SourceType[]} */ (Object.keys(minSize))) {
|
||||
const size = sizes[key];
|
||||
if (size === undefined || size === 0) continue;
|
||||
if (size < minSize[key]) return false;
|
||||
@@ -366,11 +373,13 @@ const checkMinSize = (sizes, minSize) => {
|
||||
/**
|
||||
* @param {SplitChunksSizes} sizes the sizes
|
||||
* @param {SplitChunksSizes} minSizeReduction the min sizes
|
||||
* @param {number} chunkCount number of chunks
|
||||
* @param {CountOfChunk} chunkCount number of chunks
|
||||
* @returns {boolean} true if there are sizes and all existing sizes are at least `minSizeReduction`
|
||||
*/
|
||||
const checkMinSizeReduction = (sizes, minSizeReduction, chunkCount) => {
|
||||
for (const key of Object.keys(minSizeReduction)) {
|
||||
for (const key of /** @type {SourceType[]} */ (
|
||||
Object.keys(minSizeReduction)
|
||||
)) {
|
||||
const size = sizes[key];
|
||||
if (size === undefined || size === 0) continue;
|
||||
if (size * chunkCount < minSizeReduction[key]) return false;
|
||||
@@ -384,8 +393,9 @@ const checkMinSizeReduction = (sizes, minSizeReduction, chunkCount) => {
|
||||
* @returns {undefined | SourceTypes} list of size types that are below min size
|
||||
*/
|
||||
const getViolatingMinSizes = (sizes, minSize) => {
|
||||
/** @type {SourceTypes | undefined} */
|
||||
let list;
|
||||
for (const key of Object.keys(minSize)) {
|
||||
for (const key of /** @type {SourceType[]} */ (Object.keys(minSize))) {
|
||||
const size = sizes[key];
|
||||
if (size === undefined || size === 0) continue;
|
||||
if (size < minSize[key]) {
|
||||
@@ -398,11 +408,11 @@ const getViolatingMinSizes = (sizes, minSize) => {
|
||||
|
||||
/**
|
||||
* @param {SplitChunksSizes} sizes the sizes
|
||||
* @returns {number} the total size
|
||||
* @returns {Size} the total size
|
||||
*/
|
||||
const totalSize = (sizes) => {
|
||||
let size = 0;
|
||||
for (const key of Object.keys(sizes)) {
|
||||
for (const key of /** @type {SourceType[]} */ (Object.keys(sizes))) {
|
||||
size += sizes[key];
|
||||
}
|
||||
return size;
|
||||
@@ -887,9 +897,9 @@ module.exports = class SplitChunksPlugin {
|
||||
};
|
||||
|
||||
const getChunkSetsInGraph = memoize(() => {
|
||||
/** @type {Map<bigint, Set<Chunk>>} */
|
||||
/** @type {Map<bigint, ChunkSet>} */
|
||||
const chunkSetsInGraph = new Map();
|
||||
/** @type {Set<Chunk>} */
|
||||
/** @type {ChunkSet} */
|
||||
const singleChunkSets = new Set();
|
||||
for (const module of compilation.modules) {
|
||||
const chunks = chunkGraph.getModuleChunksIterable(module);
|
||||
@@ -927,10 +937,12 @@ module.exports = class SplitChunksPlugin {
|
||||
/** @type {Map<Module, Iterable<Chunk[]>>} */
|
||||
const groupedByExportsMap = new Map();
|
||||
|
||||
/** @typedef {Map<bigint | Chunk, ChunkSet>} ChunkSetsInGraph */
|
||||
|
||||
const getExportsChunkSetsInGraph = memoize(() => {
|
||||
/** @type {Map<bigint | Chunk, Set<Chunk>>} */
|
||||
/** @type {ChunkSetsInGraph} */
|
||||
const chunkSetsInGraph = new Map();
|
||||
/** @type {Set<Chunk>} */
|
||||
/** @type {ChunkSet} */
|
||||
const singleChunkSets = new Set();
|
||||
for (const module of compilation.modules) {
|
||||
const groupedChunks = [...groupChunksByExports(module)];
|
||||
@@ -949,13 +961,13 @@ module.exports = class SplitChunksPlugin {
|
||||
return { chunkSetsInGraph, singleChunkSets };
|
||||
});
|
||||
|
||||
/** @typedef {Map<number, Set<Chunk>[]>} ChunkSetsByCount */
|
||||
/** @typedef {Map<CountOfChunk, ChunkSet[]>} ChunkSetsByCount */
|
||||
|
||||
// group these set of chunks by count
|
||||
// to allow to check less sets via isSubset
|
||||
// (only smaller sets can be subset)
|
||||
/**
|
||||
* @param {IterableIterator<Set<Chunk>>} chunkSets set of sets of chunks
|
||||
* @param {IterableIterator<ChunkSet>} chunkSets set of sets of chunks
|
||||
* @returns {ChunkSetsByCount} map of sets of chunks by count
|
||||
*/
|
||||
const groupChunkSetsByCount = (chunkSets) => {
|
||||
@@ -983,13 +995,13 @@ module.exports = class SplitChunksPlugin {
|
||||
)
|
||||
);
|
||||
|
||||
/** @typedef {(Set<Chunk> | Chunk)[]} Combinations */
|
||||
/** @typedef {(ChunkSet | Chunk)[]} Combinations */
|
||||
|
||||
// Create a list of possible combinations
|
||||
/**
|
||||
* @param {Map<bigint | Chunk, Set<Chunk>>} chunkSets chunk sets
|
||||
* @param {Set<Chunk>} singleChunkSets single chunks sets
|
||||
* @param {Map<number, Set<Chunk>[]>} chunkSetsByCount chunk sets by count
|
||||
* @param {ChunkSetsInGraph} chunkSets chunk sets
|
||||
* @param {ChunkSet} singleChunkSets single chunks sets
|
||||
* @param {ChunkSetsByCount} chunkSetsByCount chunk sets by count
|
||||
* @returns {(key: bigint | Chunk) => Combinations} combinations
|
||||
*/
|
||||
const createGetCombinations = (
|
||||
@@ -1009,7 +1021,7 @@ module.exports = class SplitChunksPlugin {
|
||||
return result;
|
||||
}
|
||||
const chunksSet =
|
||||
/** @type {Set<Chunk>} */
|
||||
/** @type {ChunkSet} */
|
||||
(chunkSets.get(key));
|
||||
/** @type {Combinations} */
|
||||
const array = [chunksSet];
|
||||
@@ -1070,13 +1082,13 @@ module.exports = class SplitChunksPlugin {
|
||||
* @property {bigint | Chunk} key a key of the list
|
||||
*/
|
||||
|
||||
/** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFn, SelectedChunksResult>>} */
|
||||
/** @type {WeakMap<ChunkSet | Chunk, WeakMap<ChunkFilterFn, SelectedChunksResult>>} */
|
||||
const selectedChunksCacheByChunksSet = new WeakMap();
|
||||
|
||||
/**
|
||||
* get list and key by applying the filter function to the list
|
||||
* It is cached for performance reasons
|
||||
* @param {Set<Chunk> | Chunk} chunks list of chunks
|
||||
* @param {ChunkSet | Chunk} chunks list of chunks
|
||||
* @param {ChunkFilterFn} chunkFilter filter function for chunks
|
||||
* @returns {SelectedChunksResult} list and key
|
||||
*/
|
||||
@@ -1269,7 +1281,7 @@ module.exports = class SplitChunksPlugin {
|
||||
const getCombsByUsedExports = memoize(() => {
|
||||
// fill the groupedByExportsMap
|
||||
getExportsChunkSetsInGraph();
|
||||
/** @type {Set<Set<Chunk> | Chunk>} */
|
||||
/** @type {Set<ChunkSet | Chunk>} */
|
||||
const set = new Set();
|
||||
const groupedByUsedExports =
|
||||
/** @type {Iterable<Chunk[]>} */
|
||||
@@ -1703,7 +1715,9 @@ module.exports = class SplitChunksPlugin {
|
||||
if (Object.keys(maxSize).length === 0) {
|
||||
continue;
|
||||
}
|
||||
for (const key of Object.keys(maxSize)) {
|
||||
for (const key of /** @type {SourceType[]} */ (
|
||||
Object.keys(maxSize)
|
||||
)) {
|
||||
const maxSizeValue = maxSize[key];
|
||||
const minSizeValue = minSize[key];
|
||||
if (
|
||||
|
||||
Reference in New Issue
Block a user