Add JS-CATCH-FALLBACK-01 rule and update npm packages
Add PHP-ALIAS-01 rule: prohibit field aliasing in serialization 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
0
node_modules/webpack/lib/optimize/AggressiveMergingPlugin.js
generated
vendored
Executable file → Normal file
0
node_modules/webpack/lib/optimize/AggressiveMergingPlugin.js
generated
vendored
Executable file → Normal file
0
node_modules/webpack/lib/optimize/AggressiveSplittingPlugin.js
generated
vendored
Executable file → Normal file
0
node_modules/webpack/lib/optimize/AggressiveSplittingPlugin.js
generated
vendored
Executable file → Normal file
79
node_modules/webpack/lib/optimize/ConcatenatedModule.js
generated
vendored
Executable file → Normal file
79
node_modules/webpack/lib/optimize/ConcatenatedModule.js
generated
vendored
Executable file → Normal file
@@ -16,7 +16,10 @@ const {
|
||||
const ConcatenationScope = require("../ConcatenationScope");
|
||||
const { UsageState } = require("../ExportsInfo");
|
||||
const Module = require("../Module");
|
||||
const { JS_TYPES } = require("../ModuleSourceTypesConstants");
|
||||
const {
|
||||
JAVASCRIPT_TYPE,
|
||||
JAVASCRIPT_TYPES
|
||||
} = require("../ModuleSourceTypeConstants");
|
||||
const { JAVASCRIPT_MODULE_TYPE_ESM } = require("../ModuleTypeConstants");
|
||||
const RuntimeGlobals = require("../RuntimeGlobals");
|
||||
const Template = require("../Template");
|
||||
@@ -132,8 +135,10 @@ if (!ReferencerClass.prototype.PropertyDefinition) {
|
||||
* @property {ExportName} exportName
|
||||
*/
|
||||
|
||||
/** @typedef {ConcatenatedModuleInfo | ExternalModuleInfo } ModuleInfo */
|
||||
/** @typedef {ConcatenatedModuleInfo | ExternalModuleInfo | ReferenceToModuleInfo } ModuleInfoOrReference */
|
||||
/** @typedef {ConcatenatedModuleInfo | ExternalModuleInfo} ModuleInfo */
|
||||
/** @typedef {ConcatenatedModuleInfo | ExternalModuleInfo | ReferenceToModuleInfo} ModuleInfoOrReference */
|
||||
|
||||
/** @typedef {Map<string, string>} ExportMap */
|
||||
|
||||
/**
|
||||
* @typedef {object} ConcatenatedModuleInfo
|
||||
@@ -148,8 +153,8 @@ if (!ReferencerClass.prototype.PropertyDefinition) {
|
||||
* @property {Scope | undefined} globalScope
|
||||
* @property {Scope | undefined} moduleScope
|
||||
* @property {Map<string, string>} internalNames
|
||||
* @property {Map<string, string> | undefined} exportMap
|
||||
* @property {Map<string, string> | undefined} rawExportMap
|
||||
* @property {ExportMap | undefined} exportMap
|
||||
* @property {ExportMap | undefined} rawExportMap
|
||||
* @property {string=} namespaceExportSymbol
|
||||
* @property {string | undefined} namespaceObjectName
|
||||
* @property {ConcatenationScope | undefined} concatenationScope
|
||||
@@ -192,7 +197,7 @@ if (!ReferencerClass.prototype.PropertyDefinition) {
|
||||
/**
|
||||
* @template T
|
||||
* @param {string} property property
|
||||
* @param {function(T[keyof T], T[keyof T]): 0 | 1 | -1} comparator comparator
|
||||
* @param {(a: T[keyof T], b: T[keyof T]) => 0 | 1 | -1} comparator comparator
|
||||
* @returns {Comparator<T>} comparator
|
||||
*/
|
||||
|
||||
@@ -684,7 +689,7 @@ const getFinalName = (
|
||||
|
||||
/**
|
||||
* @typedef {object} ConcatenateModuleHooks
|
||||
* @property {SyncBailHook<[ConcatenatedModule], boolean>} onDemandExportsGeneration
|
||||
* @property {SyncBailHook<[ConcatenatedModule, RuntimeSpec[], string, Record<string, string>], boolean>} onDemandExportsGeneration
|
||||
* @property {SyncBailHook<[Partial<ConcatenatedModuleInfo>, ConcatenatedModuleInfo], boolean | void>} concatenatedModuleInfo
|
||||
*/
|
||||
|
||||
@@ -734,7 +739,12 @@ class ConcatenatedModule extends Module {
|
||||
let hooks = compilationHooksMap.get(compilation);
|
||||
if (hooks === undefined) {
|
||||
hooks = {
|
||||
onDemandExportsGeneration: new SyncBailHook(["module"]),
|
||||
onDemandExportsGeneration: new SyncBailHook([
|
||||
"module",
|
||||
"runtimes",
|
||||
"exportsFinalName",
|
||||
"exportsSource"
|
||||
]),
|
||||
concatenatedModuleInfo: new SyncBailHook([
|
||||
"updatedInfo",
|
||||
"concatenatedModuleInfo"
|
||||
@@ -784,7 +794,7 @@ class ConcatenatedModule extends Module {
|
||||
* @returns {SourceTypes} types available (do not mutate)
|
||||
*/
|
||||
getSourceTypes() {
|
||||
return JS_TYPES;
|
||||
return JAVASCRIPT_TYPES;
|
||||
}
|
||||
|
||||
get modules() {
|
||||
@@ -1254,6 +1264,7 @@ class ConcatenatedModule extends Module {
|
||||
moduleGraph,
|
||||
chunkGraph,
|
||||
runtime: generationRuntime,
|
||||
runtimes,
|
||||
codeGenerationResults
|
||||
}) {
|
||||
const { concatenatedModuleInfo } = ConcatenatedModule.getCompilationHooks(
|
||||
@@ -1289,6 +1300,7 @@ class ConcatenatedModule extends Module {
|
||||
moduleGraph,
|
||||
chunkGraph,
|
||||
runtime,
|
||||
runtimes,
|
||||
/** @type {CodeGenerationResults} */
|
||||
(codeGenerationResults),
|
||||
allUsedNames
|
||||
@@ -1407,7 +1419,7 @@ class ConcatenatedModule extends Module {
|
||||
* @param {string} name the name to find a new name for
|
||||
* @param {ConcatenatedModuleInfo} info the info of the module
|
||||
* @param {Reference[]} references the references to the name
|
||||
* @returns {string|undefined} the new name or undefined if the name is not found
|
||||
* @returns {string | undefined} the new name or undefined if the name is not found
|
||||
*/
|
||||
const _findNewName = (name, info, references) => {
|
||||
const { usedNames, alreadyCheckedScopes } = getUsedNamesInScopeInfo(
|
||||
@@ -1441,7 +1453,7 @@ class ConcatenatedModule extends Module {
|
||||
* @param {string} name the name to find a new name for
|
||||
* @param {ConcatenatedModuleInfo} info the info of the module
|
||||
* @param {Reference[]} references the references to the name
|
||||
* @returns {string|undefined} the new name or undefined if the name is not found
|
||||
* @returns {string | undefined} the new name or undefined if the name is not found
|
||||
*/
|
||||
const _findNewNameForSpecifier = (name, info, references) => {
|
||||
const { usedNames: moduleUsedNames, alreadyCheckedScopes } =
|
||||
@@ -1772,9 +1784,6 @@ class ConcatenatedModule extends Module {
|
||||
);
|
||||
}
|
||||
|
||||
const { onDemandExportsGeneration } =
|
||||
ConcatenatedModule.getCompilationHooks(this.compilation);
|
||||
|
||||
runtimeRequirements.add(RuntimeGlobals.exports);
|
||||
runtimeRequirements.add(RuntimeGlobals.definePropertyGetters);
|
||||
|
||||
@@ -1788,21 +1797,24 @@ class ConcatenatedModule extends Module {
|
||||
);
|
||||
}
|
||||
|
||||
if (onDemandExportsGeneration.call(this)) {
|
||||
/** @type {BuildMeta} */ (this.buildMeta).factoryExportsBinding =
|
||||
"\n// EXPORTS\n" +
|
||||
`${RuntimeGlobals.definePropertyGetters}(${
|
||||
this.exportsArgument
|
||||
}, {${definitions.join(",")}\n});\n`;
|
||||
/** @type {BuildMeta} */ (this.buildMeta).exportsFinalName =
|
||||
exportsFinalName;
|
||||
} else {
|
||||
result.add("\n// EXPORTS\n");
|
||||
result.add(
|
||||
`${RuntimeGlobals.definePropertyGetters}(${
|
||||
this.exportsArgument
|
||||
}, {${definitions.join(",")}\n});\n`
|
||||
);
|
||||
const exportsSource =
|
||||
"\n// EXPORTS\n" +
|
||||
`${RuntimeGlobals.definePropertyGetters}(${this.exportsArgument}, {${definitions.join(
|
||||
","
|
||||
)}\n});\n`;
|
||||
|
||||
const { onDemandExportsGeneration } =
|
||||
ConcatenatedModule.getCompilationHooks(this.compilation);
|
||||
|
||||
if (
|
||||
!onDemandExportsGeneration.call(
|
||||
this,
|
||||
runtimes,
|
||||
exportsSource,
|
||||
exportsFinalName
|
||||
)
|
||||
) {
|
||||
result.add(exportsSource);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2008,7 +2020,7 @@ ${defineGetters}`
|
||||
|
||||
/** @type {CodeGenerationResult} */
|
||||
const resultEntry = {
|
||||
sources: new Map([["javascript", new CachedSource(result)]]),
|
||||
sources: new Map([[JAVASCRIPT_TYPE, new CachedSource(result)]]),
|
||||
data,
|
||||
runtimeRequirements
|
||||
};
|
||||
@@ -2024,6 +2036,7 @@ ${defineGetters}`
|
||||
* @param {ModuleGraph} moduleGraph moduleGraph
|
||||
* @param {ChunkGraph} chunkGraph chunkGraph
|
||||
* @param {RuntimeSpec} runtime runtime
|
||||
* @param {RuntimeSpec[]} runtimes runtimes
|
||||
* @param {CodeGenerationResults} codeGenerationResults codeGenerationResults
|
||||
* @param {Set<string>} usedNames used names
|
||||
*/
|
||||
@@ -2035,6 +2048,7 @@ ${defineGetters}`
|
||||
moduleGraph,
|
||||
chunkGraph,
|
||||
runtime,
|
||||
runtimes,
|
||||
codeGenerationResults,
|
||||
usedNames
|
||||
) {
|
||||
@@ -2055,13 +2069,14 @@ ${defineGetters}`
|
||||
moduleGraph,
|
||||
chunkGraph,
|
||||
runtime,
|
||||
runtimes,
|
||||
concatenationScope,
|
||||
codeGenerationResults,
|
||||
sourceTypes: JS_TYPES
|
||||
sourceTypes: JAVASCRIPT_TYPES
|
||||
});
|
||||
const source =
|
||||
/** @type {Source} */
|
||||
(codeGenResult.sources.get("javascript"));
|
||||
(codeGenResult.sources.get(JAVASCRIPT_TYPE));
|
||||
const data = codeGenResult.data;
|
||||
const chunkInitFragments = data && data.get("chunkInitFragments");
|
||||
const code = source.source().toString();
|
||||
|
||||
9
node_modules/webpack/lib/optimize/ModuleConcatenationPlugin.js
generated
vendored
9
node_modules/webpack/lib/optimize/ModuleConcatenationPlugin.js
generated
vendored
@@ -8,7 +8,7 @@
|
||||
const asyncLib = require("neo-async");
|
||||
const ChunkGraph = require("../ChunkGraph");
|
||||
const ModuleGraph = require("../ModuleGraph");
|
||||
const { JS_TYPE } = require("../ModuleSourceTypesConstants");
|
||||
const { JAVASCRIPT_TYPE } = require("../ModuleSourceTypeConstants");
|
||||
const { STAGE_DEFAULT } = require("../OptimizationStages");
|
||||
const HarmonyImportDependency = require("../dependencies/HarmonyImportDependency");
|
||||
const { compareModulesByIdentifier } = require("../util/comparators");
|
||||
@@ -460,7 +460,7 @@ class ModuleConcatenationPlugin {
|
||||
chunkGraph.disconnectChunkAndModule(chunk, m);
|
||||
} else {
|
||||
const newSourceTypes = new Set(sourceTypes);
|
||||
newSourceTypes.delete(JS_TYPE);
|
||||
newSourceTypes.delete(JAVASCRIPT_TYPE);
|
||||
chunkGraph.setChunkModuleSourceTypes(
|
||||
chunk,
|
||||
m,
|
||||
@@ -865,6 +865,7 @@ class ModuleConcatenationPlugin {
|
||||
}
|
||||
|
||||
/** @typedef {Module | ((requestShortener: RequestShortener) => string)} Problem */
|
||||
/** @typedef {Map<Module, Problem>} Warnings */
|
||||
|
||||
class ConcatConfiguration {
|
||||
/**
|
||||
@@ -877,7 +878,7 @@ class ConcatConfiguration {
|
||||
/** @type {Set<Module>} */
|
||||
this.modules = new Set();
|
||||
this.modules.add(rootModule);
|
||||
/** @type {Map<Module, Problem>} */
|
||||
/** @type {Warnings} */
|
||||
this.warnings = new Map();
|
||||
}
|
||||
|
||||
@@ -909,7 +910,7 @@ class ConcatConfiguration {
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Map<Module, Problem>} warnings
|
||||
* @returns {Warnings} warnings
|
||||
*/
|
||||
getWarningsSorted() {
|
||||
return new Map(
|
||||
|
||||
5
node_modules/webpack/lib/optimize/SideEffectsFlagPlugin.js
generated
vendored
5
node_modules/webpack/lib/optimize/SideEffectsFlagPlugin.js
generated
vendored
@@ -45,7 +45,7 @@ const globToRegexpCache = new WeakMap();
|
||||
|
||||
/**
|
||||
* @param {string} glob the pattern
|
||||
* @param {Map<string, RegExp>} cache the glob to RegExp cache
|
||||
* @param {CacheItem} cache the glob to RegExp cache
|
||||
* @returns {RegExp} a regular expression
|
||||
*/
|
||||
const globToRegexp = (glob, cache) => {
|
||||
@@ -381,6 +381,7 @@ class SideEffectsFlagPlugin {
|
||||
for (const module of modules) {
|
||||
optimizeIncomingConnections(module);
|
||||
}
|
||||
moduleGraph.finishUpdateParent();
|
||||
logger.timeEnd("update dependencies");
|
||||
}
|
||||
);
|
||||
@@ -391,7 +392,7 @@ class SideEffectsFlagPlugin {
|
||||
/**
|
||||
* @param {string} moduleName the module name
|
||||
* @param {SideEffectsFlagValue} flagValue the flag value
|
||||
* @param {Map<string, RegExp>} cache cache for glob to regexp
|
||||
* @param {CacheItem} cache cache for glob to regexp
|
||||
* @returns {boolean | undefined} true, when the module has side effects, undefined or false when not
|
||||
*/
|
||||
static moduleHasSideEffects(moduleName, flagValue, cache) {
|
||||
|
||||
106
node_modules/webpack/lib/optimize/SplitChunksPlugin.js
generated
vendored
106
node_modules/webpack/lib/optimize/SplitChunksPlugin.js
generated
vendored
@@ -31,6 +31,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
/** @typedef {import("../ChunkGroup")} ChunkGroup */
|
||||
/** @typedef {import("../Compiler")} Compiler */
|
||||
/** @typedef {import("../Module")} Module */
|
||||
/** @typedef {import("../Module").SourceType} SourceType */
|
||||
/** @typedef {import("../ModuleGraph")} ModuleGraph */
|
||||
/** @typedef {import("../TemplatedPathPlugin").TemplatePath} TemplatePath */
|
||||
/** @typedef {import("../util/deterministicGrouping").GroupedItems<Module>} DeterministicGroupingGroupedItemsForModule */
|
||||
@@ -42,22 +43,26 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
* @returns {boolean | undefined}
|
||||
*/
|
||||
|
||||
/** @typedef {number} Priority */
|
||||
/** @typedef {number} Size */
|
||||
/** @typedef {number} CountOfChunk */
|
||||
/** @typedef {number} CountOfRequest */
|
||||
|
||||
/**
|
||||
* @callback CombineSizeFunction
|
||||
* @param {number} a
|
||||
* @param {number} b
|
||||
* @returns {number}
|
||||
* @param {Size} a
|
||||
* @param {Size} b
|
||||
* @returns {Size}
|
||||
*/
|
||||
|
||||
/** @typedef {string} SourceType */
|
||||
/** @typedef {SourceType[]} SourceTypes */
|
||||
/** @typedef {SourceType[]} DefaultSizeTypes */
|
||||
/** @typedef {Record<SourceType, number>} SplitChunksSizes */
|
||||
/** @typedef {Record<SourceType, Size>} SplitChunksSizes */
|
||||
|
||||
/**
|
||||
* @typedef {object} CacheGroupSource
|
||||
* @property {string} key
|
||||
* @property {number=} priority
|
||||
* @property {Priority=} priority
|
||||
* @property {GetNameFn=} getName
|
||||
* @property {ChunkFilterFn=} chunksFilter
|
||||
* @property {boolean=} enforce
|
||||
@@ -67,9 +72,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
* @property {SplitChunksSizes} enforceSizeThreshold
|
||||
* @property {SplitChunksSizes} maxAsyncSize
|
||||
* @property {SplitChunksSizes} maxInitialSize
|
||||
* @property {number=} minChunks
|
||||
* @property {number=} maxAsyncRequests
|
||||
* @property {number=} maxInitialRequests
|
||||
* @property {CountOfChunk=} minChunks
|
||||
* @property {CountOfRequest=} maxAsyncRequests
|
||||
* @property {CountOfRequest=} maxInitialRequests
|
||||
* @property {TemplatePath=} filename
|
||||
* @property {string=} idHint
|
||||
* @property {string=} automaticNameDelimiter
|
||||
@@ -80,7 +85,7 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
/**
|
||||
* @typedef {object} CacheGroup
|
||||
* @property {string} key
|
||||
* @property {number} priority
|
||||
* @property {Priority} priority
|
||||
* @property {GetNameFn=} getName
|
||||
* @property {ChunkFilterFn} chunksFilter
|
||||
* @property {SplitChunksSizes} minSize
|
||||
@@ -89,9 +94,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
* @property {SplitChunksSizes} enforceSizeThreshold
|
||||
* @property {SplitChunksSizes} maxAsyncSize
|
||||
* @property {SplitChunksSizes} maxInitialSize
|
||||
* @property {number} minChunks
|
||||
* @property {number} maxAsyncRequests
|
||||
* @property {number} maxInitialRequests
|
||||
* @property {CountOfChunk} minChunks
|
||||
* @property {CountOfRequest} maxAsyncRequests
|
||||
* @property {CountOfRequest} maxInitialRequests
|
||||
* @property {TemplatePath=} filename
|
||||
* @property {string} idHint
|
||||
* @property {string} automaticNameDelimiter
|
||||
@@ -143,9 +148,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
* @property {SplitChunksSizes} enforceSizeThreshold
|
||||
* @property {SplitChunksSizes} maxInitialSize
|
||||
* @property {SplitChunksSizes} maxAsyncSize
|
||||
* @property {number} minChunks
|
||||
* @property {number} maxAsyncRequests
|
||||
* @property {number} maxInitialRequests
|
||||
* @property {CountOfChunk} minChunks
|
||||
* @property {CountOfRequest} maxAsyncRequests
|
||||
* @property {CountOfRequest} maxInitialRequests
|
||||
* @property {boolean} hidePathInfo
|
||||
* @property {TemplatePath=} filename
|
||||
* @property {string} automaticNameDelimiter
|
||||
@@ -155,15 +160,17 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
||||
* @property {FallbackCacheGroup} fallbackCacheGroup
|
||||
*/
|
||||
|
||||
/** @typedef {Set<Chunk>} ChunkSet */
|
||||
|
||||
/**
|
||||
* @typedef {object} ChunksInfoItem
|
||||
* @property {SortableSet<Module>} modules
|
||||
* @property {CacheGroup} cacheGroup
|
||||
* @property {number} cacheGroupIndex
|
||||
* @property {string=} name
|
||||
* @property {Record<SourceType, number>} sizes
|
||||
* @property {Set<Chunk>} chunks
|
||||
* @property {Set<Chunk>} reusableChunks
|
||||
* @property {SplitChunksSizes} sizes
|
||||
* @property {ChunkSet} chunks
|
||||
* @property {ChunkSet} reusableChunks
|
||||
* @property {Set<bigint | Chunk>} chunksKeys
|
||||
*/
|
||||
|
||||
@@ -195,7 +202,7 @@ const hashFilename = (name, outputOptions) => {
|
||||
|
||||
/**
|
||||
* @param {Chunk} chunk the chunk
|
||||
* @returns {number} the number of requests
|
||||
* @returns {CountOfRequest} the number of requests
|
||||
*/
|
||||
const getRequests = (chunk) => {
|
||||
let requests = 0;
|
||||
@@ -209,7 +216,7 @@ const getRequests = (chunk) => {
|
||||
* @template {object} T
|
||||
* @template {object} R
|
||||
* @param {T} obj obj an object
|
||||
* @param {function(T[keyof T], keyof T): T[keyof T]} fn fn
|
||||
* @param {(obj: T[keyof T], key: keyof T) => T[keyof T]} fn fn
|
||||
* @returns {T} result
|
||||
*/
|
||||
const mapObject = (obj, fn) => {
|
||||
@@ -293,7 +300,7 @@ const ALL_CHUNK_FILTER = (_chunk) => true;
|
||||
*/
|
||||
const normalizeSizes = (value, defaultSizeTypes) => {
|
||||
if (typeof value === "number") {
|
||||
/** @type {Record<string, number>} */
|
||||
/** @type {SplitChunksSizes} */
|
||||
const o = {};
|
||||
for (const sizeType of defaultSizeTypes) o[sizeType] = value;
|
||||
return o;
|
||||
@@ -321,7 +328,7 @@ const mergeSizes = (...sizes) => {
|
||||
* @returns {boolean} true, if there are sizes > 0
|
||||
*/
|
||||
const hasNonZeroSizes = (sizes) => {
|
||||
for (const key of Object.keys(sizes)) {
|
||||
for (const key of /** @type {SourceType[]} */ (Object.keys(sizes))) {
|
||||
if (sizes[key] > 0) return true;
|
||||
}
|
||||
return false;
|
||||
@@ -334,8 +341,8 @@ const hasNonZeroSizes = (sizes) => {
|
||||
* @returns {SplitChunksSizes} the combine sizes
|
||||
*/
|
||||
const combineSizes = (a, b, combine) => {
|
||||
const aKeys = new Set(Object.keys(a));
|
||||
const bKeys = new Set(Object.keys(b));
|
||||
const aKeys = /** @type {Set<SourceType>} */ (new Set(Object.keys(a)));
|
||||
const bKeys = /** @type {Set<SourceType>} */ (new Set(Object.keys(b)));
|
||||
/** @type {SplitChunksSizes} */
|
||||
const result = {};
|
||||
for (const key of aKeys) {
|
||||
@@ -355,7 +362,7 @@ const combineSizes = (a, b, combine) => {
|
||||
* @returns {boolean} true if there are sizes and all existing sizes are at least `minSize`
|
||||
*/
|
||||
const checkMinSize = (sizes, minSize) => {
|
||||
for (const key of Object.keys(minSize)) {
|
||||
for (const key of /** @type {SourceType[]} */ (Object.keys(minSize))) {
|
||||
const size = sizes[key];
|
||||
if (size === undefined || size === 0) continue;
|
||||
if (size < minSize[key]) return false;
|
||||
@@ -366,11 +373,13 @@ const checkMinSize = (sizes, minSize) => {
|
||||
/**
|
||||
* @param {SplitChunksSizes} sizes the sizes
|
||||
* @param {SplitChunksSizes} minSizeReduction the min sizes
|
||||
* @param {number} chunkCount number of chunks
|
||||
* @param {CountOfChunk} chunkCount number of chunks
|
||||
* @returns {boolean} true if there are sizes and all existing sizes are at least `minSizeReduction`
|
||||
*/
|
||||
const checkMinSizeReduction = (sizes, minSizeReduction, chunkCount) => {
|
||||
for (const key of Object.keys(minSizeReduction)) {
|
||||
for (const key of /** @type {SourceType[]} */ (
|
||||
Object.keys(minSizeReduction)
|
||||
)) {
|
||||
const size = sizes[key];
|
||||
if (size === undefined || size === 0) continue;
|
||||
if (size * chunkCount < minSizeReduction[key]) return false;
|
||||
@@ -384,8 +393,9 @@ const checkMinSizeReduction = (sizes, minSizeReduction, chunkCount) => {
|
||||
* @returns {undefined | SourceTypes} list of size types that are below min size
|
||||
*/
|
||||
const getViolatingMinSizes = (sizes, minSize) => {
|
||||
/** @type {SourceTypes | undefined} */
|
||||
let list;
|
||||
for (const key of Object.keys(minSize)) {
|
||||
for (const key of /** @type {SourceType[]} */ (Object.keys(minSize))) {
|
||||
const size = sizes[key];
|
||||
if (size === undefined || size === 0) continue;
|
||||
if (size < minSize[key]) {
|
||||
@@ -398,11 +408,11 @@ const getViolatingMinSizes = (sizes, minSize) => {
|
||||
|
||||
/**
|
||||
* @param {SplitChunksSizes} sizes the sizes
|
||||
* @returns {number} the total size
|
||||
* @returns {Size} the total size
|
||||
*/
|
||||
const totalSize = (sizes) => {
|
||||
let size = 0;
|
||||
for (const key of Object.keys(sizes)) {
|
||||
for (const key of /** @type {SourceType[]} */ (Object.keys(sizes))) {
|
||||
size += sizes[key];
|
||||
}
|
||||
return size;
|
||||
@@ -887,9 +897,9 @@ module.exports = class SplitChunksPlugin {
|
||||
};
|
||||
|
||||
const getChunkSetsInGraph = memoize(() => {
|
||||
/** @type {Map<bigint, Set<Chunk>>} */
|
||||
/** @type {Map<bigint, ChunkSet>} */
|
||||
const chunkSetsInGraph = new Map();
|
||||
/** @type {Set<Chunk>} */
|
||||
/** @type {ChunkSet} */
|
||||
const singleChunkSets = new Set();
|
||||
for (const module of compilation.modules) {
|
||||
const chunks = chunkGraph.getModuleChunksIterable(module);
|
||||
@@ -927,10 +937,12 @@ module.exports = class SplitChunksPlugin {
|
||||
/** @type {Map<Module, Iterable<Chunk[]>>} */
|
||||
const groupedByExportsMap = new Map();
|
||||
|
||||
/** @typedef {Map<bigint | Chunk, ChunkSet>} ChunkSetsInGraph */
|
||||
|
||||
const getExportsChunkSetsInGraph = memoize(() => {
|
||||
/** @type {Map<bigint | Chunk, Set<Chunk>>} */
|
||||
/** @type {ChunkSetsInGraph} */
|
||||
const chunkSetsInGraph = new Map();
|
||||
/** @type {Set<Chunk>} */
|
||||
/** @type {ChunkSet} */
|
||||
const singleChunkSets = new Set();
|
||||
for (const module of compilation.modules) {
|
||||
const groupedChunks = [...groupChunksByExports(module)];
|
||||
@@ -949,13 +961,13 @@ module.exports = class SplitChunksPlugin {
|
||||
return { chunkSetsInGraph, singleChunkSets };
|
||||
});
|
||||
|
||||
/** @typedef {Map<number, Set<Chunk>[]>} ChunkSetsByCount */
|
||||
/** @typedef {Map<CountOfChunk, ChunkSet[]>} ChunkSetsByCount */
|
||||
|
||||
// group these set of chunks by count
|
||||
// to allow to check less sets via isSubset
|
||||
// (only smaller sets can be subset)
|
||||
/**
|
||||
* @param {IterableIterator<Set<Chunk>>} chunkSets set of sets of chunks
|
||||
* @param {IterableIterator<ChunkSet>} chunkSets set of sets of chunks
|
||||
* @returns {ChunkSetsByCount} map of sets of chunks by count
|
||||
*/
|
||||
const groupChunkSetsByCount = (chunkSets) => {
|
||||
@@ -983,13 +995,13 @@ module.exports = class SplitChunksPlugin {
|
||||
)
|
||||
);
|
||||
|
||||
/** @typedef {(Set<Chunk> | Chunk)[]} Combinations */
|
||||
/** @typedef {(ChunkSet | Chunk)[]} Combinations */
|
||||
|
||||
// Create a list of possible combinations
|
||||
/**
|
||||
* @param {Map<bigint | Chunk, Set<Chunk>>} chunkSets chunk sets
|
||||
* @param {Set<Chunk>} singleChunkSets single chunks sets
|
||||
* @param {Map<number, Set<Chunk>[]>} chunkSetsByCount chunk sets by count
|
||||
* @param {ChunkSetsInGraph} chunkSets chunk sets
|
||||
* @param {ChunkSet} singleChunkSets single chunks sets
|
||||
* @param {ChunkSetsByCount} chunkSetsByCount chunk sets by count
|
||||
* @returns {(key: bigint | Chunk) => Combinations} combinations
|
||||
*/
|
||||
const createGetCombinations = (
|
||||
@@ -1009,7 +1021,7 @@ module.exports = class SplitChunksPlugin {
|
||||
return result;
|
||||
}
|
||||
const chunksSet =
|
||||
/** @type {Set<Chunk>} */
|
||||
/** @type {ChunkSet} */
|
||||
(chunkSets.get(key));
|
||||
/** @type {Combinations} */
|
||||
const array = [chunksSet];
|
||||
@@ -1070,13 +1082,13 @@ module.exports = class SplitChunksPlugin {
|
||||
* @property {bigint | Chunk} key a key of the list
|
||||
*/
|
||||
|
||||
/** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFn, SelectedChunksResult>>} */
|
||||
/** @type {WeakMap<ChunkSet | Chunk, WeakMap<ChunkFilterFn, SelectedChunksResult>>} */
|
||||
const selectedChunksCacheByChunksSet = new WeakMap();
|
||||
|
||||
/**
|
||||
* get list and key by applying the filter function to the list
|
||||
* It is cached for performance reasons
|
||||
* @param {Set<Chunk> | Chunk} chunks list of chunks
|
||||
* @param {ChunkSet | Chunk} chunks list of chunks
|
||||
* @param {ChunkFilterFn} chunkFilter filter function for chunks
|
||||
* @returns {SelectedChunksResult} list and key
|
||||
*/
|
||||
@@ -1269,7 +1281,7 @@ module.exports = class SplitChunksPlugin {
|
||||
const getCombsByUsedExports = memoize(() => {
|
||||
// fill the groupedByExportsMap
|
||||
getExportsChunkSetsInGraph();
|
||||
/** @type {Set<Set<Chunk> | Chunk>} */
|
||||
/** @type {Set<ChunkSet | Chunk>} */
|
||||
const set = new Set();
|
||||
const groupedByUsedExports =
|
||||
/** @type {Iterable<Chunk[]>} */
|
||||
@@ -1703,7 +1715,9 @@ module.exports = class SplitChunksPlugin {
|
||||
if (Object.keys(maxSize).length === 0) {
|
||||
continue;
|
||||
}
|
||||
for (const key of Object.keys(maxSize)) {
|
||||
for (const key of /** @type {SourceType[]} */ (
|
||||
Object.keys(maxSize)
|
||||
)) {
|
||||
const maxSizeValue = maxSize[key];
|
||||
const minSizeValue = minSize[key];
|
||||
if (
|
||||
|
||||
Reference in New Issue
Block a user