Enhance refactor commands with controller-aware Route() updates and fix code quality violations
Add semantic token highlighting for 'that' variable and comment file references in VS Code extension Add Phone_Text_Input and Currency_Input components with formatting utilities Implement client widgets, form standardization, and soft delete functionality Add modal scroll lock and update documentation Implement comprehensive modal system with form integration and validation Fix modal component instantiation using jQuery plugin API Implement modal system with responsive sizing, queuing, and validation support Implement form submission with validation, error handling, and loading states Implement country/state selectors with dynamic data loading and Bootstrap styling Revert Rsx::Route() highlighting in Blade/PHP files Target specific PHP scopes for Rsx::Route() highlighting in Blade Expand injection selector for Rsx::Route() highlighting Add custom syntax highlighting for Rsx::Route() and Rsx.Route() calls Update jqhtml packages to v2.2.165 Add bundle path validation for common mistakes (development mode only) Create Ajax_Select_Input widget and Rsx_Reference_Data controller Create Country_Select_Input widget with default country support Initialize Tom Select on Select_Input widgets Add Tom Select bundle for enhanced select dropdowns Implement ISO 3166 geographic data system for country/region selection Implement widget-based form system with disabled state support 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
195
node_modules/webpack/lib/FileSystemInfo.js
generated
vendored
195
node_modules/webpack/lib/FileSystemInfo.js
generated
vendored
@@ -18,7 +18,6 @@ const makeSerializable = require("./util/makeSerializable");
|
||||
const memoize = require("./util/memoize");
|
||||
const processAsyncTree = require("./util/processAsyncTree");
|
||||
|
||||
/** @typedef {import("enhanced-resolve").Resolver} Resolver */
|
||||
/** @typedef {import("enhanced-resolve").ResolveRequest} ResolveRequest */
|
||||
/** @typedef {import("enhanced-resolve").ResolveFunctionAsync} ResolveFunctionAsync */
|
||||
/** @typedef {import("./WebpackError")} WebpackError */
|
||||
@@ -28,8 +27,6 @@ const processAsyncTree = require("./util/processAsyncTree");
|
||||
/** @typedef {import("../declarations/WebpackOptions").HashFunction} HashFunction */
|
||||
/** @typedef {import("./util/fs").IStats} IStats */
|
||||
/** @typedef {import("./util/fs").InputFileSystem} InputFileSystem */
|
||||
/** @typedef {import("./util/fs").PathLike} PathLike */
|
||||
/** @typedef {import("./util/fs").StringCallback} StringCallback */
|
||||
/**
|
||||
* @template T
|
||||
* @typedef {import("./util/AsyncQueue").Callback<T>} ProcessorCallback
|
||||
@@ -48,8 +45,8 @@ let FS_ACCURACY = 2000;
|
||||
|
||||
const EMPTY_SET = new Set();
|
||||
|
||||
const RBDT_RESOLVE_CJS = 0;
|
||||
const RBDT_RESOLVE_ESM = 1;
|
||||
const RBDT_RESOLVE_INITIAL = 0;
|
||||
const RBDT_RESOLVE_FILE = 1;
|
||||
const RBDT_RESOLVE_DIRECTORY = 2;
|
||||
const RBDT_RESOLVE_CJS_FILE = 3;
|
||||
const RBDT_RESOLVE_CJS_FILE_AS_CHILD = 4;
|
||||
@@ -59,7 +56,7 @@ const RBDT_FILE = 7;
|
||||
const RBDT_DIRECTORY_DEPENDENCIES = 8;
|
||||
const RBDT_FILE_DEPENDENCIES = 9;
|
||||
|
||||
/** @typedef {RBDT_RESOLVE_CJS | RBDT_RESOLVE_ESM | RBDT_RESOLVE_DIRECTORY | RBDT_RESOLVE_CJS_FILE | RBDT_RESOLVE_CJS_FILE_AS_CHILD | RBDT_RESOLVE_ESM_FILE | RBDT_DIRECTORY | RBDT_FILE | RBDT_DIRECTORY_DEPENDENCIES | RBDT_FILE_DEPENDENCIES} JobType */
|
||||
/** @typedef {RBDT_RESOLVE_INITIAL | RBDT_RESOLVE_FILE | RBDT_RESOLVE_DIRECTORY | RBDT_RESOLVE_CJS_FILE | RBDT_RESOLVE_CJS_FILE_AS_CHILD | RBDT_RESOLVE_ESM_FILE | RBDT_DIRECTORY | RBDT_FILE | RBDT_DIRECTORY_DEPENDENCIES | RBDT_FILE_DEPENDENCIES} JobType */
|
||||
|
||||
const INVALID = Symbol("invalid");
|
||||
|
||||
@@ -168,13 +165,18 @@ class SnapshotIterator {
|
||||
}
|
||||
}
|
||||
|
||||
/** @typedef {Map<string, TODO> | Set<string> | undefined} SnapshotMap */
|
||||
/** @typedef {(snapshot: Snapshot) => SnapshotMap[]} GetMapsFunction */
|
||||
/**
|
||||
* @template T
|
||||
* @typedef {(snapshot: Snapshot) => T[]} GetMapsFunction
|
||||
*/
|
||||
|
||||
/**
|
||||
* @template T
|
||||
*/
|
||||
class SnapshotIterable {
|
||||
/**
|
||||
* @param {Snapshot} snapshot snapshot
|
||||
* @param {GetMapsFunction} getMaps get maps function
|
||||
* @param {GetMapsFunction<T>} getMaps get maps function
|
||||
*/
|
||||
constructor(snapshot, getMaps) {
|
||||
this.snapshot = snapshot;
|
||||
@@ -185,9 +187,9 @@ class SnapshotIterable {
|
||||
let state = 0;
|
||||
/** @type {IterableIterator<string>} */
|
||||
let it;
|
||||
/** @type {GetMapsFunction} */
|
||||
/** @type {GetMapsFunction<T>} */
|
||||
let getMaps;
|
||||
/** @type {SnapshotMap[]} */
|
||||
/** @type {T[]} */
|
||||
let maps;
|
||||
/** @type {Snapshot} */
|
||||
let snapshot;
|
||||
@@ -206,7 +208,9 @@ class SnapshotIterable {
|
||||
if (maps.length > 0) {
|
||||
const map = maps.pop();
|
||||
if (map !== undefined) {
|
||||
it = map.keys();
|
||||
it =
|
||||
/** @type {Set<EXPECTED_ANY> | Map<string, EXPECTED_ANY>} */
|
||||
(map).keys();
|
||||
state = 2;
|
||||
} else {
|
||||
break;
|
||||
@@ -539,8 +543,9 @@ class Snapshot {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {GetMapsFunction} getMaps first
|
||||
* @returns {Iterable<string>} iterable
|
||||
* @template T
|
||||
* @param {GetMapsFunction<T>} getMaps first
|
||||
* @returns {SnapshotIterable<T>} iterable
|
||||
*/
|
||||
_createIterable(getMaps) {
|
||||
return new SnapshotIterable(this, getMaps);
|
||||
@@ -658,6 +663,9 @@ class SnapshotOptimization {
|
||||
* @returns {void}
|
||||
*/
|
||||
optimize(newSnapshot, capturedFiles) {
|
||||
if (capturedFiles.size === 0) {
|
||||
return;
|
||||
}
|
||||
/**
|
||||
* @param {SnapshotOptimizationEntry} entry optimization entry
|
||||
* @returns {void}
|
||||
@@ -727,6 +735,7 @@ class SnapshotOptimization {
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
/** @type {Set<string>} */
|
||||
const nonSharedFiles = new Set();
|
||||
const snapshotContent =
|
||||
/** @type {NonNullable<SnapshotOptimizationEntry["snapshotContent"]>} */
|
||||
@@ -1203,34 +1212,34 @@ class FileSystemInfo {
|
||||
processor: this._getManagedItemDirectoryInfo.bind(this)
|
||||
});
|
||||
const _unmanagedPaths = [...unmanagedPaths];
|
||||
this.unmanagedPathsWithSlash =
|
||||
/** @type {string[]} */
|
||||
(_unmanagedPaths.filter((p) => typeof p === "string")).map((p) =>
|
||||
join(fs, p, "_").slice(0, -1)
|
||||
);
|
||||
this.unmanagedPathsRegExps =
|
||||
/** @type {RegExp[]} */
|
||||
(_unmanagedPaths.filter((p) => typeof p !== "string"));
|
||||
/** @type {string[]} */
|
||||
this.unmanagedPathsWithSlash = _unmanagedPaths
|
||||
.filter((p) => typeof p === "string")
|
||||
.map((p) => join(fs, p, "_").slice(0, -1));
|
||||
/** @type {RegExp[]} */
|
||||
this.unmanagedPathsRegExps = _unmanagedPaths.filter(
|
||||
(p) => typeof p !== "string"
|
||||
);
|
||||
|
||||
this.managedPaths = [...managedPaths];
|
||||
this.managedPathsWithSlash =
|
||||
/** @type {string[]} */
|
||||
(this.managedPaths.filter((p) => typeof p === "string")).map((p) =>
|
||||
join(fs, p, "_").slice(0, -1)
|
||||
);
|
||||
/** @type {string[]} */
|
||||
this.managedPathsWithSlash = this.managedPaths
|
||||
.filter((p) => typeof p === "string")
|
||||
.map((p) => join(fs, p, "_").slice(0, -1));
|
||||
/** @type {RegExp[]} */
|
||||
this.managedPathsRegExps = this.managedPaths.filter(
|
||||
(p) => typeof p !== "string"
|
||||
);
|
||||
|
||||
this.managedPathsRegExps =
|
||||
/** @type {RegExp[]} */
|
||||
(this.managedPaths.filter((p) => typeof p !== "string"));
|
||||
this.immutablePaths = [...immutablePaths];
|
||||
this.immutablePathsWithSlash =
|
||||
/** @type {string[]} */
|
||||
(this.immutablePaths.filter((p) => typeof p === "string")).map((p) =>
|
||||
join(fs, p, "_").slice(0, -1)
|
||||
);
|
||||
this.immutablePathsRegExps =
|
||||
/** @type {RegExp[]} */
|
||||
(this.immutablePaths.filter((p) => typeof p !== "string"));
|
||||
/** @type {string[]} */
|
||||
this.immutablePathsWithSlash = this.immutablePaths
|
||||
.filter((p) => typeof p === "string")
|
||||
.map((p) => join(fs, p, "_").slice(0, -1));
|
||||
/** @type {RegExp[]} */
|
||||
this.immutablePathsRegExps = this.immutablePaths.filter(
|
||||
(p) => typeof p !== "string"
|
||||
);
|
||||
|
||||
this._cachedDeprecatedFileTimestamps = undefined;
|
||||
this._cachedDeprecatedContextTimestamps = undefined;
|
||||
@@ -1614,16 +1623,12 @@ class FileSystemInfo {
|
||||
|
||||
/**
|
||||
* @param {Job} job job
|
||||
* @returns {`resolve commonjs file ${string}${string}`|`resolve esm file ${string}${string}`|`resolve esm ${string}${string}`|`resolve directory ${string}`|`file ${string}`|`unknown ${string} ${string}`|`resolve commonjs ${string}${string}`|`directory ${string}`|`file dependencies ${string}`|`directory dependencies ${string}`} result
|
||||
* @returns {string} result
|
||||
*/
|
||||
const jobToString = (job) => {
|
||||
switch (job.type) {
|
||||
case RBDT_RESOLVE_CJS:
|
||||
return `resolve commonjs ${job.path}${expectedToString(
|
||||
job.expected
|
||||
)}`;
|
||||
case RBDT_RESOLVE_ESM:
|
||||
return `resolve esm ${job.path}${expectedToString(job.expected)}`;
|
||||
case RBDT_RESOLVE_FILE:
|
||||
return `resolve file ${job.path}${expectedToString(job.expected)}`;
|
||||
case RBDT_RESOLVE_DIRECTORY:
|
||||
return `resolve directory ${job.path}`;
|
||||
case RBDT_RESOLVE_CJS_FILE:
|
||||
@@ -1665,7 +1670,7 @@ class FileSystemInfo {
|
||||
deps,
|
||||
(dep) =>
|
||||
/** @type {Job} */ ({
|
||||
type: RBDT_RESOLVE_CJS,
|
||||
type: RBDT_RESOLVE_INITIAL,
|
||||
context,
|
||||
path: dep,
|
||||
expected: undefined,
|
||||
@@ -1769,27 +1774,23 @@ class FileSystemInfo {
|
||||
}
|
||||
);
|
||||
};
|
||||
switch (type) {
|
||||
case RBDT_RESOLVE_CJS: {
|
||||
const isDirectory = /[\\/]$/.test(path);
|
||||
if (isDirectory) {
|
||||
resolveDirectory(path.slice(0, -1));
|
||||
} else {
|
||||
resolveFile(path, "f", resolveCjs);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case RBDT_RESOLVE_ESM: {
|
||||
const isDirectory = /[\\/]$/.test(path);
|
||||
if (isDirectory) {
|
||||
resolveDirectory(path.slice(0, -1));
|
||||
} else {
|
||||
resolveFile(path);
|
||||
}
|
||||
const resolvedType =
|
||||
type === RBDT_RESOLVE_INITIAL
|
||||
? /[\\/]$/.test(path)
|
||||
? RBDT_RESOLVE_DIRECTORY
|
||||
: RBDT_RESOLVE_FILE
|
||||
: type;
|
||||
switch (resolvedType) {
|
||||
case RBDT_RESOLVE_FILE: {
|
||||
resolveFile(
|
||||
path,
|
||||
"f",
|
||||
/\.mjs$/.test(path) ? resolveEsm : resolveCjs
|
||||
);
|
||||
break;
|
||||
}
|
||||
case RBDT_RESOLVE_DIRECTORY: {
|
||||
resolveDirectory(path);
|
||||
resolveDirectory(RBDT_RESOLVE_INITIAL ? path.slice(0, -1) : path);
|
||||
break;
|
||||
}
|
||||
case RBDT_RESOLVE_CJS_FILE: {
|
||||
@@ -1867,7 +1868,13 @@ class FileSystemInfo {
|
||||
// Check commonjs cache for the module
|
||||
/** @type {NodeModule | undefined} */
|
||||
const module = require.cache[path];
|
||||
if (module && Array.isArray(module.children)) {
|
||||
if (
|
||||
module &&
|
||||
Array.isArray(module.children) &&
|
||||
// https://github.com/nodejs/node/issues/59868
|
||||
// Force use `es-module-lexer` for mjs
|
||||
!/\.mjs$/.test(path)
|
||||
) {
|
||||
children: for (const child of module.children) {
|
||||
const childPath = child.filename;
|
||||
if (childPath) {
|
||||
@@ -1947,6 +1954,7 @@ class FileSystemInfo {
|
||||
const context = dirname(this.fs, path);
|
||||
const source = /** @type {Buffer} */ (content).toString();
|
||||
const [imports] = lexer.parse(source);
|
||||
const added = new Set();
|
||||
for (const imp of imports) {
|
||||
try {
|
||||
let dependency;
|
||||
@@ -1964,9 +1972,11 @@ class FileSystemInfo {
|
||||
continue;
|
||||
}
|
||||
|
||||
// we should not track Node.js build dependencies
|
||||
// We should not track Node.js build dependencies
|
||||
if (dependency.startsWith("node:")) continue;
|
||||
if (builtinModules.has(dependency)) continue;
|
||||
// Avoid extra jobs for identical imports
|
||||
if (added.has(dependency)) continue;
|
||||
|
||||
push({
|
||||
type: RBDT_RESOLVE_ESM_FILE,
|
||||
@@ -1975,6 +1985,7 @@ class FileSystemInfo {
|
||||
expected: imp.d > -1 ? false : undefined,
|
||||
issuer: job
|
||||
});
|
||||
added.add(dependency);
|
||||
} catch (err1) {
|
||||
logger.warn(
|
||||
`Parsing of ${path} for build dependencies failed at 'import(${source.slice(
|
||||
@@ -2336,6 +2347,9 @@ class FileSystemInfo {
|
||||
* @param {ManagedFiles} capturedFiles captured files
|
||||
*/
|
||||
const processCapturedFiles = (capturedFiles) => {
|
||||
if (capturedFiles.size === 0) {
|
||||
return;
|
||||
}
|
||||
switch (mode) {
|
||||
case 3:
|
||||
this._fileTshsOptimization.optimize(snapshot, capturedFiles);
|
||||
@@ -2424,6 +2438,9 @@ class FileSystemInfo {
|
||||
* @param {ManagedContexts} capturedDirectories captured directories
|
||||
*/
|
||||
const processCapturedDirectories = (capturedDirectories) => {
|
||||
if (capturedDirectories.size === 0) {
|
||||
return;
|
||||
}
|
||||
switch (mode) {
|
||||
case 3:
|
||||
this._contextTshsOptimization.optimize(snapshot, capturedDirectories);
|
||||
@@ -2569,6 +2586,9 @@ class FileSystemInfo {
|
||||
* @param {ManagedMissing} capturedMissing captured missing
|
||||
*/
|
||||
const processCapturedMissing = (capturedMissing) => {
|
||||
if (capturedMissing.size === 0) {
|
||||
return;
|
||||
}
|
||||
this._missingExistenceOptimization.optimize(snapshot, capturedMissing);
|
||||
for (const path of capturedMissing) {
|
||||
const cache = this._fileTimestamps.get(path);
|
||||
@@ -3330,7 +3350,7 @@ class FileSystemInfo {
|
||||
|
||||
hash.update(/** @type {string | Buffer} */ (content));
|
||||
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
const digest = hash.digest("hex");
|
||||
|
||||
this._fileHashes.set(path, digest);
|
||||
|
||||
@@ -3341,7 +3361,7 @@ class FileSystemInfo {
|
||||
/**
|
||||
* @private
|
||||
* @param {string} path path
|
||||
* @param {(err: WebpackError | null, timestampAndHash?: TimestampAndHash) => void} callback callback
|
||||
* @param {(err: WebpackError | null, timestampAndHash?: TimestampAndHash | string) => void} callback callback
|
||||
*/
|
||||
_getFileTimestampAndHash(path, callback) {
|
||||
/**
|
||||
@@ -3361,7 +3381,7 @@ class FileSystemInfo {
|
||||
return callback(null, result);
|
||||
}
|
||||
this._fileTshs.set(path, hash);
|
||||
return callback(null, /** @type {TODO} */ (hash));
|
||||
return callback(null, hash);
|
||||
}
|
||||
this.fileTimestampQueue.add(path, (err, entry) => {
|
||||
if (err) {
|
||||
@@ -3598,7 +3618,7 @@ class FileSystemInfo {
|
||||
}
|
||||
}
|
||||
|
||||
const digest = /** @type {string} */ (hash.digest("hex"));
|
||||
const digest = hash.digest("hex");
|
||||
/** @type {ContextFileSystemInfoEntry} */
|
||||
const result = {
|
||||
safeTime,
|
||||
@@ -3661,7 +3681,7 @@ class FileSystemInfo {
|
||||
null,
|
||||
(entry.resolved = {
|
||||
safeTime,
|
||||
timestampHash: /** @type {string} */ (hash.digest("hex"))
|
||||
timestampHash: hash.digest("hex")
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -3723,7 +3743,7 @@ class FileSystemInfo {
|
||||
|
||||
/** @type {ContextHash} */
|
||||
const result = {
|
||||
hash: /** @type {string} */ (hash.digest("hex"))
|
||||
hash: hash.digest("hex")
|
||||
};
|
||||
if (symlinks) result.symlinks = symlinks;
|
||||
return result;
|
||||
@@ -3770,10 +3790,7 @@ class FileSystemInfo {
|
||||
for (const h of hashes) {
|
||||
hash.update(h);
|
||||
}
|
||||
callback(
|
||||
null,
|
||||
(entry.resolved = /** @type {string} */ (hash.digest("hex")))
|
||||
);
|
||||
callback(null, (entry.resolved = hash.digest("hex")));
|
||||
}
|
||||
);
|
||||
}
|
||||
@@ -3819,22 +3836,20 @@ class FileSystemInfo {
|
||||
{
|
||||
path,
|
||||
fromImmutablePath: () =>
|
||||
/** @type {ContextTimestampAndHash | null} */ (null),
|
||||
/** @type {ContextTimestampAndHash | Omit<ContextTimestampAndHash, "safeTime"> | string | null} */ (
|
||||
null
|
||||
),
|
||||
fromManagedItem: (info) => ({
|
||||
safeTime: 0,
|
||||
timestampHash: info,
|
||||
hash: info || ""
|
||||
}),
|
||||
fromSymlink: (file, target, callback) => {
|
||||
callback(
|
||||
null,
|
||||
/** @type {TODO} */
|
||||
({
|
||||
timestampHash: target,
|
||||
hash: target,
|
||||
symlinks: new Set([target])
|
||||
})
|
||||
);
|
||||
callback(null, {
|
||||
timestampHash: target,
|
||||
hash: target,
|
||||
symlinks: new Set([target])
|
||||
});
|
||||
},
|
||||
fromFile: (file, stat, callback) => {
|
||||
this._getFileTimestampAndHash(file, callback);
|
||||
@@ -3892,8 +3907,8 @@ class FileSystemInfo {
|
||||
/** @type {ContextTimestampAndHash} */
|
||||
const result = {
|
||||
safeTime,
|
||||
timestampHash: /** @type {string} */ (tsHash.digest("hex")),
|
||||
hash: /** @type {string} */ (hash.digest("hex"))
|
||||
timestampHash: tsHash.digest("hex"),
|
||||
hash: hash.digest("hex")
|
||||
};
|
||||
if (symlinks) result.symlinks = symlinks;
|
||||
return result;
|
||||
@@ -3961,8 +3976,8 @@ class FileSystemInfo {
|
||||
null,
|
||||
(entry.resolved = {
|
||||
safeTime,
|
||||
timestampHash: /** @type {string} */ (tsHash.digest("hex")),
|
||||
hash: /** @type {string} */ (hash.digest("hex"))
|
||||
timestampHash: tsHash.digest("hex"),
|
||||
hash: hash.digest("hex")
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user