Add JS-CATCH-FALLBACK-01 rule and update npm packages

Add PHP-ALIAS-01 rule: prohibit field aliasing in serialization

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
root
2025-12-23 07:36:18 +00:00
parent 3cc590186a
commit 3ce82a924a
1256 changed files with 6491 additions and 3989 deletions

19
node_modules/webpack/lib/util/hash/BatchedHash.js generated vendored Executable file → Normal file
View File

@@ -6,6 +6,7 @@
"use strict";
const Hash = require("../Hash");
const { digest, update } = require("./hash-digest");
const MAX_SHORT_STRING = require("./wasm-hash").MAX_SHORT_STRING;
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
@@ -51,9 +52,9 @@ class BatchedHash extends Hash {
return this;
}
if (this.encoding) {
this.hash.update(this.string, this.encoding);
update(this.hash, this.string, this.encoding);
} else {
this.hash.update(this.string);
update(this.hash, this.string);
}
this.string = undefined;
}
@@ -66,12 +67,12 @@ class BatchedHash extends Hash {
this.string = data;
this.encoding = inputEncoding;
} else if (inputEncoding) {
this.hash.update(data, inputEncoding);
update(this.hash, data, inputEncoding);
} else {
this.hash.update(data);
update(this.hash, data);
}
} else {
this.hash.update(data);
update(this.hash, data);
}
return this;
}
@@ -95,15 +96,15 @@ class BatchedHash extends Hash {
digest(encoding) {
if (this.string !== undefined) {
if (this.encoding) {
this.hash.update(this.string, this.encoding);
update(this.hash, this.string, this.encoding);
} else {
this.hash.update(this.string);
update(this.hash, this.string);
}
}
if (!encoding) {
return this.hash.digest();
return digest(this.hash);
}
return this.hash.digest(encoding);
return digest(this.hash, encoding);
}
}

138
node_modules/webpack/lib/util/hash/BulkUpdateHash.js generated vendored Normal file
View File

@@ -0,0 +1,138 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Alexander Akait @alexander-akait
*/
"use strict";
const Hash = require("../Hash");
const { digest, update } = require("./hash-digest");
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
/** @typedef {() => Hash} HashFactory */
const BULK_SIZE = 3;
// We are using an object instead of a Map as this will stay static during the runtime
// so access to it can be optimized by v8
/** @type {{[key: string]: Map<string, string>}} */
const digestCaches = {};
class BulkUpdateHash extends Hash {
/**
* @param {Hash | HashFactory} hashOrFactory function to create a hash
* @param {string=} hashKey key for caching
*/
constructor(hashOrFactory, hashKey) {
super();
this.hashKey = hashKey;
if (typeof hashOrFactory === "function") {
this.hashFactory = hashOrFactory;
this.hash = undefined;
} else {
this.hashFactory = undefined;
this.hash = hashOrFactory;
}
this.buffer = "";
}
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string} data data
* @param {Encoding} inputEncoding data encoding
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string | Buffer} data data
* @param {Encoding=} inputEncoding data encoding
* @returns {Hash} updated hash
*/
update(data, inputEncoding) {
if (
inputEncoding !== undefined ||
typeof data !== "string" ||
data.length > BULK_SIZE
) {
if (this.hash === undefined) {
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
}
if (this.buffer.length > 0) {
update(this.hash, this.buffer);
this.buffer = "";
}
if (typeof data === "string" && inputEncoding) {
update(this.hash, data, inputEncoding);
} else {
update(this.hash, data);
}
} else {
this.buffer += data;
if (this.buffer.length > BULK_SIZE) {
if (this.hash === undefined) {
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
}
update(this.hash, this.buffer);
this.buffer = "";
}
}
return this;
}
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @returns {Buffer} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @param {Encoding} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {Encoding=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
let digestCache;
const buffer = this.buffer;
if (this.hash === undefined) {
// short data for hash, we can use caching
const cacheKey = `${this.hashKey}-${encoding}`;
digestCache = digestCaches[cacheKey];
if (digestCache === undefined) {
digestCache = digestCaches[cacheKey] = new Map();
}
const cacheEntry = digestCache.get(buffer);
if (cacheEntry !== undefined) return cacheEntry;
this.hash = /** @type {HashFactory} */ (this.hashFactory)();
}
if (buffer.length > 0) {
update(this.hash, buffer);
}
if (!encoding) {
const result = digest(this.hash, undefined, Boolean(this.hashKey));
if (digestCache !== undefined) {
digestCache.set(buffer, result);
}
return result;
}
const result = digest(this.hash, encoding, Boolean(this.hashKey));
if (digestCache !== undefined) {
digestCache.set(buffer, result);
}
return result;
}
}
module.exports = BulkUpdateHash;

75
node_modules/webpack/lib/util/hash/DebugHash.js generated vendored Executable file
View File

@@ -0,0 +1,75 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Alexander Akait @alexander-akait
*/
"use strict";
const Hash = require("../Hash");
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
/* istanbul ignore next */
class DebugHash extends Hash {
constructor() {
super();
this.string = "";
}
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string | Buffer} data data
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @overload
* @param {string} data data
* @param {Encoding} inputEncoding data encoding
* @returns {Hash} updated hash
*/
/**
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
* @param {string | Buffer} data data
* @param {Encoding=} inputEncoding data encoding
* @returns {Hash} updated hash
*/
update(data, inputEncoding) {
if (typeof data !== "string") data = data.toString("utf8");
const prefix = Buffer.from("@webpack-debug-digest@").toString("hex");
if (data.startsWith(prefix)) {
data = Buffer.from(data.slice(prefix.length), "hex").toString();
}
this.string += `[${data}](${
/** @type {string} */
(
// eslint-disable-next-line unicorn/error-message
new Error().stack
).split("\n", 3)[2]
})\n`;
return this;
}
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @returns {Buffer} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @overload
* @param {Encoding} encoding encoding of the return value
* @returns {string} digest
*/
/**
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
* @param {Encoding=} encoding encoding of the return value
* @returns {string | Buffer} digest
*/
digest(encoding) {
return Buffer.from(`@webpack-debug-digest@${this.string}`).toString("hex");
}
}
module.exports = DebugHash;

216
node_modules/webpack/lib/util/hash/hash-digest.js generated vendored Executable file
View File

@@ -0,0 +1,216 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Alexander Akait @alexander-akait
*/
"use strict";
/** @typedef {import("../Hash")} Hash */
/** @typedef {import("../../../declarations/WebpackOptions").HashDigest} Encoding */
/** @typedef {"26" | "32" | "36" | "49" | "52" | "58" | "62"} Base */
/* cSpell:disable */
/** @type {Record<Base, string>} */
const ENCODE_TABLE = Object.freeze({
26: "abcdefghijklmnopqrstuvwxyz",
32: "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567",
36: "0123456789abcdefghijklmnopqrstuvwxyz",
49: "abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ",
52: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ",
58: "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz",
62: "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
});
/* cSpell:enable */
const ZERO = BigInt("0");
const EIGHT = BigInt("8");
const FF = BigInt("0xff");
/**
* It encodes octet arrays by doing long divisions on all significant digits in the array, creating a representation of that number in the new base.
* Then for every leading zero in the input (not significant as a number) it will encode as a single leader character.
* This is the first in the alphabet and will decode as 8 bits. The other characters depend upon the base.
* For example, a base58 alphabet packs roughly 5.858 bits per character.
* This means the encoded string 000f (using a base16, 0-f alphabet) will actually decode to 4 bytes unlike a canonical hex encoding which uniformly packs 4 bits into each character.
* While unusual, this does mean that no padding is required, and it works for bases like 43.
* @param {Buffer} buffer buffer
* @param {Base} base base
* @returns {string} encoded buffer
*/
const encode = (buffer, base) => {
if (buffer.length === 0) return "";
const bigIntBase = BigInt(ENCODE_TABLE[base].length);
// Convert buffer to BigInt efficiently using bitwise operations
let value = ZERO;
for (let i = 0; i < buffer.length; i++) {
value = (value << EIGHT) | BigInt(buffer[i]);
}
// Convert to baseX string efficiently using array
const digits = [];
if (value === ZERO) return ENCODE_TABLE[base][0];
while (value > ZERO) {
const remainder = Number(value % bigIntBase);
digits.push(ENCODE_TABLE[base][remainder]);
value /= bigIntBase;
}
return digits.reverse().join("");
};
/**
* @param {string} data string
* @param {Base} base base
* @returns {Buffer} buffer
*/
const decode = (data, base) => {
if (data.length === 0) return Buffer.from("");
const bigIntBase = BigInt(ENCODE_TABLE[base].length);
// Convert the baseX string to a BigInt value
let value = ZERO;
for (let i = 0; i < data.length; i++) {
const digit = ENCODE_TABLE[base].indexOf(data[i]);
if (digit === -1) {
throw new Error(`Invalid character at position ${i}: ${data[i]}`);
}
value = value * bigIntBase + BigInt(digit);
}
// If value is 0, return a single-byte buffer with value 0
if (value === ZERO) {
return Buffer.alloc(1);
}
// Determine buffer size efficiently by counting bytes
let temp = value;
let byteLength = 0;
while (temp > ZERO) {
temp >>= EIGHT;
byteLength++;
}
// Create buffer and fill it from right to left
const buffer = Buffer.alloc(byteLength);
for (let i = byteLength - 1; i >= 0; i--) {
buffer[i] = Number(value & FF);
value >>= EIGHT;
}
return buffer;
};
// Compatibility with the old hash libraries, they can return different structures, so let's stringify them firstly
/**
* @param {string | { toString: (radix: number) => string }} value value
* @param {string} encoding encoding
* @returns {string} string
*/
const toString = (value, encoding) =>
typeof value === "string"
? value
: Buffer.from(value.toString(16), "hex").toString(
/** @type {NodeJS.BufferEncoding} */
(encoding)
);
/**
* @param {Buffer | { toString: (radix: number) => string }} value value
* @returns {Buffer} buffer
*/
const toBuffer = (value) =>
Buffer.isBuffer(value) ? value : Buffer.from(value.toString(16), "hex");
let isBase64URLSupported = false;
try {
isBase64URLSupported = Boolean(Buffer.from("", "base64url"));
} catch (_err) {
// Nothing
}
/**
* @param {Hash} hash hash
* @param {string | Buffer} data data
* @param {Encoding=} encoding encoding of the return value
* @returns {void}
*/
const update = (hash, data, encoding) => {
if (encoding === "base64url" && !isBase64URLSupported) {
const base64String = /** @type {string} */ (data)
.replace(/-/g, "+")
.replace(/_/g, "/");
const buf = Buffer.from(base64String, "base64");
hash.update(buf);
return;
} else if (
typeof data === "string" &&
encoding &&
typeof ENCODE_TABLE[/** @type {Base} */ (encoding.slice(4))] !== "undefined"
) {
const buf = decode(data, /** @type {Base} */ (encoding.slice(4)));
hash.update(buf);
return;
}
if (encoding) {
hash.update(/** @type {string} */ (data), encoding);
} else {
hash.update(data);
}
};
/**
* @overload
* @param {Hash} hash hash
* @returns {Buffer} digest
*/
/**
* @overload
* @param {Hash} hash hash
* @param {undefined} encoding encoding of the return value
* @param {boolean=} isSafe true when we await right types from digest(), otherwise false
* @returns {Buffer} digest
*/
/**
* @overload
* @param {Hash} hash hash
* @param {Encoding} encoding encoding of the return value
* @param {boolean=} isSafe true when we await right types from digest(), otherwise false
* @returns {string} digest
*/
/**
* @param {Hash} hash hash
* @param {Encoding=} encoding encoding of the return value
* @param {boolean=} isSafe true when we await right types from digest(), otherwise false
* @returns {string | Buffer} digest
*/
const digest = (hash, encoding, isSafe) => {
if (typeof encoding === "undefined") {
return isSafe ? hash.digest() : toBuffer(hash.digest());
}
if (encoding === "base64url" && !isBase64URLSupported) {
const digest = isSafe
? hash.digest("base64")
: toString(hash.digest("base64"), "base64");
return digest.replace(/\+/g, "-").replace(/\//g, "_").replace(/[=]+$/, "");
} else if (
typeof ENCODE_TABLE[/** @type {Base} */ (encoding.slice(4))] !== "undefined"
) {
const buf = isSafe ? hash.digest() : toBuffer(hash.digest());
return encode(
buf,
/** @type {Base} */
(encoding.slice(4))
);
}
return isSafe
? hash.digest(encoding)
: toString(hash.digest(encoding), encoding);
};
module.exports.decode = decode;
module.exports.digest = digest;
module.exports.encode = encode;
module.exports.update = update;