Fix code quality violations and exclude Manifest from checks

Document application modes (development/debug/production)
Add global file drop handler, order column normalization, SPA hash fix
Serve CDN assets via /_vendor/ URLs instead of merging into bundles
Add production minification with license preservation
Improve JSON formatting for debugging and production optimization
Add CDN asset caching with CSS URL inlining for production builds
Add three-mode system (development, debug, production)
Update Manifest CLAUDE.md to reflect helper class architecture
Refactor Manifest.php into helper classes for better organization
Pre-manifest-refactor checkpoint: Add app_mode documentation

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
root
2026-01-14 10:38:22 +00:00
parent bb9046af1b
commit d523f0f600
2355 changed files with 231384 additions and 32223 deletions

View File

@@ -1,72 +1,49 @@
var tokenize = require('../tokenizer');
var TokenStream = require('../common/TokenStream');
var tokenStream = new TokenStream();
var astToTokens = {
decorator: function(handlers) {
var curNode = null;
var prev = { len: 0, node: null };
var nodes = [prev];
var buffer = '';
import { tokenize } from '../tokenizer/index.js';
const astToTokens = {
decorator(handlers) {
const tokens = [];
let curNode = null;
return {
children: handlers.children,
node: function(node) {
var tmp = curNode;
...handlers,
node(node) {
const tmp = curNode;
curNode = node;
handlers.node.call(this, node);
curNode = tmp;
},
chunk: function(chunk) {
buffer += chunk;
if (prev.node !== curNode) {
nodes.push({
len: chunk.length,
node: curNode
});
} else {
prev.len += chunk.length;
}
emit(value, type, auto) {
tokens.push({
type,
value,
node: auto ? null : curNode
});
},
result: function() {
return prepareTokens(buffer, nodes);
result() {
return tokens;
}
};
}
};
function prepareTokens(str, nodes) {
var tokens = [];
var nodesOffset = 0;
var nodesIndex = 0;
var currentNode = nodes ? nodes[nodesIndex].node : null;
tokenize(str, tokenStream);
while (!tokenStream.eof) {
if (nodes) {
while (nodesIndex < nodes.length && nodesOffset + nodes[nodesIndex].len <= tokenStream.tokenStart) {
nodesOffset += nodes[nodesIndex++].len;
currentNode = nodes[nodesIndex].node;
}
}
function stringToTokens(str) {
const tokens = [];
tokenize(str, (type, start, end) =>
tokens.push({
type: tokenStream.tokenType,
value: tokenStream.getTokenValue(),
index: tokenStream.tokenIndex, // TODO: remove it, temporary solution
balance: tokenStream.balance[tokenStream.tokenIndex], // TODO: remove it, temporary solution
node: currentNode
});
tokenStream.next();
// console.log({ ...tokens[tokens.length - 1], node: undefined });
}
type,
value: str.slice(start, end),
node: null
})
);
return tokens;
}
module.exports = function(value, syntax) {
export default function(value, syntax) {
if (typeof value === 'string') {
return prepareTokens(value, null);
return stringToTokens(value);
}
return syntax.generate(value, astToTokens);