Fix code quality violations and exclude Manifest from checks

Document application modes (development/debug/production)
Add global file drop handler, order column normalization, SPA hash fix
Serve CDN assets via /_vendor/ URLs instead of merging into bundles
Add production minification with license preservation
Improve JSON formatting for debugging and production optimization
Add CDN asset caching with CSS URL inlining for production builds
Add three-mode system (development, debug, production)
Update Manifest CLAUDE.md to reflect helper class architecture
Refactor Manifest.php into helper classes for better organization
Pre-manifest-refactor checkpoint: Add app_mode documentation

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
root
2026-01-14 10:38:22 +00:00
parent bb9046af1b
commit d523f0f600
2355 changed files with 231384 additions and 32223 deletions

View File

@@ -1,576 +0,0 @@
//
// list
// ┌──────┐
// ┌──────────────┼─head │
// │ │ tail─┼──────────────┐
// │ └──────┘ │
// ▼ ▼
// item item item item
// ┌──────┐ ┌──────┐ ┌──────┐ ┌──────┐
// null ◀──┼─prev │◀───┼─prev │◀───┼─prev │◀───┼─prev │
// │ next─┼───▶│ next─┼───▶│ next─┼───▶│ next─┼──▶ null
// ├──────┤ ├──────┤ ├──────┤ ├──────┤
// │ data │ │ data │ │ data │ │ data │
// └──────┘ └──────┘ └──────┘ └──────┘
//
function createItem(data) {
return {
prev: null,
next: null,
data: data
};
}
function allocateCursor(node, prev, next) {
var cursor;
if (cursors !== null) {
cursor = cursors;
cursors = cursors.cursor;
cursor.prev = prev;
cursor.next = next;
cursor.cursor = node.cursor;
} else {
cursor = {
prev: prev,
next: next,
cursor: node.cursor
};
}
node.cursor = cursor;
return cursor;
}
function releaseCursor(node) {
var cursor = node.cursor;
node.cursor = cursor.cursor;
cursor.prev = null;
cursor.next = null;
cursor.cursor = cursors;
cursors = cursor;
}
var cursors = null;
var List = function() {
this.cursor = null;
this.head = null;
this.tail = null;
};
List.createItem = createItem;
List.prototype.createItem = createItem;
List.prototype.updateCursors = function(prevOld, prevNew, nextOld, nextNew) {
var cursor = this.cursor;
while (cursor !== null) {
if (cursor.prev === prevOld) {
cursor.prev = prevNew;
}
if (cursor.next === nextOld) {
cursor.next = nextNew;
}
cursor = cursor.cursor;
}
};
List.prototype.getSize = function() {
var size = 0;
var cursor = this.head;
while (cursor) {
size++;
cursor = cursor.next;
}
return size;
};
List.prototype.fromArray = function(array) {
var cursor = null;
this.head = null;
for (var i = 0; i < array.length; i++) {
var item = createItem(array[i]);
if (cursor !== null) {
cursor.next = item;
} else {
this.head = item;
}
item.prev = cursor;
cursor = item;
}
this.tail = cursor;
return this;
};
List.prototype.toArray = function() {
var cursor = this.head;
var result = [];
while (cursor) {
result.push(cursor.data);
cursor = cursor.next;
}
return result;
};
List.prototype.toJSON = List.prototype.toArray;
List.prototype.isEmpty = function() {
return this.head === null;
};
List.prototype.first = function() {
return this.head && this.head.data;
};
List.prototype.last = function() {
return this.tail && this.tail.data;
};
List.prototype.each = function(fn, context) {
var item;
if (context === undefined) {
context = this;
}
// push cursor
var cursor = allocateCursor(this, null, this.head);
while (cursor.next !== null) {
item = cursor.next;
cursor.next = item.next;
fn.call(context, item.data, item, this);
}
// pop cursor
releaseCursor(this);
};
List.prototype.forEach = List.prototype.each;
List.prototype.eachRight = function(fn, context) {
var item;
if (context === undefined) {
context = this;
}
// push cursor
var cursor = allocateCursor(this, this.tail, null);
while (cursor.prev !== null) {
item = cursor.prev;
cursor.prev = item.prev;
fn.call(context, item.data, item, this);
}
// pop cursor
releaseCursor(this);
};
List.prototype.forEachRight = List.prototype.eachRight;
List.prototype.reduce = function(fn, initialValue, context) {
var item;
if (context === undefined) {
context = this;
}
// push cursor
var cursor = allocateCursor(this, null, this.head);
var acc = initialValue;
while (cursor.next !== null) {
item = cursor.next;
cursor.next = item.next;
acc = fn.call(context, acc, item.data, item, this);
}
// pop cursor
releaseCursor(this);
return acc;
};
List.prototype.reduceRight = function(fn, initialValue, context) {
var item;
if (context === undefined) {
context = this;
}
// push cursor
var cursor = allocateCursor(this, this.tail, null);
var acc = initialValue;
while (cursor.prev !== null) {
item = cursor.prev;
cursor.prev = item.prev;
acc = fn.call(context, acc, item.data, item, this);
}
// pop cursor
releaseCursor(this);
return acc;
};
List.prototype.nextUntil = function(start, fn, context) {
if (start === null) {
return;
}
var item;
if (context === undefined) {
context = this;
}
// push cursor
var cursor = allocateCursor(this, null, start);
while (cursor.next !== null) {
item = cursor.next;
cursor.next = item.next;
if (fn.call(context, item.data, item, this)) {
break;
}
}
// pop cursor
releaseCursor(this);
};
List.prototype.prevUntil = function(start, fn, context) {
if (start === null) {
return;
}
var item;
if (context === undefined) {
context = this;
}
// push cursor
var cursor = allocateCursor(this, start, null);
while (cursor.prev !== null) {
item = cursor.prev;
cursor.prev = item.prev;
if (fn.call(context, item.data, item, this)) {
break;
}
}
// pop cursor
releaseCursor(this);
};
List.prototype.some = function(fn, context) {
var cursor = this.head;
if (context === undefined) {
context = this;
}
while (cursor !== null) {
if (fn.call(context, cursor.data, cursor, this)) {
return true;
}
cursor = cursor.next;
}
return false;
};
List.prototype.map = function(fn, context) {
var result = new List();
var cursor = this.head;
if (context === undefined) {
context = this;
}
while (cursor !== null) {
result.appendData(fn.call(context, cursor.data, cursor, this));
cursor = cursor.next;
}
return result;
};
List.prototype.filter = function(fn, context) {
var result = new List();
var cursor = this.head;
if (context === undefined) {
context = this;
}
while (cursor !== null) {
if (fn.call(context, cursor.data, cursor, this)) {
result.appendData(cursor.data);
}
cursor = cursor.next;
}
return result;
};
List.prototype.clear = function() {
this.head = null;
this.tail = null;
};
List.prototype.copy = function() {
var result = new List();
var cursor = this.head;
while (cursor !== null) {
result.insert(createItem(cursor.data));
cursor = cursor.next;
}
return result;
};
List.prototype.prepend = function(item) {
// head
// ^
// item
this.updateCursors(null, item, this.head, item);
// insert to the beginning of the list
if (this.head !== null) {
// new item <- first item
this.head.prev = item;
// new item -> first item
item.next = this.head;
} else {
// if list has no head, then it also has no tail
// in this case tail points to the new item
this.tail = item;
}
// head always points to new item
this.head = item;
return this;
};
List.prototype.prependData = function(data) {
return this.prepend(createItem(data));
};
List.prototype.append = function(item) {
return this.insert(item);
};
List.prototype.appendData = function(data) {
return this.insert(createItem(data));
};
List.prototype.insert = function(item, before) {
if (before !== undefined && before !== null) {
// prev before
// ^
// item
this.updateCursors(before.prev, item, before, item);
if (before.prev === null) {
// insert to the beginning of list
if (this.head !== before) {
throw new Error('before doesn\'t belong to list');
}
// since head points to before therefore list doesn't empty
// no need to check tail
this.head = item;
before.prev = item;
item.next = before;
this.updateCursors(null, item);
} else {
// insert between two items
before.prev.next = item;
item.prev = before.prev;
before.prev = item;
item.next = before;
}
} else {
// tail
// ^
// item
this.updateCursors(this.tail, item, null, item);
// insert to the ending of the list
if (this.tail !== null) {
// last item -> new item
this.tail.next = item;
// last item <- new item
item.prev = this.tail;
} else {
// if list has no tail, then it also has no head
// in this case head points to new item
this.head = item;
}
// tail always points to new item
this.tail = item;
}
return this;
};
List.prototype.insertData = function(data, before) {
return this.insert(createItem(data), before);
};
List.prototype.remove = function(item) {
// item
// ^
// prev next
this.updateCursors(item, item.prev, item, item.next);
if (item.prev !== null) {
item.prev.next = item.next;
} else {
if (this.head !== item) {
throw new Error('item doesn\'t belong to list');
}
this.head = item.next;
}
if (item.next !== null) {
item.next.prev = item.prev;
} else {
if (this.tail !== item) {
throw new Error('item doesn\'t belong to list');
}
this.tail = item.prev;
}
item.prev = null;
item.next = null;
return item;
};
List.prototype.push = function(data) {
this.insert(createItem(data));
};
List.prototype.pop = function() {
if (this.tail !== null) {
return this.remove(this.tail);
}
};
List.prototype.unshift = function(data) {
this.prepend(createItem(data));
};
List.prototype.shift = function() {
if (this.head !== null) {
return this.remove(this.head);
}
};
List.prototype.prependList = function(list) {
return this.insertList(list, this.head);
};
List.prototype.appendList = function(list) {
return this.insertList(list);
};
List.prototype.insertList = function(list, before) {
// ignore empty lists
if (list.head === null) {
return this;
}
if (before !== undefined && before !== null) {
this.updateCursors(before.prev, list.tail, before, list.head);
// insert in the middle of dist list
if (before.prev !== null) {
// before.prev <-> list.head
before.prev.next = list.head;
list.head.prev = before.prev;
} else {
this.head = list.head;
}
before.prev = list.tail;
list.tail.next = before;
} else {
this.updateCursors(this.tail, list.tail, null, list.head);
// insert to end of the list
if (this.tail !== null) {
// if destination list has a tail, then it also has a head,
// but head doesn't change
// dest tail -> source head
this.tail.next = list.head;
// dest tail <- source head
list.head.prev = this.tail;
} else {
// if list has no a tail, then it also has no a head
// in this case points head to new item
this.head = list.head;
}
// tail always start point to new item
this.tail = list.tail;
}
list.head = null;
list.tail = null;
return this;
};
List.prototype.replace = function(oldItem, newItemOrList) {
if ('head' in newItemOrList) {
this.insertList(newItemOrList, oldItem);
} else {
this.insert(newItemOrList, oldItem);
}
this.remove(oldItem);
};
module.exports = List;

View File

@@ -1,91 +0,0 @@
var adoptBuffer = require('./adopt-buffer');
var isBOM = require('../tokenizer').isBOM;
var N = 10;
var F = 12;
var R = 13;
function computeLinesAndColumns(host, source) {
var sourceLength = source.length;
var lines = adoptBuffer(host.lines, sourceLength); // +1
var line = host.startLine;
var columns = adoptBuffer(host.columns, sourceLength);
var column = host.startColumn;
var startOffset = source.length > 0 ? isBOM(source.charCodeAt(0)) : 0;
for (var i = startOffset; i < sourceLength; i++) { // -1
var code = source.charCodeAt(i);
lines[i] = line;
columns[i] = column++;
if (code === N || code === R || code === F) {
if (code === R && i + 1 < sourceLength && source.charCodeAt(i + 1) === N) {
i++;
lines[i] = line;
columns[i] = column;
}
line++;
column = 1;
}
}
lines[i] = line;
columns[i] = column;
host.lines = lines;
host.columns = columns;
}
var OffsetToLocation = function() {
this.lines = null;
this.columns = null;
this.linesAndColumnsComputed = false;
};
OffsetToLocation.prototype = {
setSource: function(source, startOffset, startLine, startColumn) {
this.source = source;
this.startOffset = typeof startOffset === 'undefined' ? 0 : startOffset;
this.startLine = typeof startLine === 'undefined' ? 1 : startLine;
this.startColumn = typeof startColumn === 'undefined' ? 1 : startColumn;
this.linesAndColumnsComputed = false;
},
ensureLinesAndColumnsComputed: function() {
if (!this.linesAndColumnsComputed) {
computeLinesAndColumns(this, this.source);
this.linesAndColumnsComputed = true;
}
},
getLocation: function(offset, filename) {
this.ensureLinesAndColumnsComputed();
return {
source: filename,
offset: this.startOffset + offset,
line: this.lines[offset],
column: this.columns[offset]
};
},
getLocationRange: function(start, end, filename) {
this.ensureLinesAndColumnsComputed();
return {
source: filename,
start: {
offset: this.startOffset + start,
line: this.lines[start],
column: this.columns[start]
},
end: {
offset: this.startOffset + end,
line: this.lines[end],
column: this.columns[end]
}
};
}
};
module.exports = OffsetToLocation;

View File

@@ -1,82 +0,0 @@
var createCustomError = require('../utils/createCustomError');
var MAX_LINE_LENGTH = 100;
var OFFSET_CORRECTION = 60;
var TAB_REPLACEMENT = ' ';
function sourceFragment(error, extraLines) {
function processLines(start, end) {
return lines.slice(start, end).map(function(line, idx) {
var num = String(start + idx + 1);
while (num.length < maxNumLength) {
num = ' ' + num;
}
return num + ' |' + line;
}).join('\n');
}
var lines = error.source.split(/\r\n?|\n|\f/);
var line = error.line;
var column = error.column;
var startLine = Math.max(1, line - extraLines) - 1;
var endLine = Math.min(line + extraLines, lines.length + 1);
var maxNumLength = Math.max(4, String(endLine).length) + 1;
var cutLeft = 0;
// column correction according to replaced tab before column
column += (TAB_REPLACEMENT.length - 1) * (lines[line - 1].substr(0, column - 1).match(/\t/g) || []).length;
if (column > MAX_LINE_LENGTH) {
cutLeft = column - OFFSET_CORRECTION + 3;
column = OFFSET_CORRECTION - 2;
}
for (var i = startLine; i <= endLine; i++) {
if (i >= 0 && i < lines.length) {
lines[i] = lines[i].replace(/\t/g, TAB_REPLACEMENT);
lines[i] =
(cutLeft > 0 && lines[i].length > cutLeft ? '\u2026' : '') +
lines[i].substr(cutLeft, MAX_LINE_LENGTH - 2) +
(lines[i].length > cutLeft + MAX_LINE_LENGTH - 1 ? '\u2026' : '');
}
}
return [
processLines(startLine, line),
new Array(column + maxNumLength + 2).join('-') + '^',
processLines(line, endLine)
].filter(Boolean).join('\n');
}
var SyntaxError = function(message, source, offset, line, column) {
var error = createCustomError('SyntaxError', message);
error.source = source;
error.offset = offset;
error.line = line;
error.column = column;
error.sourceFragment = function(extraLines) {
return sourceFragment(error, isNaN(extraLines) ? 0 : extraLines);
};
Object.defineProperty(error, 'formattedMessage', {
get: function() {
return (
'Parse error: ' + error.message + '\n' +
sourceFragment(error, 2)
);
}
});
// for backward capability
error.parseError = {
offset: offset,
line: line,
column: column
};
return error;
};
module.exports = SyntaxError;

View File

@@ -1,219 +0,0 @@
var constants = require('../tokenizer/const');
var TYPE = constants.TYPE;
var NAME = constants.NAME;
var utils = require('../tokenizer/utils');
var cmpStr = utils.cmpStr;
var EOF = TYPE.EOF;
var WHITESPACE = TYPE.WhiteSpace;
var COMMENT = TYPE.Comment;
var OFFSET_MASK = 0x00FFFFFF;
var TYPE_SHIFT = 24;
var TokenStream = function() {
this.offsetAndType = null;
this.balance = null;
this.reset();
};
TokenStream.prototype = {
reset: function() {
this.eof = false;
this.tokenIndex = -1;
this.tokenType = 0;
this.tokenStart = this.firstCharOffset;
this.tokenEnd = this.firstCharOffset;
},
lookupType: function(offset) {
offset += this.tokenIndex;
if (offset < this.tokenCount) {
return this.offsetAndType[offset] >> TYPE_SHIFT;
}
return EOF;
},
lookupOffset: function(offset) {
offset += this.tokenIndex;
if (offset < this.tokenCount) {
return this.offsetAndType[offset - 1] & OFFSET_MASK;
}
return this.source.length;
},
lookupValue: function(offset, referenceStr) {
offset += this.tokenIndex;
if (offset < this.tokenCount) {
return cmpStr(
this.source,
this.offsetAndType[offset - 1] & OFFSET_MASK,
this.offsetAndType[offset] & OFFSET_MASK,
referenceStr
);
}
return false;
},
getTokenStart: function(tokenIndex) {
if (tokenIndex === this.tokenIndex) {
return this.tokenStart;
}
if (tokenIndex > 0) {
return tokenIndex < this.tokenCount
? this.offsetAndType[tokenIndex - 1] & OFFSET_MASK
: this.offsetAndType[this.tokenCount] & OFFSET_MASK;
}
return this.firstCharOffset;
},
// TODO: -> skipUntilBalanced
getRawLength: function(startToken, mode) {
var cursor = startToken;
var balanceEnd;
var offset = this.offsetAndType[Math.max(cursor - 1, 0)] & OFFSET_MASK;
var type;
loop:
for (; cursor < this.tokenCount; cursor++) {
balanceEnd = this.balance[cursor];
// stop scanning on balance edge that points to offset before start token
if (balanceEnd < startToken) {
break loop;
}
type = this.offsetAndType[cursor] >> TYPE_SHIFT;
// check token is stop type
switch (mode(type, this.source, offset)) {
case 1:
break loop;
case 2:
cursor++;
break loop;
default:
// fast forward to the end of balanced block
if (this.balance[balanceEnd] === cursor) {
cursor = balanceEnd;
}
offset = this.offsetAndType[cursor] & OFFSET_MASK;
}
}
return cursor - this.tokenIndex;
},
isBalanceEdge: function(pos) {
return this.balance[this.tokenIndex] < pos;
},
isDelim: function(code, offset) {
if (offset) {
return (
this.lookupType(offset) === TYPE.Delim &&
this.source.charCodeAt(this.lookupOffset(offset)) === code
);
}
return (
this.tokenType === TYPE.Delim &&
this.source.charCodeAt(this.tokenStart) === code
);
},
getTokenValue: function() {
return this.source.substring(this.tokenStart, this.tokenEnd);
},
getTokenLength: function() {
return this.tokenEnd - this.tokenStart;
},
substrToCursor: function(start) {
return this.source.substring(start, this.tokenStart);
},
skipWS: function() {
for (var i = this.tokenIndex, skipTokenCount = 0; i < this.tokenCount; i++, skipTokenCount++) {
if ((this.offsetAndType[i] >> TYPE_SHIFT) !== WHITESPACE) {
break;
}
}
if (skipTokenCount > 0) {
this.skip(skipTokenCount);
}
},
skipSC: function() {
while (this.tokenType === WHITESPACE || this.tokenType === COMMENT) {
this.next();
}
},
skip: function(tokenCount) {
var next = this.tokenIndex + tokenCount;
if (next < this.tokenCount) {
this.tokenIndex = next;
this.tokenStart = this.offsetAndType[next - 1] & OFFSET_MASK;
next = this.offsetAndType[next];
this.tokenType = next >> TYPE_SHIFT;
this.tokenEnd = next & OFFSET_MASK;
} else {
this.tokenIndex = this.tokenCount;
this.next();
}
},
next: function() {
var next = this.tokenIndex + 1;
if (next < this.tokenCount) {
this.tokenIndex = next;
this.tokenStart = this.tokenEnd;
next = this.offsetAndType[next];
this.tokenType = next >> TYPE_SHIFT;
this.tokenEnd = next & OFFSET_MASK;
} else {
this.tokenIndex = this.tokenCount;
this.eof = true;
this.tokenType = EOF;
this.tokenStart = this.tokenEnd = this.source.length;
}
},
forEachToken(fn) {
for (var i = 0, offset = this.firstCharOffset; i < this.tokenCount; i++) {
var start = offset;
var item = this.offsetAndType[i];
var end = item & OFFSET_MASK;
var type = item >> TYPE_SHIFT;
offset = end;
fn(type, start, end, i);
}
},
dump() {
var tokens = new Array(this.tokenCount);
this.forEachToken((type, start, end, index) => {
tokens[index] = {
idx: index,
type: NAME[type],
chunk: this.source.substring(start, end),
balance: this.balance[index]
};
});
return tokens;
}
};
module.exports = TokenStream;

View File

@@ -1,10 +0,0 @@
var MIN_SIZE = 16 * 1024;
var SafeUint32Array = typeof Uint32Array !== 'undefined' ? Uint32Array : Array; // fallback on Array when TypedArray is not supported
module.exports = function adoptBuffer(buffer, size) {
if (buffer === null || buffer.length < size) {
return new SafeUint32Array(Math.max(size + 1024, MIN_SIZE));
}
return buffer;
};

View File

@@ -1,10 +1,10 @@
var List = require('../common/List');
import { List } from '../utils/List.js';
module.exports = function createConvertors(walk) {
export function createConvertor(walk) {
return {
fromPlainObject: function(ast) {
fromPlainObject(ast) {
walk(ast, {
enter: function(node) {
enter(node) {
if (node.children && node.children instanceof List === false) {
node.children = new List().fromArray(node.children);
}
@@ -13,9 +13,9 @@ module.exports = function createConvertors(walk) {
return ast;
},
toPlainObject: function(ast) {
toPlainObject(ast) {
walk(ast, {
leave: function(node) {
leave(node) {
if (node.children && node.children instanceof List) {
node.children = node.children.toArray();
}

View File

@@ -1,3 +1,4 @@
var createConvertor = require('./create');
import { createConvertor } from './create.js';
import walker from '../walker/index.js';
module.exports = createConvertor(require('../walker'));
export default createConvertor(walker);

6
node_modules/css-tree/lib/data-patch.js generated vendored Normal file
View File

@@ -0,0 +1,6 @@
import { createRequire } from 'module';
const require = createRequire(import.meta.url);
const patch = require('../data/patch.json');
export default patch;

118
node_modules/css-tree/lib/data.js generated vendored Executable file
View File

@@ -0,0 +1,118 @@
import { createRequire } from 'module';
import patch from './data-patch.js';
const require = createRequire(import.meta.url);
const mdnAtrules = require('mdn-data/css/at-rules.json');
const mdnProperties = require('mdn-data/css/properties.json');
const mdnSyntaxes = require('mdn-data/css/syntaxes.json');
const hasOwn = Object.hasOwn || ((object, property) => Object.prototype.hasOwnProperty.call(object, property));
const extendSyntax = /^\s*\|\s*/;
function preprocessAtrules(dict) {
const result = Object.create(null);
for (const [atruleName, atrule] of Object.entries(dict)) {
let descriptors = null;
if (atrule.descriptors) {
descriptors = Object.create(null);
for (const [name, descriptor] of Object.entries(atrule.descriptors)) {
descriptors[name] = descriptor.syntax;
}
}
result[atruleName.substr(1)] = {
prelude: atrule.syntax.trim().replace(/\{(.|\s)+\}/, '').match(/^@\S+\s+([^;\{]*)/)[1].trim() || null,
descriptors
};
}
return result;
}
function patchDictionary(dict, patchDict) {
const result = Object.create(null);
// copy all syntaxes for an original dict
for (const [key, value] of Object.entries(dict)) {
if (value) {
result[key] = value.syntax || value;
}
}
// apply a patch
for (const key of Object.keys(patchDict)) {
if (hasOwn(dict, key)) {
if (patchDict[key].syntax) {
result[key] = extendSyntax.test(patchDict[key].syntax)
? result[key] + ' ' + patchDict[key].syntax.trim()
: patchDict[key].syntax;
} else {
delete result[key];
}
} else {
if (patchDict[key].syntax) {
result[key] = patchDict[key].syntax.replace(extendSyntax, '');
}
}
}
return result;
}
function preprocessPatchAtrulesDescritors(declarations) {
const result = {};
for (const [key, value] of Object.entries(declarations || {})) {
result[key] = typeof value === 'string'
? { syntax: value }
: value;
}
return result;
}
function patchAtrules(dict, patchDict) {
const result = {};
// copy all syntaxes for an original dict
for (const key in dict) {
if (patchDict[key] === null) {
continue;
}
const atrulePatch = patchDict[key] || {};
result[key] = {
prelude: key in patchDict && 'prelude' in atrulePatch
? atrulePatch.prelude
: dict[key].prelude || null,
descriptors: patchDictionary(
dict[key].descriptors || {},
preprocessPatchAtrulesDescritors(atrulePatch.descriptors)
)
};
}
// apply a patch
for (const [key, atrulePatch] of Object.entries(patchDict)) {
if (atrulePatch && !hasOwn(dict, key)) {
result[key] = {
prelude: atrulePatch.prelude || null,
descriptors: atrulePatch.descriptors
? patchDictionary({}, preprocessPatchAtrulesDescritors(atrulePatch.descriptors))
: null
};
}
}
return result;
}
export default {
types: patchDictionary(mdnSyntaxes, patch.types),
atrules: patchAtrules(preprocessAtrules(mdnAtrules), patch.atrules),
properties: patchDictionary(mdnProperties, patch.properties)
};

View File

@@ -1,14 +1,12 @@
var createCustomError = require('../utils/createCustomError');
import { createCustomError } from '../utils/create-custom-error.js';
module.exports = function SyntaxError(message, input, offset) {
var error = createCustomError('SyntaxError', message);
error.input = input;
error.offset = offset;
error.rawMessage = message;
error.message = error.rawMessage + '\n' +
' ' + error.input + '\n' +
'--' + new Array((error.offset || error.input.length) + 1).join('-') + '^';
return error;
export function SyntaxError(message, input, offset) {
return Object.assign(createCustomError('SyntaxError', message), {
input,
offset,
rawMessage: message,
message: message + '\n' +
' ' + input + '\n' +
'--' + new Array((offset || input.length) + 1).join('-') + '^'
});
};

View File

@@ -3,27 +3,29 @@ function noop(value) {
}
function generateMultiplier(multiplier) {
if (multiplier.min === 0 && multiplier.max === 0) {
return '*';
const { min, max, comma } = multiplier;
if (min === 0 && max === 0) {
return comma ? '#?' : '*';
}
if (multiplier.min === 0 && multiplier.max === 1) {
if (min === 0 && max === 1) {
return '?';
}
if (multiplier.min === 1 && multiplier.max === 0) {
return multiplier.comma ? '#' : '+';
if (min === 1 && max === 0) {
return comma ? '#' : '+';
}
if (multiplier.min === 1 && multiplier.max === 1) {
if (min === 1 && max === 1) {
return '';
}
return (
(multiplier.comma ? '#' : '') +
(multiplier.min === multiplier.max
? '{' + multiplier.min + '}'
: '{' + multiplier.min + ',' + (multiplier.max !== 0 ? multiplier.max : '') + '}'
(comma ? '#' : '') +
(min === max
? '{' + min + '}'
: '{' + min + ',' + (max !== 0 ? max : '') + '}'
)
);
}
@@ -45,20 +47,20 @@ function generateTypeOpts(node) {
}
function generateSequence(node, decorate, forceBraces, compact) {
var combinator = node.combinator === ' ' || compact ? node.combinator : ' ' + node.combinator + ' ';
var result = node.terms.map(function(term) {
return generate(term, decorate, forceBraces, compact);
}).join(combinator);
const combinator = node.combinator === ' ' || compact ? node.combinator : ' ' + node.combinator + ' ';
const result = node.terms
.map(term => internalGenerate(term, decorate, forceBraces, compact))
.join(combinator);
if (node.explicit || forceBraces) {
result = (compact || result[0] === ',' ? '[' : '[ ') + result + (compact ? ']' : ' ]');
return (compact || result[0] === ',' ? '[' : '[ ') + result + (compact ? ']' : ' ]');
}
return result;
}
function generate(node, decorate, forceBraces, compact) {
var result;
function internalGenerate(node, decorate, forceBraces, compact) {
let result;
switch (node.type) {
case 'Group':
@@ -70,10 +72,14 @@ function generate(node, decorate, forceBraces, compact) {
case 'Multiplier':
// return since node is a composition
return (
generate(node.term, decorate, forceBraces, compact) +
internalGenerate(node.term, decorate, forceBraces, compact) +
decorate(generateMultiplier(node), node)
);
case 'Boolean':
result = '<boolean-expr[' + internalGenerate(node.term, decorate, forceBraces, compact) + ']>';
break;
case 'Type':
result = '<' + node.name + (node.opts ? decorate(generateTypeOpts(node.opts), node.opts) : '') + '>';
break;
@@ -110,10 +116,10 @@ function generate(node, decorate, forceBraces, compact) {
return decorate(result, node);
}
module.exports = function(node, options) {
var decorate = noop;
var forceBraces = false;
var compact = false;
export function generate(node, options) {
let decorate = noop;
let forceBraces = false;
let compact = false;
if (typeof options === 'function') {
decorate = options;
@@ -125,5 +131,5 @@ module.exports = function(node, options) {
}
}
return generate(node, decorate, forceBraces, compact);
return internalGenerate(node, decorate, forceBraces, compact);
};

View File

@@ -1,6 +1,4 @@
module.exports = {
SyntaxError: require('./SyntaxError'),
parse: require('./parse'),
generate: require('./generate'),
walk: require('./walk')
};
export { SyntaxError } from './SyntaxError.js';
export { generate } from './generate.js';
export { parse } from './parse.js';
export { walk } from './walk.js';

View File

@@ -1,116 +1,60 @@
var Tokenizer = require('./tokenizer');
var TAB = 9;
var N = 10;
var F = 12;
var R = 13;
var SPACE = 32;
var EXCLAMATIONMARK = 33; // !
var NUMBERSIGN = 35; // #
var AMPERSAND = 38; // &
var APOSTROPHE = 39; // '
var LEFTPARENTHESIS = 40; // (
var RIGHTPARENTHESIS = 41; // )
var ASTERISK = 42; // *
var PLUSSIGN = 43; // +
var COMMA = 44; // ,
var HYPERMINUS = 45; // -
var LESSTHANSIGN = 60; // <
var GREATERTHANSIGN = 62; // >
var QUESTIONMARK = 63; // ?
var COMMERCIALAT = 64; // @
var LEFTSQUAREBRACKET = 91; // [
var RIGHTSQUAREBRACKET = 93; // ]
var LEFTCURLYBRACKET = 123; // {
var VERTICALLINE = 124; // |
var RIGHTCURLYBRACKET = 125; // }
var INFINITY = 8734; //
var NAME_CHAR = createCharMap(function(ch) {
return /[a-zA-Z0-9\-]/.test(ch);
});
var COMBINATOR_PRECEDENCE = {
import { Scanner } from './scanner.js';
const TAB = 9;
const N = 10;
const F = 12;
const R = 13;
const SPACE = 32;
const EXCLAMATIONMARK = 33; // !
const NUMBERSIGN = 35; // #
const AMPERSAND = 38; // &
const APOSTROPHE = 39; // '
const LEFTPARENTHESIS = 40; // (
const RIGHTPARENTHESIS = 41; // )
const ASTERISK = 42; // *
const PLUSSIGN = 43; // +
const COMMA = 44; // ,
const HYPERMINUS = 45; // -
const LESSTHANSIGN = 60; // <
const GREATERTHANSIGN = 62; // >
const QUESTIONMARK = 63; // ?
const COMMERCIALAT = 64; // @
const LEFTSQUAREBRACKET = 91; // [
const RIGHTSQUAREBRACKET = 93; // ]
const LEFTCURLYBRACKET = 123; // {
const VERTICALLINE = 124; // |
const RIGHTCURLYBRACKET = 125; // }
const INFINITY = 8734; // ∞
const COMBINATOR_PRECEDENCE = {
' ': 1,
'&&': 2,
'||': 3,
'|': 4
};
function createCharMap(fn) {
var array = typeof Uint32Array === 'function' ? new Uint32Array(128) : new Array(128);
for (var i = 0; i < 128; i++) {
array[i] = fn(String.fromCharCode(i)) ? 1 : 0;
}
return array;
}
function readMultiplierRange(scanner) {
let min = null;
let max = null;
function scanSpaces(tokenizer) {
return tokenizer.substringToPos(
tokenizer.findWsEnd(tokenizer.pos)
);
}
scanner.eat(LEFTCURLYBRACKET);
scanner.skipWs();
function scanWord(tokenizer) {
var end = tokenizer.pos;
min = scanner.scanNumber(scanner);
scanner.skipWs();
for (; end < tokenizer.str.length; end++) {
var code = tokenizer.str.charCodeAt(end);
if (code >= 128 || NAME_CHAR[code] === 0) {
break;
}
}
if (scanner.charCode() === COMMA) {
scanner.pos++;
scanner.skipWs();
if (tokenizer.pos === end) {
tokenizer.error('Expect a keyword');
}
return tokenizer.substringToPos(end);
}
function scanNumber(tokenizer) {
var end = tokenizer.pos;
for (; end < tokenizer.str.length; end++) {
var code = tokenizer.str.charCodeAt(end);
if (code < 48 || code > 57) {
break;
}
}
if (tokenizer.pos === end) {
tokenizer.error('Expect a number');
}
return tokenizer.substringToPos(end);
}
function scanString(tokenizer) {
var end = tokenizer.str.indexOf('\'', tokenizer.pos + 1);
if (end === -1) {
tokenizer.pos = tokenizer.str.length;
tokenizer.error('Expect an apostrophe');
}
return tokenizer.substringToPos(end + 1);
}
function readMultiplierRange(tokenizer) {
var min = null;
var max = null;
tokenizer.eat(LEFTCURLYBRACKET);
min = scanNumber(tokenizer);
if (tokenizer.charCode() === COMMA) {
tokenizer.pos++;
if (tokenizer.charCode() !== RIGHTCURLYBRACKET) {
max = scanNumber(tokenizer);
if (scanner.charCode() !== RIGHTCURLYBRACKET) {
max = scanner.scanNumber(scanner);
scanner.skipWs();
}
} else {
max = min;
}
tokenizer.eat(RIGHTCURLYBRACKET);
scanner.eat(RIGHTCURLYBRACKET);
return {
min: Number(min),
@@ -118,13 +62,13 @@ function readMultiplierRange(tokenizer) {
};
}
function readMultiplier(tokenizer) {
var range = null;
var comma = false;
function readMultiplier(scanner) {
let range = null;
let comma = false;
switch (tokenizer.charCode()) {
switch (scanner.charCode()) {
case ASTERISK:
tokenizer.pos++;
scanner.pos++;
range = {
min: 0,
@@ -134,7 +78,7 @@ function readMultiplier(tokenizer) {
break;
case PLUSSIGN:
tokenizer.pos++;
scanner.pos++;
range = {
min: 1,
@@ -144,7 +88,7 @@ function readMultiplier(tokenizer) {
break;
case QUESTIONMARK:
tokenizer.pos++;
scanner.pos++;
range = {
min: 0,
@@ -154,12 +98,22 @@ function readMultiplier(tokenizer) {
break;
case NUMBERSIGN:
tokenizer.pos++;
scanner.pos++;
comma = true;
if (tokenizer.charCode() === LEFTCURLYBRACKET) {
range = readMultiplierRange(tokenizer);
if (scanner.charCode() === LEFTCURLYBRACKET) {
range = readMultiplierRange(scanner);
} else if (scanner.charCode() === QUESTIONMARK) {
// https://www.w3.org/TR/css-values-4/#component-multipliers
// > the # and ? multipliers may be stacked as #?
// In this case just treat "#?" as a single multiplier
// { min: 0, max: 0, comma: true }
scanner.pos++;
range = {
min: 0,
max: 0
};
} else {
range = {
min: 1,
@@ -170,7 +124,7 @@ function readMultiplier(tokenizer) {
break;
case LEFTCURLYBRACKET:
range = readMultiplierRange(tokenizer);
range = readMultiplierRange(scanner);
break;
default:
@@ -179,51 +133,66 @@ function readMultiplier(tokenizer) {
return {
type: 'Multiplier',
comma: comma,
comma,
min: range.min,
max: range.max,
term: null
};
}
function maybeMultiplied(tokenizer, node) {
var multiplier = readMultiplier(tokenizer);
function maybeMultiplied(scanner, node) {
const multiplier = readMultiplier(scanner);
if (multiplier !== null) {
multiplier.term = node;
// https://www.w3.org/TR/css-values-4/#component-multipliers
// > The + and # multipliers may be stacked as +#;
// Represent "+#" as nested multipliers:
// { ...<multiplier #>,
// term: {
// ...<multipler +>,
// term: node
// }
// }
if (scanner.charCode() === NUMBERSIGN &&
scanner.charCodeAt(scanner.pos - 1) === PLUSSIGN) {
return maybeMultiplied(scanner, multiplier);
}
return multiplier;
}
return node;
}
function maybeToken(tokenizer) {
var ch = tokenizer.peek();
function maybeToken(scanner) {
const ch = scanner.peek();
if (ch === '') {
return null;
}
return {
return maybeMultiplied(scanner, {
type: 'Token',
value: ch
};
});
}
function readProperty(tokenizer) {
var name;
function readProperty(scanner) {
let name;
tokenizer.eat(LESSTHANSIGN);
tokenizer.eat(APOSTROPHE);
scanner.eat(LESSTHANSIGN);
scanner.eat(APOSTROPHE);
name = scanWord(tokenizer);
name = scanner.scanWord();
tokenizer.eat(APOSTROPHE);
tokenizer.eat(GREATERTHANSIGN);
scanner.eat(APOSTROPHE);
scanner.eat(GREATERTHANSIGN);
return maybeMultiplied(tokenizer, {
return maybeMultiplied(scanner, {
type: 'Property',
name: name
name
});
}
@@ -234,101 +203,118 @@ function readProperty(tokenizer) {
// range notation—[min,max]—within the angle brackets, after the identifying keyword,
// indicating a closed range between (and including) min and max.
// For example, <integer [0, 10]> indicates an integer between 0 and 10, inclusive.
function readTypeRange(tokenizer) {
function readTypeRange(scanner) {
// use null for Infinity to make AST format JSON serializable/deserializable
var min = null; // -Infinity
var max = null; // Infinity
var sign = 1;
let min = null; // -Infinity
let max = null; // Infinity
let sign = 1;
tokenizer.eat(LEFTSQUAREBRACKET);
scanner.eat(LEFTSQUAREBRACKET);
if (tokenizer.charCode() === HYPERMINUS) {
tokenizer.peek();
if (scanner.charCode() === HYPERMINUS) {
scanner.peek();
sign = -1;
}
if (sign == -1 && tokenizer.charCode() === INFINITY) {
tokenizer.peek();
if (sign == -1 && scanner.charCode() === INFINITY) {
scanner.peek();
} else {
min = sign * Number(scanNumber(tokenizer));
min = sign * Number(scanner.scanNumber(scanner));
if (scanner.isNameCharCode()) {
min += scanner.scanWord();
}
}
scanSpaces(tokenizer);
tokenizer.eat(COMMA);
scanSpaces(tokenizer);
scanner.skipWs();
scanner.eat(COMMA);
scanner.skipWs();
if (tokenizer.charCode() === INFINITY) {
tokenizer.peek();
if (scanner.charCode() === INFINITY) {
scanner.peek();
} else {
sign = 1;
if (tokenizer.charCode() === HYPERMINUS) {
tokenizer.peek();
if (scanner.charCode() === HYPERMINUS) {
scanner.peek();
sign = -1;
}
max = sign * Number(scanNumber(tokenizer));
max = sign * Number(scanner.scanNumber(scanner));
if (scanner.isNameCharCode()) {
max += scanner.scanWord();
}
}
tokenizer.eat(RIGHTSQUAREBRACKET);
// If no range is indicated, either by using the bracketed range notation
// or in the property description, then [−∞,∞] is assumed.
if (min === null && max === null) {
return null;
}
scanner.eat(RIGHTSQUAREBRACKET);
return {
type: 'Range',
min: min,
max: max
min,
max
};
}
function readType(tokenizer) {
var name;
var opts = null;
function readType(scanner) {
let name;
let opts = null;
tokenizer.eat(LESSTHANSIGN);
name = scanWord(tokenizer);
scanner.eat(LESSTHANSIGN);
name = scanner.scanWord();
if (tokenizer.charCode() === LEFTPARENTHESIS &&
tokenizer.nextCharCode() === RIGHTPARENTHESIS) {
tokenizer.pos += 2;
// https://drafts.csswg.org/css-values-5/#boolean
if (name === 'boolean-expr') {
scanner.eat(LEFTSQUAREBRACKET);
const implicitGroup = readImplicitGroup(scanner, RIGHTSQUAREBRACKET);
scanner.eat(RIGHTSQUAREBRACKET);
scanner.eat(GREATERTHANSIGN);
return maybeMultiplied(scanner, {
type: 'Boolean',
term: implicitGroup.terms.length === 1
? implicitGroup.terms[0]
: implicitGroup
});
}
if (scanner.charCode() === LEFTPARENTHESIS &&
scanner.nextCharCode() === RIGHTPARENTHESIS) {
scanner.pos += 2;
name += '()';
}
if (tokenizer.charCodeAt(tokenizer.findWsEnd(tokenizer.pos)) === LEFTSQUAREBRACKET) {
scanSpaces(tokenizer);
opts = readTypeRange(tokenizer);
if (scanner.charCodeAt(scanner.findWsEnd(scanner.pos)) === LEFTSQUAREBRACKET) {
scanner.skipWs();
opts = readTypeRange(scanner);
}
tokenizer.eat(GREATERTHANSIGN);
scanner.eat(GREATERTHANSIGN);
return maybeMultiplied(tokenizer, {
return maybeMultiplied(scanner, {
type: 'Type',
name: name,
opts: opts
name,
opts
});
}
function readKeywordOrFunction(tokenizer) {
var name;
function readKeywordOrFunction(scanner) {
const name = scanner.scanWord();
name = scanWord(tokenizer);
if (tokenizer.charCode() === LEFTPARENTHESIS) {
tokenizer.pos++;
if (scanner.charCode() === LEFTPARENTHESIS) {
scanner.pos++;
return {
type: 'Function',
name: name
name
};
}
return maybeMultiplied(tokenizer, {
return maybeMultiplied(scanner, {
type: 'Keyword',
name: name
name
});
}
@@ -336,21 +322,27 @@ function regroupTerms(terms, combinators) {
function createGroup(terms, combinator) {
return {
type: 'Group',
terms: terms,
combinator: combinator,
terms,
combinator,
disallowEmpty: false,
explicit: false
};
}
combinators = Object.keys(combinators).sort(function(a, b) {
return COMBINATOR_PRECEDENCE[a] - COMBINATOR_PRECEDENCE[b];
});
let combinator;
combinators = Object.keys(combinators)
.sort((a, b) => COMBINATOR_PRECEDENCE[a] - COMBINATOR_PRECEDENCE[b]);
while (combinators.length > 0) {
var combinator = combinators.shift();
for (var i = 0, subgroupStart = 0; i < terms.length; i++) {
var term = terms[i];
combinator = combinators.shift();
let i = 0;
let subgroupStart = 0;
for (; i < terms.length; i++) {
const term = terms[i];
if (term.type === 'Combinator') {
if (term.value === combinator) {
if (subgroupStart === -1) {
@@ -384,20 +376,20 @@ function regroupTerms(terms, combinators) {
return combinator;
}
function readImplicitGroup(tokenizer) {
var terms = [];
var combinators = {};
var token;
var prevToken = null;
var prevTokenPos = tokenizer.pos;
function readImplicitGroup(scanner, stopCharCode) {
const combinators = Object.create(null);
const terms = [];
let token;
let prevToken = null;
let prevTokenPos = scanner.pos;
while (token = peek(tokenizer)) {
while (scanner.charCode() !== stopCharCode && (token = peek(scanner, stopCharCode))) {
if (token.type !== 'Spaces') {
if (token.type === 'Combinator') {
// check for combinator in group beginning and double combinator sequence
if (prevToken === null || prevToken.type === 'Combinator') {
tokenizer.pos = prevTokenPos;
tokenizer.error('Unexpected combinator');
scanner.pos = prevTokenPos;
scanner.error('Unexpected combinator');
}
combinators[token.value] = true;
@@ -411,48 +403,44 @@ function readImplicitGroup(tokenizer) {
terms.push(token);
prevToken = token;
prevTokenPos = tokenizer.pos;
prevTokenPos = scanner.pos;
}
}
// check for combinator in group ending
if (prevToken !== null && prevToken.type === 'Combinator') {
tokenizer.pos -= prevTokenPos;
tokenizer.error('Unexpected combinator');
scanner.pos -= prevTokenPos;
scanner.error('Unexpected combinator');
}
return {
type: 'Group',
terms: terms,
terms,
combinator: regroupTerms(terms, combinators) || ' ',
disallowEmpty: false,
explicit: false
};
}
function readGroup(tokenizer) {
var result;
function readGroup(scanner, stopCharCode) {
let result;
tokenizer.eat(LEFTSQUAREBRACKET);
result = readImplicitGroup(tokenizer);
tokenizer.eat(RIGHTSQUAREBRACKET);
scanner.eat(LEFTSQUAREBRACKET);
result = readImplicitGroup(scanner, stopCharCode);
scanner.eat(RIGHTSQUAREBRACKET);
result.explicit = true;
if (tokenizer.charCode() === EXCLAMATIONMARK) {
tokenizer.pos++;
if (scanner.charCode() === EXCLAMATIONMARK) {
scanner.pos++;
result.disallowEmpty = true;
}
return result;
}
function peek(tokenizer) {
var code = tokenizer.charCode();
if (code < 128 && NAME_CHAR[code] === 1) {
return readKeywordOrFunction(tokenizer);
}
function peek(scanner, stopCharCode) {
let code = scanner.charCode();
switch (code) {
case RIGHTSQUAREBRACKET:
@@ -460,26 +448,24 @@ function peek(tokenizer) {
break;
case LEFTSQUAREBRACKET:
return maybeMultiplied(tokenizer, readGroup(tokenizer));
return maybeMultiplied(scanner, readGroup(scanner, stopCharCode));
case LESSTHANSIGN:
return tokenizer.nextCharCode() === APOSTROPHE
? readProperty(tokenizer)
: readType(tokenizer);
return scanner.nextCharCode() === APOSTROPHE
? readProperty(scanner)
: readType(scanner);
case VERTICALLINE:
return {
type: 'Combinator',
value: tokenizer.substringToPos(
tokenizer.nextCharCode() === VERTICALLINE
? tokenizer.pos + 2
: tokenizer.pos + 1
value: scanner.substringToPos(
scanner.pos + (scanner.nextCharCode() === VERTICALLINE ? 2 : 1)
)
};
case AMPERSAND:
tokenizer.pos++;
tokenizer.eat(AMPERSAND);
scanner.pos++;
scanner.eat(AMPERSAND);
return {
type: 'Combinator',
@@ -487,15 +473,15 @@ function peek(tokenizer) {
};
case COMMA:
tokenizer.pos++;
scanner.pos++;
return {
type: 'Comma'
};
case APOSTROPHE:
return maybeMultiplied(tokenizer, {
return maybeMultiplied(scanner, {
type: 'String',
value: scanString(tokenizer)
value: scanner.scanString()
});
case SPACE:
@@ -505,21 +491,21 @@ function peek(tokenizer) {
case F:
return {
type: 'Spaces',
value: scanSpaces(tokenizer)
value: scanner.scanSpaces()
};
case COMMERCIALAT:
code = tokenizer.nextCharCode();
code = scanner.nextCharCode();
if (code < 128 && NAME_CHAR[code] === 1) {
tokenizer.pos++;
if (scanner.isNameCharCode(code)) {
scanner.pos++;
return {
type: 'AtKeyword',
name: scanWord(tokenizer)
name: scanner.scanWord()
};
}
return maybeToken(tokenizer);
return maybeToken(scanner);
case ASTERISK:
case PLUSSIGN:
@@ -532,37 +518,35 @@ function peek(tokenizer) {
case LEFTCURLYBRACKET:
// LEFTCURLYBRACKET is allowed since mdn/data uses it w/o quoting
// check next char isn't a number, because it's likely a disjoined multiplier
code = tokenizer.nextCharCode();
code = scanner.nextCharCode();
if (code < 48 || code > 57) {
return maybeToken(tokenizer);
return maybeToken(scanner);
}
break;
default:
return maybeToken(tokenizer);
if (scanner.isNameCharCode(code)) {
return readKeywordOrFunction(scanner);
}
return maybeToken(scanner);
}
}
function parse(source) {
var tokenizer = new Tokenizer(source);
var result = readImplicitGroup(tokenizer);
export function parse(source) {
const scanner = new Scanner(source);
const result = readImplicitGroup(scanner);
if (tokenizer.pos !== source.length) {
tokenizer.error('Unexpected input');
if (scanner.pos !== source.length) {
scanner.error('Unexpected input');
}
// reduce redundant groups with single group term
if (result.terms.length === 1 && result.terms[0].type === 'Group') {
result = result.terms[0];
return result.terms[0];
}
return result;
}
// warm up parse to elimitate code branches that never execute
// fix soft deoptimizations (insufficient type feedback)
parse('[a&&<b>#|<\'c\'>*||e() f{2} /,(% g#{1,2} h{2,})]!');
module.exports = parse;
};

109
node_modules/css-tree/lib/definition-syntax/scanner.js generated vendored Normal file
View File

@@ -0,0 +1,109 @@
import { SyntaxError } from './SyntaxError.js';
const TAB = 9;
const N = 10;
const F = 12;
const R = 13;
const SPACE = 32;
const NAME_CHAR = new Uint8Array(128).map((_, idx) =>
/[a-zA-Z0-9\-]/.test(String.fromCharCode(idx)) ? 1 : 0
);
export class Scanner {
constructor(str) {
this.str = str;
this.pos = 0;
}
charCodeAt(pos) {
return pos < this.str.length ? this.str.charCodeAt(pos) : 0;
}
charCode() {
return this.charCodeAt(this.pos);
}
isNameCharCode(code = this.charCode()) {
return code < 128 && NAME_CHAR[code] === 1;
}
nextCharCode() {
return this.charCodeAt(this.pos + 1);
}
nextNonWsCode(pos) {
return this.charCodeAt(this.findWsEnd(pos));
}
skipWs() {
this.pos = this.findWsEnd(this.pos);
}
findWsEnd(pos) {
for (; pos < this.str.length; pos++) {
const code = this.str.charCodeAt(pos);
if (code !== R && code !== N && code !== F && code !== SPACE && code !== TAB) {
break;
}
}
return pos;
}
substringToPos(end) {
return this.str.substring(this.pos, this.pos = end);
}
eat(code) {
if (this.charCode() !== code) {
this.error('Expect `' + String.fromCharCode(code) + '`');
}
this.pos++;
}
peek() {
return this.pos < this.str.length ? this.str.charAt(this.pos++) : '';
}
error(message) {
throw new SyntaxError(message, this.str, this.pos);
}
scanSpaces() {
return this.substringToPos(this.findWsEnd(this.pos));
}
scanWord() {
let end = this.pos;
for (; end < this.str.length; end++) {
const code = this.str.charCodeAt(end);
if (code >= 128 || NAME_CHAR[code] === 0) {
break;
}
}
if (this.pos === end) {
this.error('Expect a keyword');
}
return this.substringToPos(end);
}
scanNumber() {
let end = this.pos;
for (; end < this.str.length; end++) {
const code = this.str.charCodeAt(end);
if (code < 48 || code > 57) {
break;
}
}
if (this.pos === end) {
this.error('Expect a number');
}
return this.substringToPos(end);
}
scanString() {
const end = this.str.indexOf('\'', this.pos + 1);
if (end === -1) {
this.pos = this.str.length;
this.error('Expect an apostrophe');
}
return this.substringToPos(end + 1);
}
};

View File

@@ -1,55 +0,0 @@
var SyntaxError = require('./SyntaxError');
var TAB = 9;
var N = 10;
var F = 12;
var R = 13;
var SPACE = 32;
var Tokenizer = function(str) {
this.str = str;
this.pos = 0;
};
Tokenizer.prototype = {
charCodeAt: function(pos) {
return pos < this.str.length ? this.str.charCodeAt(pos) : 0;
},
charCode: function() {
return this.charCodeAt(this.pos);
},
nextCharCode: function() {
return this.charCodeAt(this.pos + 1);
},
nextNonWsCode: function(pos) {
return this.charCodeAt(this.findWsEnd(pos));
},
findWsEnd: function(pos) {
for (; pos < this.str.length; pos++) {
var code = this.str.charCodeAt(pos);
if (code !== R && code !== N && code !== F && code !== SPACE && code !== TAB) {
break;
}
}
return pos;
},
substringToPos: function(end) {
return this.str.substring(this.pos, this.pos = end);
},
eat: function(code) {
if (this.charCode() !== code) {
this.error('Expect `' + String.fromCharCode(code) + '`');
}
this.pos++;
},
peek: function() {
return this.pos < this.str.length ? this.str.charAt(this.pos++) : '';
},
error: function(message) {
throw new SyntaxError(message, this.str, this.pos);
}
};
module.exports = Tokenizer;

View File

@@ -1,10 +1,10 @@
var noop = function() {};
const noop = function() {};
function ensureFunction(value) {
return typeof value === 'function' ? value : noop;
}
module.exports = function(node, options, context) {
export function walk(node, options, context) {
function walk(node) {
enter.call(context, node);
@@ -14,6 +14,7 @@ module.exports = function(node, options, context) {
break;
case 'Multiplier':
case 'Boolean':
walk(node.term);
break;
@@ -34,8 +35,8 @@ module.exports = function(node, options, context) {
leave.call(context, node);
}
var enter = noop;
var leave = noop;
let enter = noop;
let leave = noop;
if (typeof options === 'function') {
enter = options;

View File

@@ -1,50 +1,70 @@
var sourceMap = require('./sourceMap');
var hasOwnProperty = Object.prototype.hasOwnProperty;
import { tokenize, Delim, WhiteSpace } from '../tokenizer/index.js';
import { generateSourceMap } from './sourceMap.js';
import * as tokenBefore from './token-before.js';
const REVERSESOLIDUS = 0x005c; // U+005C REVERSE SOLIDUS (\)
function processChildren(node, delimeter) {
var list = node.children;
var prev = null;
if (typeof delimeter === 'function') {
let prev = null;
if (typeof delimeter !== 'function') {
list.forEach(this.node, this);
} else {
list.forEach(function(node) {
node.children.forEach(node => {
if (prev !== null) {
delimeter.call(this, prev);
}
this.node(node);
prev = node;
}, this);
});
return;
}
node.children.forEach(this.node, this);
}
module.exports = function createGenerator(config) {
function processNode(node) {
if (hasOwnProperty.call(types, node.type)) {
types[node.type].call(this, node);
} else {
throw new Error('Unknown node type: ' + node.type);
}
}
function processChunk(chunk) {
tokenize(chunk, (type, start, end) => {
this.token(type, chunk.slice(start, end));
});
}
var types = {};
export function createGenerator(config) {
const types = new Map();
if (config.node) {
for (var name in config.node) {
types[name] = config.node[name].generate;
for (let [name, item] of Object.entries(config.node)) {
const fn = item.generate || item;
if (typeof fn === 'function') {
types.set(name, item.generate || item);
}
}
return function(node, options) {
var buffer = '';
var handlers = {
children: processChildren,
node: processNode,
chunk: function(chunk) {
buffer += chunk;
let buffer = '';
let prevCode = 0;
let handlers = {
node(node) {
if (types.has(node.type)) {
types.get(node.type).call(publicApi, node);
} else {
throw new Error('Unknown node type: ' + node.type);
}
},
result: function() {
tokenBefore: tokenBefore.safe,
token(type, value) {
prevCode = this.tokenBefore(prevCode, type, value);
this.emit(value, type, false);
if (type === Delim && value.charCodeAt(0) === REVERSESOLIDUS) {
this.emit('\n', WhiteSpace, true);
}
},
emit(value) {
buffer += value;
},
result() {
return buffer;
}
};
@@ -55,10 +75,21 @@ module.exports = function createGenerator(config) {
}
if (options.sourceMap) {
handlers = sourceMap(handlers);
handlers = generateSourceMap(handlers);
}
if (options.mode in tokenBefore) {
handlers.tokenBefore = tokenBefore[options.mode];
}
}
const publicApi = {
node: (node) => handlers.node(node),
children: processChildren,
token: (type, value) => handlers.token(type, value),
tokenize: processChunk
};
handlers.node(node);
return handlers.result();

View File

@@ -1,4 +1,4 @@
var createGenerator = require('./create');
var config = require('../syntax/config/parser');
import { createGenerator } from './create.js';
import config from '../syntax/config/generator.js';
module.exports = createGenerator(config);
export default createGenerator(config);

View File

@@ -1,36 +1,33 @@
var SourceMapGenerator = require('source-map/lib/source-map-generator').SourceMapGenerator;
var trackNodes = {
Atrule: true,
Selector: true,
Declaration: true
};
import { SourceMapGenerator } from 'source-map-js/lib/source-map-generator.js';
module.exports = function generateSourceMap(handlers) {
var map = new SourceMapGenerator();
var line = 1;
var column = 0;
var generated = {
const trackNodes = new Set(['Atrule', 'Selector', 'Declaration']);
export function generateSourceMap(handlers) {
const map = new SourceMapGenerator();
const generated = {
line: 1,
column: 0
};
var original = {
const original = {
line: 0, // should be zero to add first mapping
column: 0
};
var sourceMappingActive = false;
var activatedGenerated = {
const activatedGenerated = {
line: 1,
column: 0
};
var activatedMapping = {
const activatedMapping = {
generated: activatedGenerated
};
let line = 1;
let column = 0;
let sourceMappingActive = false;
var handlersNode = handlers.node;
const origHandlersNode = handlers.node;
handlers.node = function(node) {
if (node.loc && node.loc.start && trackNodes.hasOwnProperty(node.type)) {
var nodeLine = node.loc.start.line;
var nodeColumn = node.loc.start.column - 1;
if (node.loc && node.loc.start && trackNodes.has(node.type)) {
const nodeLine = node.loc.start.line;
const nodeColumn = node.loc.start.column - 1;
if (original.line !== nodeLine ||
original.column !== nodeColumn) {
@@ -51,24 +48,24 @@ module.exports = function generateSourceMap(handlers) {
sourceMappingActive = true;
map.addMapping({
source: node.loc.source,
original: original,
generated: generated
original,
generated
});
}
}
handlersNode.call(this, node);
origHandlersNode.call(this, node);
if (sourceMappingActive && trackNodes.hasOwnProperty(node.type)) {
if (sourceMappingActive && trackNodes.has(node.type)) {
activatedGenerated.line = line;
activatedGenerated.column = column;
}
};
var handlersChunk = handlers.chunk;
handlers.chunk = function(chunk) {
for (var i = 0; i < chunk.length; i++) {
if (chunk.charCodeAt(i) === 10) { // \n
const origHandlersEmit = handlers.emit;
handlers.emit = function(value, type, auto) {
for (let i = 0; i < value.length; i++) {
if (value.charCodeAt(i) === 10) { // \n
line++;
column = 0;
} else {
@@ -76,18 +73,18 @@ module.exports = function generateSourceMap(handlers) {
}
}
handlersChunk(chunk);
origHandlersEmit(value, type, auto);
};
var handlersResult = handlers.result;
const origHandlersResult = handlers.result;
handlers.result = function() {
if (sourceMappingActive) {
map.addMapping(activatedMapping);
}
return {
css: handlersResult(),
map: map
css: origHandlersResult(),
map
};
};

182
node_modules/css-tree/lib/generator/token-before.js generated vendored Normal file
View File

@@ -0,0 +1,182 @@
import {
WhiteSpace,
Delim,
Ident,
Function as FunctionToken,
Url,
BadUrl,
AtKeyword,
Hash,
Percentage,
Dimension,
Number as NumberToken,
String as StringToken,
Colon,
LeftParenthesis,
RightParenthesis,
CDC
} from '../tokenizer/index.js';
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const code = (type, value) => {
if (type === Delim) {
type = value;
}
if (typeof type === 'string') {
const charCode = type.charCodeAt(0);
return charCode > 0x7F ? 0x8000 : charCode << 8;
}
return type;
};
// https://www.w3.org/TR/css-syntax-3/#serialization
// The only requirement for serialization is that it must "round-trip" with parsing,
// that is, parsing the stylesheet must produce the same data structures as parsing,
// serializing, and parsing again, except for consecutive <whitespace-token>s,
// which may be collapsed into a single token.
const specPairs = [
[Ident, Ident],
[Ident, FunctionToken],
[Ident, Url],
[Ident, BadUrl],
[Ident, '-'],
[Ident, NumberToken],
[Ident, Percentage],
[Ident, Dimension],
[Ident, CDC],
[Ident, LeftParenthesis],
[AtKeyword, Ident],
[AtKeyword, FunctionToken],
[AtKeyword, Url],
[AtKeyword, BadUrl],
[AtKeyword, '-'],
[AtKeyword, NumberToken],
[AtKeyword, Percentage],
[AtKeyword, Dimension],
[AtKeyword, CDC],
[Hash, Ident],
[Hash, FunctionToken],
[Hash, Url],
[Hash, BadUrl],
[Hash, '-'],
[Hash, NumberToken],
[Hash, Percentage],
[Hash, Dimension],
[Hash, CDC],
[Dimension, Ident],
[Dimension, FunctionToken],
[Dimension, Url],
[Dimension, BadUrl],
[Dimension, '-'],
[Dimension, NumberToken],
[Dimension, Percentage],
[Dimension, Dimension],
[Dimension, CDC],
['#', Ident],
['#', FunctionToken],
['#', Url],
['#', BadUrl],
['#', '-'],
['#', NumberToken],
['#', Percentage],
['#', Dimension],
['#', CDC], // https://github.com/w3c/csswg-drafts/pull/6874
['-', Ident],
['-', FunctionToken],
['-', Url],
['-', BadUrl],
['-', '-'],
['-', NumberToken],
['-', Percentage],
['-', Dimension],
['-', CDC], // https://github.com/w3c/csswg-drafts/pull/6874
[NumberToken, Ident],
[NumberToken, FunctionToken],
[NumberToken, Url],
[NumberToken, BadUrl],
[NumberToken, NumberToken],
[NumberToken, Percentage],
[NumberToken, Dimension],
[NumberToken, '%'],
[NumberToken, CDC], // https://github.com/w3c/csswg-drafts/pull/6874
['@', Ident],
['@', FunctionToken],
['@', Url],
['@', BadUrl],
['@', '-'],
['@', CDC], // https://github.com/w3c/csswg-drafts/pull/6874
['.', NumberToken],
['.', Percentage],
['.', Dimension],
['+', NumberToken],
['+', Percentage],
['+', Dimension],
['/', '*']
];
// validate with scripts/generate-safe
const safePairs = specPairs.concat([
[Ident, Hash],
[Dimension, Hash],
[Hash, Hash],
[AtKeyword, LeftParenthesis],
[AtKeyword, StringToken],
[AtKeyword, Colon],
[Percentage, Percentage],
[Percentage, Dimension],
[Percentage, FunctionToken],
[Percentage, '-'],
[RightParenthesis, Ident],
[RightParenthesis, FunctionToken],
[RightParenthesis, Percentage],
[RightParenthesis, Dimension],
[RightParenthesis, Hash],
[RightParenthesis, '-']
]);
function createMap(pairs) {
const isWhiteSpaceRequired = new Set(
pairs.map(([prev, next]) => (code(prev) << 16 | code(next)))
);
return function(prevCode, type, value) {
const nextCode = code(type, value);
const nextCharCode = value.charCodeAt(0);
const emitWs =
(nextCharCode === HYPHENMINUS &&
type !== Ident &&
type !== FunctionToken &&
type !== CDC) ||
(nextCharCode === PLUSSIGN)
? isWhiteSpaceRequired.has(prevCode << 16 | nextCharCode << 8)
: isWhiteSpaceRequired.has(prevCode << 16 | nextCode);
if (emitWs) {
this.emit(' ', WhiteSpace, true);
}
return nextCode;
};
}
export const spec = createMap(specPairs);
export const safe = createMap(safePairs);

31
node_modules/css-tree/lib/index.js generated vendored
View File

@@ -1 +1,30 @@
module.exports = require('./syntax');
import syntax from './syntax/index.js';
export * from './version.js';
export { default as createSyntax } from './syntax/create.js';
export { List } from './utils/List.js';
export { Lexer } from './lexer/Lexer.js';
export { tokenTypes, tokenNames, TokenStream, OffsetToLocation } from './tokenizer/index.js';
export * as definitionSyntax from './definition-syntax/index.js';
export { clone } from './utils/clone.js';
export * from './utils/names.js';
export * as ident from './utils/ident.js';
export * as string from './utils/string.js';
export * as url from './utils/url.js';
export const {
tokenize,
parse,
generate,
lexer,
createLexer,
walk,
find,
findLast,
findAll,
toPlainObject,
fromPlainObject,
fork
} = syntax;

View File

@@ -1,27 +1,24 @@
var SyntaxReferenceError = require('./error').SyntaxReferenceError;
var SyntaxMatchError = require('./error').SyntaxMatchError;
var names = require('../utils/names');
var generic = require('./generic');
var parse = require('../definition-syntax/parse');
var generate = require('../definition-syntax/generate');
var walk = require('../definition-syntax/walk');
var prepareTokens = require('./prepare-tokens');
var buildMatchGraph = require('./match-graph').buildMatchGraph;
var matchAsTree = require('./match').matchAsTree;
var trace = require('./trace');
var search = require('./search');
var getStructureFromConfig = require('./structure').getStructureFromConfig;
var cssWideKeywords = buildMatchGraph('inherit | initial | unset');
var cssWideKeywordsWithExpression = buildMatchGraph('inherit | initial | unset | <-ms-legacy-expression>');
import { SyntaxReferenceError, SyntaxMatchError } from './error.js';
import * as names from '../utils/names.js';
import { cssWideKeywords } from './generic-const.js';
import { createGenericTypes } from './generic.js';
import * as units from './units.js';
import { parse, generate, walk } from '../definition-syntax/index.js';
import prepareTokens from './prepare-tokens.js';
import { buildMatchGraph } from './match-graph.js';
import { matchAsTree } from './match.js';
import * as trace from './trace.js';
import { matchFragments } from './search.js';
import { getStructureFromConfig } from './structure.js';
function dumpMapSyntax(map, compact, syntaxAsAst) {
var result = {};
const result = {};
for (var name in map) {
for (const name in map) {
if (map[name].syntax) {
result[name] = syntaxAsAst
? map[name].syntax
: generate(map[name].syntax, { compact: compact });
: generate(map[name].syntax, { compact });
}
}
@@ -46,7 +43,7 @@ function dumpAtruleMapSyntax(map, compact, syntaxAsAst) {
}
function valueHasVar(tokens) {
for (var i = 0; i < tokens.length; i++) {
for (let i = 0; i < tokens.length; i++) {
if (tokens[i].value.toLowerCase() === 'var(') {
return true;
}
@@ -55,31 +52,39 @@ function valueHasVar(tokens) {
return false;
}
function buildMatchResult(match, error, iterations) {
function syntaxHasTopLevelCommaMultiplier(syntax) {
const singleTerm = syntax.terms[0];
return (
syntax.explicit === false &&
syntax.terms.length === 1 &&
singleTerm.type === 'Multiplier' &&
singleTerm.comma === true
);
}
function buildMatchResult(matched, error, iterations) {
return {
matched: match,
iterations: iterations,
error: error,
getTrace: trace.getTrace,
isType: trace.isType,
isProperty: trace.isProperty,
isKeyword: trace.isKeyword
matched,
iterations,
error,
...trace
};
}
function matchSyntax(lexer, syntax, value, useCommon) {
var tokens = prepareTokens(value, lexer.syntax);
var result;
function matchSyntax(lexer, syntax, value, useCssWideKeywords) {
const tokens = prepareTokens(value, lexer.syntax);
let result;
if (valueHasVar(tokens)) {
return buildMatchResult(null, new Error('Matching for a tree with var() is not supported'));
}
if (useCommon) {
result = matchAsTree(tokens, lexer.valueCommonSyntax, lexer);
if (useCssWideKeywords) {
result = matchAsTree(tokens, lexer.cssWideKeywordsSyntax, lexer);
}
if (!useCommon || !result.match) {
if (!useCssWideKeywords || !result.match) {
result = matchAsTree(tokens, syntax.match, lexer);
if (!result.match) {
return buildMatchResult(
@@ -93,55 +98,66 @@ function matchSyntax(lexer, syntax, value, useCommon) {
return buildMatchResult(result.match, null, result.iterations);
}
var Lexer = function(config, syntax, structure) {
this.valueCommonSyntax = cssWideKeywords;
this.syntax = syntax;
this.generic = false;
this.atrules = {};
this.properties = {};
this.types = {};
this.structure = structure || getStructureFromConfig(config);
export class Lexer {
constructor(config, syntax, structure) {
this.cssWideKeywords = cssWideKeywords;
this.syntax = syntax;
this.generic = false;
this.units = { ...units };
this.atrules = Object.create(null);
this.properties = Object.create(null);
this.types = Object.create(null);
this.structure = structure || getStructureFromConfig(config);
if (config) {
if (config.types) {
for (var name in config.types) {
this.addType_(name, config.types[name]);
if (config) {
if (config.cssWideKeywords) {
this.cssWideKeywords = config.cssWideKeywords;
}
if (config.units) {
for (const group of Object.keys(units)) {
if (Array.isArray(config.units[group])) {
this.units[group] = config.units[group];
}
}
}
if (config.types) {
for (const [name, type] of Object.entries(config.types)) {
this.addType_(name, type);
}
}
if (config.generic) {
this.generic = true;
for (const [name, value] of Object.entries(createGenericTypes(this.units))) {
this.addType_(name, value);
}
}
if (config.atrules) {
for (const [name, atrule] of Object.entries(config.atrules)) {
this.addAtrule_(name, atrule);
}
}
if (config.properties) {
for (const [name, property] of Object.entries(config.properties)) {
this.addProperty_(name, property);
}
}
}
if (config.generic) {
this.generic = true;
for (var name in generic) {
this.addType_(name, generic[name]);
}
}
if (config.atrules) {
for (var name in config.atrules) {
this.addAtrule_(name, config.atrules[name]);
}
}
if (config.properties) {
for (var name in config.properties) {
this.addProperty_(name, config.properties[name]);
}
}
this.cssWideKeywordsSyntax = buildMatchGraph(this.cssWideKeywords.join(' | '));
}
};
Lexer.prototype = {
structure: {},
checkStructure: function(ast) {
checkStructure(ast) {
function collectWarning(node, message) {
warns.push({
node: node,
message: message
});
warns.push({ node, message });
}
var structure = this.structure;
var warns = [];
const structure = this.structure;
const warns = [];
this.syntax.walk(ast, function(node) {
if (structure.hasOwnProperty(node.type)) {
@@ -152,19 +168,21 @@ Lexer.prototype = {
});
return warns.length ? warns : false;
},
}
createDescriptor: function(syntax, type, name, parent = null) {
var ref = {
type: type,
name: name
createDescriptor(syntax, type, name, parent = null) {
const ref = {
type,
name
};
var descriptor = {
type: type,
name: name,
parent: parent,
const descriptor = {
type,
name,
parent,
serializable: typeof syntax === 'string' || (syntax && typeof syntax.type === 'string'),
syntax: null,
match: null
match: null,
matchRef: null // used for properties when a syntax referenced as <'property'> in other syntax definitions
};
if (typeof syntax === 'function') {
@@ -173,7 +191,7 @@ Lexer.prototype = {
if (typeof syntax === 'string') {
// lazy parsing on first access
Object.defineProperty(descriptor, 'syntax', {
get: function() {
get() {
Object.defineProperty(descriptor, 'syntax', {
value: parse(syntax)
});
@@ -187,7 +205,7 @@ Lexer.prototype = {
// lazy graph build on first access
Object.defineProperty(descriptor, 'match', {
get: function() {
get() {
Object.defineProperty(descriptor, 'match', {
value: buildMatchGraph(descriptor.syntax, ref)
});
@@ -195,11 +213,31 @@ Lexer.prototype = {
return descriptor.match;
}
});
if (type === 'Property') {
Object.defineProperty(descriptor, 'matchRef', {
get() {
const syntax = descriptor.syntax;
const value = syntaxHasTopLevelCommaMultiplier(syntax)
? buildMatchGraph({
...syntax,
terms: [syntax.terms[0].term]
}, ref)
: null;
Object.defineProperty(descriptor, 'matchRef', {
value
});
return value;
}
});
}
}
return descriptor;
},
addAtrule_: function(name, syntax) {
}
addAtrule_(name, syntax) {
if (!syntax) {
return;
}
@@ -209,63 +247,64 @@ Lexer.prototype = {
name: name,
prelude: syntax.prelude ? this.createDescriptor(syntax.prelude, 'AtrulePrelude', name) : null,
descriptors: syntax.descriptors
? Object.keys(syntax.descriptors).reduce((res, descName) => {
res[descName] = this.createDescriptor(syntax.descriptors[descName], 'AtruleDescriptor', descName, name);
return res;
}, {})
? Object.keys(syntax.descriptors).reduce(
(map, descName) => {
map[descName] = this.createDescriptor(syntax.descriptors[descName], 'AtruleDescriptor', descName, name);
return map;
},
Object.create(null)
)
: null
};
},
addProperty_: function(name, syntax) {
}
addProperty_(name, syntax) {
if (!syntax) {
return;
}
this.properties[name] = this.createDescriptor(syntax, 'Property', name);
},
addType_: function(name, syntax) {
}
addType_(name, syntax) {
if (!syntax) {
return;
}
this.types[name] = this.createDescriptor(syntax, 'Type', name);
}
if (syntax === generic['-ms-legacy-expression']) {
this.valueCommonSyntax = cssWideKeywordsWithExpression;
}
},
checkAtruleName: function(atruleName) {
checkAtruleName(atruleName) {
if (!this.getAtrule(atruleName)) {
return new SyntaxReferenceError('Unknown at-rule', '@' + atruleName);
}
},
checkAtrulePrelude: function(atruleName, prelude) {
let error = this.checkAtruleName(atruleName);
}
checkAtrulePrelude(atruleName, prelude) {
const error = this.checkAtruleName(atruleName);
if (error) {
return error;
}
var atrule = this.getAtrule(atruleName);
const atrule = this.getAtrule(atruleName);
if (!atrule.prelude && prelude) {
return new SyntaxError('At-rule `@' + atruleName + '` should not contain a prelude');
}
if (atrule.prelude && !prelude) {
return new SyntaxError('At-rule `@' + atruleName + '` should contain a prelude');
if (!matchSyntax(this, atrule.prelude, '', false).matched) {
return new SyntaxError('At-rule `@' + atruleName + '` should contain a prelude');
}
}
},
checkAtruleDescriptorName: function(atruleName, descriptorName) {
let error = this.checkAtruleName(atruleName);
}
checkAtruleDescriptorName(atruleName, descriptorName) {
const error = this.checkAtruleName(atruleName);
if (error) {
return error;
}
var atrule = this.getAtrule(atruleName);
var descriptor = names.keyword(descriptorName);
const atrule = this.getAtrule(atruleName);
const descriptor = names.keyword(descriptorName);
if (!atrule.descriptors) {
return new SyntaxError('At-rule `@' + atruleName + '` has no known descriptors');
@@ -275,71 +314,71 @@ Lexer.prototype = {
!atrule.descriptors[descriptor.basename]) {
return new SyntaxReferenceError('Unknown at-rule descriptor', descriptorName);
}
},
checkPropertyName: function(propertyName) {
var property = names.property(propertyName);
// don't match syntax for a custom property
if (property.custom) {
return new Error('Lexer matching doesn\'t applicable for custom properties');
}
}
checkPropertyName(propertyName) {
if (!this.getProperty(propertyName)) {
return new SyntaxReferenceError('Unknown property', propertyName);
}
},
}
matchAtrulePrelude: function(atruleName, prelude) {
var error = this.checkAtrulePrelude(atruleName, prelude);
matchAtrulePrelude(atruleName, prelude) {
const error = this.checkAtrulePrelude(atruleName, prelude);
if (error) {
return buildMatchResult(null, error);
}
if (!prelude) {
const atrule = this.getAtrule(atruleName);
if (!atrule.prelude) {
return buildMatchResult(null, null);
}
return matchSyntax(this, this.getAtrule(atruleName).prelude, prelude, false);
},
matchAtruleDescriptor: function(atruleName, descriptorName, value) {
var error = this.checkAtruleDescriptorName(atruleName, descriptorName);
return matchSyntax(this, atrule.prelude, prelude || '', false);
}
matchAtruleDescriptor(atruleName, descriptorName, value) {
const error = this.checkAtruleDescriptorName(atruleName, descriptorName);
if (error) {
return buildMatchResult(null, error);
}
var atrule = this.getAtrule(atruleName);
var descriptor = names.keyword(descriptorName);
const atrule = this.getAtrule(atruleName);
const descriptor = names.keyword(descriptorName);
return matchSyntax(this, atrule.descriptors[descriptor.name] || atrule.descriptors[descriptor.basename], value, false);
},
matchDeclaration: function(node) {
}
matchDeclaration(node) {
if (node.type !== 'Declaration') {
return buildMatchResult(null, new Error('Not a Declaration node'));
}
return this.matchProperty(node.property, node.value);
},
matchProperty: function(propertyName, value) {
var error = this.checkPropertyName(propertyName);
}
matchProperty(propertyName, value) {
// don't match syntax for a custom property at the moment
if (names.property(propertyName).custom) {
return buildMatchResult(null, new Error('Lexer matching doesn\'t applicable for custom properties'));
}
const error = this.checkPropertyName(propertyName);
if (error) {
return buildMatchResult(null, error);
}
return matchSyntax(this, this.getProperty(propertyName), value, true);
},
matchType: function(typeName, value) {
var typeSyntax = this.getType(typeName);
}
matchType(typeName, value) {
const typeSyntax = this.getType(typeName);
if (!typeSyntax) {
return buildMatchResult(null, new SyntaxReferenceError('Unknown type', typeName));
}
return matchSyntax(this, typeSyntax, value, false);
},
match: function(syntax, value) {
}
match(syntax, value) {
if (typeof syntax !== 'string' && (!syntax || !syntax.type)) {
return buildMatchResult(null, new SyntaxReferenceError('Bad syntax'));
}
@@ -349,118 +388,124 @@ Lexer.prototype = {
}
return matchSyntax(this, syntax, value, false);
},
}
findValueFragments: function(propertyName, value, type, name) {
return search.matchFragments(this, value, this.matchProperty(propertyName, value), type, name);
},
findDeclarationValueFragments: function(declaration, type, name) {
return search.matchFragments(this, declaration.value, this.matchDeclaration(declaration), type, name);
},
findAllFragments: function(ast, type, name) {
var result = [];
findValueFragments(propertyName, value, type, name) {
return matchFragments(this, value, this.matchProperty(propertyName, value), type, name);
}
findDeclarationValueFragments(declaration, type, name) {
return matchFragments(this, declaration.value, this.matchDeclaration(declaration), type, name);
}
findAllFragments(ast, type, name) {
const result = [];
this.syntax.walk(ast, {
visit: 'Declaration',
enter: function(declaration) {
enter: (declaration) => {
result.push.apply(result, this.findDeclarationValueFragments(declaration, type, name));
}.bind(this)
}
});
return result;
},
}
getAtrule: function(atruleName, fallbackBasename = true) {
var atrule = names.keyword(atruleName);
var atruleEntry = atrule.vendor && fallbackBasename
getAtrule(atruleName, fallbackBasename = true) {
const atrule = names.keyword(atruleName);
const atruleEntry = atrule.vendor && fallbackBasename
? this.atrules[atrule.name] || this.atrules[atrule.basename]
: this.atrules[atrule.name];
return atruleEntry || null;
},
getAtrulePrelude: function(atruleName, fallbackBasename = true) {
}
getAtrulePrelude(atruleName, fallbackBasename = true) {
const atrule = this.getAtrule(atruleName, fallbackBasename);
return atrule && atrule.prelude || null;
},
getAtruleDescriptor: function(atruleName, name) {
}
getAtruleDescriptor(atruleName, name) {
return this.atrules.hasOwnProperty(atruleName) && this.atrules.declarators
? this.atrules[atruleName].declarators[name] || null
: null;
},
getProperty: function(propertyName, fallbackBasename = true) {
var property = names.property(propertyName);
var propertyEntry = property.vendor && fallbackBasename
}
getProperty(propertyName, fallbackBasename = true) {
const property = names.property(propertyName);
const propertyEntry = property.vendor && fallbackBasename
? this.properties[property.name] || this.properties[property.basename]
: this.properties[property.name];
return propertyEntry || null;
},
getType: function(name) {
return this.types.hasOwnProperty(name) ? this.types[name] : null;
},
}
getType(name) {
return hasOwnProperty.call(this.types, name) ? this.types[name] : null;
}
validate() {
function syntaxRef(name, isType) {
return isType ? `<${name}>` : `<'${name}'>`;
}
validate: function() {
function validate(syntax, name, broken, descriptor) {
if (broken.hasOwnProperty(name)) {
return broken[name];
if (broken.has(name)) {
return broken.get(name);
}
broken[name] = false;
broken.set(name, false);
if (descriptor.syntax !== null) {
walk(descriptor.syntax, function(node) {
if (node.type !== 'Type' && node.type !== 'Property') {
return;
}
var map = node.type === 'Type' ? syntax.types : syntax.properties;
var brokenMap = node.type === 'Type' ? brokenTypes : brokenProperties;
const map = node.type === 'Type' ? syntax.types : syntax.properties;
const brokenMap = node.type === 'Type' ? brokenTypes : brokenProperties;
if (!map.hasOwnProperty(node.name) || validate(syntax, node.name, brokenMap, map[node.name])) {
broken[name] = true;
if (!hasOwnProperty.call(map, node.name)) {
errors.push(`${syntaxRef(name, broken === brokenTypes)} used missed syntax definition ${syntaxRef(node.name, node.type === 'Type')}`);
broken.set(name, true);
} else if (validate(syntax, node.name, brokenMap, map[node.name])) {
errors.push(`${syntaxRef(name, broken === brokenTypes)} used broken syntax definition ${syntaxRef(node.name, node.type === 'Type')}`);
broken.set(name, true);
}
}, this);
}
}
var brokenTypes = {};
var brokenProperties = {};
const errors = [];
let brokenTypes = new Map();
let brokenProperties = new Map();
for (var key in this.types) {
for (const key in this.types) {
validate(this, key, brokenTypes, this.types[key]);
}
for (var key in this.properties) {
for (const key in this.properties) {
validate(this, key, brokenProperties, this.properties[key]);
}
brokenTypes = Object.keys(brokenTypes).filter(function(name) {
return brokenTypes[name];
});
brokenProperties = Object.keys(brokenProperties).filter(function(name) {
return brokenProperties[name];
});
const brokenTypesArray = [...brokenTypes.keys()].filter(name => brokenTypes.get(name));
const brokenPropertiesArray = [...brokenProperties.keys()].filter(name => brokenProperties.get(name));
if (brokenTypes.length || brokenProperties.length) {
if (brokenTypesArray.length || brokenPropertiesArray.length) {
return {
types: brokenTypes,
properties: brokenProperties
errors,
types: brokenTypesArray,
properties: brokenPropertiesArray
};
}
return null;
},
dump: function(syntaxAsAst, pretty) {
}
dump(syntaxAsAst, pretty) {
return {
generic: this.generic,
cssWideKeywords: this.cssWideKeywords,
units: this.units,
types: dumpMapSyntax(this.types, !pretty, syntaxAsAst),
properties: dumpMapSyntax(this.properties, !pretty, syntaxAsAst),
atrules: dumpAtruleMapSyntax(this.atrules, !pretty, syntaxAsAst)
};
},
toString: function() {
}
toString() {
return JSON.stringify(this.dump());
}
};
module.exports = Lexer;

View File

@@ -1,5 +1,6 @@
const createCustomError = require('../utils/createCustomError');
const generate = require('../definition-syntax/generate');
import { createCustomError } from '../utils/create-custom-error.js';
import { generate } from '../definition-syntax/generate.js';
const defaultLoc = { offset: 0, line: 1, column: 1 };
function locateMismatch(matchResult, node) {
@@ -80,7 +81,7 @@ function buildLoc({ offset, line, column }, extra) {
return loc;
}
const SyntaxReferenceError = function(type, referenceName) {
export const SyntaxReferenceError = function(type, referenceName) {
const error = createCustomError(
'SyntaxReferenceError',
type + (referenceName ? ' `' + referenceName + '`' : '')
@@ -91,7 +92,7 @@ const SyntaxReferenceError = function(type, referenceName) {
return error;
};
const SyntaxMatchError = function(message, syntax, node, matchResult) {
export const SyntaxMatchError = function(message, syntax, node, matchResult) {
const error = createCustomError('SyntaxMatchError', message);
const {
css,
@@ -120,8 +121,3 @@ const SyntaxMatchError = function(message, syntax, node, matchResult) {
return error;
};
module.exports = {
SyntaxReferenceError,
SyntaxMatchError
};

View File

@@ -1,25 +1,26 @@
var isDigit = require('../tokenizer').isDigit;
var cmpChar = require('../tokenizer').cmpChar;
var TYPE = require('../tokenizer').TYPE;
import {
isDigit,
cmpChar,
Delim,
WhiteSpace,
Comment,
Ident,
Number as NumberToken,
Dimension
} from '../tokenizer/index.js';
var DELIM = TYPE.Delim;
var WHITESPACE = TYPE.WhiteSpace;
var COMMENT = TYPE.Comment;
var IDENT = TYPE.Ident;
var NUMBER = TYPE.Number;
var DIMENSION = TYPE.Dimension;
var PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
var HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
var N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
var DISALLOW_SIGN = true;
var ALLOW_SIGN = false;
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
const DISALLOW_SIGN = true;
const ALLOW_SIGN = false;
function isDelim(token, code) {
return token !== null && token.type === DELIM && token.value.charCodeAt(0) === code;
return token !== null && token.type === Delim && token.value.charCodeAt(0) === code;
}
function skipSC(token, offset, getNextToken) {
while (token !== null && (token.type === WHITESPACE || token.type === COMMENT)) {
while (token !== null && (token.type === WhiteSpace || token.type === Comment)) {
token = getNextToken(++offset);
}
@@ -31,7 +32,7 @@ function checkInteger(token, valueOffset, disallowSign, offset) {
return 0;
}
var code = token.value.charCodeAt(valueOffset);
const code = token.value.charCodeAt(valueOffset);
if (code === PLUSSIGN || code === HYPHENMINUS) {
if (disallowSign) {
@@ -54,8 +55,8 @@ function checkInteger(token, valueOffset, disallowSign, offset) {
// ... <signed-integer>
// ... ['+' | '-'] <signless-integer>
function consumeB(token, offset_, getNextToken) {
var sign = false;
var offset = skipSC(token, offset_, getNextToken);
let sign = false;
let offset = skipSC(token, offset_, getNextToken);
token = getNextToken(offset);
@@ -63,13 +64,13 @@ function consumeB(token, offset_, getNextToken) {
return offset_;
}
if (token.type !== NUMBER) {
if (token.type !== NumberToken) {
if (isDelim(token, PLUSSIGN) || isDelim(token, HYPHENMINUS)) {
sign = true;
offset = skipSC(getNextToken(++offset), offset, getNextToken);
token = getNextToken(offset);
if (token === null && token.type !== NUMBER) {
if (token === null || token.type !== NumberToken) {
return 0;
}
} else {
@@ -78,7 +79,7 @@ function consumeB(token, offset_, getNextToken) {
}
if (!sign) {
var code = token.value.charCodeAt(0);
const code = token.value.charCodeAt(0);
if (code !== PLUSSIGN && code !== HYPHENMINUS) {
// Number sign is expected
return 0;
@@ -89,16 +90,16 @@ function consumeB(token, offset_, getNextToken) {
}
// An+B microsyntax https://www.w3.org/TR/css-syntax-3/#anb
module.exports = function anPlusB(token, getNextToken) {
export default function anPlusB(token, getNextToken) {
/* eslint-disable brace-style*/
var offset = 0;
let offset = 0;
if (!token) {
return 0;
}
// <integer>
if (token.type === NUMBER) {
if (token.type === NumberToken) {
return checkInteger(token, 0, ALLOW_SIGN, offset); // b
}
@@ -107,7 +108,7 @@ module.exports = function anPlusB(token, getNextToken) {
// -n ['+' | '-'] <signless-integer>
// -n- <signless-integer>
// <dashndashdigit-ident>
else if (token.type === IDENT && token.value.charCodeAt(0) === HYPHENMINUS) {
else if (token.type === Ident && token.value.charCodeAt(0) === HYPHENMINUS) {
// expect 1st char is N
if (!cmpChar(token.value, 1, N)) {
return 0;
@@ -146,9 +147,9 @@ module.exports = function anPlusB(token, getNextToken) {
// '+'? n ['+' | '-'] <signless-integer>
// '+'? n- <signless-integer>
// '+'? <ndashdigit-ident>
else if (token.type === IDENT || (isDelim(token, PLUSSIGN) && getNextToken(offset + 1).type === IDENT)) {
else if (token.type === Ident || (isDelim(token, PLUSSIGN) && getNextToken(offset + 1).type === Ident)) {
// just ignore a plus
if (token.type !== IDENT) {
if (token.type !== Ident) {
token = getNextToken(++offset);
}
@@ -189,11 +190,12 @@ module.exports = function anPlusB(token, getNextToken) {
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
else if (token.type === DIMENSION) {
var code = token.value.charCodeAt(0);
var sign = code === PLUSSIGN || code === HYPHENMINUS ? 1 : 0;
else if (token.type === Dimension) {
let code = token.value.charCodeAt(0);
let sign = code === PLUSSIGN || code === HYPHENMINUS ? 1 : 0;
let i = sign;
for (var i = sign; i < token.value.length; i++) {
for (; i < token.value.length; i++) {
if (!isDigit(token.value.charCodeAt(i))) {
break;
}

8
node_modules/css-tree/lib/lexer/generic-const.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
// https://drafts.csswg.org/css-cascade-5/
export const cssWideKeywords = [
'initial',
'inherit',
'unset',
'revert',
'revert-layer'
];

View File

@@ -1,18 +1,19 @@
var isHexDigit = require('../tokenizer').isHexDigit;
var cmpChar = require('../tokenizer').cmpChar;
var TYPE = require('../tokenizer').TYPE;
import {
isHexDigit,
cmpChar,
Ident,
Delim,
Number as NumberToken,
Dimension
} from '../tokenizer/index.js';
var IDENT = TYPE.Ident;
var DELIM = TYPE.Delim;
var NUMBER = TYPE.Number;
var DIMENSION = TYPE.Dimension;
var PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
var HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
var QUESTIONMARK = 0x003F; // U+003F QUESTION MARK (?)
var U = 0x0075; // U+0075 LATIN SMALL LETTER U (u)
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const QUESTIONMARK = 0x003F; // U+003F QUESTION MARK (?)
const U = 0x0075; // U+0075 LATIN SMALL LETTER U (u)
function isDelim(token, code) {
return token !== null && token.type === DELIM && token.value.charCodeAt(0) === code;
return token !== null && token.type === Delim && token.value.charCodeAt(0) === code;
}
function startsWith(token, code) {
@@ -20,15 +21,14 @@ function startsWith(token, code) {
}
function hexSequence(token, offset, allowDash) {
for (var pos = offset, hexlen = 0; pos < token.value.length; pos++) {
var code = token.value.charCodeAt(pos);
let hexlen = 0;
for (let pos = offset; pos < token.value.length; pos++) {
const code = token.value.charCodeAt(pos);
if (code === HYPHENMINUS && allowDash && hexlen !== 0) {
if (hexSequence(token, offset + hexlen + 1, false) > 0) {
return 6; // dissallow following question marks
}
return 0; // dash at the ending of a hex sequence is not allowed
hexSequence(token, offset + hexlen + 1, false);
return 6; // dissallow following question marks
}
if (!isHexDigit(code)) {
@@ -78,11 +78,11 @@ function withQuestionMarkSequence(consumed, length, getNextToken) {
// u <number-token> <dimension-token> |
// u <number-token> <number-token> |
// u '+' '?'+
module.exports = function urange(token, getNextToken) {
var length = 0;
export default function urange(token, getNextToken) {
let length = 0;
// should start with `u` or `U`
if (token === null || token.type !== IDENT || !cmpChar(token.value, 0, U)) {
if (token === null || token.type !== Ident || !cmpChar(token.value, 0, U)) {
return 0;
}
@@ -99,7 +99,7 @@ module.exports = function urange(token, getNextToken) {
return 0;
}
if (token.type === IDENT) {
if (token.type === Ident) {
// u '+' <ident-token> '?'*
return withQuestionMarkSequence(hexSequence(token, 0, true), ++length, getNextToken);
}
@@ -116,12 +116,8 @@ module.exports = function urange(token, getNextToken) {
// u <number-token> '?'*
// u <number-token> <dimension-token>
// u <number-token> <number-token>
if (token.type === NUMBER) {
if (!startsWith(token, PLUSSIGN)) {
return 0;
}
var consumedHexLength = hexSequence(token, 1, true);
if (token.type === NumberToken) {
const consumedHexLength = hexSequence(token, 1, true);
if (consumedHexLength === 0) {
return 0;
}
@@ -132,7 +128,7 @@ module.exports = function urange(token, getNextToken) {
return length;
}
if (token.type === DIMENSION || token.type === NUMBER) {
if (token.type === Dimension || token.type === NumberToken) {
// u <number-token> <dimension-token>
// u <number-token> <number-token>
if (!startsWith(token, HYPHENMINUS) || !hexSequence(token, 1, false)) {
@@ -147,11 +143,7 @@ module.exports = function urange(token, getNextToken) {
}
// u <dimension-token> '?'*
if (token.type === DIMENSION) {
if (!startsWith(token, PLUSSIGN)) {
return 0;
}
if (token.type === Dimension) {
return withQuestionMarkSequence(hexSequence(token, 1, true), ++length, getNextToken);
}

View File

@@ -1,83 +1,49 @@
var tokenizer = require('../tokenizer');
var isIdentifierStart = tokenizer.isIdentifierStart;
var isHexDigit = tokenizer.isHexDigit;
var isDigit = tokenizer.isDigit;
var cmpStr = tokenizer.cmpStr;
var consumeNumber = tokenizer.consumeNumber;
var TYPE = tokenizer.TYPE;
var anPlusB = require('./generic-an-plus-b');
var urange = require('./generic-urange');
import { cssWideKeywords } from './generic-const.js';
import anPlusB from './generic-an-plus-b.js';
import urange from './generic-urange.js';
import {
isIdentifierStart,
isHexDigit,
isDigit,
cmpStr,
consumeNumber,
var cssWideKeywords = ['unset', 'initial', 'inherit'];
var calcFunctionNames = ['calc(', '-moz-calc(', '-webkit-calc('];
Ident,
Function as FunctionToken,
AtKeyword,
Hash,
String as StringToken,
BadString,
Url,
BadUrl,
Delim,
Number as NumberToken,
Percentage,
Dimension,
WhiteSpace,
CDO,
CDC,
Colon,
Semicolon,
Comma,
LeftSquareBracket,
RightSquareBracket,
LeftParenthesis,
RightParenthesis,
LeftCurlyBracket,
RightCurlyBracket
} from '../tokenizer/index.js';
// https://www.w3.org/TR/css-values-3/#lengths
var LENGTH = {
// absolute length units
'px': true,
'mm': true,
'cm': true,
'in': true,
'pt': true,
'pc': true,
'q': true,
// relative length units
'em': true,
'ex': true,
'ch': true,
'rem': true,
// viewport-percentage lengths
'vh': true,
'vw': true,
'vmin': true,
'vmax': true,
'vm': true
};
var ANGLE = {
'deg': true,
'grad': true,
'rad': true,
'turn': true
};
var TIME = {
's': true,
'ms': true
};
var FREQUENCY = {
'hz': true,
'khz': true
};
// https://www.w3.org/TR/css-values-3/#resolution (https://drafts.csswg.org/css-values/#resolution)
var RESOLUTION = {
'dpi': true,
'dpcm': true,
'dppx': true,
'x': true // https://github.com/w3c/csswg-drafts/issues/461
};
// https://drafts.csswg.org/css-grid/#fr-unit
var FLEX = {
'fr': true
};
// https://www.w3.org/TR/css3-speech/#mixing-props-voice-volume
var DECIBEL = {
'db': true
};
// https://www.w3.org/TR/css3-speech/#voice-props-voice-pitch
var SEMITONES = {
'st': true
};
const calcFunctionNames = ['calc(', '-moz-calc(', '-webkit-calc('];
const balancePair = new Map([
[FunctionToken, RightParenthesis],
[LeftParenthesis, RightParenthesis],
[LeftSquareBracket, RightSquareBracket],
[LeftCurlyBracket, RightCurlyBracket]
]);
// safe char code getter
function charCode(str, index) {
function charCodeAt(str, index) {
return index < str.length ? str.charCodeAt(index) : 0;
}
@@ -86,7 +52,7 @@ function eqStr(actual, expected) {
}
function eqStrAny(actual, expected) {
for (var i = 0; i < expected.length; i++) {
for (let i = 0; i < expected.length; i++) {
if (eqStr(actual, expected[i])) {
return true;
}
@@ -102,14 +68,14 @@ function isPostfixIeHack(str, offset) {
}
return (
str.charCodeAt(offset) === 0x005C && // U+005C REVERSE SOLIDUS (\)
isDigit(str.charCodeAt(offset + 1))
charCodeAt(str, offset) === 0x005C && // U+005C REVERSE SOLIDUS (\)
isDigit(charCodeAt(str, offset + 1))
);
}
function outOfRange(opts, value, numEnd) {
if (opts && opts.type === 'Range') {
var num = Number(
const num = Number(
numEnd !== undefined && numEnd !== value.length
? value.substr(0, numEnd)
: value
@@ -119,11 +85,15 @@ function outOfRange(opts, value, numEnd) {
return true;
}
if (opts.min !== null && num < opts.min) {
// FIXME: when opts.min is a string it's a dimension, skip a range validation
// for now since it requires a type covertation which is not implmented yet
if (opts.min !== null && num < opts.min && typeof opts.min !== 'string') {
return true;
}
if (opts.max !== null && num > opts.max) {
// FIXME: when opts.max is a string it's a dimension, skip a range validation
// for now since it requires a type covertation which is not implmented yet
if (opts.max !== null && num > opts.max && typeof opts.max !== 'string') {
return true;
}
}
@@ -132,16 +102,40 @@ function outOfRange(opts, value, numEnd) {
}
function consumeFunction(token, getNextToken) {
var startIdx = token.index;
var length = 0;
let balanceCloseType = 0;
let balanceStash = [];
let length = 0;
// balanced token consuming
scan:
do {
length++;
switch (token.type) {
case RightCurlyBracket:
case RightParenthesis:
case RightSquareBracket:
if (token.type !== balanceCloseType) {
break scan;
}
if (token.balance <= startIdx) {
break;
balanceCloseType = balanceStash.pop();
if (balanceStash.length === 0) {
length++;
break scan;
}
break;
case FunctionToken:
case LeftParenthesis:
case LeftSquareBracket:
case LeftCurlyBracket:
balanceStash.push(balanceCloseType);
balanceCloseType = balancePair.get(token.type);
break;
}
length++;
} while (token = getNextToken(length));
return length;
@@ -156,7 +150,7 @@ function calc(next) {
return 0;
}
if (token.type === TYPE.Function && eqStrAny(token.value, calcFunctionNames)) {
if (token.type === FunctionToken && eqStrAny(token.value, calcFunctionNames)) {
return consumeFunction(token, getNextToken);
}
@@ -174,18 +168,6 @@ function tokenType(expectedTokenType) {
};
}
function func(name) {
name = name + '(';
return function(token, getNextToken) {
if (token !== null && eqStr(token.value, name)) {
return consumeFunction(token, getNextToken);
}
return 0;
};
}
// =========================
// Complex types
//
@@ -198,11 +180,11 @@ function func(name) {
//
// See also: https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident
function customIdent(token) {
if (token === null || token.type !== TYPE.Ident) {
if (token === null || token.type !== Ident) {
return 0;
}
var name = token.value.toLowerCase();
const name = token.value.toLowerCase();
// The CSS-wide keywords are not valid <custom-ident>s
if (eqStrAny(name, cssWideKeywords)) {
@@ -223,19 +205,34 @@ function customIdent(token) {
return 1;
}
// https://drafts.csswg.org/css-variables/#typedef-custom-property-name
// A custom property is any property whose name starts with two dashes (U+002D HYPHEN-MINUS), like --foo.
// The <custom-property-name> production corresponds to this: its defined as any valid identifier
// that starts with two dashes, except -- itself, which is reserved for future use by CSS.
// NOTE: Current implementation treat `--` as a valid name since most (all?) major browsers treat it as valid.
function customPropertyName(token) {
// ... defined as any valid identifier
if (token === null || token.type !== TYPE.Ident) {
// https://drafts.csswg.org/css-values-4/#dashed-idents
// The <dashed-ident> production is a <custom-ident>, with all the case-sensitivity that implies,
// with the additional restriction that it must start with two dashes (U+002D HYPHEN-MINUS).
function dashedIdent(token) {
if (token === null || token.type !== Ident) {
return 0;
}
// ... that starts with two dashes (U+002D HYPHEN-MINUS)
if (charCode(token.value, 0) !== 0x002D || charCode(token.value, 1) !== 0x002D) {
// ... must start with two dashes (U+002D HYPHEN-MINUS)
if (charCodeAt(token.value, 0) !== 0x002D || charCodeAt(token.value, 1) !== 0x002D) {
return 0;
}
return 1;
}
// https://drafts.csswg.org/css-variables/#typedef-custom-property-name
// A custom property is any property whose name starts with two dashes (U+002D HYPHEN-MINUS), like --foo.
// The <custom-property-name> production corresponds to this: its defined as any <dashed-ident>
// (a valid identifier that starts with two dashes), except -- itself, which is reserved for future use by CSS.
function customPropertyName(token) {
// ... its defined as any <dashed-ident>
if (!dashedIdent(token)) {
return 0;
}
// ... except -- itself, which is reserved for future use by CSS
if (token.value === '--') {
return 0;
}
@@ -247,19 +244,19 @@ function customPropertyName(token) {
// In other words, a hex color is written as a hash character, "#", followed by some number of digits 0-9 or
// letters a-f (the case of the letters doesnt matter - #00ff00 is identical to #00FF00).
function hexColor(token) {
if (token === null || token.type !== TYPE.Hash) {
if (token === null || token.type !== Hash) {
return 0;
}
var length = token.value.length;
const length = token.value.length;
// valid values (length): #rgb (4), #rgba (5), #rrggbb (7), #rrggbbaa (9)
if (length !== 4 && length !== 5 && length !== 7 && length !== 9) {
return 0;
}
for (var i = 1; i < length; i++) {
if (!isHexDigit(token.value.charCodeAt(i))) {
for (let i = 1; i < length; i++) {
if (!isHexDigit(charCodeAt(token.value, i))) {
return 0;
}
}
@@ -268,11 +265,11 @@ function hexColor(token) {
}
function idSelector(token) {
if (token === null || token.type !== TYPE.Hash) {
if (token === null || token.type !== Hash) {
return 0;
}
if (!isIdentifierStart(charCode(token.value, 1), charCode(token.value, 2), charCode(token.value, 3))) {
if (!isIdentifierStart(charCodeAt(token.value, 1), charCodeAt(token.value, 2), charCodeAt(token.value, 3))) {
return 0;
}
@@ -286,61 +283,57 @@ function declarationValue(token, getNextToken) {
return 0;
}
var length = 0;
var level = 0;
var startIdx = token.index;
let balanceCloseType = 0;
let balanceStash = [];
let length = 0;
// The <declaration-value> production matches any sequence of one or more tokens,
// so long as the sequence ...
// so long as the sequence does not contain ...
scan:
do {
switch (token.type) {
// ... does not contain <bad-string-token>, <bad-url-token>,
case TYPE.BadString:
case TYPE.BadUrl:
// ... <bad-string-token>, <bad-url-token>,
case BadString:
case BadUrl:
break scan;
// ... unmatched <)-token>, <]-token>, or <}-token>,
case TYPE.RightCurlyBracket:
case TYPE.RightParenthesis:
case TYPE.RightSquareBracket:
if (token.balance > token.index || token.balance < startIdx) {
case RightCurlyBracket:
case RightParenthesis:
case RightSquareBracket:
if (token.type !== balanceCloseType) {
break scan;
}
level--;
balanceCloseType = balanceStash.pop();
break;
// ... or top-level <semicolon-token> tokens
case TYPE.Semicolon:
if (level === 0) {
case Semicolon:
if (balanceCloseType === 0) {
break scan;
}
break;
// ... or <delim-token> tokens with a value of "!"
case TYPE.Delim:
if (token.value === '!' && level === 0) {
case Delim:
if (balanceCloseType === 0 && token.value === '!') {
break scan;
}
break;
case TYPE.Function:
case TYPE.LeftParenthesis:
case TYPE.LeftSquareBracket:
case TYPE.LeftCurlyBracket:
level++;
case FunctionToken:
case LeftParenthesis:
case LeftSquareBracket:
case LeftCurlyBracket:
balanceStash.push(balanceCloseType);
balanceCloseType = balancePair.get(token.type);
break;
}
length++;
// until balance closing
if (token.balance <= startIdx) {
break;
}
} while (token = getNextToken(length));
return length;
@@ -355,8 +348,9 @@ function anyValue(token, getNextToken) {
return 0;
}
var startIdx = token.index;
var length = 0;
let balanceCloseType = 0;
let balanceStash = [];
let length = 0;
// The <any-value> production matches any sequence of one or more tokens,
// so long as the sequence ...
@@ -364,27 +358,31 @@ function anyValue(token, getNextToken) {
do {
switch (token.type) {
// ... does not contain <bad-string-token>, <bad-url-token>,
case TYPE.BadString:
case TYPE.BadUrl:
case BadString:
case BadUrl:
break scan;
// ... unmatched <)-token>, <]-token>, or <}-token>,
case TYPE.RightCurlyBracket:
case TYPE.RightParenthesis:
case TYPE.RightSquareBracket:
if (token.balance > token.index || token.balance < startIdx) {
case RightCurlyBracket:
case RightParenthesis:
case RightSquareBracket:
if (token.type !== balanceCloseType) {
break scan;
}
balanceCloseType = balanceStash.pop();
break;
case FunctionToken:
case LeftParenthesis:
case LeftSquareBracket:
case LeftCurlyBracket:
balanceStash.push(balanceCloseType);
balanceCloseType = balancePair.get(token.type);
break;
}
length++;
// until balance closing
if (token.balance <= startIdx) {
break;
}
} while (token = getNextToken(length));
return length;
@@ -395,22 +393,26 @@ function anyValue(token, getNextToken) {
//
function dimension(type) {
if (type) {
type = new Set(type);
}
return function(token, getNextToken, opts) {
if (token === null || token.type !== TYPE.Dimension) {
if (token === null || token.type !== Dimension) {
return 0;
}
var numberEnd = consumeNumber(token.value, 0);
const numberEnd = consumeNumber(token.value, 0);
// check unit
if (type !== null) {
// check for IE postfix hack, i.e. 123px\0 or 123px\9
var reverseSolidusOffset = token.value.indexOf('\\', numberEnd);
var unit = reverseSolidusOffset === -1 || !isPostfixIeHack(token.value, reverseSolidusOffset)
const reverseSolidusOffset = token.value.indexOf('\\', numberEnd);
const unit = reverseSolidusOffset === -1 || !isPostfixIeHack(token.value, reverseSolidusOffset)
? token.value.substr(numberEnd)
: token.value.substring(numberEnd, reverseSolidusOffset);
if (type.hasOwnProperty(unit.toLowerCase()) === false) {
if (type.has(unit.toLowerCase()) === false) {
return 0;
}
}
@@ -432,7 +434,7 @@ function dimension(type) {
// https://drafts.csswg.org/css-values-4/#percentages
function percentage(token, getNextToken, opts) {
// ... corresponds to the <percentage-token> production
if (token === null || token.type !== TYPE.Percentage) {
if (token === null || token.type !== Percentage) {
return 0;
}
@@ -460,7 +462,7 @@ function zero(next) {
}
return function(token, getNextToken, opts) {
if (token !== null && token.type === TYPE.Number) {
if (token !== null && token.type === NumberToken) {
if (Number(token.value) === 0) {
return 1;
}
@@ -479,8 +481,8 @@ function number(token, getNextToken, opts) {
return 0;
}
var numberEnd = consumeNumber(token.value, 0);
var isNumber = numberEnd === token.value.length;
const numberEnd = consumeNumber(token.value, 0);
const isNumber = numberEnd === token.value.length;
if (!isNumber && !isPostfixIeHack(token.value, numberEnd)) {
return 0;
}
@@ -497,17 +499,17 @@ function number(token, getNextToken, opts) {
// https://drafts.csswg.org/css-values-4/#integers
function integer(token, getNextToken, opts) {
// ... corresponds to a subset of the <number-token> production
if (token === null || token.type !== TYPE.Number) {
if (token === null || token.type !== NumberToken) {
return 0;
}
// The first digit of an integer may be immediately preceded by `-` or `+` to indicate the integers sign.
var i = token.value.charCodeAt(0) === 0x002B || // U+002B PLUS SIGN (+)
token.value.charCodeAt(0) === 0x002D ? 1 : 0; // U+002D HYPHEN-MINUS (-)
let i = charCodeAt(token.value, 0) === 0x002B || // U+002B PLUS SIGN (+)
charCodeAt(token.value, 0) === 0x002D ? 1 : 0; // U+002D HYPHEN-MINUS (-)
// When written literally, an integer is one or more decimal digits 0 through 9 ...
for (; i < token.value.length; i++) {
if (!isDigit(token.value.charCodeAt(i))) {
if (!isDigit(charCodeAt(token.value, i))) {
return 0;
}
}
@@ -520,57 +522,39 @@ function integer(token, getNextToken, opts) {
return 1;
}
module.exports = {
// token types
'ident-token': tokenType(TYPE.Ident),
'function-token': tokenType(TYPE.Function),
'at-keyword-token': tokenType(TYPE.AtKeyword),
'hash-token': tokenType(TYPE.Hash),
'string-token': tokenType(TYPE.String),
'bad-string-token': tokenType(TYPE.BadString),
'url-token': tokenType(TYPE.Url),
'bad-url-token': tokenType(TYPE.BadUrl),
'delim-token': tokenType(TYPE.Delim),
'number-token': tokenType(TYPE.Number),
'percentage-token': tokenType(TYPE.Percentage),
'dimension-token': tokenType(TYPE.Dimension),
'whitespace-token': tokenType(TYPE.WhiteSpace),
'CDO-token': tokenType(TYPE.CDO),
'CDC-token': tokenType(TYPE.CDC),
'colon-token': tokenType(TYPE.Colon),
'semicolon-token': tokenType(TYPE.Semicolon),
'comma-token': tokenType(TYPE.Comma),
'[-token': tokenType(TYPE.LeftSquareBracket),
']-token': tokenType(TYPE.RightSquareBracket),
'(-token': tokenType(TYPE.LeftParenthesis),
')-token': tokenType(TYPE.RightParenthesis),
'{-token': tokenType(TYPE.LeftCurlyBracket),
'}-token': tokenType(TYPE.RightCurlyBracket),
// token types
export const tokenTypes = {
'ident-token': tokenType(Ident),
'function-token': tokenType(FunctionToken),
'at-keyword-token': tokenType(AtKeyword),
'hash-token': tokenType(Hash),
'string-token': tokenType(StringToken),
'bad-string-token': tokenType(BadString),
'url-token': tokenType(Url),
'bad-url-token': tokenType(BadUrl),
'delim-token': tokenType(Delim),
'number-token': tokenType(NumberToken),
'percentage-token': tokenType(Percentage),
'dimension-token': tokenType(Dimension),
'whitespace-token': tokenType(WhiteSpace),
'CDO-token': tokenType(CDO),
'CDC-token': tokenType(CDC),
'colon-token': tokenType(Colon),
'semicolon-token': tokenType(Semicolon),
'comma-token': tokenType(Comma),
'[-token': tokenType(LeftSquareBracket),
']-token': tokenType(RightSquareBracket),
'(-token': tokenType(LeftParenthesis),
')-token': tokenType(RightParenthesis),
'{-token': tokenType(LeftCurlyBracket),
'}-token': tokenType(RightCurlyBracket)
};
// token production types
export const productionTypes = {
// token type aliases
'string': tokenType(TYPE.String),
'ident': tokenType(TYPE.Ident),
// complex types
'custom-ident': customIdent,
'custom-property-name': customPropertyName,
'hex-color': hexColor,
'id-selector': idSelector, // element( <id-selector> )
'an-plus-b': anPlusB,
'urange': urange,
'declaration-value': declarationValue,
'any-value': anyValue,
// dimensions
'dimension': calc(dimension(null)),
'angle': calc(dimension(ANGLE)),
'decibel': calc(dimension(DECIBEL)),
'frequency': calc(dimension(FREQUENCY)),
'flex': calc(dimension(FLEX)),
'length': calc(zero(dimension(LENGTH))),
'resolution': calc(dimension(RESOLUTION)),
'semitones': calc(dimension(SEMITONES)),
'time': calc(dimension(TIME)),
'string': tokenType(StringToken),
'ident': tokenType(Ident),
// percentage
'percentage': calc(percentage),
@@ -580,6 +564,59 @@ module.exports = {
'number': calc(number),
'integer': calc(integer),
// old IE stuff
'-ms-legacy-expression': func('expression')
// complex types
'custom-ident': customIdent,
'dashed-ident': dashedIdent,
'custom-property-name': customPropertyName,
'hex-color': hexColor,
'id-selector': idSelector, // element( <id-selector> )
'an-plus-b': anPlusB,
'urange': urange,
'declaration-value': declarationValue,
'any-value': anyValue
};
export const unitGroups = [
'length',
'angle',
'time',
'frequency',
'resolution',
'flex',
'decibel',
'semitones'
];
// dimensions types depend on units set
export function createDemensionTypes(units) {
const {
angle,
decibel,
frequency,
flex,
length,
resolution,
semitones,
time
} = units || {};
return {
'dimension': calc(dimension(null)),
'angle': calc(dimension(angle)),
'decibel': calc(dimension(decibel)),
'frequency': calc(dimension(frequency)),
'flex': calc(dimension(flex)),
'length': calc(zero(dimension(length))),
'resolution': calc(dimension(resolution)),
'semitones': calc(dimension(semitones)),
'time': calc(dimension(time))
};
}
export function createGenericTypes(units) {
return {
...tokenTypes,
...productionTypes,
...createDemensionTypes(units)
};
};

View File

@@ -1,3 +1 @@
module.exports = {
Lexer: require('./Lexer')
};
export { Lexer } from './Lexer.js';

View File

@@ -1,10 +1,11 @@
var parse = require('../definition-syntax/parse');
import { parse } from '../definition-syntax/parse.js';
var MATCH = { type: 'Match' };
var MISMATCH = { type: 'Mismatch' };
var DISALLOW_EMPTY = { type: 'DisallowEmpty' };
var LEFTPARENTHESIS = 40; // (
var RIGHTPARENTHESIS = 41; // )
export const MATCH = { type: 'Match' };
export const MISMATCH = { type: 'Mismatch' };
export const DISALLOW_EMPTY = { type: 'DisallowEmpty' };
const LEFTPARENTHESIS = 40; // (
const RIGHTPARENTHESIS = 41; // )
function createCondition(match, thenBranch, elseBranch) {
// reduce node count
@@ -23,7 +24,7 @@ function createCondition(match, thenBranch, elseBranch) {
return {
type: 'If',
match: match,
match,
then: thenBranch,
else: elseBranch
};
@@ -46,9 +47,38 @@ function isEnumCapatible(term) {
);
}
function groupNode(terms, combinator = ' ', explicit = false) {
return {
type: 'Group',
terms,
combinator,
disallowEmpty: false,
explicit
};
}
function replaceTypeInGraph(node, replacements, visited = new Set()) {
if (!visited.has(node)) {
visited.add(node);
switch (node.type) {
case 'If':
node.match = replaceTypeInGraph(node.match, replacements, visited);
node.then = replaceTypeInGraph(node.then, replacements, visited);
node.else = replaceTypeInGraph(node.else, replacements, visited);
break;
case 'Type':
return replacements[node.name] || node;
}
}
return node;
}
function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
switch (combinator) {
case ' ':
case ' ': {
// Juxtaposing components means that all of them must occur, in the given order.
//
// a b c
@@ -60,10 +90,10 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
// else MISMATCH
// else MISMATCH
// else MISMATCH
var result = MATCH;
let result = MATCH;
for (var i = terms.length - 1; i >= 0; i--) {
var term = terms[i];
for (let i = terms.length - 1; i >= 0; i--) {
const term = terms[i];
result = createCondition(
term,
@@ -73,8 +103,9 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
};
return result;
}
case '|':
case '|': {
// A bar (|) separates two or more alternatives: exactly one of them must occur.
//
// a | b | c
@@ -87,11 +118,11 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
// then MATCH
// else MISMATCH
var result = MISMATCH;
var map = null;
let result = MISMATCH;
let map = null;
for (var i = terms.length - 1; i >= 0; i--) {
var term = terms[i];
for (let i = terms.length - 1; i >= 0; i--) {
let term = terms[i];
// reduce sequence of keywords into a Enum
if (isEnumCapatible(term)) {
@@ -100,7 +131,7 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
result = createCondition(
{
type: 'Enum',
map: map
map
},
MATCH,
result
@@ -108,7 +139,7 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
}
if (map !== null) {
var key = (isFunctionType(term.name) ? term.name.slice(0, -1) : term.name).toLowerCase();
const key = (isFunctionType(term.name) ? term.name.slice(0, -1) : term.name).toLowerCase();
if (key in map === false) {
map[key] = term;
continue;
@@ -127,8 +158,9 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
};
return result;
}
case '&&':
case '&&': {
// A double ampersand (&&) separates two or more components,
// all of which must occur, in any order.
@@ -137,7 +169,7 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
if (terms.length > 5) {
return {
type: 'MatchOnce',
terms: terms,
terms,
all: true
};
}
@@ -165,11 +197,11 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
// then MATCH
// else MISMATCH
// else MISMATCH
var result = MISMATCH;
let result = MISMATCH;
for (var i = terms.length - 1; i >= 0; i--) {
var term = terms[i];
var thenClause;
for (let i = terms.length - 1; i >= 0; i--) {
const term = terms[i];
let thenClause;
if (terms.length > 1) {
thenClause = buildGroupMatchGraph(
@@ -191,8 +223,9 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
};
return result;
}
case '||':
case '||': {
// A double bar (||) separates two or more options:
// one or more of them must occur, in any order.
@@ -201,7 +234,7 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
if (terms.length > 5) {
return {
type: 'MatchOnce',
terms: terms,
terms,
all: false
};
}
@@ -229,11 +262,11 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
// then MATCH
// else MATCH
// else MISMATCH
var result = atLeastOneTermMatched ? MATCH : MISMATCH;
let result = atLeastOneTermMatched ? MATCH : MISMATCH;
for (var i = terms.length - 1; i >= 0; i--) {
var term = terms[i];
var thenClause;
for (let i = terms.length - 1; i >= 0; i--) {
const term = terms[i];
let thenClause;
if (terms.length > 1) {
thenClause = buildGroupMatchGraph(
@@ -255,12 +288,13 @@ function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
};
return result;
}
}
}
function buildMultiplierMatchGraph(node) {
var result = MATCH;
var matchTerm = buildMatchGraph(node.term);
let result = MATCH;
let matchTerm = buildMatchGraphInternal(node.term);
if (node.max === 0) {
// disable repeating of empty match to prevent infinite loop
@@ -293,7 +327,7 @@ function buildMultiplierMatchGraph(node) {
}
} else {
// create a match node chain for [min .. max] interval with optional matches
for (var i = node.min || 1; i <= node.max; i++) {
for (let i = node.min || 1; i <= node.max; i++) {
if (node.comma && result !== MATCH) {
result = createCondition(
{ type: 'Comma', syntax: node },
@@ -323,7 +357,7 @@ function buildMultiplierMatchGraph(node) {
);
} else {
// create a match node chain to collect [0 ... min - 1] required matches
for (var i = 0; i < node.min - 1; i++) {
for (let i = 0; i < node.min - 1; i++) {
if (node.comma && result !== MATCH) {
result = createCondition(
{ type: 'Comma', syntax: node },
@@ -343,7 +377,7 @@ function buildMultiplierMatchGraph(node) {
return result;
}
function buildMatchGraph(node) {
function buildMatchGraphInternal(node) {
if (typeof node === 'function') {
return {
type: 'Generic',
@@ -352,10 +386,10 @@ function buildMatchGraph(node) {
}
switch (node.type) {
case 'Group':
var result = buildGroupMatchGraph(
case 'Group': {
let result = buildGroupMatchGraph(
node.combinator,
node.terms.map(buildMatchGraph),
node.terms.map(buildMatchGraphInternal),
false
);
@@ -368,10 +402,53 @@ function buildMatchGraph(node) {
}
return result;
}
case 'Multiplier':
return buildMultiplierMatchGraph(node);
// https://drafts.csswg.org/css-values-5/#boolean
case 'Boolean': {
const term = buildMatchGraphInternal(node.term);
// <boolean-expr[ <test> ]> = not <boolean-expr-group> | <boolean-expr-group> [ [ and <boolean-expr-group> ]* | [ or <boolean-expr-group> ]* ]
const matchNode = buildMatchGraphInternal(groupNode([
groupNode([
{ type: 'Keyword', name: 'not' },
{ type: 'Type', name: '!boolean-group' }
]),
groupNode([
{ type: 'Type', name: '!boolean-group' },
groupNode([
{ type: 'Multiplier', comma: false, min: 0, max: 0, term: groupNode([
{ type: 'Keyword', name: 'and' },
{ type: 'Type', name: '!boolean-group' }
]) },
{ type: 'Multiplier', comma: false, min: 0, max: 0, term: groupNode([
{ type: 'Keyword', name: 'or' },
{ type: 'Type', name: '!boolean-group' }
]) }
], '|')
])
], '|'));
// <boolean-expr-group> = <test> | ( <boolean-expr[ <test> ]> ) | <general-enclosed>
const booleanGroup = buildMatchGraphInternal(
groupNode([
{ type: 'Type', name: '!term' },
groupNode([
{ type: 'Token', value: '(' },
{ type: 'Type', name: '!self' },
{ type: 'Token', value: ')' }
]),
{ type: 'Type', name: 'general-enclosed' }
], '|')
);
replaceTypeInGraph(booleanGroup, { '!term': term, '!self': matchNode });
replaceTypeInGraph(matchNode, { '!boolean-group': booleanGroup });
return matchNode;
}
case 'Type':
case 'Property':
return {
@@ -436,20 +513,15 @@ function buildMatchGraph(node) {
}
}
module.exports = {
MATCH: MATCH,
MISMATCH: MISMATCH,
DISALLOW_EMPTY: DISALLOW_EMPTY,
buildMatchGraph: function(syntaxTree, ref) {
if (typeof syntaxTree === 'string') {
syntaxTree = parse(syntaxTree);
}
return {
type: 'MatchGraph',
match: buildMatchGraph(syntaxTree),
syntax: ref || null,
source: syntaxTree
};
export function buildMatchGraph(syntaxTree, ref) {
if (typeof syntaxTree === 'string') {
syntaxTree = parse(syntaxTree);
}
};
return {
type: 'MatchGraph',
match: buildMatchGraphInternal(syntaxTree),
syntax: ref || null,
source: syntaxTree
};
}

View File

@@ -1,26 +1,23 @@
var hasOwnProperty = Object.prototype.hasOwnProperty;
var matchGraph = require('./match-graph');
var MATCH = matchGraph.MATCH;
var MISMATCH = matchGraph.MISMATCH;
var DISALLOW_EMPTY = matchGraph.DISALLOW_EMPTY;
var TYPE = require('../tokenizer/const').TYPE;
import { MATCH, MISMATCH, DISALLOW_EMPTY } from './match-graph.js';
import * as TYPE from '../tokenizer/types.js';
var STUB = 0;
var TOKEN = 1;
var OPEN_SYNTAX = 2;
var CLOSE_SYNTAX = 3;
const { hasOwnProperty } = Object.prototype;
const STUB = 0;
const TOKEN = 1;
const OPEN_SYNTAX = 2;
const CLOSE_SYNTAX = 3;
var EXIT_REASON_MATCH = 'Match';
var EXIT_REASON_MISMATCH = 'Mismatch';
var EXIT_REASON_ITERATION_LIMIT = 'Maximum iteration number exceeded (please fill an issue on https://github.com/csstree/csstree/issues)';
const EXIT_REASON_MATCH = 'Match';
const EXIT_REASON_MISMATCH = 'Mismatch';
const EXIT_REASON_ITERATION_LIMIT = 'Maximum iteration number exceeded (please fill an issue on https://github.com/csstree/csstree/issues)';
var ITERATION_LIMIT = 15000;
var totalIterationCount = 0;
const ITERATION_LIMIT = 15000;
export let totalIterationCount = 0;
function reverseList(list) {
var prev = null;
var next = null;
var item = list;
let prev = null;
let next = null;
let item = list;
while (item !== null) {
next = item.prev;
@@ -37,9 +34,9 @@ function areStringsEqualCaseInsensitive(testStr, referenceStr) {
return false;
}
for (var i = 0; i < testStr.length; i++) {
var testCode = testStr.charCodeAt(i);
var referenceCode = referenceStr.charCodeAt(i);
for (let i = 0; i < testStr.length; i++) {
const referenceCode = referenceStr.charCodeAt(i);
let testCode = testStr.charCodeAt(i);
// testCode.toLowerCase() for U+0041 LATIN CAPITAL LETTER A (A) .. U+005A LATIN CAPITAL LETTER Z (Z).
if (testCode >= 0x0041 && testCode <= 0x005A) {
@@ -88,7 +85,7 @@ function isCommaContextEnd(token) {
token.type === TYPE.RightParenthesis ||
token.type === TYPE.RightSquareBracket ||
token.type === TYPE.RightCurlyBracket ||
token.type === TYPE.Delim
(token.type === TYPE.Delim && token.value === '/')
);
}
@@ -101,27 +98,27 @@ function internalMatch(tokens, state, syntaxes) {
}
function getNextToken(offset) {
var nextIndex = tokenIndex + offset;
const nextIndex = tokenIndex + offset;
return nextIndex < tokens.length ? tokens[nextIndex] : null;
}
function stateSnapshotFromSyntax(nextState, prev) {
return {
nextState: nextState,
matchStack: matchStack,
syntaxStack: syntaxStack,
thenStack: thenStack,
tokenIndex: tokenIndex,
prev: prev
nextState,
matchStack,
syntaxStack,
thenStack,
tokenIndex,
prev
};
}
function pushThenStack(nextState) {
thenStack = {
nextState: nextState,
matchStack: matchStack,
syntaxStack: syntaxStack,
nextState,
matchStack,
syntaxStack,
prev: thenStack
};
}
@@ -134,7 +131,7 @@ function internalMatch(tokens, state, syntaxes) {
matchStack = {
type: TOKEN,
syntax: state.syntax,
token: token,
token,
prev: matchStack
};
@@ -176,22 +173,22 @@ function internalMatch(tokens, state, syntaxes) {
syntaxStack = syntaxStack.prev;
}
var syntaxStack = null;
var thenStack = null;
var elseStack = null;
let syntaxStack = null;
let thenStack = null;
let elseStack = null;
// null stashing allowed, nothing stashed
// false stashing disabled, nothing stashed
// anithing else fail stashable syntaxes, some syntax stashed
var syntaxStash = null;
let syntaxStash = null;
var iterationCount = 0; // count iterations and prevent infinite loop
var exitReason = null;
let iterationCount = 0; // count iterations and prevent infinite loop
let exitReason = null;
var token = null;
var tokenIndex = -1;
var longestMatch = 0;
var matchStack = {
let token = null;
let tokenIndex = -1;
let longestMatch = 0;
let matchStack = {
type: STUB,
syntax: null,
token: null,
@@ -202,7 +199,7 @@ function internalMatch(tokens, state, syntaxes) {
while (exitReason === null && ++iterationCount < ITERATION_LIMIT) {
// function mapList(list, fn) {
// var result = [];
// const result = [];
// while (list) {
// result.unshift(fn(list));
// list = list.prev;
@@ -314,8 +311,8 @@ function internalMatch(tokens, state, syntaxes) {
};
break;
case 'MatchOnceBuffer':
var terms = state.syntax.terms;
case 'MatchOnceBuffer': {
const terms = state.syntax.terms;
if (state.index === terms.length) {
// no matches at all or it's required all terms to be matched
@@ -336,7 +333,7 @@ function internalMatch(tokens, state, syntaxes) {
}
for (; state.index < terms.length; state.index++) {
var matchFlag = 1 << state.index;
const matchFlag = 1 << state.index;
if ((state.mask & matchFlag) === 0) {
// IMPORTANT: else stack push must go first,
@@ -354,6 +351,7 @@ function internalMatch(tokens, state, syntaxes) {
}
}
break;
}
case 'AddMatchOnce':
state = {
@@ -366,7 +364,7 @@ function internalMatch(tokens, state, syntaxes) {
case 'Enum':
if (token !== null) {
var name = token.value.toLowerCase();
let name = token.value.toLowerCase();
// drop \0 and \9 hack from keyword name
if (name.indexOf('\\') !== -1) {
@@ -382,9 +380,9 @@ function internalMatch(tokens, state, syntaxes) {
state = MISMATCH;
break;
case 'Generic':
var opts = syntaxStack !== null ? syntaxStack.opts : null;
var lastTokenIndex = tokenIndex + Math.floor(state.fn(token, getNextToken, opts));
case 'Generic': {
const opts = syntaxStack !== null ? syntaxStack.opts : null;
const lastTokenIndex = tokenIndex + Math.floor(state.fn(token, getNextToken, opts));
if (!isNaN(lastTokenIndex) && lastTokenIndex > tokenIndex) {
while (tokenIndex < lastTokenIndex) {
@@ -397,11 +395,12 @@ function internalMatch(tokens, state, syntaxes) {
}
break;
}
case 'Type':
case 'Property':
var syntaxDict = state.type === 'Type' ? 'types' : 'properties';
var dictSyntax = hasOwnProperty.call(syntaxes, syntaxDict) ? syntaxes[syntaxDict][state.name] : null;
case 'Property': {
const syntaxDict = state.type === 'Type' ? 'types' : 'properties';
const dictSyntax = hasOwnProperty.call(syntaxes, syntaxDict) ? syntaxes[syntaxDict][state.name] : null;
if (!dictSyntax || !dictSyntax.match) {
throw new Error(
@@ -414,7 +413,7 @@ function internalMatch(tokens, state, syntaxes) {
// stash a syntax for types with low priority
if (syntaxStash !== false && token !== null && state.type === 'Type') {
var lowPriorityMatching =
const lowPriorityMatching =
// https://drafts.csswg.org/css-values-4/#custom-idents
// When parsing positionally-ambiguous keywords in a property value, a <custom-ident> production
// can only claim the keyword if no other unfulfilled production can claim it.
@@ -436,14 +435,15 @@ function internalMatch(tokens, state, syntaxes) {
}
openSyntax();
state = dictSyntax.match;
state = dictSyntax.matchRef || dictSyntax.match;
break;
}
case 'Keyword':
var name = state.name;
case 'Keyword': {
const name = state.name;
if (token !== null) {
var keywordName = token.value;
let keywordName = token.value;
// drop \0 and \9 hack from keyword name
if (keywordName.indexOf('\\') !== -1) {
@@ -459,6 +459,7 @@ function internalMatch(tokens, state, syntaxes) {
state = MISMATCH;
break;
}
case 'AtKeyword':
case 'Function':
@@ -496,9 +497,10 @@ function internalMatch(tokens, state, syntaxes) {
break;
case 'String':
var string = '';
let string = '';
let lastTokenIndex = tokenIndex;
for (var lastTokenIndex = tokenIndex; lastTokenIndex < tokens.length && string.length < state.value.length; lastTokenIndex++) {
for (; lastTokenIndex < tokens.length && string.length < state.value.length; lastTokenIndex++) {
string += tokens[lastTokenIndex].value;
}
@@ -539,27 +541,24 @@ function internalMatch(tokens, state, syntaxes) {
}
return {
tokens: tokens,
tokens,
reason: exitReason,
iterations: iterationCount,
match: matchStack,
longestMatch: longestMatch
longestMatch
};
}
function matchAsList(tokens, matchGraph, syntaxes) {
var matchResult = internalMatch(tokens, matchGraph, syntaxes || {});
export function matchAsList(tokens, matchGraph, syntaxes) {
const matchResult = internalMatch(tokens, matchGraph, syntaxes || {});
if (matchResult.match !== null) {
var item = reverseList(matchResult.match).prev;
let item = reverseList(matchResult.match).prev;
matchResult.match = [];
while (item !== null) {
switch (item.type) {
case STUB:
break;
case OPEN_SYNTAX:
case CLOSE_SYNTAX:
matchResult.match.push({
@@ -583,19 +582,19 @@ function matchAsList(tokens, matchGraph, syntaxes) {
return matchResult;
}
function matchAsTree(tokens, matchGraph, syntaxes) {
var matchResult = internalMatch(tokens, matchGraph, syntaxes || {});
export function matchAsTree(tokens, matchGraph, syntaxes) {
const matchResult = internalMatch(tokens, matchGraph, syntaxes || {});
if (matchResult.match === null) {
return matchResult;
}
var item = matchResult.match;
var host = matchResult.match = {
let item = matchResult.match;
let host = matchResult.match = {
syntax: matchGraph.syntax || null,
match: []
};
var hostStack = [host];
const hostStack = [host];
// revert a list and start with 2nd item since 1st is a stub item
item = reverseList(item).prev;
@@ -629,11 +628,3 @@ function matchAsTree(tokens, matchGraph, syntaxes) {
return matchResult;
}
module.exports = {
matchAsList: matchAsList,
matchAsTree: matchAsTree,
getTotalIterationCount: function() {
return totalIterationCount;
}
};

View File

@@ -1,72 +1,49 @@
var tokenize = require('../tokenizer');
var TokenStream = require('../common/TokenStream');
var tokenStream = new TokenStream();
var astToTokens = {
decorator: function(handlers) {
var curNode = null;
var prev = { len: 0, node: null };
var nodes = [prev];
var buffer = '';
import { tokenize } from '../tokenizer/index.js';
const astToTokens = {
decorator(handlers) {
const tokens = [];
let curNode = null;
return {
children: handlers.children,
node: function(node) {
var tmp = curNode;
...handlers,
node(node) {
const tmp = curNode;
curNode = node;
handlers.node.call(this, node);
curNode = tmp;
},
chunk: function(chunk) {
buffer += chunk;
if (prev.node !== curNode) {
nodes.push({
len: chunk.length,
node: curNode
});
} else {
prev.len += chunk.length;
}
emit(value, type, auto) {
tokens.push({
type,
value,
node: auto ? null : curNode
});
},
result: function() {
return prepareTokens(buffer, nodes);
result() {
return tokens;
}
};
}
};
function prepareTokens(str, nodes) {
var tokens = [];
var nodesOffset = 0;
var nodesIndex = 0;
var currentNode = nodes ? nodes[nodesIndex].node : null;
tokenize(str, tokenStream);
while (!tokenStream.eof) {
if (nodes) {
while (nodesIndex < nodes.length && nodesOffset + nodes[nodesIndex].len <= tokenStream.tokenStart) {
nodesOffset += nodes[nodesIndex++].len;
currentNode = nodes[nodesIndex].node;
}
}
function stringToTokens(str) {
const tokens = [];
tokenize(str, (type, start, end) =>
tokens.push({
type: tokenStream.tokenType,
value: tokenStream.getTokenValue(),
index: tokenStream.tokenIndex, // TODO: remove it, temporary solution
balance: tokenStream.balance[tokenStream.tokenIndex], // TODO: remove it, temporary solution
node: currentNode
});
tokenStream.next();
// console.log({ ...tokens[tokens.length - 1], node: undefined });
}
type,
value: str.slice(start, end),
node: null
})
);
return tokens;
}
module.exports = function(value, syntax) {
export default function(value, syntax) {
if (typeof value === 'string') {
return prepareTokens(value, null);
return stringToTokens(value);
}
return syntax.generate(value, astToTokens);

View File

@@ -1,4 +1,4 @@
var List = require('../common/List');
import { List } from '../utils/List.js';
function getFirstMatchNode(matchNode) {
if ('node' in matchNode) {
@@ -16,17 +16,17 @@ function getLastMatchNode(matchNode) {
return getLastMatchNode(matchNode.match[matchNode.match.length - 1]);
}
function matchFragments(lexer, ast, match, type, name) {
export function matchFragments(lexer, ast, match, type, name) {
function findFragments(matchNode) {
if (matchNode.syntax !== null &&
matchNode.syntax.type === type &&
matchNode.syntax.name === name) {
var start = getFirstMatchNode(matchNode);
var end = getLastMatchNode(matchNode);
const start = getFirstMatchNode(matchNode);
const end = getLastMatchNode(matchNode);
lexer.syntax.walk(ast, function(node, item, list) {
if (node === start) {
var nodes = new List();
const nodes = new List();
do {
nodes.appendData(item.data);
@@ -40,7 +40,7 @@ function matchFragments(lexer, ast, match, type, name) {
fragments.push({
parent: list,
nodes: nodes
nodes
});
}
});
@@ -51,7 +51,7 @@ function matchFragments(lexer, ast, match, type, name) {
}
}
var fragments = [];
const fragments = [];
if (match.matched !== null) {
findFragments(match.matched);
@@ -59,7 +59,3 @@ function matchFragments(lexer, ast, match, type, name) {
return fragments;
}
module.exports = {
matchFragments: matchFragments
};

View File

@@ -1,5 +1,6 @@
var List = require('../common/List');
var hasOwnProperty = Object.prototype.hasOwnProperty;
import { List } from '../utils/List.js';
const { hasOwnProperty } = Object.prototype;
function isValidNumber(value) {
// Number.isInteger(value) && value >= 0
@@ -26,8 +27,8 @@ function createNodeStructureChecker(type, fields) {
return warn(node, 'Type of node should be an Object');
}
for (var key in node) {
var valid = true;
for (let key in node) {
let valid = true;
if (hasOwnProperty.call(node, key) === false) {
continue;
@@ -54,8 +55,10 @@ function createNodeStructureChecker(type, fields) {
valid = false;
} else if (fields.hasOwnProperty(key)) {
for (var i = 0, valid = false; !valid && i < fields[key].length; i++) {
var fieldType = fields[key][i];
valid = false;
for (let i = 0; !valid && i < fields[key].length; i++) {
const fieldType = fields[key][i];
switch (fieldType) {
case String:
@@ -87,7 +90,7 @@ function createNodeStructureChecker(type, fields) {
}
}
for (var key in fields) {
for (const key in fields) {
if (hasOwnProperty.call(fields, key) &&
hasOwnProperty.call(node, key) === false) {
warn(node, 'Field `' + type + '.' + key + '` is missed');
@@ -96,68 +99,71 @@ function createNodeStructureChecker(type, fields) {
};
}
function genTypesList(fieldTypes, path) {
const docsTypes = [];
for (let i = 0; i < fieldTypes.length; i++) {
const fieldType = fieldTypes[i];
if (fieldType === String || fieldType === Boolean) {
docsTypes.push(fieldType.name.toLowerCase());
} else if (fieldType === null) {
docsTypes.push('null');
} else if (typeof fieldType === 'string') {
docsTypes.push(fieldType);
} else if (Array.isArray(fieldType)) {
docsTypes.push('List<' + (genTypesList(fieldType, path) || 'any') + '>'); // TODO: use type enum
} else {
throw new Error('Wrong value `' + fieldType + '` in `' + path + '` structure definition');
}
}
return docsTypes.join(' | ');
}
function processStructure(name, nodeType) {
var structure = nodeType.structure;
var fields = {
const structure = nodeType.structure;
const fields = {
type: String,
loc: true
};
var docs = {
const docs = {
type: '"' + name + '"'
};
for (var key in structure) {
for (const key in structure) {
if (hasOwnProperty.call(structure, key) === false) {
continue;
}
var docsTypes = [];
var fieldTypes = fields[key] = Array.isArray(structure[key])
const fieldTypes = fields[key] = Array.isArray(structure[key])
? structure[key].slice()
: [structure[key]];
for (var i = 0; i < fieldTypes.length; i++) {
var fieldType = fieldTypes[i];
if (fieldType === String || fieldType === Boolean) {
docsTypes.push(fieldType.name);
} else if (fieldType === null) {
docsTypes.push('null');
} else if (typeof fieldType === 'string') {
docsTypes.push('<' + fieldType + '>');
} else if (Array.isArray(fieldType)) {
docsTypes.push('List'); // TODO: use type enum
} else {
throw new Error('Wrong value `' + fieldType + '` in `' + name + '.' + key + '` structure definition');
}
}
docs[key] = docsTypes.join(' | ');
docs[key] = genTypesList(fieldTypes, name + '.' + key);
}
return {
docs: docs,
docs,
check: createNodeStructureChecker(name, fields)
};
}
module.exports = {
getStructureFromConfig: function(config) {
var structure = {};
export function getStructureFromConfig(config) {
const structure = {};
if (config.node) {
for (var name in config.node) {
if (hasOwnProperty.call(config.node, name)) {
var nodeType = config.node[name];
if (config.node) {
for (const name in config.node) {
if (hasOwnProperty.call(config.node, name)) {
const nodeType = config.node[name];
if (nodeType.structure) {
structure[name] = processStructure(name, nodeType);
} else {
throw new Error('Missed `structure` field in `' + name + '` node type definition');
}
if (nodeType.structure) {
structure[name] = processStructure(name, nodeType);
} else {
throw new Error('Missed `structure` field in `' + name + '` node type definition');
}
}
}
return structure;
}
return structure;
};

View File

@@ -1,4 +1,4 @@
function getTrace(node) {
export function getTrace(node) {
function shouldPutToTrace(syntax) {
if (syntax === null) {
return false;
@@ -14,7 +14,7 @@ function getTrace(node) {
function hasMatch(matchNode) {
if (Array.isArray(matchNode.match)) {
// use for-loop for better perfomance
for (var i = 0; i < matchNode.match.length; i++) {
for (let i = 0; i < matchNode.match.length; i++) {
if (hasMatch(matchNode.match[i])) {
if (shouldPutToTrace(matchNode.syntax)) {
result.unshift(matchNode.syntax);
@@ -34,7 +34,7 @@ function getTrace(node) {
return false;
}
var result = null;
let result = null;
if (this.matched !== null) {
hasMatch(this.matched);
@@ -43,8 +43,20 @@ function getTrace(node) {
return result;
}
export function isType(node, type) {
return testNode(this, node, match => match.type === 'Type' && match.name === type);
}
export function isProperty(node, property) {
return testNode(this, node, match => match.type === 'Property' && match.name === property);
}
export function isKeyword(node) {
return testNode(this, node, match => match.type === 'Keyword');
}
function testNode(match, node, fn) {
var trace = getTrace.call(match, node);
const trace = getTrace.call(match, node);
if (trace === null) {
return false;
@@ -52,28 +64,3 @@ function testNode(match, node, fn) {
return trace.some(fn);
}
function isType(node, type) {
return testNode(this, node, function(matchNode) {
return matchNode.type === 'Type' && matchNode.name === type;
});
}
function isProperty(node, property) {
return testNode(this, node, function(matchNode) {
return matchNode.type === 'Property' && matchNode.name === property;
});
}
function isKeyword(node) {
return testNode(this, node, function(matchNode) {
return matchNode.type === 'Keyword';
});
}
module.exports = {
getTrace: getTrace,
isType: isType,
isProperty: isProperty,
isKeyword: isKeyword
};

27
node_modules/css-tree/lib/lexer/units.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
export const length = [
// absolute length units https://www.w3.org/TR/css-values-3/#lengths
'cm', 'mm', 'q', 'in', 'pt', 'pc', 'px',
// font-relative length units https://drafts.csswg.org/css-values-4/#font-relative-lengths
'em', 'rem',
'ex', 'rex',
'cap', 'rcap',
'ch', 'rch',
'ic', 'ric',
'lh', 'rlh',
// viewport-percentage lengths https://drafts.csswg.org/css-values-4/#viewport-relative-lengths
'vw', 'svw', 'lvw', 'dvw',
'vh', 'svh', 'lvh', 'dvh',
'vi', 'svi', 'lvi', 'dvi',
'vb', 'svb', 'lvb', 'dvb',
'vmin', 'svmin', 'lvmin', 'dvmin',
'vmax', 'svmax', 'lvmax', 'dvmax',
// container relative lengths https://drafts.csswg.org/css-contain-3/#container-lengths
'cqw', 'cqh', 'cqi', 'cqb', 'cqmin', 'cqmax'
];
export const angle = ['deg', 'grad', 'rad', 'turn']; // https://www.w3.org/TR/css-values-3/#angles
export const time = ['s', 'ms']; // https://www.w3.org/TR/css-values-3/#time
export const frequency = ['hz', 'khz']; // https://www.w3.org/TR/css-values-3/#frequency
export const resolution = ['dpi', 'dpcm', 'dppx', 'x']; // https://www.w3.org/TR/css-values-3/#resolution
export const flex = ['fr']; // https://drafts.csswg.org/css-grid/#fr-unit
export const decibel = ['db']; // https://www.w3.org/TR/css3-speech/#mixing-props-voice-volume
export const semitones = ['st']; // https://www.w3.org/TR/css3-speech/#voice-props-voice-pitch

70
node_modules/css-tree/lib/parser/SyntaxError.js generated vendored Normal file
View File

@@ -0,0 +1,70 @@
import { createCustomError } from '../utils/create-custom-error.js';
const MAX_LINE_LENGTH = 100;
const OFFSET_CORRECTION = 60;
const TAB_REPLACEMENT = ' ';
function sourceFragment({ source, line, column, baseLine, baseColumn }, extraLines) {
function processLines(start, end) {
return lines
.slice(start, end)
.map((line, idx) =>
String(start + idx + 1).padStart(maxNumLength) + ' |' + line
).join('\n');
}
const prelines = '\n'.repeat(Math.max(baseLine - 1, 0));
const precolumns = ' '.repeat(Math.max(baseColumn - 1, 0));
const lines = (prelines + precolumns + source).split(/\r\n?|\n|\f/);
const startLine = Math.max(1, line - extraLines) - 1;
const endLine = Math.min(line + extraLines, lines.length + 1);
const maxNumLength = Math.max(4, String(endLine).length) + 1;
let cutLeft = 0;
// column correction according to replaced tab before column
column += (TAB_REPLACEMENT.length - 1) * (lines[line - 1].substr(0, column - 1).match(/\t/g) || []).length;
if (column > MAX_LINE_LENGTH) {
cutLeft = column - OFFSET_CORRECTION + 3;
column = OFFSET_CORRECTION - 2;
}
for (let i = startLine; i <= endLine; i++) {
if (i >= 0 && i < lines.length) {
lines[i] = lines[i].replace(/\t/g, TAB_REPLACEMENT);
lines[i] =
(cutLeft > 0 && lines[i].length > cutLeft ? '\u2026' : '') +
lines[i].substr(cutLeft, MAX_LINE_LENGTH - 2) +
(lines[i].length > cutLeft + MAX_LINE_LENGTH - 1 ? '\u2026' : '');
}
}
return [
processLines(startLine, line),
new Array(column + maxNumLength + 2).join('-') + '^',
processLines(line, endLine)
].filter(Boolean)
.join('\n')
.replace(/^(\s+\d+\s+\|\n)+/, '')
.replace(/\n(\s+\d+\s+\|)+$/, '');
}
export function SyntaxError(message, source, offset, line, column, baseLine = 1, baseColumn = 1) {
const error = Object.assign(createCustomError('SyntaxError', message), {
source,
offset,
line,
column,
sourceFragment(extraLines) {
return sourceFragment({ source, line, column, baseLine, baseColumn }, isNaN(extraLines) ? 0 : extraLines);
},
get formattedMessage() {
return (
`Parse error: ${message}\n` +
sourceFragment({ source, line, column, baseLine, baseColumn }, 2)
);
}
});
return error;
}

View File

@@ -1,25 +1,33 @@
var OffsetToLocation = require('../common/OffsetToLocation');
var SyntaxError = require('../common/SyntaxError');
var TokenStream = require('../common/TokenStream');
var List = require('../common/List');
var tokenize = require('../tokenizer');
var constants = require('../tokenizer/const');
var { findWhiteSpaceStart, cmpStr } = require('../tokenizer/utils');
var sequence = require('./sequence');
var noop = function() {};
import { List } from '../utils/List.js';
import { SyntaxError } from './SyntaxError.js';
import {
tokenize,
OffsetToLocation,
TokenStream,
tokenNames,
var TYPE = constants.TYPE;
var NAME = constants.NAME;
var WHITESPACE = TYPE.WhiteSpace;
var COMMENT = TYPE.Comment;
var IDENT = TYPE.Ident;
var FUNCTION = TYPE.Function;
var URL = TYPE.Url;
var HASH = TYPE.Hash;
var PERCENTAGE = TYPE.Percentage;
var NUMBER = TYPE.Number;
var NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
var NULL = 0;
consumeNumber,
findWhiteSpaceStart,
cmpChar,
cmpStr,
WhiteSpace,
Comment,
Ident,
Function as FunctionToken,
Url,
Hash,
Percentage,
Number as NumberToken
} from '../tokenizer/index.js';
import { readSequence } from './sequence.js';
const NOOP = () => {};
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
const SEMICOLON = 0x003B; // U+003B SEMICOLON (;)
const LEFTCURLYBRACKET = 0x007B; // U+007B LEFT CURLY BRACKET ({)
const NULL = 0;
function createParseContext(name) {
return function() {
@@ -27,116 +35,120 @@ function createParseContext(name) {
};
}
function processConfig(config) {
var parserConfig = {
context: {},
scope: {},
atrule: {},
pseudo: {}
};
function fetchParseValues(dict) {
const result = Object.create(null);
if (config.parseContext) {
for (var name in config.parseContext) {
switch (typeof config.parseContext[name]) {
case 'function':
parserConfig.context[name] = config.parseContext[name];
break;
for (const name of Object.keys(dict)) {
const item = dict[name];
const fn = item.parse || item;
case 'string':
parserConfig.context[name] = createParseContext(config.parseContext[name]);
break;
}
if (fn) {
result[name] = fn;
}
}
if (config.scope) {
for (var name in config.scope) {
parserConfig.scope[name] = config.scope[name];
}
}
if (config.atrule) {
for (var name in config.atrule) {
var atrule = config.atrule[name];
if (atrule.parse) {
parserConfig.atrule[name] = atrule.parse;
}
}
}
if (config.pseudo) {
for (var name in config.pseudo) {
var pseudo = config.pseudo[name];
if (pseudo.parse) {
parserConfig.pseudo[name] = pseudo.parse;
}
}
}
if (config.node) {
for (var name in config.node) {
parserConfig[name] = config.node[name].parse;
}
}
return parserConfig;
return result;
}
module.exports = function createParser(config) {
var parser = {
scanner: new TokenStream(),
locationMap: new OffsetToLocation(),
function processConfig(config) {
const parseConfig = {
context: Object.create(null),
features: Object.assign(Object.create(null), config.features),
scope: Object.assign(Object.create(null), config.scope),
atrule: fetchParseValues(config.atrule),
pseudo: fetchParseValues(config.pseudo),
node: fetchParseValues(config.node)
};
filename: '<unknown>',
needPositions: false,
onParseError: noop,
onParseErrorThrow: false,
for (const [name, context] of Object.entries(config.parseContext)) {
switch (typeof context) {
case 'function':
parseConfig.context[name] = context;
break;
case 'string':
parseConfig.context[name] = createParseContext(context);
break;
}
}
return {
config: parseConfig,
...parseConfig,
...parseConfig.node
};
}
export function createParser(config) {
let source = '';
let filename = '<unknown>';
let needPositions = false;
let onParseError = NOOP;
let onParseErrorThrow = false;
const locationMap = new OffsetToLocation();
const parser = Object.assign(new TokenStream(), processConfig(config || {}), {
parseAtrulePrelude: true,
parseRulePrelude: true,
parseValue: true,
parseCustomProperty: false,
readSequence: sequence,
readSequence,
createList: function() {
consumeUntilBalanceEnd: () => 0,
consumeUntilLeftCurlyBracket(code) {
return code === LEFTCURLYBRACKET ? 1 : 0;
},
consumeUntilLeftCurlyBracketOrSemicolon(code) {
return code === LEFTCURLYBRACKET || code === SEMICOLON ? 1 : 0;
},
consumeUntilExclamationMarkOrSemicolon(code) {
return code === EXCLAMATIONMARK || code === SEMICOLON ? 1 : 0;
},
consumeUntilSemicolonIncluded(code) {
return code === SEMICOLON ? 2 : 0;
},
createList() {
return new List();
},
createSingleNodeList: function(node) {
createSingleNodeList(node) {
return new List().appendData(node);
},
getFirstListNode: function(list) {
return list && list.first();
getFirstListNode(list) {
return list && list.first;
},
getLastListNode: function(list) {
return list.last();
getLastListNode(list) {
return list && list.last;
},
parseWithFallback: function(consumer, fallback) {
var startToken = this.scanner.tokenIndex;
parseWithFallback(consumer, fallback) {
const startIndex = this.tokenIndex;
try {
return consumer.call(this);
} catch (e) {
if (this.onParseErrorThrow) {
if (onParseErrorThrow) {
throw e;
}
var fallbackNode = fallback.call(this, startToken);
this.skip(startIndex - this.tokenIndex);
const fallbackNode = fallback.call(this);
this.onParseErrorThrow = true;
this.onParseError(e, fallbackNode);
this.onParseErrorThrow = false;
onParseErrorThrow = true;
onParseError(e, fallbackNode);
onParseErrorThrow = false;
return fallbackNode;
}
},
lookupNonWSType: function(offset) {
lookupNonWSType(offset) {
let type;
do {
var type = this.scanner.lookupType(offset++);
if (type !== WHITESPACE) {
type = this.lookupType(offset++);
if (type !== WhiteSpace && type !== Comment) {
return type;
}
} while (type !== NULL);
@@ -144,145 +156,174 @@ module.exports = function createParser(config) {
return NULL;
},
eat: function(tokenType) {
if (this.scanner.tokenType !== tokenType) {
var offset = this.scanner.tokenStart;
var message = NAME[tokenType] + ' is expected';
charCodeAt(offset) {
return offset >= 0 && offset < source.length ? source.charCodeAt(offset) : 0;
},
substring(offsetStart, offsetEnd) {
return source.substring(offsetStart, offsetEnd);
},
substrToCursor(start) {
return this.source.substring(start, this.tokenStart);
},
cmpChar(offset, charCode) {
return cmpChar(source, offset, charCode);
},
cmpStr(offsetStart, offsetEnd, str) {
return cmpStr(source, offsetStart, offsetEnd, str);
},
consume(tokenType) {
const start = this.tokenStart;
this.eat(tokenType);
return this.substrToCursor(start);
},
consumeFunctionName() {
const name = source.substring(this.tokenStart, this.tokenEnd - 1);
this.eat(FunctionToken);
return name;
},
consumeNumber(type) {
const number = source.substring(this.tokenStart, consumeNumber(source, this.tokenStart));
this.eat(type);
return number;
},
eat(tokenType) {
if (this.tokenType !== tokenType) {
const tokenName = tokenNames[tokenType].slice(0, -6).replace(/-/g, ' ').replace(/^./, m => m.toUpperCase());
let message = `${/[[\](){}]/.test(tokenName) ? `"${tokenName}"` : tokenName} is expected`;
let offset = this.tokenStart;
// tweak message and offset
switch (tokenType) {
case IDENT:
case Ident:
// when identifier is expected but there is a function or url
if (this.scanner.tokenType === FUNCTION || this.scanner.tokenType === URL) {
offset = this.scanner.tokenEnd - 1;
if (this.tokenType === FunctionToken || this.tokenType === Url) {
offset = this.tokenEnd - 1;
message = 'Identifier is expected but function found';
} else {
message = 'Identifier is expected';
}
break;
case HASH:
if (this.scanner.isDelim(NUMBERSIGN)) {
this.scanner.next();
case Hash:
if (this.isDelim(NUMBERSIGN)) {
this.next();
offset++;
message = 'Name is expected';
}
break;
case PERCENTAGE:
if (this.scanner.tokenType === NUMBER) {
offset = this.scanner.tokenEnd;
case Percentage:
if (this.tokenType === NumberToken) {
offset = this.tokenEnd;
message = 'Percent sign is expected';
}
break;
default:
// when test type is part of another token show error for current position + 1
// e.g. eat(HYPHENMINUS) will fail on "-foo", but pointing on "-" is odd
if (this.scanner.source.charCodeAt(this.scanner.tokenStart) === tokenType) {
offset = offset + 1;
}
}
this.error(message, offset);
}
this.scanner.next();
this.next();
},
eatIdent(name) {
if (this.tokenType !== Ident || this.lookupValue(0, name) === false) {
this.error(`Identifier "${name}" is expected`);
}
this.next();
},
eatDelim(code) {
if (!this.isDelim(code)) {
this.error(`Delim "${String.fromCharCode(code)}" is expected`);
}
this.next();
},
consume: function(tokenType) {
var value = this.scanner.getTokenValue();
this.eat(tokenType);
return value;
},
consumeFunctionName: function() {
var name = this.scanner.source.substring(this.scanner.tokenStart, this.scanner.tokenEnd - 1);
this.eat(FUNCTION);
return name;
},
getLocation: function(start, end) {
if (this.needPositions) {
return this.locationMap.getLocationRange(
getLocation(start, end) {
if (needPositions) {
return locationMap.getLocationRange(
start,
end,
this.filename
filename
);
}
return null;
},
getLocationFromList: function(list) {
if (this.needPositions) {
var head = this.getFirstListNode(list);
var tail = this.getLastListNode(list);
return this.locationMap.getLocationRange(
head !== null ? head.loc.start.offset - this.locationMap.startOffset : this.scanner.tokenStart,
tail !== null ? tail.loc.end.offset - this.locationMap.startOffset : this.scanner.tokenStart,
this.filename
getLocationFromList(list) {
if (needPositions) {
const head = this.getFirstListNode(list);
const tail = this.getLastListNode(list);
return locationMap.getLocationRange(
head !== null ? head.loc.start.offset - locationMap.startOffset : this.tokenStart,
tail !== null ? tail.loc.end.offset - locationMap.startOffset : this.tokenStart,
filename
);
}
return null;
},
error: function(message, offset) {
var location = typeof offset !== 'undefined' && offset < this.scanner.source.length
? this.locationMap.getLocation(offset)
: this.scanner.eof
? this.locationMap.getLocation(findWhiteSpaceStart(this.scanner.source, this.scanner.source.length - 1))
: this.locationMap.getLocation(this.scanner.tokenStart);
error(message, offset) {
const location = typeof offset !== 'undefined' && offset < source.length
? locationMap.getLocation(offset)
: this.eof
? locationMap.getLocation(findWhiteSpaceStart(source, source.length - 1))
: locationMap.getLocation(this.tokenStart);
throw new SyntaxError(
message || 'Unexpected input',
this.scanner.source,
source,
location.offset,
location.line,
location.column
location.column,
locationMap.startLine,
locationMap.startColumn
);
}
};
});
config = processConfig(config || {});
for (var key in config) {
parser[key] = config[key];
}
return function(source, options) {
const parse = function(source_, options) {
source = source_;
options = options || {};
var context = options.context || 'default';
var onComment = options.onComment;
var ast;
tokenize(source, parser.scanner);
parser.locationMap.setSource(
parser.setSource(source, tokenize);
locationMap.setSource(
source,
options.offset,
options.line,
options.column
);
parser.filename = options.filename || '<unknown>';
parser.needPositions = Boolean(options.positions);
parser.onParseError = typeof options.onParseError === 'function' ? options.onParseError : noop;
parser.onParseErrorThrow = false;
filename = options.filename || '<unknown>';
needPositions = Boolean(options.positions);
onParseError = typeof options.onParseError === 'function' ? options.onParseError : NOOP;
onParseErrorThrow = false;
parser.parseAtrulePrelude = 'parseAtrulePrelude' in options ? Boolean(options.parseAtrulePrelude) : true;
parser.parseRulePrelude = 'parseRulePrelude' in options ? Boolean(options.parseRulePrelude) : true;
parser.parseValue = 'parseValue' in options ? Boolean(options.parseValue) : true;
parser.parseCustomProperty = 'parseCustomProperty' in options ? Boolean(options.parseCustomProperty) : false;
if (!parser.context.hasOwnProperty(context)) {
const { context = 'default', onComment } = options;
if (context in parser.context === false) {
throw new Error('Unknown context `' + context + '`');
}
if (typeof onComment === 'function') {
parser.scanner.forEachToken((type, start, end) => {
if (type === COMMENT) {
parser.forEachToken((type, start, end) => {
if (type === Comment) {
const loc = parser.getLocation(start, end);
const value = cmpStr(source, end - 2, end, '*/')
? source.slice(start + 2, end - 2)
@@ -293,12 +334,17 @@ module.exports = function createParser(config) {
});
}
ast = parser.context[context].call(parser, options);
const ast = parser.context[context].call(parser, options);
if (!parser.scanner.eof) {
if (!parser.eof) {
parser.error();
}
return ast;
};
return Object.assign(parse, {
SyntaxError,
config: parser.config
});
};

View File

@@ -1,4 +1,4 @@
var createParser = require('./create');
var config = require('../syntax/config/parser');
import { createParser } from './create.js';
import config from '../syntax/config/parser.js';
module.exports = createParser(config);
export default createParser(config);

4
node_modules/css-tree/lib/parser/parse-selector.js generated vendored Normal file
View File

@@ -0,0 +1,4 @@
import { createParser } from './create.js';
import config from '../syntax/config/parser-selector.js';
export default createParser(config);

View File

@@ -1,53 +1,42 @@
var TYPE = require('../tokenizer').TYPE;
var WHITESPACE = TYPE.WhiteSpace;
var COMMENT = TYPE.Comment;
import { WhiteSpace, Comment } from '../tokenizer/index.js';
module.exports = function readSequence(recognizer) {
var children = this.createList();
var child = null;
var context = {
recognizer: recognizer,
space: null,
ignoreWS: false,
ignoreWSAfter: false
export function readSequence(recognizer) {
const children = this.createList();
let space = false;
const context = {
recognizer
};
this.scanner.skipSC();
while (!this.scanner.eof) {
switch (this.scanner.tokenType) {
case COMMENT:
this.scanner.next();
while (!this.eof) {
switch (this.tokenType) {
case Comment:
this.next();
continue;
case WHITESPACE:
if (context.ignoreWS) {
this.scanner.next();
} else {
context.space = this.WhiteSpace();
}
case WhiteSpace:
space = true;
this.next();
continue;
}
child = recognizer.getNode.call(this, context);
let child = recognizer.getNode.call(this, context);
if (child === undefined) {
break;
}
if (context.space !== null) {
children.push(context.space);
context.space = null;
if (space) {
if (recognizer.onWhiteSpace) {
recognizer.onWhiteSpace.call(this, child, children, context);
}
space = false;
}
children.push(child);
}
if (context.ignoreWSAfter) {
context.ignoreWSAfter = false;
context.ignoreWS = true;
} else {
context.ignoreWS = false;
}
if (space && recognizer.onWhiteSpace) {
recognizer.onWhiteSpace.call(this, null, children, context);
}
return children;

28
node_modules/css-tree/lib/syntax/atrule/container.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
import { Ident } from '../../tokenizer/index.js';
// https://drafts.csswg.org/css-contain-3/#container-rule
// The keywords `none`, `and`, `not`, and `or` are excluded from the <custom-ident> above.
const nonContainerNameKeywords = new Set(['none', 'and', 'not', 'or']);
export default {
parse: {
prelude() {
const children = this.createList();
if (this.tokenType === Ident) {
const name = this.substring(this.tokenStart, this.tokenEnd);
if (!nonContainerNameKeywords.has(name.toLowerCase())) {
children.push(this.Identifier());
}
}
children.push(this.Condition('container'));
return children;
},
block(nested = false) {
return this.Block(nested);
}
}
};

View File

@@ -1,7 +1,7 @@
module.exports = {
export default {
parse: {
prelude: null,
block: function() {
block() {
return this.Block(true);
}
}

View File

@@ -1,25 +1,71 @@
var TYPE = require('../../tokenizer').TYPE;
import {
String as StringToken,
Ident,
Url,
Function as FunctionToken,
LeftParenthesis,
RightParenthesis
} from '../../tokenizer/index.js';
var STRING = TYPE.String;
var IDENT = TYPE.Ident;
var URL = TYPE.Url;
var FUNCTION = TYPE.Function;
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
function parseWithFallback(parse, fallback) {
return this.parseWithFallback(
() => {
try {
return parse.call(this);
} finally {
this.skipSC();
if (this.lookupNonWSType(0) !== RightParenthesis) {
this.error();
}
}
},
fallback || (() => this.Raw(null, true))
);
}
module.exports = {
const parseFunctions = {
layer() {
this.skipSC();
const children = this.createList();
const node = parseWithFallback.call(this, this.Layer);
if (node.type !== 'Raw' || node.value !== '') {
children.push(node);
}
return children;
},
supports() {
this.skipSC();
const children = this.createList();
const node = parseWithFallback.call(
this,
this.Declaration,
() => parseWithFallback.call(this, () => this.Condition('supports'))
);
if (node.type !== 'Raw' || node.value !== '') {
children.push(node);
}
return children;
}
};
export default {
parse: {
prelude: function() {
var children = this.createList();
prelude() {
const children = this.createList();
this.scanner.skipSC();
switch (this.scanner.tokenType) {
case STRING:
switch (this.tokenType) {
case StringToken:
children.push(this.String());
break;
case URL:
case FUNCTION:
case Url:
case FunctionToken:
children.push(this.Url());
break;
@@ -27,9 +73,27 @@ module.exports = {
this.error('String or url() is expected');
}
if (this.lookupNonWSType(0) === IDENT ||
this.lookupNonWSType(0) === LEFTPARENTHESIS) {
children.push(this.WhiteSpace());
this.skipSC();
if (this.tokenType === Ident &&
this.cmpStr(this.tokenStart, this.tokenEnd, 'layer')) {
children.push(this.Identifier());
} else if (
this.tokenType === FunctionToken &&
this.cmpStr(this.tokenStart, this.tokenEnd, 'layer(')
) {
children.push(this.Function(null, parseFunctions));
}
this.skipSC();
if (this.tokenType === FunctionToken &&
this.cmpStr(this.tokenStart, this.tokenEnd, 'supports(')) {
children.push(this.Function(null, parseFunctions));
}
if (this.lookupNonWSType(0) === Ident ||
this.lookupNonWSType(0) === LeftParenthesis) {
children.push(this.MediaQueryList());
}

View File

@@ -1,7 +1,23 @@
module.exports = {
'font-face': require('./font-face'),
'import': require('./import'),
'media': require('./media'),
'page': require('./page'),
'supports': require('./supports')
import container from './container.js';
import fontFace from './font-face.js';
import importAtrule from './import.js';
import layer from './layer.js';
import media from './media.js';
import nest from './nest.js';
import page from './page.js';
import scope from './scope.js';
import startingStyle from './starting-style.js';
import supports from './supports.js';
export default {
container,
'font-face': fontFace,
import: importAtrule,
layer,
media,
nest,
page,
scope,
'starting-style': startingStyle,
supports
};

12
node_modules/css-tree/lib/syntax/atrule/layer.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
export default {
parse: {
prelude() {
return this.createSingleNodeList(
this.LayerList()
);
},
block() {
return this.Block(false);
}
}
};

View File

@@ -1,12 +1,12 @@
module.exports = {
export default {
parse: {
prelude: function() {
prelude() {
return this.createSingleNodeList(
this.MediaQueryList()
);
},
block: function() {
return this.Block(false);
block(nested = false) {
return this.Block(nested);
}
}
};

12
node_modules/css-tree/lib/syntax/atrule/nest.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
export default {
parse: {
prelude() {
return this.createSingleNodeList(
this.SelectorList()
);
},
block() {
return this.Block(true);
}
}
};

View File

@@ -1,11 +1,11 @@
module.exports = {
export default {
parse: {
prelude: function() {
prelude() {
return this.createSingleNodeList(
this.SelectorList()
);
},
block: function() {
block() {
return this.Block(true);
}
}

12
node_modules/css-tree/lib/syntax/atrule/scope.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
export default {
parse: {
prelude() {
return this.createSingleNodeList(
this.Scope()
);
},
block(nested = false) {
return this.Block(nested);
}
}
};

View File

@@ -0,0 +1,8 @@
export default {
parse: {
prelude: null,
block(nested = false) {
return this.Block(nested);
}
}
};

View File

@@ -1,89 +1,12 @@
var TYPE = require('../../tokenizer').TYPE;
var WHITESPACE = TYPE.WhiteSpace;
var COMMENT = TYPE.Comment;
var IDENT = TYPE.Ident;
var FUNCTION = TYPE.Function;
var COLON = TYPE.Colon;
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
function consumeRaw() {
return this.createSingleNodeList(
this.Raw(this.scanner.tokenIndex, null, false)
);
}
function parentheses() {
this.scanner.skipSC();
if (this.scanner.tokenType === IDENT &&
this.lookupNonWSType(1) === COLON) {
return this.createSingleNodeList(
this.Declaration()
);
}
return readSequence.call(this);
}
function readSequence() {
var children = this.createList();
var space = null;
var child;
this.scanner.skipSC();
scan:
while (!this.scanner.eof) {
switch (this.scanner.tokenType) {
case WHITESPACE:
space = this.WhiteSpace();
continue;
case COMMENT:
this.scanner.next();
continue;
case FUNCTION:
child = this.Function(consumeRaw, this.scope.AtrulePrelude);
break;
case IDENT:
child = this.Identifier();
break;
case LEFTPARENTHESIS:
child = this.Parentheses(parentheses, this.scope.AtrulePrelude);
break;
default:
break scan;
}
if (space !== null) {
children.push(space);
space = null;
}
children.push(child);
}
return children;
}
module.exports = {
export default {
parse: {
prelude: function() {
var children = readSequence.call(this);
if (this.getFirstListNode(children) === null) {
this.error('Condition is expected');
}
return children;
prelude() {
return this.createSingleNodeList(
this.Condition('supports')
);
},
block: function() {
return this.Block(false);
block(nested = false) {
return this.Block(nested);
}
}
};

5
node_modules/css-tree/lib/syntax/config/generator.js generated vendored Normal file
View File

@@ -0,0 +1,5 @@
import * as node from '../node/index-generate.js';
export default {
node
};

View File

@@ -1,9 +1,10 @@
var data = require('../../../data');
import { cssWideKeywords } from '../../lexer/generic-const.js';
import definitions from '../../data.js';
import * as node from '../node/index.js';
module.exports = {
export default {
generic: true,
types: data.types,
atrules: data.atrules,
properties: data.properties,
node: require('../node')
cssWideKeywords,
...definitions,
node
};

View File

@@ -1,48 +1,4 @@
const hasOwnProperty = Object.prototype.hasOwnProperty;
const shape = {
generic: true,
types: appendOrAssign,
atrules: {
prelude: appendOrAssignOrNull,
descriptors: appendOrAssignOrNull
},
properties: appendOrAssign,
parseContext: assign,
scope: deepAssign,
atrule: ['parse'],
pseudo: ['parse'],
node: ['name', 'structure', 'parse', 'generate', 'walkContext']
};
function isObject(value) {
return value && value.constructor === Object;
}
function copy(value) {
return isObject(value)
? Object.assign({}, value)
: value;
}
function assign(dest, src) {
return Object.assign(dest, src);
}
function deepAssign(dest, src) {
for (const key in src) {
if (hasOwnProperty.call(src, key)) {
if (isObject(dest[key])) {
deepAssign(dest[key], copy(src[key]));
} else {
dest[key] = copy(src[key]);
}
}
}
return dest;
}
function append(a, b) {
function appendOrSet(a, b) {
if (typeof b === 'string' && /^\s*\|/.test(b)) {
return typeof a === 'string'
? a + b
@@ -52,89 +8,116 @@ function append(a, b) {
return b || null;
}
function appendOrAssign(a, b) {
if (typeof b === 'string') {
return append(a, b);
}
function sliceProps(obj, props) {
const result = Object.create(null);
const result = Object.assign({}, a);
for (let key in b) {
if (hasOwnProperty.call(b, key)) {
result[key] = append(hasOwnProperty.call(a, key) ? a[key] : undefined, b[key]);
for (const [key, value] of Object.entries(obj)) {
if (value) {
result[key] = {};
for (const prop of Object.keys(value)) {
if (props.includes(prop)) {
result[key][prop] = value[prop];
}
}
}
}
return result;
}
function appendOrAssignOrNull(a, b) {
const result = appendOrAssign(a, b);
export default function mix(dest, src) {
const result = { ...dest };
return !isObject(result) || Object.keys(result).length
? result
: null;
}
for (const [prop, value] of Object.entries(src)) {
switch (prop) {
case 'generic':
result[prop] = Boolean(value);
break;
function mix(dest, src, shape) {
for (const key in shape) {
if (hasOwnProperty.call(shape, key) === false) {
continue;
}
case 'cssWideKeywords':
result[prop] = dest[prop]
? [...dest[prop], ...value]
: value || [];
break;
if (shape[key] === true) {
if (key in src) {
if (hasOwnProperty.call(src, key)) {
dest[key] = copy(src[key]);
case 'units':
result[prop] = { ...dest[prop] };
for (const [name, patch] of Object.entries(value)) {
result[prop][name] = Array.isArray(patch) ? patch : [];
}
}
} else if (shape[key]) {
if (typeof shape[key] === 'function') {
const fn = shape[key];
dest[key] = fn({}, dest[key]);
dest[key] = fn(dest[key] || {}, src[key]);
} else if (isObject(shape[key])) {
const result = {};
break;
for (let name in dest[key]) {
result[name] = mix({}, dest[key][name], shape[key]);
}
case 'atrules':
result[prop] = { ...dest[prop] };
for (let name in src[key]) {
result[name] = mix(result[name] || {}, src[key][name], shape[key]);
}
for (const [name, atrule] of Object.entries(value)) {
const exists = result[prop][name] || {};
const current = result[prop][name] = {
prelude: exists.prelude || null,
descriptors: {
...exists.descriptors
}
};
dest[key] = result;
} else if (Array.isArray(shape[key])) {
const res = {};
const innerShape = shape[key].reduce(function(s, k) {
s[k] = true;
return s;
}, {});
if (!atrule) {
continue;
}
for (const [name, value] of Object.entries(dest[key] || {})) {
res[name] = {};
if (value) {
mix(res[name], value, innerShape);
current.prelude = atrule.prelude
? appendOrSet(current.prelude, atrule.prelude)
: current.prelude || null;
for (const [descriptorName, descriptorValue] of Object.entries(atrule.descriptors || {})) {
current.descriptors[descriptorName] = descriptorValue
? appendOrSet(current.descriptors[descriptorName], descriptorValue)
: null;
}
if (!Object.keys(current.descriptors).length) {
current.descriptors = null;
}
}
break;
for (const name in src[key]) {
if (hasOwnProperty.call(src[key], name)) {
if (!res[name]) {
res[name] = {};
}
if (src[key] && src[key][name]) {
mix(res[name], src[key][name], innerShape);
}
}
case 'types':
case 'properties':
result[prop] = { ...dest[prop] };
for (const [name, syntax] of Object.entries(value)) {
result[prop][name] = appendOrSet(result[prop][name], syntax);
}
break;
dest[key] = res;
}
case 'scope':
case 'features':
result[prop] = { ...dest[prop] };
for (const [name, props] of Object.entries(value)) {
result[prop][name] = { ...result[prop][name], ...props };
}
break;
case 'parseContext':
result[prop] = {
...dest[prop],
...value
};
break;
case 'atrule':
case 'pseudo':
result[prop] = {
...dest[prop],
...sliceProps(value, ['parse'])
};
break;
case 'node':
result[prop] = {
...dest[prop],
...sliceProps(value, ['name', 'structure', 'parse', 'generate', 'walkContext'])
};
break;
}
}
return dest;
}
module.exports = (dest, src) => mix(dest, src, shape);
return result;
}

View File

@@ -0,0 +1,15 @@
import { Selector } from '../scope/index.js';
import pseudo from '../pseudo/index.js';
import * as node from '../node/index-parse-selector.js';
export default {
parseContext: {
default: 'SelectorList',
selectorList: 'SelectorList',
selector: 'Selector'
},
scope: { Selector },
atrule: {},
pseudo,
node
};

View File

@@ -1,25 +1,45 @@
module.exports = {
import * as scope from '../scope/index.js';
import atrule from '../atrule/index.js';
import pseudo from '../pseudo/index.js';
import * as node from '../node/index-parse.js';
export default {
parseContext: {
default: 'StyleSheet',
stylesheet: 'StyleSheet',
atrule: 'Atrule',
atrulePrelude: function(options) {
atrulePrelude(options) {
return this.AtrulePrelude(options.atrule ? String(options.atrule) : null);
},
mediaQueryList: 'MediaQueryList',
mediaQuery: 'MediaQuery',
condition(options) {
return this.Condition(options.kind);
},
rule: 'Rule',
selectorList: 'SelectorList',
selector: 'Selector',
block: function() {
block() {
return this.Block(true);
},
declarationList: 'DeclarationList',
declaration: 'Declaration',
value: 'Value'
},
scope: require('../scope'),
atrule: require('../atrule'),
pseudo: require('../pseudo'),
node: require('../node')
features: {
supports: {
selector() {
return this.Selector();
}
},
container: {
style() {
return this.Declaration();
}
}
},
scope,
atrule,
pseudo,
node
};

View File

@@ -1,3 +1,5 @@
module.exports = {
node: require('../node')
import * as node from '../node/index.js';
export default {
node
};

View File

@@ -1,68 +1,48 @@
var List = require('../common/List');
var SyntaxError = require('../common/SyntaxError');
var TokenStream = require('../common/TokenStream');
var Lexer = require('../lexer/Lexer');
var definitionSyntax = require('../definition-syntax');
var tokenize = require('../tokenizer');
var createParser = require('../parser/create');
var createGenerator = require('../generator/create');
var createConvertor = require('../convertor/create');
var createWalker = require('../walker/create');
var clone = require('../utils/clone');
var names = require('../utils/names');
var mix = require('./config/mix');
import { tokenize } from '../tokenizer/index.js';
import { createParser } from '../parser/create.js';
import { createGenerator } from '../generator/create.js';
import { createConvertor } from '../convertor/create.js';
import { createWalker } from '../walker/create.js';
import { Lexer } from '../lexer/Lexer.js';
import mix from './config/mix.js';
function createSyntax(config) {
var parse = createParser(config);
var walk = createWalker(config);
var generate = createGenerator(config);
var convert = createConvertor(walk);
const parse = createParser(config);
const walk = createWalker(config);
const generate = createGenerator(config);
const { fromPlainObject, toPlainObject } = createConvertor(walk);
var syntax = {
List: List,
SyntaxError: SyntaxError,
TokenStream: TokenStream,
Lexer: Lexer,
vendorPrefix: names.vendorPrefix,
keyword: names.keyword,
property: names.property,
isCustomProperty: names.isCustomProperty,
definitionSyntax: definitionSyntax,
const syntax = {
lexer: null,
createLexer: function(config) {
return new Lexer(config, syntax, syntax.lexer.structure);
},
createLexer: config => new Lexer(config, syntax, syntax.lexer.structure),
tokenize: tokenize,
parse: parse,
walk: walk,
generate: generate,
tokenize,
parse,
generate,
walk,
find: walk.find,
findLast: walk.findLast,
findAll: walk.findAll,
clone: clone,
fromPlainObject: convert.fromPlainObject,
toPlainObject: convert.toPlainObject,
fromPlainObject,
toPlainObject,
fork(extension) {
const base = mix({}, config); // copy of config
createSyntax: function(config) {
return createSyntax(mix({}, config));
},
fork: function(extension) {
var base = mix({}, config); // copy of config
return createSyntax(
typeof extension === 'function'
? extension(base, Object.assign)
? extension(base) // TODO: remove Object.assign as second parameter
: mix(base, extension)
);
}
};
syntax.lexer = new Lexer({
generic: true,
generic: config.generic,
cssWideKeywords: config.cssWideKeywords,
units: config.units,
types: config.types,
atrules: config.atrules,
properties: config.properties,
@@ -72,6 +52,4 @@ function createSyntax(config) {
return syntax;
};
exports.create = function(config) {
return createSyntax(mix({}, config));
};
export default config => createSyntax(mix({}, config));

View File

@@ -1,7 +1,7 @@
// legacy IE function
// expression( <any-value> )
module.exports = function() {
export default function() {
return this.createSingleNodeList(
this.Raw(this.scanner.tokenIndex, null, false)
this.Raw(null, false)
);
};
}

View File

@@ -1,31 +1,27 @@
var TYPE = require('../../tokenizer').TYPE;
var rawMode = require('../node/Raw').mode;
var COMMA = TYPE.Comma;
var WHITESPACE = TYPE.WhiteSpace;
import { Comma, WhiteSpace } from '../../tokenizer/index.js';
// var( <ident> , <value>? )
module.exports = function() {
var children = this.createList();
export default function() {
const children = this.createList();
this.scanner.skipSC();
this.skipSC();
// NOTE: Don't check more than a first argument is an ident, rest checks are for lexer
children.push(this.Identifier());
this.scanner.skipSC();
this.skipSC();
if (this.scanner.tokenType === COMMA) {
if (this.tokenType === Comma) {
children.push(this.Operator());
const startIndex = this.scanner.tokenIndex;
const startIndex = this.tokenIndex;
const value = this.parseCustomProperty
? this.Value(null)
: this.Raw(this.scanner.tokenIndex, rawMode.exclamationMarkOrSemicolon, false);
: this.Raw(this.consumeUntilExclamationMarkOrSemicolon, false);
if (value.type === 'Value' && value.children.isEmpty()) {
for (let offset = startIndex - this.scanner.tokenIndex; offset <= 0; offset++) {
if (this.scanner.lookupType(offset) === WHITESPACE) {
if (value.type === 'Value' && value.children.isEmpty) {
for (let offset = startIndex - this.tokenIndex; offset <= 0; offset++) {
if (this.lookupType(offset) === WhiteSpace) {
value.children.appendData({
type: 'WhiteSpace',
loc: null,

View File

@@ -1,21 +1,10 @@
function merge() {
var dest = {};
import createSyntax from './create.js';
import lexerConfig from './config/lexer.js';
import parserConfig from './config/parser.js';
import walkerConfig from './config/walker.js';
for (var i = 0; i < arguments.length; i++) {
var src = arguments[i];
for (var key in src) {
dest[key] = src[key];
}
}
return dest;
}
module.exports = require('./create').create(
merge(
require('./config/lexer'),
require('./config/parser'),
require('./config/walker')
)
);
module.exports.version = require('../../package.json').version;
export default createSyntax({
...lexerConfig,
...parserConfig,
...walkerConfig
});

View File

@@ -1,21 +1,21 @@
var cmpChar = require('../../tokenizer').cmpChar;
var isDigit = require('../../tokenizer').isDigit;
var TYPE = require('../../tokenizer').TYPE;
import {
isDigit,
WhiteSpace,
Comment,
Ident,
Number,
Dimension
} from '../../tokenizer/index.js';
var WHITESPACE = TYPE.WhiteSpace;
var COMMENT = TYPE.Comment;
var IDENT = TYPE.Ident;
var NUMBER = TYPE.Number;
var DIMENSION = TYPE.Dimension;
var PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
var HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
var N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
var DISALLOW_SIGN = true;
var ALLOW_SIGN = false;
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
const DISALLOW_SIGN = true;
const ALLOW_SIGN = false;
function checkInteger(offset, disallowSign) {
var pos = this.scanner.tokenStart + offset;
var code = this.scanner.source.charCodeAt(pos);
let pos = this.tokenStart + offset;
const code = this.charCodeAt(pos);
if (code === PLUSSIGN || code === HYPHENMINUS) {
if (disallowSign) {
@@ -24,8 +24,8 @@ function checkInteger(offset, disallowSign) {
pos++;
}
for (; pos < this.scanner.tokenEnd; pos++) {
if (!isDigit(this.scanner.source.charCodeAt(pos))) {
for (; pos < this.tokenEnd; pos++) {
if (!isDigit(this.charCodeAt(pos))) {
this.error('Integer is expected', pos);
}
}
@@ -36,8 +36,8 @@ function checkTokenIsInteger(disallowSign) {
}
function expectCharCode(offset, code) {
if (!cmpChar(this.scanner.source, this.scanner.tokenStart + offset, code)) {
var msg = '';
if (!this.cmpChar(this.tokenStart + offset, code)) {
let msg = '';
switch (code) {
case N:
@@ -48,32 +48,32 @@ function expectCharCode(offset, code) {
break;
}
this.error(msg, this.scanner.tokenStart + offset);
this.error(msg, this.tokenStart + offset);
}
}
// ... <signed-integer>
// ... ['+' | '-'] <signless-integer>
function consumeB() {
var offset = 0;
var sign = 0;
var type = this.scanner.tokenType;
let offset = 0;
let sign = 0;
let type = this.tokenType;
while (type === WHITESPACE || type === COMMENT) {
type = this.scanner.lookupType(++offset);
while (type === WhiteSpace || type === Comment) {
type = this.lookupType(++offset);
}
if (type !== NUMBER) {
if (this.scanner.isDelim(PLUSSIGN, offset) ||
this.scanner.isDelim(HYPHENMINUS, offset)) {
sign = this.scanner.isDelim(PLUSSIGN, offset) ? PLUSSIGN : HYPHENMINUS;
if (type !== Number) {
if (this.isDelim(PLUSSIGN, offset) ||
this.isDelim(HYPHENMINUS, offset)) {
sign = this.isDelim(PLUSSIGN, offset) ? PLUSSIGN : HYPHENMINUS;
do {
type = this.scanner.lookupType(++offset);
} while (type === WHITESPACE || type === COMMENT);
type = this.lookupType(++offset);
} while (type === WhiteSpace || type === Comment);
if (type !== NUMBER) {
this.scanner.skip(offset);
if (type !== Number) {
this.skip(offset);
checkTokenIsInteger.call(this, DISALLOW_SIGN);
}
} else {
@@ -82,216 +82,211 @@ function consumeB() {
}
if (offset > 0) {
this.scanner.skip(offset);
this.skip(offset);
}
if (sign === 0) {
type = this.scanner.source.charCodeAt(this.scanner.tokenStart);
type = this.charCodeAt(this.tokenStart);
if (type !== PLUSSIGN && type !== HYPHENMINUS) {
this.error('Number sign is expected');
}
}
checkTokenIsInteger.call(this, sign !== 0);
return sign === HYPHENMINUS ? '-' + this.consume(NUMBER) : this.consume(NUMBER);
return sign === HYPHENMINUS ? '-' + this.consume(Number) : this.consume(Number);
}
// An+B microsyntax https://www.w3.org/TR/css-syntax-3/#anb
module.exports = {
name: 'AnPlusB',
structure: {
a: [String, null],
b: [String, null]
},
parse: function() {
/* eslint-disable brace-style*/
var start = this.scanner.tokenStart;
var a = null;
var b = null;
export const name = 'AnPlusB';
export const structure = {
a: [String, null],
b: [String, null]
};
// <integer>
if (this.scanner.tokenType === NUMBER) {
checkTokenIsInteger.call(this, ALLOW_SIGN);
b = this.consume(NUMBER);
export function parse() {
/* eslint-disable brace-style*/
const start = this.tokenStart;
let a = null;
let b = null;
// <integer>
if (this.tokenType === Number) {
checkTokenIsInteger.call(this, ALLOW_SIGN);
b = this.consume(Number);
}
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
// -n- <signless-integer>
// <dashndashdigit-ident>
else if (this.tokenType === Ident && this.cmpChar(this.tokenStart, HYPHENMINUS)) {
a = '-1';
expectCharCode.call(this, 1, N);
switch (this.tokenEnd - this.tokenStart) {
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
case 2:
this.next();
b = consumeB.call(this);
break;
// -n- <signless-integer>
case 3:
expectCharCode.call(this, 2, HYPHENMINUS);
this.next();
this.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(Number);
break;
// <dashndashdigit-ident>
default:
expectCharCode.call(this, 2, HYPHENMINUS);
checkInteger.call(this, 3, DISALLOW_SIGN);
this.next();
b = this.substrToCursor(start + 2);
}
}
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
// '+'? n- <signless-integer>
// '+'? <ndashdigit-ident>
else if (this.tokenType === Ident || (this.isDelim(PLUSSIGN) && this.lookupType(1) === Ident)) {
let sign = 0;
a = '1';
// just ignore a plus
if (this.isDelim(PLUSSIGN)) {
sign = 1;
this.next();
}
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
// -n- <signless-integer>
// <dashndashdigit-ident>
else if (this.scanner.tokenType === IDENT && cmpChar(this.scanner.source, this.scanner.tokenStart, HYPHENMINUS)) {
a = '-1';
expectCharCode.call(this, 0, N);
expectCharCode.call(this, 1, N);
switch (this.tokenEnd - this.tokenStart) {
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
case 1:
this.next();
b = consumeB.call(this);
break;
switch (this.scanner.getTokenLength()) {
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
case 2:
this.scanner.next();
b = consumeB.call(this);
break;
// '+'? n- <signless-integer>
case 2:
expectCharCode.call(this, 1, HYPHENMINUS);
// -n- <signless-integer>
case 3:
expectCharCode.call(this, 2, HYPHENMINUS);
this.next();
this.skipSC();
this.scanner.next();
this.scanner.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(Number);
break;
b = '-' + this.consume(NUMBER);
break;
// '+'? <ndashdigit-ident>
default:
expectCharCode.call(this, 1, HYPHENMINUS);
checkInteger.call(this, 2, DISALLOW_SIGN);
this.next();
// <dashndashdigit-ident>
default:
expectCharCode.call(this, 2, HYPHENMINUS);
checkInteger.call(this, 3, DISALLOW_SIGN);
this.scanner.next();
b = this.substrToCursor(start + sign + 1);
}
}
b = this.scanner.substrToCursor(start + 2);
// <ndashdigit-dimension>
// <ndash-dimension> <signless-integer>
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
else if (this.tokenType === Dimension) {
const code = this.charCodeAt(this.tokenStart);
const sign = code === PLUSSIGN || code === HYPHENMINUS;
let i = this.tokenStart + sign;
for (; i < this.tokenEnd; i++) {
if (!isDigit(this.charCodeAt(i))) {
break;
}
}
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
// '+'? n- <signless-integer>
// '+'? <ndashdigit-ident>
else if (this.scanner.tokenType === IDENT || (this.scanner.isDelim(PLUSSIGN) && this.scanner.lookupType(1) === IDENT)) {
var sign = 0;
a = '1';
// just ignore a plus
if (this.scanner.isDelim(PLUSSIGN)) {
sign = 1;
this.scanner.next();
}
expectCharCode.call(this, 0, N);
switch (this.scanner.getTokenLength()) {
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
case 1:
this.scanner.next();
b = consumeB.call(this);
break;
// '+'? n- <signless-integer>
case 2:
expectCharCode.call(this, 1, HYPHENMINUS);
this.scanner.next();
this.scanner.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(NUMBER);
break;
// '+'? <ndashdigit-ident>
default:
expectCharCode.call(this, 1, HYPHENMINUS);
checkInteger.call(this, 2, DISALLOW_SIGN);
this.scanner.next();
b = this.scanner.substrToCursor(start + sign + 1);
}
if (i === this.tokenStart + sign) {
this.error('Integer is expected', this.tokenStart + sign);
}
// <ndashdigit-dimension>
// <ndash-dimension> <signless-integer>
expectCharCode.call(this, i - this.tokenStart, N);
a = this.substring(start, i);
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
else if (this.scanner.tokenType === DIMENSION) {
var code = this.scanner.source.charCodeAt(this.scanner.tokenStart);
var sign = code === PLUSSIGN || code === HYPHENMINUS;
for (var i = this.scanner.tokenStart + sign; i < this.scanner.tokenEnd; i++) {
if (!isDigit(this.scanner.source.charCodeAt(i))) {
break;
}
}
if (i === this.scanner.tokenStart + sign) {
this.error('Integer is expected', this.scanner.tokenStart + sign);
}
expectCharCode.call(this, i - this.scanner.tokenStart, N);
a = this.scanner.source.substring(start, i);
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
if (i + 1 === this.scanner.tokenEnd) {
this.scanner.next();
b = consumeB.call(this);
} else {
expectCharCode.call(this, i - this.scanner.tokenStart + 1, HYPHENMINUS);
// <ndash-dimension> <signless-integer>
if (i + 2 === this.scanner.tokenEnd) {
this.scanner.next();
this.scanner.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(NUMBER);
}
// <ndashdigit-dimension>
else {
checkInteger.call(this, i - this.scanner.tokenStart + 2, DISALLOW_SIGN);
this.scanner.next();
b = this.scanner.substrToCursor(i + 1);
}
}
if (i + 1 === this.tokenEnd) {
this.next();
b = consumeB.call(this);
} else {
this.error();
}
expectCharCode.call(this, i - this.tokenStart + 1, HYPHENMINUS);
if (a !== null && a.charCodeAt(0) === PLUSSIGN) {
a = a.substr(1);
}
if (b !== null && b.charCodeAt(0) === PLUSSIGN) {
b = b.substr(1);
}
return {
type: 'AnPlusB',
loc: this.getLocation(start, this.scanner.tokenStart),
a: a,
b: b
};
},
generate: function(node) {
var a = node.a !== null && node.a !== undefined;
var b = node.b !== null && node.b !== undefined;
if (a) {
this.chunk(
node.a === '+1' ? '+n' : // eslint-disable-line operator-linebreak, indent
node.a === '1' ? 'n' : // eslint-disable-line operator-linebreak, indent
node.a === '-1' ? '-n' : // eslint-disable-line operator-linebreak, indent
node.a + 'n' // eslint-disable-line operator-linebreak, indent
);
if (b) {
b = String(node.b);
if (b.charAt(0) === '-' || b.charAt(0) === '+') {
this.chunk(b.charAt(0));
this.chunk(b.substr(1));
} else {
this.chunk('+');
this.chunk(b);
}
// <ndash-dimension> <signless-integer>
if (i + 2 === this.tokenEnd) {
this.next();
this.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(Number);
}
// <ndashdigit-dimension>
else {
checkInteger.call(this, i - this.tokenStart + 2, DISALLOW_SIGN);
this.next();
b = this.substrToCursor(i + 1);
}
} else {
this.chunk(String(node.b));
}
} else {
this.error();
}
};
if (a !== null && a.charCodeAt(0) === PLUSSIGN) {
a = a.substr(1);
}
if (b !== null && b.charCodeAt(0) === PLUSSIGN) {
b = b.substr(1);
}
return {
type: 'AnPlusB',
loc: this.getLocation(start, this.tokenStart),
a,
b
};
}
export function generate(node) {
if (node.a) {
const a =
node.a === '+1' && 'n' ||
node.a === '1' && 'n' ||
node.a === '-1' && '-n' ||
node.a + 'n';
if (node.b) {
const b = node.b[0] === '-' || node.b[0] === '+'
? node.b
: '+' + node.b;
this.tokenize(a + b);
} else {
this.tokenize(a);
}
} else {
this.tokenize(node.b);
}
}

View File

@@ -1,23 +1,22 @@
var TYPE = require('../../tokenizer').TYPE;
var rawMode = require('./Raw').mode;
import {
AtKeyword,
Semicolon,
LeftCurlyBracket,
RightCurlyBracket
} from '../../tokenizer/index.js';
var ATKEYWORD = TYPE.AtKeyword;
var SEMICOLON = TYPE.Semicolon;
var LEFTCURLYBRACKET = TYPE.LeftCurlyBracket;
var RIGHTCURLYBRACKET = TYPE.RightCurlyBracket;
function consumeRaw(startToken) {
return this.Raw(startToken, rawMode.leftCurlyBracketOrSemicolon, true);
function consumeRaw() {
return this.Raw(this.consumeUntilLeftCurlyBracketOrSemicolon, true);
}
function isDeclarationBlockAtrule() {
for (var offset = 1, type; type = this.scanner.lookupType(offset); offset++) {
if (type === RIGHTCURLYBRACKET) {
for (let offset = 1, type; type = this.lookupType(offset); offset++) {
if (type === RightCurlyBracket) {
return true;
}
if (type === LEFTCURLYBRACKET ||
type === ATKEYWORD) {
if (type === LeftCurlyBracket ||
type === AtKeyword) {
return false;
}
}
@@ -25,83 +24,77 @@ function isDeclarationBlockAtrule() {
return false;
}
module.exports = {
name: 'Atrule',
structure: {
name: String,
prelude: ['AtrulePrelude', 'Raw', null],
block: ['Block', null]
},
parse: function() {
var start = this.scanner.tokenStart;
var name;
var nameLowerCase;
var prelude = null;
var block = null;
this.eat(ATKEYWORD);
export const name = 'Atrule';
export const walkContext = 'atrule';
export const structure = {
name: String,
prelude: ['AtrulePrelude', 'Raw', null],
block: ['Block', null]
};
name = this.scanner.substrToCursor(start + 1);
nameLowerCase = name.toLowerCase();
this.scanner.skipSC();
export function parse(isDeclaration = false) {
const start = this.tokenStart;
let name;
let nameLowerCase;
let prelude = null;
let block = null;
// parse prelude
if (this.scanner.eof === false &&
this.scanner.tokenType !== LEFTCURLYBRACKET &&
this.scanner.tokenType !== SEMICOLON) {
if (this.parseAtrulePrelude) {
prelude = this.parseWithFallback(this.AtrulePrelude.bind(this, name), consumeRaw);
this.eat(AtKeyword);
// turn empty AtrulePrelude into null
if (prelude.type === 'AtrulePrelude' && prelude.children.head === null) {
prelude = null;
}
name = this.substrToCursor(start + 1);
nameLowerCase = name.toLowerCase();
this.skipSC();
// parse prelude
if (this.eof === false &&
this.tokenType !== LeftCurlyBracket &&
this.tokenType !== Semicolon) {
if (this.parseAtrulePrelude) {
prelude = this.parseWithFallback(this.AtrulePrelude.bind(this, name, isDeclaration), consumeRaw);
} else {
prelude = consumeRaw.call(this, this.tokenIndex);
}
this.skipSC();
}
switch (this.tokenType) {
case Semicolon:
this.next();
break;
case LeftCurlyBracket:
if (hasOwnProperty.call(this.atrule, nameLowerCase) &&
typeof this.atrule[nameLowerCase].block === 'function') {
block = this.atrule[nameLowerCase].block.call(this, isDeclaration);
} else {
prelude = consumeRaw.call(this, this.scanner.tokenIndex);
// TODO: should consume block content as Raw?
block = this.Block(isDeclarationBlockAtrule.call(this));
}
this.scanner.skipSC();
}
break;
}
switch (this.scanner.tokenType) {
case SEMICOLON:
this.scanner.next();
break;
return {
type: 'Atrule',
loc: this.getLocation(start, this.tokenStart),
name,
prelude,
block
};
}
case LEFTCURLYBRACKET:
if (this.atrule.hasOwnProperty(nameLowerCase) &&
typeof this.atrule[nameLowerCase].block === 'function') {
block = this.atrule[nameLowerCase].block.call(this);
} else {
// TODO: should consume block content as Raw?
block = this.Block(isDeclarationBlockAtrule.call(this));
}
export function generate(node) {
this.token(AtKeyword, '@' + node.name);
break;
}
if (node.prelude !== null) {
this.node(node.prelude);
}
return {
type: 'Atrule',
loc: this.getLocation(start, this.scanner.tokenStart),
name: name,
prelude: prelude,
block: block
};
},
generate: function(node) {
this.chunk('@');
this.chunk(node.name);
if (node.prelude !== null) {
this.chunk(' ');
this.node(node.prelude);
}
if (node.block) {
this.node(node.block);
} else {
this.chunk(';');
}
},
walkContext: 'atrule'
};
if (node.block) {
this.node(node.block);
} else {
this.token(Semicolon, ';');
}
}

View File

@@ -1,51 +1,47 @@
var TYPE = require('../../tokenizer').TYPE;
import {
Semicolon,
LeftCurlyBracket
} from '../../tokenizer/index.js';
var SEMICOLON = TYPE.Semicolon;
var LEFTCURLYBRACKET = TYPE.LeftCurlyBracket;
module.exports = {
name: 'AtrulePrelude',
structure: {
children: [[]]
},
parse: function(name) {
var children = null;
if (name !== null) {
name = name.toLowerCase();
}
this.scanner.skipSC();
if (this.atrule.hasOwnProperty(name) &&
typeof this.atrule[name].prelude === 'function') {
// custom consumer
children = this.atrule[name].prelude.call(this);
} else {
// default consumer
children = this.readSequence(this.scope.AtrulePrelude);
}
this.scanner.skipSC();
if (this.scanner.eof !== true &&
this.scanner.tokenType !== LEFTCURLYBRACKET &&
this.scanner.tokenType !== SEMICOLON) {
this.error('Semicolon or block is expected');
}
if (children === null) {
children = this.createList();
}
return {
type: 'AtrulePrelude',
loc: this.getLocationFromList(children),
children: children
};
},
generate: function(node) {
this.children(node);
},
walkContext: 'atrulePrelude'
export const name = 'AtrulePrelude';
export const walkContext = 'atrulePrelude';
export const structure = {
children: [[]]
};
export function parse(name) {
let children = null;
if (name !== null) {
name = name.toLowerCase();
}
this.skipSC();
if (hasOwnProperty.call(this.atrule, name) &&
typeof this.atrule[name].prelude === 'function') {
// custom consumer
children = this.atrule[name].prelude.call(this);
} else {
// default consumer
children = this.readSequence(this.scope.AtrulePrelude);
}
this.skipSC();
if (this.eof !== true &&
this.tokenType !== LeftCurlyBracket &&
this.tokenType !== Semicolon) {
this.error('Semicolon or block is expected');
}
return {
type: 'AtrulePrelude',
loc: this.getLocationFromList(children),
children
};
}
export function generate(node) {
this.children(node);
}

View File

@@ -1,60 +1,54 @@
var TYPE = require('../../tokenizer').TYPE;
import {
Ident,
String as StringToken,
Delim,
LeftSquareBracket,
RightSquareBracket
} from '../../tokenizer/index.js';
var IDENT = TYPE.Ident;
var STRING = TYPE.String;
var COLON = TYPE.Colon;
var LEFTSQUAREBRACKET = TYPE.LeftSquareBracket;
var RIGHTSQUAREBRACKET = TYPE.RightSquareBracket;
var DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
var ASTERISK = 0x002A; // U+002A ASTERISK (*)
var EQUALSSIGN = 0x003D; // U+003D EQUALS SIGN (=)
var CIRCUMFLEXACCENT = 0x005E; // U+005E (^)
var VERTICALLINE = 0x007C; // U+007C VERTICAL LINE (|)
var TILDE = 0x007E; // U+007E TILDE (~)
const DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const EQUALSSIGN = 0x003D; // U+003D EQUALS SIGN (=)
const CIRCUMFLEXACCENT = 0x005E; // U+005E (^)
const VERTICALLINE = 0x007C; // U+007C VERTICAL LINE (|)
const TILDE = 0x007E; // U+007E TILDE (~)
function getAttributeName() {
if (this.scanner.eof) {
if (this.eof) {
this.error('Unexpected end of input');
}
var start = this.scanner.tokenStart;
var expectIdent = false;
var checkColon = true;
const start = this.tokenStart;
let expectIdent = false;
if (this.scanner.isDelim(ASTERISK)) {
if (this.isDelim(ASTERISK)) {
expectIdent = true;
checkColon = false;
this.scanner.next();
} else if (!this.scanner.isDelim(VERTICALLINE)) {
this.eat(IDENT);
this.next();
} else if (!this.isDelim(VERTICALLINE)) {
this.eat(Ident);
}
if (this.scanner.isDelim(VERTICALLINE)) {
if (this.scanner.source.charCodeAt(this.scanner.tokenStart + 1) !== EQUALSSIGN) {
this.scanner.next();
this.eat(IDENT);
if (this.isDelim(VERTICALLINE)) {
if (this.charCodeAt(this.tokenStart + 1) !== EQUALSSIGN) {
this.next();
this.eat(Ident);
} else if (expectIdent) {
this.error('Identifier is expected', this.scanner.tokenEnd);
this.error('Identifier is expected', this.tokenEnd);
}
} else if (expectIdent) {
this.error('Vertical line is expected');
}
if (checkColon && this.scanner.tokenType === COLON) {
this.scanner.next();
this.eat(IDENT);
}
return {
type: 'Identifier',
loc: this.getLocation(start, this.scanner.tokenStart),
name: this.scanner.substrToCursor(start)
loc: this.getLocation(start, this.tokenStart),
name: this.substrToCursor(start)
};
}
function getOperator() {
var start = this.scanner.tokenStart;
var code = this.scanner.source.charCodeAt(start);
const start = this.tokenStart;
const code = this.charCodeAt(start);
if (code !== EQUALSSIGN && // =
code !== TILDE && // ~=
@@ -66,100 +60,88 @@ function getOperator() {
this.error('Attribute selector (=, ~=, ^=, $=, *=, |=) is expected');
}
this.scanner.next();
this.next();
if (code !== EQUALSSIGN) {
if (!this.scanner.isDelim(EQUALSSIGN)) {
if (!this.isDelim(EQUALSSIGN)) {
this.error('Equal sign is expected');
}
this.scanner.next();
this.next();
}
return this.scanner.substrToCursor(start);
return this.substrToCursor(start);
}
// '[' <wq-name> ']'
// '[' <wq-name> <attr-matcher> [ <string-token> | <ident-token> ] <attr-modifier>? ']'
module.exports = {
name: 'AttributeSelector',
structure: {
name: 'Identifier',
matcher: [String, null],
value: ['String', 'Identifier', null],
flags: [String, null]
},
parse: function() {
var start = this.scanner.tokenStart;
var name;
var matcher = null;
var value = null;
var flags = null;
this.eat(LEFTSQUAREBRACKET);
this.scanner.skipSC();
name = getAttributeName.call(this);
this.scanner.skipSC();
if (this.scanner.tokenType !== RIGHTSQUAREBRACKET) {
// avoid case `[name i]`
if (this.scanner.tokenType !== IDENT) {
matcher = getOperator.call(this);
this.scanner.skipSC();
value = this.scanner.tokenType === STRING
? this.String()
: this.Identifier();
this.scanner.skipSC();
}
// attribute flags
if (this.scanner.tokenType === IDENT) {
flags = this.scanner.getTokenValue();
this.scanner.next();
this.scanner.skipSC();
}
}
this.eat(RIGHTSQUAREBRACKET);
return {
type: 'AttributeSelector',
loc: this.getLocation(start, this.scanner.tokenStart),
name: name,
matcher: matcher,
value: value,
flags: flags
};
},
generate: function(node) {
var flagsPrefix = ' ';
this.chunk('[');
this.node(node.name);
if (node.matcher !== null) {
this.chunk(node.matcher);
if (node.value !== null) {
this.node(node.value);
// space between string and flags is not required
if (node.value.type === 'String') {
flagsPrefix = '';
}
}
}
if (node.flags !== null) {
this.chunk(flagsPrefix);
this.chunk(node.flags);
}
this.chunk(']');
}
export const name = 'AttributeSelector';
export const structure = {
name: 'Identifier',
matcher: [String, null],
value: ['String', 'Identifier', null],
flags: [String, null]
};
export function parse() {
const start = this.tokenStart;
let name;
let matcher = null;
let value = null;
let flags = null;
this.eat(LeftSquareBracket);
this.skipSC();
name = getAttributeName.call(this);
this.skipSC();
if (this.tokenType !== RightSquareBracket) {
// avoid case `[name i]`
if (this.tokenType !== Ident) {
matcher = getOperator.call(this);
this.skipSC();
value = this.tokenType === StringToken
? this.String()
: this.Identifier();
this.skipSC();
}
// attribute flags
if (this.tokenType === Ident) {
flags = this.consume(Ident);
this.skipSC();
}
}
this.eat(RightSquareBracket);
return {
type: 'AttributeSelector',
loc: this.getLocation(start, this.tokenStart),
name,
matcher,
value,
flags
};
}
export function generate(node) {
this.token(Delim, '[');
this.node(node.name);
if (node.matcher !== null) {
this.tokenize(node.matcher);
this.node(node.value);
}
if (node.flags !== null) {
this.token(Ident, node.flags);
}
this.token(Delim, ']');
}

View File

@@ -1,91 +1,95 @@
var TYPE = require('../../tokenizer').TYPE;
var rawMode = require('./Raw').mode;
import {
WhiteSpace,
Comment,
Semicolon,
AtKeyword,
LeftCurlyBracket,
RightCurlyBracket
} from '../../tokenizer/index.js';
var WHITESPACE = TYPE.WhiteSpace;
var COMMENT = TYPE.Comment;
var SEMICOLON = TYPE.Semicolon;
var ATKEYWORD = TYPE.AtKeyword;
var LEFTCURLYBRACKET = TYPE.LeftCurlyBracket;
var RIGHTCURLYBRACKET = TYPE.RightCurlyBracket;
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
function consumeRaw(startToken) {
return this.Raw(startToken, null, true);
function consumeRaw() {
return this.Raw(null, true);
}
function consumeRule() {
return this.parseWithFallback(this.Rule, consumeRaw);
}
function consumeRawDeclaration(startToken) {
return this.Raw(startToken, rawMode.semicolonIncluded, true);
function consumeRawDeclaration() {
return this.Raw(this.consumeUntilSemicolonIncluded, true);
}
function consumeDeclaration() {
if (this.scanner.tokenType === SEMICOLON) {
return consumeRawDeclaration.call(this, this.scanner.tokenIndex);
if (this.tokenType === Semicolon) {
return consumeRawDeclaration.call(this, this.tokenIndex);
}
var node = this.parseWithFallback(this.Declaration, consumeRawDeclaration);
const node = this.parseWithFallback(this.Declaration, consumeRawDeclaration);
if (this.scanner.tokenType === SEMICOLON) {
this.scanner.next();
if (this.tokenType === Semicolon) {
this.next();
}
return node;
}
module.exports = {
name: 'Block',
structure: {
children: [[
'Atrule',
'Rule',
'Declaration'
]]
},
parse: function(isDeclaration) {
var consumer = isDeclaration ? consumeDeclaration : consumeRule;
var start = this.scanner.tokenStart;
var children = this.createList();
this.eat(LEFTCURLYBRACKET);
scan:
while (!this.scanner.eof) {
switch (this.scanner.tokenType) {
case RIGHTCURLYBRACKET:
break scan;
case WHITESPACE:
case COMMENT:
this.scanner.next();
break;
case ATKEYWORD:
children.push(this.parseWithFallback(this.Atrule, consumeRaw));
break;
default:
children.push(consumer.call(this));
}
}
if (!this.scanner.eof) {
this.eat(RIGHTCURLYBRACKET);
}
return {
type: 'Block',
loc: this.getLocation(start, this.scanner.tokenStart),
children: children
};
},
generate: function(node) {
this.chunk('{');
this.children(node, function(prev) {
if (prev.type === 'Declaration') {
this.chunk(';');
}
});
this.chunk('}');
},
walkContext: 'block'
export const name = 'Block';
export const walkContext = 'block';
export const structure = {
children: [[
'Atrule',
'Rule',
'Declaration'
]]
};
export function parse(isStyleBlock) {
const consumer = isStyleBlock ? consumeDeclaration : consumeRule;
const start = this.tokenStart;
let children = this.createList();
this.eat(LeftCurlyBracket);
scan:
while (!this.eof) {
switch (this.tokenType) {
case RightCurlyBracket:
break scan;
case WhiteSpace:
case Comment:
this.next();
break;
case AtKeyword:
children.push(this.parseWithFallback(this.Atrule.bind(this, isStyleBlock), consumeRaw));
break;
default:
if (isStyleBlock && this.isDelim(AMPERSAND)) {
children.push(consumeRule.call(this));
} else {
children.push(consumer.call(this));
}
}
}
if (!this.eof) {
this.eat(RightCurlyBracket);
}
return {
type: 'Block',
loc: this.getLocation(start, this.tokenStart),
children
};
}
export function generate(node) {
this.token(LeftCurlyBracket, '{');
this.children(node, prev => {
if (prev.type === 'Declaration') {
this.token(Semicolon, ';');
}
});
this.token(RightCurlyBracket, '}');
}

View File

@@ -1,34 +1,35 @@
var TYPE = require('../../tokenizer').TYPE;
import {
Delim,
LeftSquareBracket,
RightSquareBracket
} from '../../tokenizer/index.js';
var LEFTSQUAREBRACKET = TYPE.LeftSquareBracket;
var RIGHTSQUAREBRACKET = TYPE.RightSquareBracket;
module.exports = {
name: 'Brackets',
structure: {
children: [[]]
},
parse: function(readSequence, recognizer) {
var start = this.scanner.tokenStart;
var children = null;
this.eat(LEFTSQUAREBRACKET);
children = readSequence.call(this, recognizer);
if (!this.scanner.eof) {
this.eat(RIGHTSQUAREBRACKET);
}
return {
type: 'Brackets',
loc: this.getLocation(start, this.scanner.tokenStart),
children: children
};
},
generate: function(node) {
this.chunk('[');
this.children(node);
this.chunk(']');
}
export const name = 'Brackets';
export const structure = {
children: [[]]
};
export function parse(readSequence, recognizer) {
const start = this.tokenStart;
let children = null;
this.eat(LeftSquareBracket);
children = readSequence.call(this, recognizer);
if (!this.eof) {
this.eat(RightSquareBracket);
}
return {
type: 'Brackets',
loc: this.getLocation(start, this.tokenStart),
children
};
}
export function generate(node) {
this.token(Delim, '[');
this.children(node);
this.token(Delim, ']');
}

View File

@@ -1,19 +1,19 @@
var CDC = require('../../tokenizer').TYPE.CDC;
import { CDC } from '../../tokenizer/index.js';
module.exports = {
name: 'CDC',
structure: [],
parse: function() {
var start = this.scanner.tokenStart;
export const name = 'CDC';
export const structure = [];
this.eat(CDC); // -->
export function parse() {
const start = this.tokenStart;
return {
type: 'CDC',
loc: this.getLocation(start, this.scanner.tokenStart)
};
},
generate: function() {
this.chunk('-->');
}
};
this.eat(CDC); // -->
return {
type: 'CDC',
loc: this.getLocation(start, this.tokenStart)
};
}
export function generate() {
this.token(CDC, '-->');
}

View File

@@ -1,19 +1,19 @@
var CDO = require('../../tokenizer').TYPE.CDO;
import { CDO } from '../../tokenizer/index.js';
module.exports = {
name: 'CDO',
structure: [],
parse: function() {
var start = this.scanner.tokenStart;
export const name = 'CDO';
export const structure = [];
this.eat(CDO); // <!--
export function parse() {
const start = this.tokenStart;
return {
type: 'CDO',
loc: this.getLocation(start, this.scanner.tokenStart)
};
},
generate: function() {
this.chunk('<!--');
}
};
this.eat(CDO); // <!--
return {
type: 'CDO',
loc: this.getLocation(start, this.tokenStart)
};
}
export function generate() {
this.token(CDO, '<!--');
}

View File

@@ -1,29 +1,24 @@
var TYPE = require('../../tokenizer').TYPE;
import { Delim, Ident } from '../../tokenizer/index.js';
var IDENT = TYPE.Ident;
var FULLSTOP = 0x002E; // U+002E FULL STOP (.)
const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
// '.' ident
module.exports = {
name: 'ClassSelector',
structure: {
name: String
},
parse: function() {
if (!this.scanner.isDelim(FULLSTOP)) {
this.error('Full stop is expected');
}
this.scanner.next();
return {
type: 'ClassSelector',
loc: this.getLocation(this.scanner.tokenStart - 1, this.scanner.tokenEnd),
name: this.consume(IDENT)
};
},
generate: function(node) {
this.chunk('.');
this.chunk(node.name);
}
export const name = 'ClassSelector';
export const structure = {
name: String
};
export function parse() {
this.eatDelim(FULLSTOP);
return {
type: 'ClassSelector',
loc: this.getLocation(this.tokenStart - 1, this.tokenEnd),
name: this.consume(Ident)
};
}
export function generate(node) {
this.token(Delim, '.');
this.token(Ident, node.name);
}

View File

@@ -1,55 +1,54 @@
var TYPE = require('../../tokenizer').TYPE;
import { WhiteSpace, Delim } from '../../tokenizer/index.js';
var IDENT = TYPE.Ident;
var PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
var SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
var GREATERTHANSIGN = 0x003E; // U+003E GREATER-THAN SIGN (>)
var TILDE = 0x007E; // U+007E TILDE (~)
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const GREATERTHANSIGN = 0x003E; // U+003E GREATER-THAN SIGN (>)
const TILDE = 0x007E; // U+007E TILDE (~)
export const name = 'Combinator';
export const structure = {
name: String
};
// + | > | ~ | /deep/
module.exports = {
name: 'Combinator',
structure: {
name: String
},
parse: function() {
var start = this.scanner.tokenStart;
var code = this.scanner.source.charCodeAt(this.scanner.tokenStart);
export function parse() {
const start = this.tokenStart;
let name;
switch (code) {
case GREATERTHANSIGN:
case PLUSSIGN:
case TILDE:
this.scanner.next();
break;
switch (this.tokenType) {
case WhiteSpace:
name = ' ';
break;
case SOLIDUS:
this.scanner.next();
case Delim:
switch (this.charCodeAt(this.tokenStart)) {
case GREATERTHANSIGN:
case PLUSSIGN:
case TILDE:
this.next();
break;
if (this.scanner.tokenType !== IDENT || this.scanner.lookupValue(0, 'deep') === false) {
this.error('Identifier `deep` is expected');
}
case SOLIDUS:
this.next();
this.eatIdent('deep');
this.eatDelim(SOLIDUS);
break;
this.scanner.next();
default:
this.error('Combinator is expected');
}
if (!this.scanner.isDelim(SOLIDUS)) {
this.error('Solidus is expected');
}
this.scanner.next();
break;
default:
this.error('Combinator is expected');
}
return {
type: 'Combinator',
loc: this.getLocation(start, this.scanner.tokenStart),
name: this.scanner.substrToCursor(start)
};
},
generate: function(node) {
this.chunk(node.name);
name = this.substrToCursor(start);
break;
}
};
return {
type: 'Combinator',
loc: this.getLocation(start, this.tokenStart),
name
};
}
export function generate(node) {
this.tokenize(node.name);
}

View File

@@ -1,36 +1,33 @@
var TYPE = require('../../tokenizer').TYPE;
import { Comment } from '../../tokenizer/index.js';
var COMMENT = TYPE.Comment;
var ASTERISK = 0x002A; // U+002A ASTERISK (*)
var SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
// '/*' .* '*/'
module.exports = {
name: 'Comment',
structure: {
value: String
},
parse: function() {
var start = this.scanner.tokenStart;
var end = this.scanner.tokenEnd;
this.eat(COMMENT);
if ((end - start + 2) >= 2 &&
this.scanner.source.charCodeAt(end - 2) === ASTERISK &&
this.scanner.source.charCodeAt(end - 1) === SOLIDUS) {
end -= 2;
}
return {
type: 'Comment',
loc: this.getLocation(start, this.scanner.tokenStart),
value: this.scanner.source.substring(start + 2, end)
};
},
generate: function(node) {
this.chunk('/*');
this.chunk(node.value);
this.chunk('*/');
}
export const name = 'Comment';
export const structure = {
value: String
};
export function parse() {
const start = this.tokenStart;
let end = this.tokenEnd;
this.eat(Comment);
if ((end - start + 2) >= 2 &&
this.charCodeAt(end - 2) === ASTERISK &&
this.charCodeAt(end - 1) === SOLIDUS) {
end -= 2;
}
return {
type: 'Comment',
loc: this.getLocation(start, this.tokenStart),
value: this.substring(start + 2, end)
};
}
export function generate(node) {
this.token(Comment, '/*' + node.value + '*/');
}

123
node_modules/css-tree/lib/syntax/node/Condition.js generated vendored Normal file
View File

@@ -0,0 +1,123 @@
import {
WhiteSpace,
Comment,
Ident,
LeftParenthesis,
RightParenthesis,
Function as FunctionToken,
Colon,
EOF
} from '../../tokenizer/index.js';
const likelyFeatureToken = new Set([Colon, RightParenthesis, EOF]);
export const name = 'Condition';
export const structure = {
kind: String,
children: [[
'Identifier',
'Feature',
'FeatureFunction',
'FeatureRange',
'SupportsDeclaration'
]]
};
function featureOrRange(kind) {
if (this.lookupTypeNonSC(1) === Ident &&
likelyFeatureToken.has(this.lookupTypeNonSC(2))) {
return this.Feature(kind);
}
return this.FeatureRange(kind);
}
const parentheses = {
media: featureOrRange,
container: featureOrRange,
supports() {
return this.SupportsDeclaration();
}
};
export function parse(kind = 'media') {
const children = this.createList();
scan: while (!this.eof) {
switch (this.tokenType) {
case Comment:
case WhiteSpace:
this.next();
continue;
case Ident:
children.push(this.Identifier());
break;
case LeftParenthesis: {
let term = this.parseWithFallback(
() => parentheses[kind].call(this, kind),
() => null
);
if (!term) {
term = this.parseWithFallback(
() => {
this.eat(LeftParenthesis);
const res = this.Condition(kind);
this.eat(RightParenthesis);
return res;
},
() => {
return this.GeneralEnclosed(kind);
}
);
}
children.push(term);
break;
}
case FunctionToken: {
let term = this.parseWithFallback(
() => this.FeatureFunction(kind),
() => null
);
if (!term) {
term = this.GeneralEnclosed(kind);
}
children.push(term);
break;
}
default:
break scan;
}
}
if (children.isEmpty) {
this.error('Condition is expected');
}
return {
type: 'Condition',
loc: this.getLocationFromList(children),
kind,
children
};
}
export function generate(node) {
node.children.forEach(child => {
if (child.type === 'Condition') {
this.token(LeftParenthesis, '(');
this.node(child);
this.token(RightParenthesis, ')');
} else {
this.node(child);
}
});
}

View File

@@ -1,167 +1,163 @@
var isCustomProperty = require('../../utils/names').isCustomProperty;
var TYPE = require('../../tokenizer').TYPE;
var rawMode = require('./Raw').mode;
import { isCustomProperty } from '../../utils/names.js';
import {
Ident,
Hash,
Colon,
Semicolon,
Delim,
WhiteSpace
} from '../../tokenizer/index.js';
var IDENT = TYPE.Ident;
var HASH = TYPE.Hash;
var COLON = TYPE.Colon;
var SEMICOLON = TYPE.Semicolon;
var DELIM = TYPE.Delim;
var WHITESPACE = TYPE.WhiteSpace;
var EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
var NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
var DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
var AMPERSAND = 0x0026; // U+0026 ANPERSAND (&)
var ASTERISK = 0x002A; // U+002A ASTERISK (*)
var PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
var SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
const DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
function consumeValueRaw(startToken) {
return this.Raw(startToken, rawMode.exclamationMarkOrSemicolon, true);
function consumeValueRaw() {
return this.Raw(this.consumeUntilExclamationMarkOrSemicolon, true);
}
function consumeCustomPropertyRaw(startToken) {
return this.Raw(startToken, rawMode.exclamationMarkOrSemicolon, false);
function consumeCustomPropertyRaw() {
return this.Raw(this.consumeUntilExclamationMarkOrSemicolon, false);
}
function consumeValue() {
var startValueToken = this.scanner.tokenIndex;
var value = this.Value();
const startValueToken = this.tokenIndex;
const value = this.Value();
if (value.type !== 'Raw' &&
this.scanner.eof === false &&
this.scanner.tokenType !== SEMICOLON &&
this.scanner.isDelim(EXCLAMATIONMARK) === false &&
this.scanner.isBalanceEdge(startValueToken) === false) {
this.eof === false &&
this.tokenType !== Semicolon &&
this.isDelim(EXCLAMATIONMARK) === false &&
this.isBalanceEdge(startValueToken) === false) {
this.error();
}
return value;
}
module.exports = {
name: 'Declaration',
structure: {
important: [Boolean, String],
property: String,
value: ['Value', 'Raw']
},
parse: function() {
var start = this.scanner.tokenStart;
var startToken = this.scanner.tokenIndex;
var property = readProperty.call(this);
var customProperty = isCustomProperty(property);
var parseValue = customProperty ? this.parseCustomProperty : this.parseValue;
var consumeRaw = customProperty ? consumeCustomPropertyRaw : consumeValueRaw;
var important = false;
var value;
this.scanner.skipSC();
this.eat(COLON);
const valueStart = this.scanner.tokenIndex;
if (!customProperty) {
this.scanner.skipSC();
}
if (parseValue) {
value = this.parseWithFallback(consumeValue, consumeRaw);
} else {
value = consumeRaw.call(this, this.scanner.tokenIndex);
}
if (customProperty && value.type === 'Value' && value.children.isEmpty()) {
for (let offset = valueStart - this.scanner.tokenIndex; offset <= 0; offset++) {
if (this.scanner.lookupType(offset) === WHITESPACE) {
value.children.appendData({
type: 'WhiteSpace',
loc: null,
value: ' '
});
break;
}
}
}
if (this.scanner.isDelim(EXCLAMATIONMARK)) {
important = getImportant.call(this);
this.scanner.skipSC();
}
// Do not include semicolon to range per spec
// https://drafts.csswg.org/css-syntax/#declaration-diagram
if (this.scanner.eof === false &&
this.scanner.tokenType !== SEMICOLON &&
this.scanner.isBalanceEdge(startToken) === false) {
this.error();
}
return {
type: 'Declaration',
loc: this.getLocation(start, this.scanner.tokenStart),
important: important,
property: property,
value: value
};
},
generate: function(node) {
this.chunk(node.property);
this.chunk(':');
this.node(node.value);
if (node.important) {
this.chunk(node.important === true ? '!important' : '!' + node.important);
}
},
walkContext: 'declaration'
export const name = 'Declaration';
export const walkContext = 'declaration';
export const structure = {
important: [Boolean, String],
property: String,
value: ['Value', 'Raw']
};
export function parse() {
const start = this.tokenStart;
const startToken = this.tokenIndex;
const property = readProperty.call(this);
const customProperty = isCustomProperty(property);
const parseValue = customProperty ? this.parseCustomProperty : this.parseValue;
const consumeRaw = customProperty ? consumeCustomPropertyRaw : consumeValueRaw;
let important = false;
let value;
this.skipSC();
this.eat(Colon);
const valueStart = this.tokenIndex;
if (!customProperty) {
this.skipSC();
}
if (parseValue) {
value = this.parseWithFallback(consumeValue, consumeRaw);
} else {
value = consumeRaw.call(this, this.tokenIndex);
}
if (customProperty && value.type === 'Value' && value.children.isEmpty) {
for (let offset = valueStart - this.tokenIndex; offset <= 0; offset++) {
if (this.lookupType(offset) === WhiteSpace) {
value.children.appendData({
type: 'WhiteSpace',
loc: null,
value: ' '
});
break;
}
}
}
if (this.isDelim(EXCLAMATIONMARK)) {
important = getImportant.call(this);
this.skipSC();
}
// Do not include semicolon to range per spec
// https://drafts.csswg.org/css-syntax/#declaration-diagram
if (this.eof === false &&
this.tokenType !== Semicolon &&
this.isBalanceEdge(startToken) === false) {
this.error();
}
return {
type: 'Declaration',
loc: this.getLocation(start, this.tokenStart),
important,
property,
value
};
}
export function generate(node) {
this.token(Ident, node.property);
this.token(Colon, ':');
this.node(node.value);
if (node.important) {
this.token(Delim, '!');
this.token(Ident, node.important === true ? 'important' : node.important);
}
}
function readProperty() {
var start = this.scanner.tokenStart;
var prefix = 0;
const start = this.tokenStart;
// hacks
if (this.scanner.tokenType === DELIM) {
switch (this.scanner.source.charCodeAt(this.scanner.tokenStart)) {
if (this.tokenType === Delim) {
switch (this.charCodeAt(this.tokenStart)) {
case ASTERISK:
case DOLLARSIGN:
case PLUSSIGN:
case NUMBERSIGN:
case AMPERSAND:
this.scanner.next();
this.next();
break;
// TODO: not sure we should support this hack
case SOLIDUS:
this.scanner.next();
if (this.scanner.isDelim(SOLIDUS)) {
this.scanner.next();
this.next();
if (this.isDelim(SOLIDUS)) {
this.next();
}
break;
}
}
if (prefix) {
this.scanner.skip(prefix);
}
if (this.scanner.tokenType === HASH) {
this.eat(HASH);
if (this.tokenType === Hash) {
this.eat(Hash);
} else {
this.eat(IDENT);
this.eat(Ident);
}
return this.scanner.substrToCursor(start);
return this.substrToCursor(start);
}
// ! ws* important
function getImportant() {
this.eat(DELIM);
this.scanner.skipSC();
this.eat(Delim);
this.skipSC();
var important = this.consume(IDENT);
const important = this.consume(Ident);
// store original value in case it differ from `important`
// for better original source restoring and hacks like `!ie` support

View File

@@ -1,49 +1,62 @@
var TYPE = require('../../tokenizer').TYPE;
var rawMode = require('./Raw').mode;
import {
WhiteSpace,
Comment,
Semicolon,
AtKeyword
} from '../../tokenizer/index.js';
var WHITESPACE = TYPE.WhiteSpace;
var COMMENT = TYPE.Comment;
var SEMICOLON = TYPE.Semicolon;
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
function consumeRaw(startToken) {
return this.Raw(startToken, rawMode.semicolonIncluded, true);
function consumeRaw() {
return this.Raw(this.consumeUntilSemicolonIncluded, true);
}
module.exports = {
name: 'DeclarationList',
structure: {
children: [[
'Declaration'
]]
},
parse: function() {
var children = this.createList();
scan:
while (!this.scanner.eof) {
switch (this.scanner.tokenType) {
case WHITESPACE:
case COMMENT:
case SEMICOLON:
this.scanner.next();
break;
default:
children.push(this.parseWithFallback(this.Declaration, consumeRaw));
}
}
return {
type: 'DeclarationList',
loc: this.getLocationFromList(children),
children: children
};
},
generate: function(node) {
this.children(node, function(prev) {
if (prev.type === 'Declaration') {
this.chunk(';');
}
});
}
export const name = 'DeclarationList';
export const structure = {
children: [[
'Declaration',
'Atrule',
'Rule'
]]
};
export function parse() {
const children = this.createList();
scan:
while (!this.eof) {
switch (this.tokenType) {
case WhiteSpace:
case Comment:
case Semicolon:
this.next();
break;
case AtKeyword:
children.push(this.parseWithFallback(this.Atrule.bind(this, true), consumeRaw));
break;
default:
if (this.isDelim(AMPERSAND)) {
children.push(this.parseWithFallback(this.Rule, consumeRaw));
} else {
children.push(this.parseWithFallback(this.Declaration, consumeRaw));
}
}
}
return {
type: 'DeclarationList',
loc: this.getLocationFromList(children),
children
};
}
export function generate(node) {
this.children(node, prev => {
if (prev.type === 'Declaration') {
this.token(Semicolon, ';');
}
});
}

View File

@@ -1,29 +1,23 @@
var consumeNumber = require('../../tokenizer/utils').consumeNumber;
var TYPE = require('../../tokenizer').TYPE;
import { Dimension } from '../../tokenizer/index.js';
var DIMENSION = TYPE.Dimension;
module.exports = {
name: 'Dimension',
structure: {
value: String,
unit: String
},
parse: function() {
var start = this.scanner.tokenStart;
var numberEnd = consumeNumber(this.scanner.source, start);
this.eat(DIMENSION);
return {
type: 'Dimension',
loc: this.getLocation(start, this.scanner.tokenStart),
value: this.scanner.source.substring(start, numberEnd),
unit: this.scanner.source.substring(numberEnd, this.scanner.tokenStart)
};
},
generate: function(node) {
this.chunk(node.value);
this.chunk(node.unit);
}
export const name = 'Dimension';
export const structure = {
value: String,
unit: String
};
export function parse() {
const start = this.tokenStart;
const value = this.consumeNumber(Dimension);
return {
type: 'Dimension',
loc: this.getLocation(start, this.tokenStart),
value,
unit: this.substring(start + value.length, this.tokenStart)
};
}
export function generate(node) {
this.token(Dimension, node.value + node.unit);
}

103
node_modules/css-tree/lib/syntax/node/Feature.js generated vendored Normal file
View File

@@ -0,0 +1,103 @@
import {
Ident,
Number,
Dimension,
Function as FunctionToken,
LeftParenthesis,
RightParenthesis,
Colon,
Delim
} from '../../tokenizer/index.js';
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
export const name = 'Feature';
export const structure = {
kind: String,
name: String,
value: ['Identifier', 'Number', 'Dimension', 'Ratio', 'Function', null]
};
export function parse(kind) {
const start = this.tokenStart;
let name;
let value = null;
this.eat(LeftParenthesis);
this.skipSC();
name = this.consume(Ident);
this.skipSC();
if (this.tokenType !== RightParenthesis) {
this.eat(Colon);
this.skipSC();
switch (this.tokenType) {
case Number:
if (this.lookupNonWSType(1) === Delim) {
value = this.Ratio();
} else {
value = this.Number();
}
break;
case Dimension:
value = this.Dimension();
break;
case Ident:
value = this.Identifier();
break;
case FunctionToken:
value = this.parseWithFallback(
() => {
const res = this.Function(this.readSequence, this.scope.Value);
this.skipSC();
if (this.isDelim(SOLIDUS)) {
this.error();
}
return res;
},
() => {
return this.Ratio();
}
);
break;
default:
this.error('Number, dimension, ratio or identifier is expected');
}
this.skipSC();
}
if (!this.eof) {
this.eat(RightParenthesis);
}
return {
type: 'Feature',
loc: this.getLocation(start, this.tokenStart),
kind,
name,
value
};
}
export function generate(node) {
this.token(LeftParenthesis, '(');
this.token(Ident, node.name);
if (node.value !== null) {
this.token(Colon, ':');
this.node(node.value);
}
this.token(RightParenthesis, ')');
}

View File

@@ -0,0 +1,63 @@
import {
Function as FunctionToken,
RightParenthesis
} from '../../tokenizer/index.js';
export const name = 'FeatureFunction';
export const structure = {
kind: String,
feature: String,
value: ['Declaration', 'Selector']
};
function getFeatureParser(kind, name) {
const featuresOfKind = this.features[kind] || {};
const parser = featuresOfKind[name];
if (typeof parser !== 'function') {
this.error(`Unknown feature ${name}()`);
}
return parser;
}
export function parse(kind = 'unknown') {
const start = this.tokenStart;
const functionName = this.consumeFunctionName();
const valueParser = getFeatureParser.call(this, kind, functionName.toLowerCase());
this.skipSC();
const value = this.parseWithFallback(
() => {
const startValueToken = this.tokenIndex;
const value = valueParser.call(this);
if (this.eof === false &&
this.isBalanceEdge(startValueToken) === false) {
this.error();
}
return value;
},
() => this.Raw(null, false)
);
if (!this.eof) {
this.eat(RightParenthesis);
}
return {
type: 'FeatureFunction',
loc: this.getLocation(start, this.tokenStart),
kind,
feature: functionName,
value
};
}
export function generate(node) {
this.token(FunctionToken, node.feature + '(');
this.node(node.value);
this.token(RightParenthesis, ')');
}

133
node_modules/css-tree/lib/syntax/node/FeatureRange.js generated vendored Normal file
View File

@@ -0,0 +1,133 @@
import {
Ident,
Number,
Dimension,
Function as FunctionToken,
LeftParenthesis,
RightParenthesis
} from '../../tokenizer/index.js';
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const LESSTHANSIGN = 0x003C; // U+003C LESS-THAN SIGN (<)
const EQUALSSIGN = 0x003D; // U+003D EQUALS SIGN (=)
const GREATERTHANSIGN = 0x003E; // U+003E GREATER-THAN SIGN (>)
export const name = 'FeatureRange';
export const structure = {
kind: String,
left: ['Identifier', 'Number', 'Dimension', 'Ratio', 'Function'],
leftComparison: String,
middle: ['Identifier', 'Number', 'Dimension', 'Ratio', 'Function'],
rightComparison: [String, null],
right: ['Identifier', 'Number', 'Dimension', 'Ratio', 'Function', null]
};
function readTerm() {
this.skipSC();
switch (this.tokenType) {
case Number:
if (this.isDelim(SOLIDUS, this.lookupOffsetNonSC(1))) {
return this.Ratio();
} else {
return this.Number();
}
case Dimension:
return this.Dimension();
case Ident:
return this.Identifier();
case FunctionToken:
return this.parseWithFallback(
() => {
const res = this.Function(this.readSequence, this.scope.Value);
this.skipSC();
if (this.isDelim(SOLIDUS)) {
this.error();
}
return res;
},
() => {
return this.Ratio();
}
);
default:
this.error('Number, dimension, ratio or identifier is expected');
}
}
function readComparison(expectColon) {
this.skipSC();
if (this.isDelim(LESSTHANSIGN) ||
this.isDelim(GREATERTHANSIGN)) {
const value = this.source[this.tokenStart];
this.next();
if (this.isDelim(EQUALSSIGN)) {
this.next();
return value + '=';
}
return value;
}
if (this.isDelim(EQUALSSIGN)) {
return '=';
}
this.error(`Expected ${expectColon ? '":", ' : ''}"<", ">", "=" or ")"`);
}
export function parse(kind = 'unknown') {
const start = this.tokenStart;
this.skipSC();
this.eat(LeftParenthesis);
const left = readTerm.call(this);
const leftComparison = readComparison.call(this, left.type === 'Identifier');
const middle = readTerm.call(this);
let rightComparison = null;
let right = null;
if (this.lookupNonWSType(0) !== RightParenthesis) {
rightComparison = readComparison.call(this);
right = readTerm.call(this);
}
this.skipSC();
this.eat(RightParenthesis);
return {
type: 'FeatureRange',
loc: this.getLocation(start, this.tokenStart),
kind,
left,
leftComparison,
middle,
rightComparison,
right
};
}
export function generate(node) {
this.token(LeftParenthesis, '(');
this.node(node.left);
this.tokenize(node.leftComparison);
this.node(node.middle);
if (node.right) {
this.tokenize(node.rightComparison);
this.node(node.right);
}
this.token(RightParenthesis, ')');
}

View File

@@ -1,40 +1,41 @@
var TYPE = require('../../tokenizer').TYPE;
import {
Function as FunctionToken,
RightParenthesis
} from '../../tokenizer/index.js';
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
export const name = 'Function';
export const walkContext = 'function';
export const structure = {
name: String,
children: [[]]
};
// <function-token> <sequence> )
module.exports = {
name: 'Function',
structure: {
name: String,
children: [[]]
},
parse: function(readSequence, recognizer) {
var start = this.scanner.tokenStart;
var name = this.consumeFunctionName();
var nameLowerCase = name.toLowerCase();
var children;
export function parse(readSequence, recognizer) {
const start = this.tokenStart;
const name = this.consumeFunctionName();
const nameLowerCase = name.toLowerCase();
let children;
children = recognizer.hasOwnProperty(nameLowerCase)
? recognizer[nameLowerCase].call(this, recognizer)
: readSequence.call(this, recognizer);
children = recognizer.hasOwnProperty(nameLowerCase)
? recognizer[nameLowerCase].call(this, recognizer)
: readSequence.call(this, recognizer);
if (!this.scanner.eof) {
this.eat(RIGHTPARENTHESIS);
}
if (!this.eof) {
this.eat(RightParenthesis);
}
return {
type: 'Function',
loc: this.getLocation(start, this.scanner.tokenStart),
name: name,
children: children
};
},
generate: function(node) {
this.chunk(node.name);
this.chunk('(');
this.children(node);
this.chunk(')');
},
walkContext: 'function'
};
return {
type: 'Function',
loc: this.getLocation(start, this.tokenStart),
name,
children
};
}
export function generate(node) {
this.token(FunctionToken, node.name + '(');
this.children(node);
this.token(RightParenthesis, ')');
}

View File

@@ -0,0 +1,66 @@
import {
Function as FunctionToken,
LeftParenthesis,
RightParenthesis
} from '../../tokenizer/index.js';
export const name = 'GeneralEnclosed';
export const structure = {
kind: String,
function: [String, null],
children: [[]]
};
// <function-token> <any-value> )
// ( <any-value> )
export function parse(kind) {
const start = this.tokenStart;
let functionName = null;
if (this.tokenType === FunctionToken) {
functionName = this.consumeFunctionName();
} else {
this.eat(LeftParenthesis);
}
const children = this.parseWithFallback(
() => {
const startValueToken = this.tokenIndex;
const children = this.readSequence(this.scope.Value);
if (this.eof === false &&
this.isBalanceEdge(startValueToken) === false) {
this.error();
}
return children;
},
() => this.createSingleNodeList(
this.Raw(null, false)
)
);
if (!this.eof) {
this.eat(RightParenthesis);
}
return {
type: 'GeneralEnclosed',
loc: this.getLocation(start, this.tokenStart),
kind,
function: functionName,
children
};
}
export function generate(node) {
if (node.function) {
this.token(FunctionToken, node.function + '(');
} else {
this.token(LeftParenthesis, '(');
}
this.children(node);
this.token(RightParenthesis, ')');
}

View File

@@ -1,26 +1,23 @@
var TYPE = require('../../tokenizer').TYPE;
var HASH = TYPE.Hash;
import { Hash } from '../../tokenizer/index.js';
// '#' ident
module.exports = {
name: 'Hash',
structure: {
value: String
},
parse: function() {
var start = this.scanner.tokenStart;
this.eat(HASH);
return {
type: 'Hash',
loc: this.getLocation(start, this.scanner.tokenStart),
value: this.scanner.substrToCursor(start + 1)
};
},
generate: function(node) {
this.chunk('#');
this.chunk(node.value);
}
export const xxx = 'XXX';
export const name = 'Hash';
export const structure = {
value: String
};
export function parse() {
const start = this.tokenStart;
this.eat(Hash);
return {
type: 'Hash',
loc: this.getLocation(start, this.tokenStart),
value: this.substrToCursor(start + 1)
};
}
export function generate(node) {
this.token(Hash, '#' + node.value);
}

View File

@@ -1,27 +1,26 @@
var TYPE = require('../../tokenizer').TYPE;
import { Hash, Delim } from '../../tokenizer/index.js';
var HASH = TYPE.Hash;
// <hash-token>
module.exports = {
name: 'IdSelector',
structure: {
name: String
},
parse: function() {
var start = this.scanner.tokenStart;
// TODO: check value is an ident
this.eat(HASH);
return {
type: 'IdSelector',
loc: this.getLocation(start, this.scanner.tokenStart),
name: this.scanner.substrToCursor(start + 1)
};
},
generate: function(node) {
this.chunk('#');
this.chunk(node.name);
}
export const name = 'IdSelector';
export const structure = {
name: String
};
export function parse() {
const start = this.tokenStart;
// TODO: check value is an ident
this.eat(Hash);
return {
type: 'IdSelector',
loc: this.getLocation(start, this.tokenStart),
name: this.substrToCursor(start + 1)
};
}
export function generate(node) {
// Using Delim instead of Hash is a hack to avoid for a whitespace between ident and id-selector
// in safe mode (e.g. "a#id"), because IE11 doesn't allow a sequence <ident-token> <hash-token>
// without a whitespace in values (e.g. "1px solid#000")
this.token(Delim, '#' + node.name);
}

View File

@@ -1,20 +1,18 @@
var TYPE = require('../../tokenizer').TYPE;
import { Ident } from '../../tokenizer/index.js';
var IDENT = TYPE.Ident;
module.exports = {
name: 'Identifier',
structure: {
name: String
},
parse: function() {
return {
type: 'Identifier',
loc: this.getLocation(this.scanner.tokenStart, this.scanner.tokenEnd),
name: this.consume(IDENT)
};
},
generate: function(node) {
this.chunk(node.name);
}
export const name = 'Identifier';
export const structure = {
name: String
};
export function parse() {
return {
type: 'Identifier',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
name: this.consume(Ident)
};
}
export function generate(node) {
this.token(Ident, node.name);
}

28
node_modules/css-tree/lib/syntax/node/Layer.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
import { Ident, Delim } from '../../tokenizer/index.js';
const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
export const name = 'Layer';
export const structure = {
name: String
};
export function parse() {
let tokenStart = this.tokenStart;
let name = this.consume(Ident);
while (this.isDelim(FULLSTOP)) {
this.eat(Delim);
name += '.' + this.consume(Ident);
}
return {
type: 'Layer',
loc: this.getLocation(tokenStart, this.tokenStart),
name
};
}
export function generate(node) {
this.tokenize(node.name);
}

36
node_modules/css-tree/lib/syntax/node/LayerList.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
import { Comma } from '../../tokenizer/index.js';
export const name = 'LayerList';
export const structure = {
children: [[
'Layer'
]]
};
export function parse() {
const children = this.createList();
this.skipSC();
while (!this.eof) {
children.push(this.Layer());
if (this.lookupTypeNonSC(0) !== Comma) {
break;
}
this.skipSC();
this.next();
this.skipSC();
}
return {
type: 'LayerList',
loc: this.getLocationFromList(children),
children
};
}
export function generate(node) {
this.children(node, () => this.token(Comma, ','));
}

View File

@@ -1,76 +0,0 @@
var TYPE = require('../../tokenizer').TYPE;
var IDENT = TYPE.Ident;
var NUMBER = TYPE.Number;
var DIMENSION = TYPE.Dimension;
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
var COLON = TYPE.Colon;
var DELIM = TYPE.Delim;
module.exports = {
name: 'MediaFeature',
structure: {
name: String,
value: ['Identifier', 'Number', 'Dimension', 'Ratio', null]
},
parse: function() {
var start = this.scanner.tokenStart;
var name;
var value = null;
this.eat(LEFTPARENTHESIS);
this.scanner.skipSC();
name = this.consume(IDENT);
this.scanner.skipSC();
if (this.scanner.tokenType !== RIGHTPARENTHESIS) {
this.eat(COLON);
this.scanner.skipSC();
switch (this.scanner.tokenType) {
case NUMBER:
if (this.lookupNonWSType(1) === DELIM) {
value = this.Ratio();
} else {
value = this.Number();
}
break;
case DIMENSION:
value = this.Dimension();
break;
case IDENT:
value = this.Identifier();
break;
default:
this.error('Number, dimension, ratio or identifier is expected');
}
this.scanner.skipSC();
}
this.eat(RIGHTPARENTHESIS);
return {
type: 'MediaFeature',
loc: this.getLocation(start, this.scanner.tokenStart),
name: name,
value: value
};
},
generate: function(node) {
this.chunk('(');
this.chunk(node.name);
if (node.value !== null) {
this.chunk(':');
this.node(node.value);
}
this.chunk(')');
}
};

View File

@@ -1,68 +1,102 @@
var TYPE = require('../../tokenizer').TYPE;
import {
Comma,
EOF,
Ident,
LeftCurlyBracket,
LeftParenthesis,
Function as FunctionToken,
Semicolon
} from '../../tokenizer/index.js';
var WHITESPACE = TYPE.WhiteSpace;
var COMMENT = TYPE.Comment;
var IDENT = TYPE.Ident;
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
module.exports = {
name: 'MediaQuery',
structure: {
children: [[
'Identifier',
'MediaFeature',
'WhiteSpace'
]]
},
parse: function() {
this.scanner.skipSC();
var children = this.createList();
var child = null;
var space = null;
scan:
while (!this.scanner.eof) {
switch (this.scanner.tokenType) {
case COMMENT:
this.scanner.next();
continue;
case WHITESPACE:
space = this.WhiteSpace();
continue;
case IDENT:
child = this.Identifier();
break;
case LEFTPARENTHESIS:
child = this.MediaFeature();
break;
default:
break scan;
}
if (space !== null) {
children.push(space);
space = null;
}
children.push(child);
}
if (child === null) {
this.error('Identifier or parenthesis is expected');
}
return {
type: 'MediaQuery',
loc: this.getLocationFromList(children),
children: children
};
},
generate: function(node) {
this.children(node);
}
export const name = 'MediaQuery';
export const structure = {
modifier: [String, null],
mediaType: [String, null],
condition: ['Condition', null]
};
export function parse() {
const start = this.tokenStart;
let modifier = null;
let mediaType = null;
let condition = null;
this.skipSC();
if (this.tokenType === Ident && this.lookupTypeNonSC(1) !== LeftParenthesis) {
// [ not | only ]? <media-type>
const ident = this.consume(Ident);
const identLowerCase = ident.toLowerCase();
if (identLowerCase === 'not' || identLowerCase === 'only') {
this.skipSC();
modifier = identLowerCase;
mediaType = this.consume(Ident);
} else {
mediaType = ident;
}
switch (this.lookupTypeNonSC(0)) {
case Ident: {
// and <media-condition-without-or>
this.skipSC();
this.eatIdent('and');
condition = this.Condition('media');
break;
}
case LeftCurlyBracket:
case Semicolon:
case Comma:
case EOF:
break;
default:
this.error('Identifier or parenthesis is expected');
}
} else {
switch (this.tokenType) {
case Ident:
case LeftParenthesis:
case FunctionToken: {
// <media-condition>
condition = this.Condition('media');
break;
}
case LeftCurlyBracket:
case Semicolon:
case EOF:
break;
default:
this.error('Identifier or parenthesis is expected');
}
}
return {
type: 'MediaQuery',
loc: this.getLocation(start, this.tokenStart),
modifier,
mediaType,
condition
};
}
export function generate(node) {
if (node.mediaType) {
if (node.modifier) {
this.token(Ident, node.modifier);
}
this.token(Ident, node.mediaType);
if (node.condition) {
this.token(Ident, 'and');
this.node(node.condition);
}
} else if (node.condition) {
this.node(node.condition);
}
}

View File

@@ -1,36 +1,34 @@
var COMMA = require('../../tokenizer').TYPE.Comma;
import { Comma } from '../../tokenizer/index.js';
module.exports = {
name: 'MediaQueryList',
structure: {
children: [[
'MediaQuery'
]]
},
parse: function(relative) {
var children = this.createList();
export const name = 'MediaQueryList';
export const structure = {
children: [[
'MediaQuery'
]]
};
this.scanner.skipSC();
export function parse() {
const children = this.createList();
while (!this.scanner.eof) {
children.push(this.MediaQuery(relative));
this.skipSC();
if (this.scanner.tokenType !== COMMA) {
break;
}
while (!this.eof) {
children.push(this.MediaQuery());
this.scanner.next();
if (this.tokenType !== Comma) {
break;
}
return {
type: 'MediaQueryList',
loc: this.getLocationFromList(children),
children: children
};
},
generate: function(node) {
this.children(node, function() {
this.chunk(',');
});
this.next();
}
};
return {
type: 'MediaQueryList',
loc: this.getLocationFromList(children),
children
};
}
export function generate(node) {
this.children(node, () => this.token(Comma, ','));
}

View File

@@ -0,0 +1,22 @@
import { Delim } from '../../tokenizer/index.js';
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
export const name = 'NestingSelector';
export const structure = {
};
export function parse() {
const start = this.tokenStart;
this.eatDelim(AMPERSAND);
return {
type: 'NestingSelector',
loc: this.getLocation(start, this.tokenStart)
};
}
export function generate() {
this.token(Delim, '&');
}

View File

@@ -1,51 +1,47 @@
module.exports = {
name: 'Nth',
structure: {
nth: ['AnPlusB', 'Identifier'],
selector: ['SelectorList', null]
},
parse: function(allowOfClause) {
this.scanner.skipSC();
import { Ident } from '../../tokenizer/index.js';
var start = this.scanner.tokenStart;
var end = start;
var selector = null;
var query;
if (this.scanner.lookupValue(0, 'odd') || this.scanner.lookupValue(0, 'even')) {
query = this.Identifier();
} else {
query = this.AnPlusB();
}
this.scanner.skipSC();
if (allowOfClause && this.scanner.lookupValue(0, 'of')) {
this.scanner.next();
selector = this.SelectorList();
if (this.needPositions) {
end = this.getLastListNode(selector.children).loc.end.offset;
}
} else {
if (this.needPositions) {
end = query.loc.end.offset;
}
}
return {
type: 'Nth',
loc: this.getLocation(start, end),
nth: query,
selector: selector
};
},
generate: function(node) {
this.node(node.nth);
if (node.selector !== null) {
this.chunk(' of ');
this.node(node.selector);
}
}
export const name = 'Nth';
export const structure = {
nth: ['AnPlusB', 'Identifier'],
selector: ['SelectorList', null]
};
export function parse() {
this.skipSC();
const start = this.tokenStart;
let end = start;
let selector = null;
let nth;
if (this.lookupValue(0, 'odd') || this.lookupValue(0, 'even')) {
nth = this.Identifier();
} else {
nth = this.AnPlusB();
}
end = this.tokenStart;
this.skipSC();
if (this.lookupValue(0, 'of')) {
this.next();
selector = this.SelectorList();
end = this.tokenStart;
}
return {
type: 'Nth',
loc: this.getLocation(start, end),
nth,
selector
};
}
export function generate(node) {
this.node(node.nth);
if (node.selector !== null) {
this.token(Ident, 'of');
this.node(node.selector);
}
}

View File

@@ -1,18 +1,18 @@
var NUMBER = require('../../tokenizer').TYPE.Number;
import { Number as NumberToken } from '../../tokenizer/index.js';
module.exports = {
name: 'Number',
structure: {
value: String
},
parse: function() {
return {
type: 'Number',
loc: this.getLocation(this.scanner.tokenStart, this.scanner.tokenEnd),
value: this.consume(NUMBER)
};
},
generate: function(node) {
this.chunk(node.value);
}
export const name = 'Number';
export const structure = {
value: String
};
export function parse() {
return {
type: 'Number',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
value: this.consume(NumberToken)
};
}
export function generate(node) {
this.token(NumberToken, node.value);
}

View File

@@ -1,21 +1,21 @@
// '/' | '*' | ',' | ':' | '+' | '-'
module.exports = {
name: 'Operator',
structure: {
value: String
},
parse: function() {
var start = this.scanner.tokenStart;
this.scanner.next();
return {
type: 'Operator',
loc: this.getLocation(start, this.scanner.tokenStart),
value: this.scanner.substrToCursor(start)
};
},
generate: function(node) {
this.chunk(node.value);
}
export const name = 'Operator';
export const structure = {
value: String
};
export function parse() {
const start = this.tokenStart;
this.next();
return {
type: 'Operator',
loc: this.getLocation(start, this.tokenStart),
value: this.substrToCursor(start)
};
}
export function generate(node) {
this.tokenize(node.value);
}

View File

@@ -1,34 +1,34 @@
var TYPE = require('../../tokenizer').TYPE;
import {
LeftParenthesis,
RightParenthesis
} from '../../tokenizer/index.js';
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
module.exports = {
name: 'Parentheses',
structure: {
children: [[]]
},
parse: function(readSequence, recognizer) {
var start = this.scanner.tokenStart;
var children = null;
this.eat(LEFTPARENTHESIS);
children = readSequence.call(this, recognizer);
if (!this.scanner.eof) {
this.eat(RIGHTPARENTHESIS);
}
return {
type: 'Parentheses',
loc: this.getLocation(start, this.scanner.tokenStart),
children: children
};
},
generate: function(node) {
this.chunk('(');
this.children(node);
this.chunk(')');
}
export const name = 'Parentheses';
export const structure = {
children: [[]]
};
export function parse(readSequence, recognizer) {
const start = this.tokenStart;
let children = null;
this.eat(LeftParenthesis);
children = readSequence.call(this, recognizer);
if (!this.eof) {
this.eat(RightParenthesis);
}
return {
type: 'Parentheses',
loc: this.getLocation(start, this.tokenStart),
children
};
}
export function generate(node) {
this.token(LeftParenthesis, '(');
this.children(node);
this.token(RightParenthesis, ')');
}

View File

@@ -1,27 +1,18 @@
var consumeNumber = require('../../tokenizer/utils').consumeNumber;
var TYPE = require('../../tokenizer').TYPE;
import { Percentage } from '../../tokenizer/index.js';
var PERCENTAGE = TYPE.Percentage;
module.exports = {
name: 'Percentage',
structure: {
value: String
},
parse: function() {
var start = this.scanner.tokenStart;
var numberEnd = consumeNumber(this.scanner.source, start);
this.eat(PERCENTAGE);
return {
type: 'Percentage',
loc: this.getLocation(start, this.scanner.tokenStart),
value: this.scanner.source.substring(start, numberEnd)
};
},
generate: function(node) {
this.chunk(node.value);
this.chunk('%');
}
export const name = 'Percentage';
export const structure = {
value: String
};
export function parse() {
return {
type: 'Percentage',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
value: this.consumeNumber(Percentage)
};
}
export function generate(node) {
this.token(Percentage, node.value + '%');
}

View File

@@ -1,61 +1,65 @@
var TYPE = require('../../tokenizer').TYPE;
import {
Ident,
Function as FunctionToken,
Colon,
RightParenthesis
} from '../../tokenizer/index.js';
var IDENT = TYPE.Ident;
var FUNCTION = TYPE.Function;
var COLON = TYPE.Colon;
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
export const name = 'PseudoClassSelector';
export const walkContext = 'function';
export const structure = {
name: String,
children: [['Raw'], null]
};
// : [ <ident> | <function-token> <any-value>? ) ]
module.exports = {
name: 'PseudoClassSelector',
structure: {
name: String,
children: [['Raw'], null]
},
parse: function() {
var start = this.scanner.tokenStart;
var children = null;
var name;
var nameLowerCase;
export function parse() {
const start = this.tokenStart;
let children = null;
let name;
let nameLowerCase;
this.eat(COLON);
this.eat(Colon);
if (this.scanner.tokenType === FUNCTION) {
name = this.consumeFunctionName();
nameLowerCase = name.toLowerCase();
if (this.tokenType === FunctionToken) {
name = this.consumeFunctionName();
nameLowerCase = name.toLowerCase();
if (this.pseudo.hasOwnProperty(nameLowerCase)) {
this.scanner.skipSC();
children = this.pseudo[nameLowerCase].call(this);
this.scanner.skipSC();
} else {
children = this.createList();
children.push(
this.Raw(this.scanner.tokenIndex, null, false)
);
}
this.eat(RIGHTPARENTHESIS);
if (this.lookupNonWSType(0) == RightParenthesis) {
children = this.createList();
} else if (hasOwnProperty.call(this.pseudo, nameLowerCase)) {
this.skipSC();
children = this.pseudo[nameLowerCase].call(this);
this.skipSC();
} else {
name = this.consume(IDENT);
children = this.createList();
children.push(
this.Raw(null, false)
);
}
return {
type: 'PseudoClassSelector',
loc: this.getLocation(start, this.scanner.tokenStart),
name: name,
children: children
};
},
generate: function(node) {
this.chunk(':');
this.chunk(node.name);
this.eat(RightParenthesis);
} else {
name = this.consume(Ident);
}
if (node.children !== null) {
this.chunk('(');
this.children(node);
this.chunk(')');
}
},
walkContext: 'function'
};
return {
type: 'PseudoClassSelector',
loc: this.getLocation(start, this.tokenStart),
name,
children
};
}
export function generate(node) {
this.token(Colon, ':');
if (node.children === null) {
this.token(Ident, node.name);
} else {
this.token(FunctionToken, node.name + '(');
this.children(node);
this.token(RightParenthesis, ')');
}
}

View File

@@ -1,62 +1,66 @@
var TYPE = require('../../tokenizer').TYPE;
import {
Ident,
Function as FunctionToken,
Colon,
RightParenthesis
} from '../../tokenizer/index.js';
var IDENT = TYPE.Ident;
var FUNCTION = TYPE.Function;
var COLON = TYPE.Colon;
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
export const name = 'PseudoElementSelector';
export const walkContext = 'function';
export const structure = {
name: String,
children: [['Raw'], null]
};
// :: [ <ident> | <function-token> <any-value>? ) ]
module.exports = {
name: 'PseudoElementSelector',
structure: {
name: String,
children: [['Raw'], null]
},
parse: function() {
var start = this.scanner.tokenStart;
var children = null;
var name;
var nameLowerCase;
export function parse() {
const start = this.tokenStart;
let children = null;
let name;
let nameLowerCase;
this.eat(COLON);
this.eat(COLON);
this.eat(Colon);
this.eat(Colon);
if (this.scanner.tokenType === FUNCTION) {
name = this.consumeFunctionName();
nameLowerCase = name.toLowerCase();
if (this.tokenType === FunctionToken) {
name = this.consumeFunctionName();
nameLowerCase = name.toLowerCase();
if (this.pseudo.hasOwnProperty(nameLowerCase)) {
this.scanner.skipSC();
children = this.pseudo[nameLowerCase].call(this);
this.scanner.skipSC();
} else {
children = this.createList();
children.push(
this.Raw(this.scanner.tokenIndex, null, false)
);
}
this.eat(RIGHTPARENTHESIS);
if (this.lookupNonWSType(0) == RightParenthesis) {
children = this.createList();
} else if (hasOwnProperty.call(this.pseudo, nameLowerCase)) {
this.skipSC();
children = this.pseudo[nameLowerCase].call(this);
this.skipSC();
} else {
name = this.consume(IDENT);
children = this.createList();
children.push(
this.Raw(null, false)
);
}
return {
type: 'PseudoElementSelector',
loc: this.getLocation(start, this.scanner.tokenStart),
name: name,
children: children
};
},
generate: function(node) {
this.chunk('::');
this.chunk(node.name);
this.eat(RightParenthesis);
} else {
name = this.consume(Ident);
}
if (node.children !== null) {
this.chunk('(');
this.children(node);
this.chunk(')');
}
},
walkContext: 'function'
};
return {
type: 'PseudoElementSelector',
loc: this.getLocation(start, this.tokenStart),
name,
children
};
}
export function generate(node) {
this.token(Colon, ':');
this.token(Colon, ':');
if (node.children === null) {
this.token(Ident, node.name);
} else {
this.token(FunctionToken, node.name + '(');
this.children(node);
this.token(RightParenthesis, ')');
}
}

View File

@@ -1,66 +1,68 @@
var isDigit = require('../../tokenizer').isDigit;
var TYPE = require('../../tokenizer').TYPE;
import {
Delim,
Number as NumberToken,
Function as FunctionToken
} from '../../tokenizer/index.js';
var NUMBER = TYPE.Number;
var DELIM = TYPE.Delim;
var SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
var FULLSTOP = 0x002E; // U+002E FULL STOP (.)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
// Terms of <ratio> should be a positive numbers (not zero or negative)
// (see https://drafts.csswg.org/mediaqueries-3/#values)
// However, -o-min-device-pixel-ratio takes fractional values as a ratio's term
// and this is using by various sites. Therefore we relax checking on parse
// to test a term is unsigned number without an exponent part.
// Additional checking may be applied on lexer validation.
function consumeNumber() {
this.scanner.skipWS();
// Media Queries Level 3 defines terms of <ratio> as a positive (not zero or negative)
// integers (see https://drafts.csswg.org/mediaqueries-3/#values)
// However, Media Queries Level 4 removes any definition of values
// (see https://drafts.csswg.org/mediaqueries-4/#values) and refers to
// CSS Values and Units for detail. In CSS Values and Units Level 4 a <ratio>
// definition was added (see https://drafts.csswg.org/css-values-4/#ratios) which
// defines ratio as "<number [0,∞]> [ / <number [0,∞]> ]?" and based on it
// any constrains on terms were removed. Parser also doesn't test numbers
// in any way to make possible for linting and fixing them by the tools using CSSTree.
// An additional syntax examination may be applied by a lexer.
function consumeTerm() {
this.skipSC();
var value = this.consume(NUMBER);
switch (this.tokenType) {
case NumberToken:
return this.Number();
for (var i = 0; i < value.length; i++) {
var code = value.charCodeAt(i);
if (!isDigit(code) && code !== FULLSTOP) {
this.error('Unsigned number is expected', this.scanner.tokenStart - value.length + i);
}
case FunctionToken:
return this.Function(this.readSequence, this.scope.Value);
default:
this.error('Number of function is expected');
}
if (Number(value) === 0) {
this.error('Zero number is not allowed', this.scanner.tokenStart - value.length);
}
return value;
}
// <positive-integer> S* '/' S* <positive-integer>
module.exports = {
name: 'Ratio',
structure: {
left: String,
right: String
},
parse: function() {
var start = this.scanner.tokenStart;
var left = consumeNumber.call(this);
var right;
this.scanner.skipWS();
if (!this.scanner.isDelim(SOLIDUS)) {
this.error('Solidus is expected');
}
this.eat(DELIM);
right = consumeNumber.call(this);
return {
type: 'Ratio',
loc: this.getLocation(start, this.scanner.tokenStart),
left: left,
right: right
};
},
generate: function(node) {
this.chunk(node.left);
this.chunk('/');
this.chunk(node.right);
}
export const name = 'Ratio';
export const structure = {
left: ['Number', 'Function'],
right: ['Number', 'Function', null]
};
// <number [0,∞]> [ / <number [0,∞]> ]?
export function parse() {
const start = this.tokenStart;
const left = consumeTerm.call(this);
let right = null;
this.skipSC();
if (this.isDelim(SOLIDUS)) {
this.eatDelim(SOLIDUS);
right = consumeTerm.call(this);
}
return {
type: 'Ratio',
loc: this.getLocation(start, this.tokenStart),
left,
right
};
}
export function generate(node) {
this.node(node.left);
this.token(Delim, '/');
if (node.right) {
this.node(node.right);
} else {
this.node(NumberToken, 1);
}
}

View File

@@ -1,87 +1,41 @@
var tokenizer = require('../../tokenizer');
var TYPE = tokenizer.TYPE;
var WhiteSpace = TYPE.WhiteSpace;
var Semicolon = TYPE.Semicolon;
var LeftCurlyBracket = TYPE.LeftCurlyBracket;
var Delim = TYPE.Delim;
var EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
import { WhiteSpace } from '../../tokenizer/index.js';
function getOffsetExcludeWS() {
if (this.scanner.tokenIndex > 0) {
if (this.scanner.lookupType(-1) === WhiteSpace) {
return this.scanner.tokenIndex > 1
? this.scanner.getTokenStart(this.scanner.tokenIndex - 1)
: this.scanner.firstCharOffset;
if (this.tokenIndex > 0) {
if (this.lookupType(-1) === WhiteSpace) {
return this.tokenIndex > 1
? this.getTokenStart(this.tokenIndex - 1)
: this.firstCharOffset;
}
}
return this.scanner.tokenStart;
return this.tokenStart;
}
// 0, 0, false
function balanceEnd() {
return 0;
}
// LEFTCURLYBRACKET, 0, false
function leftCurlyBracket(tokenType) {
return tokenType === LeftCurlyBracket ? 1 : 0;
}
// LEFTCURLYBRACKET, SEMICOLON, false
function leftCurlyBracketOrSemicolon(tokenType) {
return tokenType === LeftCurlyBracket || tokenType === Semicolon ? 1 : 0;
}
// EXCLAMATIONMARK, SEMICOLON, false
function exclamationMarkOrSemicolon(tokenType, source, offset) {
if (tokenType === Delim && source.charCodeAt(offset) === EXCLAMATIONMARK) {
return 1;
}
return tokenType === Semicolon ? 1 : 0;
}
// 0, SEMICOLON, true
function semicolonIncluded(tokenType) {
return tokenType === Semicolon ? 2 : 0;
}
module.exports = {
name: 'Raw',
structure: {
value: String
},
parse: function(startToken, mode, excludeWhiteSpace) {
var startOffset = this.scanner.getTokenStart(startToken);
var endOffset;
this.scanner.skip(
this.scanner.getRawLength(startToken, mode || balanceEnd)
);
if (excludeWhiteSpace && this.scanner.tokenStart > startOffset) {
endOffset = getOffsetExcludeWS.call(this);
} else {
endOffset = this.scanner.tokenStart;
}
return {
type: 'Raw',
loc: this.getLocation(startOffset, endOffset),
value: this.scanner.source.substring(startOffset, endOffset)
};
},
generate: function(node) {
this.chunk(node.value);
},
mode: {
default: balanceEnd,
leftCurlyBracket: leftCurlyBracket,
leftCurlyBracketOrSemicolon: leftCurlyBracketOrSemicolon,
exclamationMarkOrSemicolon: exclamationMarkOrSemicolon,
semicolonIncluded: semicolonIncluded
}
export const name = 'Raw';
export const structure = {
value: String
};
export function parse(consumeUntil, excludeWhiteSpace) {
const startOffset = this.getTokenStart(this.tokenIndex);
let endOffset;
this.skipUntilBalanced(this.tokenIndex, consumeUntil || this.consumeUntilBalanceEnd);
if (excludeWhiteSpace && this.tokenStart > startOffset) {
endOffset = getOffsetExcludeWS.call(this);
} else {
endOffset = this.tokenStart;
}
return {
type: 'Raw',
loc: this.getLocation(startOffset, endOffset),
value: this.substring(startOffset, endOffset)
};
}
export function generate(node) {
this.tokenize(node.value);
}

View File

@@ -1,54 +1,51 @@
var TYPE = require('../../tokenizer').TYPE;
var rawMode = require('./Raw').mode;
import { LeftCurlyBracket } from '../../tokenizer/index.js';
var LEFTCURLYBRACKET = TYPE.LeftCurlyBracket;
function consumeRaw(startToken) {
return this.Raw(startToken, rawMode.leftCurlyBracket, true);
function consumeRaw() {
return this.Raw(this.consumeUntilLeftCurlyBracket, true);
}
function consumePrelude() {
var prelude = this.SelectorList();
const prelude = this.SelectorList();
if (prelude.type !== 'Raw' &&
this.scanner.eof === false &&
this.scanner.tokenType !== LEFTCURLYBRACKET) {
this.eof === false &&
this.tokenType !== LeftCurlyBracket) {
this.error();
}
return prelude;
}
module.exports = {
name: 'Rule',
structure: {
prelude: ['SelectorList', 'Raw'],
block: ['Block']
},
parse: function() {
var startToken = this.scanner.tokenIndex;
var startOffset = this.scanner.tokenStart;
var prelude;
var block;
if (this.parseRulePrelude) {
prelude = this.parseWithFallback(consumePrelude, consumeRaw);
} else {
prelude = consumeRaw.call(this, startToken);
}
block = this.Block(true);
return {
type: 'Rule',
loc: this.getLocation(startOffset, this.scanner.tokenStart),
prelude: prelude,
block: block
};
},
generate: function(node) {
this.node(node.prelude);
this.node(node.block);
},
walkContext: 'rule'
export const name = 'Rule';
export const walkContext = 'rule';
export const structure = {
prelude: ['SelectorList', 'Raw'],
block: ['Block']
};
export function parse() {
const startToken = this.tokenIndex;
const startOffset = this.tokenStart;
let prelude;
let block;
if (this.parseRulePrelude) {
prelude = this.parseWithFallback(consumePrelude, consumeRaw);
} else {
prelude = consumeRaw.call(this, startToken);
}
block = this.Block(true);
return {
type: 'Rule',
loc: this.getLocation(startOffset, this.tokenStart),
prelude,
block
};
}
export function generate(node) {
this.node(node.prelude);
this.node(node.block);
}

Some files were not shown because too many files have changed in this diff Show More