node_modules: upgrade

This commit is contained in:
Dawid Dziurla
2025-06-14 23:18:18 +02:00
parent 948e4a4fb8
commit de40b1f21f
268 changed files with 2150 additions and 3858 deletions

View File

@@ -18,7 +18,7 @@ function resolveAsScalar(token, strict = true, onError) {
case 'double-quoted-scalar':
return resolveFlowScalar(token, strict, _onError);
case 'block-scalar':
return resolveBlockScalar(token, strict, _onError);
return resolveBlockScalar({ options: { strict } }, token, _onError);
}
}
return null;

View File

@@ -79,11 +79,11 @@ function isEmpty(ch) {
return false;
}
}
const hexDigits = '0123456789ABCDEFabcdef'.split('');
const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
const invalidFlowScalarChars = ',[]{}'.split('');
const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
const hexDigits = new Set('0123456789ABCDEFabcdef');
const tagChars = new Set("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()");
const flowIndicatorChars = new Set(',[]{}');
const invalidAnchorChars = new Set(' ,[]{}\n\r\t');
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch);
/**
* Splits an input string into lexical tokens, i.e. smaller strings that are
* easily identifiable by `tokens.tokenType()`.
@@ -149,6 +149,8 @@ class Lexer {
*/
*lex(source, incomplete = false) {
if (source) {
if (typeof source !== 'string')
throw TypeError('source is not a string');
this.buffer = this.buffer ? this.buffer + source : source;
this.lineEndPos = null;
}
@@ -248,11 +250,16 @@ class Lexer {
}
if (line[0] === '%') {
let dirEnd = line.length;
const cs = line.indexOf('#');
if (cs !== -1) {
let cs = line.indexOf('#');
while (cs !== -1) {
const ch = line[cs - 1];
if (ch === ' ' || ch === '\t')
if (ch === ' ' || ch === '\t') {
dirEnd = cs - 1;
break;
}
else {
cs = line.indexOf('#', cs + 1);
}
}
while (true) {
const ch = line[dirEnd - 1];
@@ -283,15 +290,11 @@ class Lexer {
if (!this.atEnd && !this.hasChars(4))
return this.setNext('line-start');
const s = this.peek(3);
if (s === '---' && isEmpty(this.charAt(3))) {
if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) {
yield* this.pushCount(3);
this.indentValue = 0;
this.indentNext = 0;
return 'doc';
}
else if (s === '...' && isEmpty(this.charAt(3))) {
yield* this.pushCount(3);
return 'stream';
return s === '---' ? 'doc' : 'stream';
}
}
this.indentValue = yield* this.pushSpaces(false);
@@ -518,8 +521,10 @@ class Lexer {
if (indent >= this.indentNext) {
if (this.blockScalarIndent === -1)
this.indentNext = indent;
else
this.indentNext += this.blockScalarIndent;
else {
this.indentNext =
this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext);
}
do {
const cs = this.continueScalar(nl + 1);
if (cs === -1)
@@ -532,14 +537,25 @@ class Lexer {
nl = this.buffer.length;
}
}
if (!this.blockScalarKeep) {
// Trailing insufficiently indented tabs are invalid.
// To catch that during parsing, we include them in the block scalar value.
let i = nl + 1;
ch = this.buffer[i];
while (ch === ' ')
ch = this.buffer[++i];
if (ch === '\t') {
while (ch === '\t' || ch === ' ' || ch === '\r' || ch === '\n')
ch = this.buffer[++i];
nl = i - 1;
}
else if (!this.blockScalarKeep) {
do {
let i = nl - 1;
let ch = this.buffer[i];
if (ch === '\r')
ch = this.buffer[--i];
const lastChar = i; // Drop the line if last char not more indented
while (ch === ' ' || ch === '\t')
while (ch === ' ')
ch = this.buffer[--i];
if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
nl = i;
@@ -559,7 +575,7 @@ class Lexer {
while ((ch = this.buffer[++i])) {
if (ch === ':') {
const next = this.buffer[i + 1];
if (isEmpty(next) || (inFlow && next === ','))
if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next)))
break;
end = i;
}
@@ -574,7 +590,7 @@ class Lexer {
else
end = i;
}
if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
if (next === '#' || (inFlow && flowIndicatorChars.has(next)))
break;
if (ch === '\n') {
const cs = this.continueScalar(i + 1);
@@ -584,7 +600,7 @@ class Lexer {
}
}
else {
if (inFlow && invalidFlowScalarChars.includes(ch))
if (inFlow && flowIndicatorChars.has(ch))
break;
end = i;
}
@@ -629,7 +645,7 @@ class Lexer {
case ':': {
const inFlow = this.flowLevel > 0;
const ch1 = this.charAt(1);
if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {
if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) {
if (!inFlow)
this.indentNext = this.indentValue + 1;
else if (this.flowKey)
@@ -654,11 +670,11 @@ class Lexer {
let i = this.pos + 1;
let ch = this.buffer[i];
while (ch) {
if (tagChars.includes(ch))
if (tagChars.has(ch))
ch = this.buffer[++i];
else if (ch === '%' &&
hexDigits.includes(this.buffer[i + 1]) &&
hexDigits.includes(this.buffer[i + 2])) {
hexDigits.has(this.buffer[i + 1]) &&
hexDigits.has(this.buffer[i + 2])) {
ch = this.buffer[(i += 3)];
}
else

View File

@@ -304,7 +304,7 @@ class Parser {
}
else {
Object.assign(it, { key: token, sep: [] });
this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
this.onKeyLine = !it.explicitKey;
return;
}
break;
@@ -513,7 +513,10 @@ class Parser {
return;
}
if (this.indent >= map.indent) {
const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;
const atMapIndent = !this.onKeyLine && this.indent === map.indent;
const atNextItem = atMapIndent &&
(it.sep || it.explicitKey) &&
this.type !== 'seq-item-ind';
// For empty nodes, assign newline-separated not indented empty tokens to following node
let start = [];
if (atNextItem && it.sep && !it.value) {
@@ -553,25 +556,26 @@ class Parser {
}
return;
case 'explicit-key-ind':
if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {
if (!it.sep && !it.explicitKey) {
it.start.push(this.sourceToken);
it.explicitKey = true;
}
else if (atNextItem || it.value) {
start.push(this.sourceToken);
map.items.push({ start });
map.items.push({ start, explicitKey: true });
}
else {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start: [this.sourceToken] }]
items: [{ start: [this.sourceToken], explicitKey: true }]
});
}
this.onKeyLine = true;
return;
case 'map-value-ind':
if (includesToken(it.start, 'explicit-key-ind')) {
if (it.explicitKey) {
if (!it.sep) {
if (includesToken(it.start, 'newline')) {
Object.assign(it, { key: null, sep: [this.sourceToken] });
@@ -604,7 +608,9 @@ class Parser {
const sep = it.sep;
sep.push(this.sourceToken);
// @ts-expect-error type guard is wrong here
delete it.key, delete it.sep;
delete it.key;
// @ts-expect-error type guard is wrong here
delete it.sep;
this.stack.push({
type: 'block-map',
offset: this.offset,
@@ -662,9 +668,20 @@ class Parser {
default: {
const bv = this.startBlockValue(map);
if (bv) {
if (atNextItem &&
bv.type !== 'block-seq' &&
includesToken(it.start, 'explicit-key-ind')) {
if (bv.type === 'block-seq') {
if (!it.explicitKey &&
it.sep &&
!includesToken(it.sep, 'newline')) {
yield* this.pop({
type: 'error',
offset: this.offset,
message: 'Unexpected block-seq-ind on same line with key',
source: this.source
});
return;
}
}
else if (atMapIndent) {
map.items.push({ start });
}
this.stack.push(bv);
@@ -885,7 +902,7 @@ class Parser {
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start }]
items: [{ start, explicitKey: true }]
};
}
case 'map-value-ind': {