mirror of
https://github.com/dawidd6/action-ansible-playbook.git
synced 2025-12-09 22:30:41 -07:00
node_modules: upgrade
This commit is contained in:
8
node_modules/yaml/dist/parse/cst-scalar.d.ts
generated
vendored
8
node_modules/yaml/dist/parse/cst-scalar.d.ts
generated
vendored
@@ -1,7 +1,7 @@
|
||||
import { ErrorCode } from '../errors.js';
|
||||
import { Range } from '../nodes/Node.js';
|
||||
import type { Scalar } from '../nodes/Scalar.js';
|
||||
import type { BlockScalar, FlowScalar, SourceToken, Token } from './cst.js';
|
||||
import type { ErrorCode } from '../errors';
|
||||
import type { Range } from '../nodes/Node';
|
||||
import type { Scalar } from '../nodes/Scalar';
|
||||
import type { BlockScalar, FlowScalar, SourceToken, Token } from './cst';
|
||||
/**
|
||||
* If `token` is a CST flow or block scalar, determine its string value and a few other attributes.
|
||||
* Otherwise, return `null`.
|
||||
|
||||
2
node_modules/yaml/dist/parse/cst-scalar.js
generated
vendored
2
node_modules/yaml/dist/parse/cst-scalar.js
generated
vendored
@@ -20,7 +20,7 @@ function resolveAsScalar(token, strict = true, onError) {
|
||||
case 'double-quoted-scalar':
|
||||
return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);
|
||||
case 'block-scalar':
|
||||
return resolveBlockScalar.resolveBlockScalar(token, strict, _onError);
|
||||
return resolveBlockScalar.resolveBlockScalar({ options: { strict } }, token, _onError);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
||||
2
node_modules/yaml/dist/parse/cst-stringify.d.ts
generated
vendored
2
node_modules/yaml/dist/parse/cst-stringify.d.ts
generated
vendored
@@ -1,4 +1,4 @@
|
||||
import type { CollectionItem, Token } from './cst.js';
|
||||
import type { CollectionItem, Token } from './cst';
|
||||
/**
|
||||
* Stringify a CST document, token, or collection item
|
||||
*
|
||||
|
||||
4
node_modules/yaml/dist/parse/cst-visit.d.ts
generated
vendored
4
node_modules/yaml/dist/parse/cst-visit.d.ts
generated
vendored
@@ -1,4 +1,4 @@
|
||||
import type { CollectionItem, Document } from './cst.js';
|
||||
import type { BlockMap, BlockSequence, CollectionItem, Document, FlowCollection } from './cst';
|
||||
export type VisitPath = readonly ['key' | 'value', number][];
|
||||
export type Visitor = (item: CollectionItem, path: VisitPath) => number | symbol | Visitor | void;
|
||||
/**
|
||||
@@ -35,5 +35,5 @@ export declare namespace visit {
|
||||
var SKIP: symbol;
|
||||
var REMOVE: symbol;
|
||||
var itemAtPath: (cst: Document | CollectionItem, path: VisitPath) => CollectionItem | undefined;
|
||||
var parentCollection: (cst: Document | CollectionItem, path: VisitPath) => import("./cst.js").BlockMap | import("./cst.js").BlockSequence | import("./cst.js").FlowCollection;
|
||||
var parentCollection: (cst: Document | CollectionItem, path: VisitPath) => BlockMap | BlockSequence | FlowCollection;
|
||||
}
|
||||
|
||||
11
node_modules/yaml/dist/parse/cst.d.ts
generated
vendored
11
node_modules/yaml/dist/parse/cst.d.ts
generated
vendored
@@ -1,6 +1,7 @@
|
||||
export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar.js';
|
||||
export { stringify } from './cst-stringify.js';
|
||||
export { visit, Visitor, VisitPath } from './cst-visit.js';
|
||||
export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar';
|
||||
export { stringify } from './cst-stringify';
|
||||
export type { Visitor, VisitPath } from './cst-visit';
|
||||
export { visit } from './cst-visit';
|
||||
export interface SourceToken {
|
||||
type: 'byte-order-mark' | 'doc-mode' | 'doc-start' | 'space' | 'comment' | 'newline' | 'directive-line' | 'anchor' | 'tag' | 'seq-item-ind' | 'explicit-key-ind' | 'map-value-ind' | 'flow-map-start' | 'flow-map-end' | 'flow-seq-start' | 'flow-seq-end' | 'flow-error-end' | 'comma' | 'block-scalar-header';
|
||||
offset: number;
|
||||
@@ -51,11 +52,13 @@ export interface BlockMap {
|
||||
indent: number;
|
||||
items: Array<{
|
||||
start: SourceToken[];
|
||||
explicitKey?: true;
|
||||
key?: never;
|
||||
sep?: never;
|
||||
value?: never;
|
||||
} | {
|
||||
start: SourceToken[];
|
||||
explicitKey?: true;
|
||||
key: Token | null;
|
||||
sep: SourceToken[];
|
||||
value?: Token;
|
||||
@@ -99,7 +102,7 @@ export declare const SCALAR = "\u001F";
|
||||
/** @returns `true` if `token` is a flow or block collection */
|
||||
export declare const isCollection: (token: Token | null | undefined) => token is BlockMap | BlockSequence | FlowCollection;
|
||||
/** @returns `true` if `token` is a flow or block scalar; not an alias */
|
||||
export declare const isScalar: (token: Token | null | undefined) => token is BlockScalar | FlowScalar;
|
||||
export declare const isScalar: (token: Token | null | undefined) => token is FlowScalar | BlockScalar;
|
||||
/** Get a printable representation of a lexer token */
|
||||
export declare function prettyToken(token: string): string;
|
||||
/** Identify the type of a lexer token. May return `null` for unknown tokens. */
|
||||
|
||||
2
node_modules/yaml/dist/parse/lexer.d.ts
generated
vendored
2
node_modules/yaml/dist/parse/lexer.d.ts
generated
vendored
@@ -59,7 +59,7 @@ export declare class Lexer {
|
||||
*
|
||||
* @returns A generator of lexical tokens
|
||||
*/
|
||||
lex(source: string, incomplete?: boolean): Generator<string, void, unknown>;
|
||||
lex(source: string, incomplete?: boolean): Generator<string, void>;
|
||||
private atLineEnd;
|
||||
private charAt;
|
||||
private continueScalar;
|
||||
|
||||
66
node_modules/yaml/dist/parse/lexer.js
generated
vendored
66
node_modules/yaml/dist/parse/lexer.js
generated
vendored
@@ -81,11 +81,11 @@ function isEmpty(ch) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const hexDigits = '0123456789ABCDEFabcdef'.split('');
|
||||
const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
|
||||
const invalidFlowScalarChars = ',[]{}'.split('');
|
||||
const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
|
||||
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
|
||||
const hexDigits = new Set('0123456789ABCDEFabcdef');
|
||||
const tagChars = new Set("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()");
|
||||
const flowIndicatorChars = new Set(',[]{}');
|
||||
const invalidAnchorChars = new Set(' ,[]{}\n\r\t');
|
||||
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch);
|
||||
/**
|
||||
* Splits an input string into lexical tokens, i.e. smaller strings that are
|
||||
* easily identifiable by `tokens.tokenType()`.
|
||||
@@ -151,6 +151,8 @@ class Lexer {
|
||||
*/
|
||||
*lex(source, incomplete = false) {
|
||||
if (source) {
|
||||
if (typeof source !== 'string')
|
||||
throw TypeError('source is not a string');
|
||||
this.buffer = this.buffer ? this.buffer + source : source;
|
||||
this.lineEndPos = null;
|
||||
}
|
||||
@@ -250,11 +252,16 @@ class Lexer {
|
||||
}
|
||||
if (line[0] === '%') {
|
||||
let dirEnd = line.length;
|
||||
const cs = line.indexOf('#');
|
||||
if (cs !== -1) {
|
||||
let cs = line.indexOf('#');
|
||||
while (cs !== -1) {
|
||||
const ch = line[cs - 1];
|
||||
if (ch === ' ' || ch === '\t')
|
||||
if (ch === ' ' || ch === '\t') {
|
||||
dirEnd = cs - 1;
|
||||
break;
|
||||
}
|
||||
else {
|
||||
cs = line.indexOf('#', cs + 1);
|
||||
}
|
||||
}
|
||||
while (true) {
|
||||
const ch = line[dirEnd - 1];
|
||||
@@ -285,15 +292,11 @@ class Lexer {
|
||||
if (!this.atEnd && !this.hasChars(4))
|
||||
return this.setNext('line-start');
|
||||
const s = this.peek(3);
|
||||
if (s === '---' && isEmpty(this.charAt(3))) {
|
||||
if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) {
|
||||
yield* this.pushCount(3);
|
||||
this.indentValue = 0;
|
||||
this.indentNext = 0;
|
||||
return 'doc';
|
||||
}
|
||||
else if (s === '...' && isEmpty(this.charAt(3))) {
|
||||
yield* this.pushCount(3);
|
||||
return 'stream';
|
||||
return s === '---' ? 'doc' : 'stream';
|
||||
}
|
||||
}
|
||||
this.indentValue = yield* this.pushSpaces(false);
|
||||
@@ -520,8 +523,10 @@ class Lexer {
|
||||
if (indent >= this.indentNext) {
|
||||
if (this.blockScalarIndent === -1)
|
||||
this.indentNext = indent;
|
||||
else
|
||||
this.indentNext += this.blockScalarIndent;
|
||||
else {
|
||||
this.indentNext =
|
||||
this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext);
|
||||
}
|
||||
do {
|
||||
const cs = this.continueScalar(nl + 1);
|
||||
if (cs === -1)
|
||||
@@ -534,14 +539,25 @@ class Lexer {
|
||||
nl = this.buffer.length;
|
||||
}
|
||||
}
|
||||
if (!this.blockScalarKeep) {
|
||||
// Trailing insufficiently indented tabs are invalid.
|
||||
// To catch that during parsing, we include them in the block scalar value.
|
||||
let i = nl + 1;
|
||||
ch = this.buffer[i];
|
||||
while (ch === ' ')
|
||||
ch = this.buffer[++i];
|
||||
if (ch === '\t') {
|
||||
while (ch === '\t' || ch === ' ' || ch === '\r' || ch === '\n')
|
||||
ch = this.buffer[++i];
|
||||
nl = i - 1;
|
||||
}
|
||||
else if (!this.blockScalarKeep) {
|
||||
do {
|
||||
let i = nl - 1;
|
||||
let ch = this.buffer[i];
|
||||
if (ch === '\r')
|
||||
ch = this.buffer[--i];
|
||||
const lastChar = i; // Drop the line if last char not more indented
|
||||
while (ch === ' ' || ch === '\t')
|
||||
while (ch === ' ')
|
||||
ch = this.buffer[--i];
|
||||
if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
|
||||
nl = i;
|
||||
@@ -561,7 +577,7 @@ class Lexer {
|
||||
while ((ch = this.buffer[++i])) {
|
||||
if (ch === ':') {
|
||||
const next = this.buffer[i + 1];
|
||||
if (isEmpty(next) || (inFlow && next === ','))
|
||||
if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next)))
|
||||
break;
|
||||
end = i;
|
||||
}
|
||||
@@ -576,7 +592,7 @@ class Lexer {
|
||||
else
|
||||
end = i;
|
||||
}
|
||||
if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
|
||||
if (next === '#' || (inFlow && flowIndicatorChars.has(next)))
|
||||
break;
|
||||
if (ch === '\n') {
|
||||
const cs = this.continueScalar(i + 1);
|
||||
@@ -586,7 +602,7 @@ class Lexer {
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (inFlow && invalidFlowScalarChars.includes(ch))
|
||||
if (inFlow && flowIndicatorChars.has(ch))
|
||||
break;
|
||||
end = i;
|
||||
}
|
||||
@@ -631,7 +647,7 @@ class Lexer {
|
||||
case ':': {
|
||||
const inFlow = this.flowLevel > 0;
|
||||
const ch1 = this.charAt(1);
|
||||
if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {
|
||||
if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) {
|
||||
if (!inFlow)
|
||||
this.indentNext = this.indentValue + 1;
|
||||
else if (this.flowKey)
|
||||
@@ -656,11 +672,11 @@ class Lexer {
|
||||
let i = this.pos + 1;
|
||||
let ch = this.buffer[i];
|
||||
while (ch) {
|
||||
if (tagChars.includes(ch))
|
||||
if (tagChars.has(ch))
|
||||
ch = this.buffer[++i];
|
||||
else if (ch === '%' &&
|
||||
hexDigits.includes(this.buffer[i + 1]) &&
|
||||
hexDigits.includes(this.buffer[i + 2])) {
|
||||
hexDigits.has(this.buffer[i + 1]) &&
|
||||
hexDigits.has(this.buffer[i + 2])) {
|
||||
ch = this.buffer[(i += 3)];
|
||||
}
|
||||
else
|
||||
|
||||
8
node_modules/yaml/dist/parse/parser.d.ts
generated
vendored
8
node_modules/yaml/dist/parse/parser.d.ts
generated
vendored
@@ -1,4 +1,4 @@
|
||||
import { Token } from './cst.js';
|
||||
import type { Token } from './cst';
|
||||
/**
|
||||
* A YAML concrete syntax tree (CST) parser
|
||||
*
|
||||
@@ -57,14 +57,14 @@ export declare class Parser {
|
||||
*
|
||||
* @returns A generator of tokens representing each directive, document, and other structure.
|
||||
*/
|
||||
parse(source: string, incomplete?: boolean): Generator<Token, void, unknown>;
|
||||
parse(source: string, incomplete?: boolean): Generator<Token, void>;
|
||||
/**
|
||||
* Advance the parser by the `source` of one lexical token.
|
||||
*/
|
||||
next(source: string): Generator<Token, void, unknown>;
|
||||
next(source: string): Generator<Token, void>;
|
||||
private lexer;
|
||||
/** Call at end of input to push out any remaining constructions */
|
||||
end(): Generator<Token, void, unknown>;
|
||||
end(): Generator<Token, void>;
|
||||
private get sourceToken();
|
||||
private step;
|
||||
private peek;
|
||||
|
||||
42
node_modules/yaml/dist/parse/parser.js
generated
vendored
42
node_modules/yaml/dist/parse/parser.js
generated
vendored
@@ -1,5 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
var node_process = require('process');
|
||||
var cst = require('./cst.js');
|
||||
var lexer = require('./lexer.js');
|
||||
|
||||
@@ -166,7 +167,7 @@ class Parser {
|
||||
*/
|
||||
*next(source) {
|
||||
this.source = source;
|
||||
if (process.env.LOG_TOKENS)
|
||||
if (node_process.env.LOG_TOKENS)
|
||||
console.log('|', cst.prettyToken(source));
|
||||
if (this.atScalar) {
|
||||
this.atScalar = false;
|
||||
@@ -308,7 +309,7 @@ class Parser {
|
||||
}
|
||||
else {
|
||||
Object.assign(it, { key: token, sep: [] });
|
||||
this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
|
||||
this.onKeyLine = !it.explicitKey;
|
||||
return;
|
||||
}
|
||||
break;
|
||||
@@ -517,7 +518,10 @@ class Parser {
|
||||
return;
|
||||
}
|
||||
if (this.indent >= map.indent) {
|
||||
const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;
|
||||
const atMapIndent = !this.onKeyLine && this.indent === map.indent;
|
||||
const atNextItem = atMapIndent &&
|
||||
(it.sep || it.explicitKey) &&
|
||||
this.type !== 'seq-item-ind';
|
||||
// For empty nodes, assign newline-separated not indented empty tokens to following node
|
||||
let start = [];
|
||||
if (atNextItem && it.sep && !it.value) {
|
||||
@@ -557,25 +561,26 @@ class Parser {
|
||||
}
|
||||
return;
|
||||
case 'explicit-key-ind':
|
||||
if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {
|
||||
if (!it.sep && !it.explicitKey) {
|
||||
it.start.push(this.sourceToken);
|
||||
it.explicitKey = true;
|
||||
}
|
||||
else if (atNextItem || it.value) {
|
||||
start.push(this.sourceToken);
|
||||
map.items.push({ start });
|
||||
map.items.push({ start, explicitKey: true });
|
||||
}
|
||||
else {
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start: [this.sourceToken] }]
|
||||
items: [{ start: [this.sourceToken], explicitKey: true }]
|
||||
});
|
||||
}
|
||||
this.onKeyLine = true;
|
||||
return;
|
||||
case 'map-value-ind':
|
||||
if (includesToken(it.start, 'explicit-key-ind')) {
|
||||
if (it.explicitKey) {
|
||||
if (!it.sep) {
|
||||
if (includesToken(it.start, 'newline')) {
|
||||
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||||
@@ -608,7 +613,9 @@ class Parser {
|
||||
const sep = it.sep;
|
||||
sep.push(this.sourceToken);
|
||||
// @ts-expect-error type guard is wrong here
|
||||
delete it.key, delete it.sep;
|
||||
delete it.key;
|
||||
// @ts-expect-error type guard is wrong here
|
||||
delete it.sep;
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
@@ -666,9 +673,20 @@ class Parser {
|
||||
default: {
|
||||
const bv = this.startBlockValue(map);
|
||||
if (bv) {
|
||||
if (atNextItem &&
|
||||
bv.type !== 'block-seq' &&
|
||||
includesToken(it.start, 'explicit-key-ind')) {
|
||||
if (bv.type === 'block-seq') {
|
||||
if (!it.explicitKey &&
|
||||
it.sep &&
|
||||
!includesToken(it.sep, 'newline')) {
|
||||
yield* this.pop({
|
||||
type: 'error',
|
||||
offset: this.offset,
|
||||
message: 'Unexpected block-seq-ind on same line with key',
|
||||
source: this.source
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
else if (atMapIndent) {
|
||||
map.items.push({ start });
|
||||
}
|
||||
this.stack.push(bv);
|
||||
@@ -889,7 +907,7 @@ class Parser {
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start }]
|
||||
items: [{ start, explicitKey: true }]
|
||||
};
|
||||
}
|
||||
case 'map-value-ind': {
|
||||
|
||||
Reference in New Issue
Block a user