node_modules: upgrade

This commit is contained in:
Dawid Dziurla
2025-06-14 23:18:18 +02:00
parent 948e4a4fb8
commit de40b1f21f
268 changed files with 2150 additions and 3858 deletions

81
node_modules/yaml/README.md generated vendored
View File

@@ -24,60 +24,103 @@ To install:
```sh
npm install yaml
# or
deno add jsr:@eemeli/yaml
```
**Note:** These docs are for `yaml@2`. For v1, see the [v1.10.0 tag](https://github.com/eemeli/yaml/tree/v1.10.0) for the source and [eemeli.org/yaml/v1](https://eemeli.org/yaml/v1/) for the documentation.
The development and maintenance of this library is [sponsored](https://github.com/sponsors/eemeli) by:
<p align="center" width="100%">
<a href="https://www.scipress.io/"
><img
width="150"
align="top"
src="https://eemeli.org/yaml/images/scipress.svg"
alt="Scipress"
/></a>
&nbsp; &nbsp;
<a href="https://manifest.build/"
><img
width="150"
align="top"
src="https://eemeli.org/yaml/images/manifest.svg"
alt="Manifest"
/></a>
</p>
## API Overview
The API provided by `yaml` has three layers, depending on how deep you need to go: [Parse & Stringify](https://eemeli.org/yaml/#parse-amp-stringify), [Documents](https://eemeli.org/yaml/#documents), and the underlying [Lexer/Parser/Composer](https://eemeli.org/yaml/#parsing-yaml).
The first has the simplest API and "just works", the second gets you all the bells and whistles supported by the library along with a decent [AST](https://eemeli.org/yaml/#content-nodes), and the third lets you get progressively closer to YAML source, if that's your thing.
```js
import { parse, stringify } from 'yaml'
// or
import YAML from 'yaml'
// or
const YAML = require('yaml')
```
A [command-line tool](https://eemeli.org/yaml/#command-line-tool) is also included.
### Parse & Stringify
```js
import { parse, stringify } from 'yaml'
```
- [`parse(str, reviver?, options?): value`](https://eemeli.org/yaml/#yaml-parse)
- [`stringify(value, replacer?, options?): string`](https://eemeli.org/yaml/#yaml-stringify)
### Documents
<!-- prettier-ignore -->
```js
import {
Document,
isDocument,
parseAllDocuments,
parseDocument
} from 'yaml'
```
- [`Document`](https://eemeli.org/yaml/#documents)
- [`constructor(value, replacer?, options?)`](https://eemeli.org/yaml/#creating-documents)
- [`#anchors`](https://eemeli.org/yaml/#working-with-anchors)
- [`#contents`](https://eemeli.org/yaml/#content-nodes)
- [`#directives`](https://eemeli.org/yaml/#stream-directives)
- [`#errors`](https://eemeli.org/yaml/#errors)
- [`#warnings`](https://eemeli.org/yaml/#errors)
- [`isDocument(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
- [`isDocument(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types)
- [`parseAllDocuments(str, options?): Document[]`](https://eemeli.org/yaml/#parsing-documents)
- [`parseDocument(str, options?): Document`](https://eemeli.org/yaml/#parsing-documents)
### Content Nodes
- [`isAlias(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
- [`isCollection(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
- [`isMap(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
- [`isNode(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
- [`isPair(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
- [`isScalar(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
- [`isSeq(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
<!-- prettier-ignore -->
```js
import {
isAlias, isCollection, isMap, isNode,
isPair, isScalar, isSeq, Scalar,
visit, visitAsync, YAMLMap, YAMLSeq
} from 'yaml'
```
- [`isAlias(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types)
- [`isCollection(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types)
- [`isMap(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types)
- [`isNode(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types)
- [`isPair(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types)
- [`isScalar(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types)
- [`isSeq(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types)
- [`new Scalar(value)`](https://eemeli.org/yaml/#scalar-values)
- [`new YAMLMap()`](https://eemeli.org/yaml/#collections)
- [`new YAMLSeq()`](https://eemeli.org/yaml/#collections)
- [`doc.createAlias(node, name?): Alias`](https://eemeli.org/yaml/#working-with-anchors)
- [`doc.createAlias(node, name?): Alias`](https://eemeli.org/yaml/#creating-nodes)
- [`doc.createNode(value, options?): Node`](https://eemeli.org/yaml/#creating-nodes)
- [`doc.createPair(key, value): Pair`](https://eemeli.org/yaml/#creating-nodes)
- [`visit(node, visitor)`](https://eemeli.org/yaml/#modifying-nodes)
- [`visit(node, visitor)`](https://eemeli.org/yaml/#finding-and-modifying-nodes)
- [`visitAsync(node, visitor)`](https://eemeli.org/yaml/#finding-and-modifying-nodes)
### Parsing YAML
```js
import { Composer, Lexer, Parser } from 'yaml'
```
- [`new Lexer().lex(src)`](https://eemeli.org/yaml/#lexer)
- [`new Parser(onNewLine?).parse(src)`](https://eemeli.org/yaml/#parser)
- [`new Composer(options?).compose(tokens)`](https://eemeli.org/yaml/#composer)
@@ -143,5 +186,5 @@ YAML.stringify({ number: 3, plain: 'string', block: 'two\nlines\n' })
Browser testing provided by:
<a href="https://www.browserstack.com/open-source">
<img width=200 src="https://eemeli.org/yaml/images/browserstack.svg" />
<img width=200 src="https://eemeli.org/yaml/images/browserstack.svg" alt="BrowserStack" />
</a>

11
node_modules/yaml/bin.mjs generated vendored Executable file
View File

@@ -0,0 +1,11 @@
#!/usr/bin/env node
import { UserError, cli, help } from './dist/cli.mjs'
cli(process.stdin, error => {
if (error instanceof UserError) {
if (error.code === UserError.ARGS) console.error(`${help}\n`)
console.error(error.message)
process.exitCode = error.code
} else if (error) throw error
})

View File

@@ -23,10 +23,23 @@ function resolveCollection(CN, ctx, token, onError, tagName, tag) {
coll.tag = tagName;
return coll;
}
function composeCollection(CN, ctx, token, tagToken, onError) {
function composeCollection(CN, ctx, token, props, onError) {
const tagToken = props.tag;
const tagName = !tagToken
? null
: ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
if (token.type === 'block-seq') {
const { anchor, newlineAfterProp: nl } = props;
const lastProp = anchor && tagToken
? anchor.offset > tagToken.offset
? anchor
: tagToken
: (anchor ?? tagToken);
if (lastProp && (!nl || nl.offset < lastProp.offset)) {
const message = 'Missing newline after block sequence props';
onError(lastProp, 'MISSING_CHAR', message);
}
}
const expType = token.type === 'block-map'
? 'map'
: token.type === 'block-seq'
@@ -40,8 +53,7 @@ function composeCollection(CN, ctx, token, tagToken, onError) {
!tagName ||
tagName === '!' ||
(tagName === YAMLMap.tagName && expType === 'map') ||
(tagName === YAMLSeq.tagName && expType === 'seq') ||
!expType) {
(tagName === YAMLSeq.tagName && expType === 'seq')) {
return resolveCollection(CN, ctx, token, onError, tagName);
}
let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType);
@@ -52,8 +64,8 @@ function composeCollection(CN, ctx, token, tagToken, onError) {
tag = kt;
}
else {
if (kt?.collection) {
onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true);
if (kt) {
onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection ?? 'scalar'}`, true);
}
else {
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);

View File

@@ -7,6 +7,7 @@ function composeDoc(options, directives, { offset, start, value, end }, onError)
const opts = Object.assign({ _directives: directives }, options);
const doc = new Document(undefined, opts);
const ctx = {
atKey: false,
atRoot: true,
directives: doc.directives,
options: doc.options,
@@ -17,6 +18,7 @@ function composeDoc(options, directives, { offset, start, value, end }, onError)
next: value ?? end?.[0],
offset,
onError,
parentIndent: 0,
startOnNewline: true
});
if (props.found) {

View File

@@ -1,4 +1,5 @@
import { Alias } from '../nodes/Alias.js';
import { isScalar } from '../nodes/identity.js';
import { composeCollection } from './compose-collection.js';
import { composeScalar } from './compose-scalar.js';
import { resolveEnd } from './resolve-end.js';
@@ -6,6 +7,7 @@ import { emptyScalarPosition } from './util-empty-scalar-position.js';
const CN = { composeNode, composeEmptyNode };
function composeNode(ctx, token, props, onError) {
const atKey = ctx.atKey;
const { spaceBefore, comment, anchor, tag } = props;
let node;
let isSrcToken = true;
@@ -26,7 +28,7 @@ function composeNode(ctx, token, props, onError) {
case 'block-map':
case 'block-seq':
case 'flow-collection':
node = composeCollection(CN, ctx, token, tag, onError);
node = composeCollection(CN, ctx, token, props, onError);
if (anchor)
node.anchor = anchor.source.substring(1);
break;
@@ -41,6 +43,14 @@ function composeNode(ctx, token, props, onError) {
}
if (anchor && node.anchor === '')
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
if (atKey &&
ctx.options.stringKeys &&
(!isScalar(node) ||
typeof node.value !== 'string' ||
(node.tag && node.tag !== 'tag:yaml.org,2002:str'))) {
const msg = 'With stringKeys, all keys must be strings';
onError(tag ?? token, 'NON_STRING_KEY', msg);
}
if (spaceBefore)
node.spaceBefore = true;
if (comment) {

View File

@@ -1,20 +1,25 @@
import { SCALAR, isScalar } from '../nodes/identity.js';
import { isScalar, SCALAR } from '../nodes/identity.js';
import { Scalar } from '../nodes/Scalar.js';
import { resolveBlockScalar } from './resolve-block-scalar.js';
import { resolveFlowScalar } from './resolve-flow-scalar.js';
function composeScalar(ctx, token, tagToken, onError) {
const { value, type, comment, range } = token.type === 'block-scalar'
? resolveBlockScalar(token, ctx.options.strict, onError)
? resolveBlockScalar(ctx, token, onError)
: resolveFlowScalar(token, ctx.options.strict, onError);
const tagName = tagToken
? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
: null;
const tag = tagToken && tagName
? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
: token.type === 'scalar'
? findScalarTagByTest(ctx, value, token, onError)
: ctx.schema[SCALAR];
let tag;
if (ctx.options.stringKeys && ctx.atKey) {
tag = ctx.schema[SCALAR];
}
else if (tagName)
tag = findScalarTagByName(ctx.schema, value, tagName, tagToken, onError);
else if (token.type === 'scalar')
tag = findScalarTagByTest(ctx, value, token, onError);
else
tag = ctx.schema[SCALAR];
let scalar;
try {
const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
@@ -62,8 +67,9 @@ function findScalarTagByName(schema, value, tagName, tagToken, onError) {
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
return schema[SCALAR];
}
function findScalarTagByTest({ directives, schema }, value, token, onError) {
const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[SCALAR];
function findScalarTagByTest({ atKey, directives, schema }, value, token, onError) {
const tag = schema.tags.find(tag => (tag.default === true || (atKey && tag.default === 'key')) &&
tag.test?.test(value)) || schema[SCALAR];
if (schema.compat) {
const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??
schema[SCALAR];

View File

@@ -21,6 +21,7 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
next: key ?? sep?.[0],
offset,
onError,
parentIndent: bm.indent,
startOnNewline: true
});
const implicitKey = !keyProps.found;
@@ -41,7 +42,7 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
}
continue;
}
if (keyProps.hasNewlineAfterProp || containsNewline(key)) {
if (keyProps.newlineAfterProp || containsNewline(key)) {
onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
}
}
@@ -49,12 +50,14 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
onError(offset, 'BAD_INDENT', startColMsg);
}
// key value
ctx.atKey = true;
const keyStart = keyProps.end;
const keyNode = key
? composeNode(ctx, key, keyProps, onError)
: composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
if (ctx.schema.compat)
flowIndentCheck(bm.indent, key, onError);
ctx.atKey = false;
if (mapIncludes(ctx, map.items, keyNode))
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
// value properties
@@ -63,6 +66,7 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
next: value,
offset: keyNode.range[2],
onError,
parentIndent: bm.indent,
startOnNewline: !key || key.type === 'block-scalar'
});
offset = valueProps.end;

View File

@@ -1,8 +1,8 @@
import { Scalar } from '../nodes/Scalar.js';
function resolveBlockScalar(scalar, strict, onError) {
function resolveBlockScalar(ctx, scalar, onError) {
const start = scalar.offset;
const header = parseBlockScalarHeader(scalar, strict, onError);
const header = parseBlockScalarHeader(scalar, ctx.options.strict, onError);
if (!header)
return { value: '', type: null, comment: '', range: [start, start, start] };
const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL;
@@ -44,6 +44,10 @@ function resolveBlockScalar(scalar, strict, onError) {
if (header.indent === 0)
trimIndent = indent.length;
contentStart = i;
if (trimIndent === 0 && !ctx.atRoot) {
const message = 'Block scalar values in collections must be indented';
onError(offset, 'BAD_INDENT', message);
}
break;
}
offset += indent.length + content.length + 1;

View File

@@ -7,6 +7,8 @@ function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, ta
const seq = new NodeClass(ctx.schema);
if (ctx.atRoot)
ctx.atRoot = false;
if (ctx.atKey)
ctx.atKey = false;
let offset = bs.offset;
let commentEnd = null;
for (const { start, value } of bs.items) {
@@ -15,6 +17,7 @@ function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, ta
next: value,
offset,
onError,
parentIndent: bs.indent,
startOnNewline: true
});
if (!props.found) {

View File

@@ -18,6 +18,8 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
const atRoot = ctx.atRoot;
if (atRoot)
ctx.atRoot = false;
if (ctx.atKey)
ctx.atKey = false;
let offset = fc.offset + fc.start.source.length;
for (let i = 0; i < fc.items.length; ++i) {
const collItem = fc.items[i];
@@ -28,6 +30,7 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
next: key ?? sep?.[0],
offset,
onError,
parentIndent: fc.indent,
startOnNewline: false
});
if (!props.found) {
@@ -96,12 +99,14 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
else {
// item is a key+value pair
// key value
ctx.atKey = true;
const keyStart = props.end;
const keyNode = key
? composeNode(ctx, key, props, onError)
: composeEmptyNode(ctx, keyStart, start, null, props, onError);
if (isBlock(key))
onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
ctx.atKey = false;
// value properties
const valueProps = resolveProps(sep ?? [], {
flow: fcName,
@@ -109,6 +114,7 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
next: value,
offset: keyNode.range[2],
onError,
parentIndent: fc.indent,
startOnNewline: false
});
if (valueProps.found) {
@@ -161,6 +167,8 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
const map = new YAMLMap(ctx.schema);
map.flow = true;
map.items.push(pair);
const endRange = (valueNode ?? keyNode).range;
map.range = [keyNode.range[0], endRange[1], endRange[2]];
coll.items.push(map);
}
offset = valueNode ? valueNode.range[2] : valueProps.end;

View File

@@ -84,7 +84,7 @@ function foldLines(source) {
first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
}
catch (_) {
catch {
first = /(.*?)[ \t]*\r?\n/sy;
line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
}
@@ -189,19 +189,19 @@ function foldNewline(source, offset) {
return { fold, offset };
}
const escapeCodes = {
'0': '\0',
a: '\x07',
b: '\b',
e: '\x1b',
f: '\f',
n: '\n',
r: '\r',
t: '\t',
v: '\v',
N: '\u0085',
_: '\u00a0',
L: '\u2028',
P: '\u2029',
'0': '\0', // null character
a: '\x07', // bell character
b: '\b', // backspace
e: '\x1b', // escape character
f: '\f', // form feed
n: '\n', // line feed
r: '\r', // carriage return
t: '\t', // horizontal tab
v: '\v', // vertical tab
N: '\u0085', // Unicode next line
_: '\u00a0', // Unicode non-breaking space
L: '\u2028', // Unicode line separator
P: '\u2029', // Unicode paragraph separator
' ': ' ',
'"': '"',
'/': '/',

View File

@@ -1,14 +1,15 @@
function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {
function resolveProps(tokens, { flow, indicator, next, offset, onError, parentIndent, startOnNewline }) {
let spaceBefore = false;
let atNewline = startOnNewline;
let hasSpace = startOnNewline;
let comment = '';
let commentSep = '';
let hasNewline = false;
let hasNewlineAfterProp = false;
let reqSpace = false;
let tab = null;
let anchor = null;
let tag = null;
let newlineAfterProp = null;
let comma = null;
let found = null;
let start = null;
@@ -20,16 +21,22 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
reqSpace = false;
}
if (tab) {
if (atNewline && token.type !== 'comment' && token.type !== 'newline') {
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
}
tab = null;
}
switch (token.type) {
case 'space':
// At the doc level, tabs at line start may be parsed
// as leading white space rather than indentation.
// In a flow collection, only the parser handles indent.
if (!flow &&
atNewline &&
indicator !== 'doc-start' &&
token.source[0] === '\t')
onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
(indicator !== 'doc-start' || next?.type !== 'flow-collection') &&
token.source.includes('\t')) {
tab = token;
}
hasSpace = true;
break;
case 'comment': {
@@ -48,7 +55,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (atNewline) {
if (comment)
comment += token.source;
else
else if (!found || indicator !== 'seq-item-ind')
spaceBefore = true;
}
else
@@ -56,7 +63,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
atNewline = true;
hasNewline = true;
if (anchor || tag)
hasNewlineAfterProp = true;
newlineAfterProp = token;
hasSpace = true;
break;
case 'anchor':
@@ -65,8 +72,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (token.source.endsWith(':'))
onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
anchor = token;
if (start === null)
start = token.offset;
start ?? (start = token.offset);
atNewline = false;
hasSpace = false;
reqSpace = true;
@@ -75,8 +81,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (tag)
onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
tag = token;
if (start === null)
start = token.offset;
start ?? (start = token.offset);
atNewline = false;
hasSpace = false;
reqSpace = true;
@@ -89,7 +94,8 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (found)
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);
found = token;
atNewline = false;
atNewline =
indicator === 'seq-item-ind' || indicator === 'explicit-key-ind';
hasSpace = false;
break;
case 'comma':
@@ -115,17 +121,23 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
next.type !== 'space' &&
next.type !== 'newline' &&
next.type !== 'comma' &&
(next.type !== 'scalar' || next.source !== ''))
(next.type !== 'scalar' || next.source !== '')) {
onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
}
if (tab &&
((atNewline && tab.indent <= parentIndent) ||
next?.type === 'block-map' ||
next?.type === 'block-seq'))
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
return {
comma,
found,
spaceBefore,
comment,
hasNewline,
hasNewlineAfterProp,
anchor,
tag,
newlineAfterProp,
end,
start: start ?? end
};

View File

@@ -1,7 +1,6 @@
function emptyScalarPosition(offset, before, pos) {
if (before) {
if (pos === null)
pos = before.length;
pos ?? (pos = before.length);
for (let i = pos - 1; i >= 0; --i) {
let st = before[i];
switch (st.type) {

View File

@@ -6,11 +6,7 @@ function mapIncludes(ctx, items, search) {
return false;
const isEqual = typeof uniqueKeys === 'function'
? uniqueKeys
: (a, b) => a === b ||
(isScalar(a) &&
isScalar(b) &&
a.value === b.value &&
!(a.value === '<<' && ctx.schema.merge));
: (a, b) => a === b || (isScalar(a) && isScalar(b) && a.value === b.value);
return items.some(pair => isEqual(pair.key, search));
}

View File

@@ -35,6 +35,7 @@ class Document {
logLevel: 'warn',
prettyErrors: true,
strict: true,
stringKeys: false,
uniqueKeys: true,
version: '1.2'
}, options);
@@ -258,7 +259,7 @@ class Document {
this.directives.yaml.version = '1.1';
else
this.directives = new Directives({ version: '1.1' });
opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
opt = { resolveKnownTags: false, schema: 'yaml-1.1' };
break;
case '1.2':
case 'next':
@@ -266,7 +267,7 @@ class Document {
this.directives.yaml.version = version;
else
this.directives = new Directives({ version });
opt = { merge: false, resolveKnownTags: true, schema: 'core' };
opt = { resolveKnownTags: true, schema: 'core' };
break;
case null:
if (this.directives)

View File

@@ -39,8 +39,7 @@ function createNodeAnchors(doc, prefix) {
return {
onAnchor: (source) => {
aliasObjects.push(source);
if (!prevAnchors)
prevAnchors = anchorNames(doc);
prevAnchors ?? (prevAnchors = anchorNames(doc));
const anchor = findNewAnchor(prefix, prevAnchors);
prevAnchors.add(anchor);
return anchor;

View File

@@ -11,6 +11,7 @@ function applyReviver(reviver, obj, key, val) {
for (let i = 0, len = val.length; i < len; ++i) {
const v0 = val[i];
const v1 = applyReviver(reviver, val, String(i), v0);
// eslint-disable-next-line @typescript-eslint/no-array-delete
if (v1 === undefined)
delete val[i];
else if (v1 !== v0)

View File

@@ -38,8 +38,7 @@ function createNode(value, tagName, ctx) {
if (aliasDuplicateObjects && value && typeof value === 'object') {
ref = sourceObjects.get(value);
if (ref) {
if (!ref.anchor)
ref.anchor = onAnchor(value);
ref.anchor ?? (ref.anchor = onAnchor(value));
return new Alias(ref.anchor);
}
else {

View File

@@ -4,12 +4,7 @@ function debug(logLevel, ...messages) {
}
function warn(logLevel, warning) {
if (logLevel === 'debug' || logLevel === 'warn') {
// https://github.com/typescript-eslint/typescript-eslint/issues/7478
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
if (typeof process !== 'undefined' && process.emitWarning)
process.emitWarning(warning);
else
console.warn(warning);
console.warn(warning);
}
}

View File

@@ -1,21 +0,0 @@
/******************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
function __classPrivateFieldGet(receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
}
export { __classPrivateFieldGet };

View File

@@ -1,6 +1,6 @@
import { anchorIsValid } from '../doc/anchors.js';
import { visit } from '../visit.js';
import { ALIAS, isAlias, isCollection, isPair } from './identity.js';
import { ALIAS, isAlias, isCollection, isPair, hasAnchor } from './identity.js';
import { NodeBase } from './Node.js';
import { toJS } from './toJS.js';
@@ -18,23 +18,36 @@ class Alias extends NodeBase {
* Resolve the value of this alias within `doc`, finding the last
* instance of the `source` anchor before this node.
*/
resolve(doc) {
resolve(doc, ctx) {
let nodes;
if (ctx?.aliasResolveCache) {
nodes = ctx.aliasResolveCache;
}
else {
nodes = [];
visit(doc, {
Node: (_key, node) => {
if (isAlias(node) || hasAnchor(node))
nodes.push(node);
}
});
if (ctx)
ctx.aliasResolveCache = nodes;
}
let found = undefined;
visit(doc, {
Node: (_key, node) => {
if (node === this)
return visit.BREAK;
if (node.anchor === this.source)
found = node;
}
});
for (const node of nodes) {
if (node === this)
break;
if (node.anchor === this.source)
found = node;
}
return found;
}
toJSON(_arg, ctx) {
if (!ctx)
return { source: this.source };
const { anchors, doc, maxAliasCount } = ctx;
const source = this.resolve(doc);
const source = this.resolve(doc, ctx);
if (!source) {
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
throw new ReferenceError(msg);

View File

@@ -143,6 +143,5 @@ class Collection extends NodeBase {
}
}
}
Collection.maxFlowStringSingleLineLength = 60;
export { Collection, collectionFromPath, isEmptyPath };

View File

@@ -1,7 +1,7 @@
import { stringifyCollection } from '../stringify/stringifyCollection.js';
import { addPairToJSMap } from './addPairToJSMap.js';
import { Collection } from './Collection.js';
import { isPair, isScalar, MAP } from './identity.js';
import { MAP, isPair, isScalar } from './identity.js';
import { Pair, createPair } from './Pair.js';
import { isScalarValue } from './Scalar.js';

View File

@@ -1,22 +1,15 @@
import { warn } from '../log.js';
import { isMergeKey, addMergeToJSMap } from '../schema/yaml-1.1/merge.js';
import { createStringifyContext } from '../stringify/stringify.js';
import { isAlias, isSeq, isScalar, isMap, isNode } from './identity.js';
import { Scalar } from './Scalar.js';
import { isNode } from './identity.js';
import { toJS } from './toJS.js';
const MERGE_KEY = '<<';
function addPairToJSMap(ctx, map, { key, value }) {
if (ctx?.doc.schema.merge && isMergeKey(key)) {
value = isAlias(value) ? value.resolve(ctx.doc) : value;
if (isSeq(value))
for (const it of value.items)
mergeToJSMap(ctx, map, it);
else if (Array.isArray(value))
for (const it of value)
mergeToJSMap(ctx, map, it);
else
mergeToJSMap(ctx, map, value);
}
if (isNode(key) && key.addToJSMap)
key.addToJSMap(ctx, map, value);
// TODO: Should drop this special case for bare << handling
else if (isMergeKey(ctx, key))
addMergeToJSMap(ctx, map, value);
else {
const jsKey = toJS(key, '', ctx);
if (map instanceof Map) {
@@ -41,44 +34,10 @@ function addPairToJSMap(ctx, map, { key, value }) {
}
return map;
}
const isMergeKey = (key) => key === MERGE_KEY ||
(isScalar(key) &&
key.value === MERGE_KEY &&
(!key.type || key.type === Scalar.PLAIN));
// If the value associated with a merge key is a single mapping node, each of
// its key/value pairs is inserted into the current mapping, unless the key
// already exists in it. If the value associated with the merge key is a
// sequence, then this sequence is expected to contain mapping nodes and each
// of these nodes is merged in turn according to its order in the sequence.
// Keys in mapping nodes earlier in the sequence override keys specified in
// later mapping nodes. -- http://yaml.org/type/merge.html
function mergeToJSMap(ctx, map, value) {
const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
if (!isMap(source))
throw new Error('Merge sources must be maps or map aliases');
const srcMap = source.toJSON(null, ctx, Map);
for (const [key, value] of srcMap) {
if (map instanceof Map) {
if (!map.has(key))
map.set(key, value);
}
else if (map instanceof Set) {
map.add(key);
}
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
Object.defineProperty(map, key, {
value,
writable: true,
enumerable: true,
configurable: true
});
}
}
return map;
}
function stringifyKey(key, jsKey, ctx) {
if (jsKey === null)
return '';
// eslint-disable-next-line @typescript-eslint/no-base-to-string
if (typeof jsKey !== 'object')
return String(jsKey);
if (isNode(key) && ctx?.doc) {

View File

@@ -18,7 +18,7 @@ function resolveAsScalar(token, strict = true, onError) {
case 'double-quoted-scalar':
return resolveFlowScalar(token, strict, _onError);
case 'block-scalar':
return resolveBlockScalar(token, strict, _onError);
return resolveBlockScalar({ options: { strict } }, token, _onError);
}
}
return null;

View File

@@ -79,11 +79,11 @@ function isEmpty(ch) {
return false;
}
}
const hexDigits = '0123456789ABCDEFabcdef'.split('');
const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
const invalidFlowScalarChars = ',[]{}'.split('');
const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
const hexDigits = new Set('0123456789ABCDEFabcdef');
const tagChars = new Set("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()");
const flowIndicatorChars = new Set(',[]{}');
const invalidAnchorChars = new Set(' ,[]{}\n\r\t');
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch);
/**
* Splits an input string into lexical tokens, i.e. smaller strings that are
* easily identifiable by `tokens.tokenType()`.
@@ -149,6 +149,8 @@ class Lexer {
*/
*lex(source, incomplete = false) {
if (source) {
if (typeof source !== 'string')
throw TypeError('source is not a string');
this.buffer = this.buffer ? this.buffer + source : source;
this.lineEndPos = null;
}
@@ -248,11 +250,16 @@ class Lexer {
}
if (line[0] === '%') {
let dirEnd = line.length;
const cs = line.indexOf('#');
if (cs !== -1) {
let cs = line.indexOf('#');
while (cs !== -1) {
const ch = line[cs - 1];
if (ch === ' ' || ch === '\t')
if (ch === ' ' || ch === '\t') {
dirEnd = cs - 1;
break;
}
else {
cs = line.indexOf('#', cs + 1);
}
}
while (true) {
const ch = line[dirEnd - 1];
@@ -283,15 +290,11 @@ class Lexer {
if (!this.atEnd && !this.hasChars(4))
return this.setNext('line-start');
const s = this.peek(3);
if (s === '---' && isEmpty(this.charAt(3))) {
if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) {
yield* this.pushCount(3);
this.indentValue = 0;
this.indentNext = 0;
return 'doc';
}
else if (s === '...' && isEmpty(this.charAt(3))) {
yield* this.pushCount(3);
return 'stream';
return s === '---' ? 'doc' : 'stream';
}
}
this.indentValue = yield* this.pushSpaces(false);
@@ -518,8 +521,10 @@ class Lexer {
if (indent >= this.indentNext) {
if (this.blockScalarIndent === -1)
this.indentNext = indent;
else
this.indentNext += this.blockScalarIndent;
else {
this.indentNext =
this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext);
}
do {
const cs = this.continueScalar(nl + 1);
if (cs === -1)
@@ -532,14 +537,25 @@ class Lexer {
nl = this.buffer.length;
}
}
if (!this.blockScalarKeep) {
// Trailing insufficiently indented tabs are invalid.
// To catch that during parsing, we include them in the block scalar value.
let i = nl + 1;
ch = this.buffer[i];
while (ch === ' ')
ch = this.buffer[++i];
if (ch === '\t') {
while (ch === '\t' || ch === ' ' || ch === '\r' || ch === '\n')
ch = this.buffer[++i];
nl = i - 1;
}
else if (!this.blockScalarKeep) {
do {
let i = nl - 1;
let ch = this.buffer[i];
if (ch === '\r')
ch = this.buffer[--i];
const lastChar = i; // Drop the line if last char not more indented
while (ch === ' ' || ch === '\t')
while (ch === ' ')
ch = this.buffer[--i];
if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
nl = i;
@@ -559,7 +575,7 @@ class Lexer {
while ((ch = this.buffer[++i])) {
if (ch === ':') {
const next = this.buffer[i + 1];
if (isEmpty(next) || (inFlow && next === ','))
if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next)))
break;
end = i;
}
@@ -574,7 +590,7 @@ class Lexer {
else
end = i;
}
if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
if (next === '#' || (inFlow && flowIndicatorChars.has(next)))
break;
if (ch === '\n') {
const cs = this.continueScalar(i + 1);
@@ -584,7 +600,7 @@ class Lexer {
}
}
else {
if (inFlow && invalidFlowScalarChars.includes(ch))
if (inFlow && flowIndicatorChars.has(ch))
break;
end = i;
}
@@ -629,7 +645,7 @@ class Lexer {
case ':': {
const inFlow = this.flowLevel > 0;
const ch1 = this.charAt(1);
if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {
if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) {
if (!inFlow)
this.indentNext = this.indentValue + 1;
else if (this.flowKey)
@@ -654,11 +670,11 @@ class Lexer {
let i = this.pos + 1;
let ch = this.buffer[i];
while (ch) {
if (tagChars.includes(ch))
if (tagChars.has(ch))
ch = this.buffer[++i];
else if (ch === '%' &&
hexDigits.includes(this.buffer[i + 1]) &&
hexDigits.includes(this.buffer[i + 2])) {
hexDigits.has(this.buffer[i + 1]) &&
hexDigits.has(this.buffer[i + 2])) {
ch = this.buffer[(i += 3)];
}
else

View File

@@ -304,7 +304,7 @@ class Parser {
}
else {
Object.assign(it, { key: token, sep: [] });
this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
this.onKeyLine = !it.explicitKey;
return;
}
break;
@@ -513,7 +513,10 @@ class Parser {
return;
}
if (this.indent >= map.indent) {
const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;
const atMapIndent = !this.onKeyLine && this.indent === map.indent;
const atNextItem = atMapIndent &&
(it.sep || it.explicitKey) &&
this.type !== 'seq-item-ind';
// For empty nodes, assign newline-separated not indented empty tokens to following node
let start = [];
if (atNextItem && it.sep && !it.value) {
@@ -553,25 +556,26 @@ class Parser {
}
return;
case 'explicit-key-ind':
if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {
if (!it.sep && !it.explicitKey) {
it.start.push(this.sourceToken);
it.explicitKey = true;
}
else if (atNextItem || it.value) {
start.push(this.sourceToken);
map.items.push({ start });
map.items.push({ start, explicitKey: true });
}
else {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start: [this.sourceToken] }]
items: [{ start: [this.sourceToken], explicitKey: true }]
});
}
this.onKeyLine = true;
return;
case 'map-value-ind':
if (includesToken(it.start, 'explicit-key-ind')) {
if (it.explicitKey) {
if (!it.sep) {
if (includesToken(it.start, 'newline')) {
Object.assign(it, { key: null, sep: [this.sourceToken] });
@@ -604,7 +608,9 @@ class Parser {
const sep = it.sep;
sep.push(this.sourceToken);
// @ts-expect-error type guard is wrong here
delete it.key, delete it.sep;
delete it.key;
// @ts-expect-error type guard is wrong here
delete it.sep;
this.stack.push({
type: 'block-map',
offset: this.offset,
@@ -662,9 +668,20 @@ class Parser {
default: {
const bv = this.startBlockValue(map);
if (bv) {
if (atNextItem &&
bv.type !== 'block-seq' &&
includesToken(it.start, 'explicit-key-ind')) {
if (bv.type === 'block-seq') {
if (!it.explicitKey &&
it.sep &&
!includesToken(it.sep, 'newline')) {
yield* this.pop({
type: 'error',
offset: this.offset,
message: 'Unexpected block-seq-ind on same line with key',
source: this.source
});
return;
}
}
else if (atMapIndent) {
map.items.push({ start });
}
this.stack.push(bv);
@@ -885,7 +902,7 @@ class Parser {
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start }]
items: [{ start, explicitKey: true }]
};
}
case 'map-value-ind': {

View File

@@ -2,6 +2,7 @@ import { Composer } from './compose/composer.js';
import { Document } from './doc/Document.js';
import { prettifyError, YAMLParseError } from './errors.js';
import { warn } from './log.js';
import { isDocument } from './nodes/identity.js';
import { LineCounter } from './parse/line-counter.js';
import { Parser } from './parse/parser.js';
@@ -93,6 +94,8 @@ function stringify(value, replacer, options) {
if (!keepUndefined)
return undefined;
}
if (isDocument(value) && !_replacer)
return value.toString(options);
return new Document(value, _replacer, options).toString(options);
}

View File

@@ -12,10 +12,9 @@ class Schema {
: compat
? getTags(null, compat)
: null;
this.merge = !!merge;
this.name = (typeof schema === 'string' && schema) || 'core';
this.knownTags = resolveKnownTags ? coreKnownTags : {};
this.tags = getTags(customTags, this.name);
this.tags = getTags(customTags, this.name, merge);
this.toStringOptions = toStringDefaults ?? null;
Object.defineProperty(this, MAP, { value: map });
Object.defineProperty(this, SCALAR, { value: string });

View File

@@ -5,7 +5,7 @@ const floatNaN = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/,
test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/,
resolve: str => str.slice(-3).toLowerCase() === 'nan'
? NaN
: str[0] === '-'

View File

@@ -27,7 +27,7 @@ const jsonScalars = [
identify: value => typeof value === 'boolean',
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^true|false$/,
test: /^true$|^false$/,
resolve: str => str === 'true',
stringify: stringifyJSON
},

View File

@@ -3,16 +3,17 @@ import { nullTag } from './common/null.js';
import { seq } from './common/seq.js';
import { string } from './common/string.js';
import { boolTag } from './core/bool.js';
import { float, floatExp, floatNaN } from './core/float.js';
import { int, intHex, intOct } from './core/int.js';
import { floatNaN, floatExp, float } from './core/float.js';
import { intOct, intHex, int } from './core/int.js';
import { schema } from './core/schema.js';
import { schema as schema$1 } from './json/schema.js';
import { binary } from './yaml-1.1/binary.js';
import { merge } from './yaml-1.1/merge.js';
import { omap } from './yaml-1.1/omap.js';
import { pairs } from './yaml-1.1/pairs.js';
import { schema as schema$2 } from './yaml-1.1/schema.js';
import { set } from './yaml-1.1/set.js';
import { timestamp, floatTime, intTime } from './yaml-1.1/timestamp.js';
import { timestamp, intTime, floatTime } from './yaml-1.1/timestamp.js';
const schemas = new Map([
['core', schema],
@@ -33,6 +34,7 @@ const tagsByName = {
intOct,
intTime,
map,
merge,
null: nullTag,
omap,
pairs,
@@ -42,13 +44,20 @@ const tagsByName = {
};
const coreKnownTags = {
'tag:yaml.org,2002:binary': binary,
'tag:yaml.org,2002:merge': merge,
'tag:yaml.org,2002:omap': omap,
'tag:yaml.org,2002:pairs': pairs,
'tag:yaml.org,2002:set': set,
'tag:yaml.org,2002:timestamp': timestamp
};
function getTags(customTags, schemaName) {
let tags = schemas.get(schemaName);
function getTags(customTags, schemaName, addMergeTag) {
const schemaTags = schemas.get(schemaName);
if (schemaTags && !customTags) {
return addMergeTag && !schemaTags.includes(merge)
? schemaTags.concat(merge)
: schemaTags.slice();
}
let tags = schemaTags;
if (!tags) {
if (Array.isArray(customTags))
tags = [];
@@ -67,17 +76,21 @@ function getTags(customTags, schemaName) {
else if (typeof customTags === 'function') {
tags = customTags(tags.slice());
}
return tags.map(tag => {
if (typeof tag !== 'string')
return tag;
const tagObj = tagsByName[tag];
if (tagObj)
return tagObj;
const keys = Object.keys(tagsByName)
.map(key => JSON.stringify(key))
.join(', ');
throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
});
if (addMergeTag)
tags = tags.concat(merge);
return tags.reduce((tags, tag) => {
const tagObj = typeof tag === 'string' ? tagsByName[tag] : tag;
if (!tagObj) {
const tagName = JSON.stringify(tag);
const keys = Object.keys(tagsByName)
.map(key => JSON.stringify(key))
.join(', ');
throw new Error(`Unknown custom tag ${tagName}; use one of ${keys}`);
}
if (!tags.includes(tagObj))
tags.push(tagObj);
return tags;
}, []);
}
export { coreKnownTags, getTags };

View File

@@ -2,7 +2,7 @@ import { Scalar } from '../../nodes/Scalar.js';
import { stringifyString } from '../../stringify/stringifyString.js';
const binary = {
identify: value => value instanceof Uint8Array,
identify: value => value instanceof Uint8Array, // Buffer inherits from Uint8Array
default: false,
tag: 'tag:yaml.org,2002:binary',
/**
@@ -14,10 +14,7 @@ const binary = {
* document.querySelector('#photo').src = URL.createObjectURL(blob)
*/
resolve(src, onError) {
if (typeof Buffer === 'function') {
return Buffer.from(src, 'base64');
}
else if (typeof atob === 'function') {
if (typeof atob === 'function') {
// On IE 11, atob() can't handle newlines
const str = atob(src.replace(/[\n\r]/g, ''));
const buffer = new Uint8Array(str.length);
@@ -31,15 +28,11 @@ const binary = {
}
},
stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
if (!value)
return '';
const buf = value; // checked earlier by binary.identify()
let str;
if (typeof Buffer === 'function') {
str =
buf instanceof Buffer
? buf.toString('base64')
: Buffer.from(buf.buffer).toString('base64');
}
else if (typeof btoa === 'function') {
if (typeof btoa === 'function') {
let s = '';
for (let i = 0; i < buf.length; ++i)
s += String.fromCharCode(buf[i]);
@@ -48,8 +41,7 @@ const binary = {
else {
throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
}
if (!type)
type = Scalar.BLOCK_LITERAL;
type ?? (type = Scalar.BLOCK_LITERAL);
if (type !== Scalar.QUOTE_DOUBLE) {
const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
const n = Math.ceil(str.length / lineWidth);

View File

@@ -18,7 +18,7 @@ const falseTag = {
identify: value => value === false,
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/,
resolve: () => new Scalar(false),
stringify: boolStringify
};

View File

@@ -5,7 +5,7 @@ const floatNaN = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/,
test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/,
resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
? NaN
: str[0] === '-'

View File

@@ -0,0 +1,64 @@
import { isScalar, isAlias, isSeq, isMap } from '../../nodes/identity.js';
import { Scalar } from '../../nodes/Scalar.js';
// If the value associated with a merge key is a single mapping node, each of
// its key/value pairs is inserted into the current mapping, unless the key
// already exists in it. If the value associated with the merge key is a
// sequence, then this sequence is expected to contain mapping nodes and each
// of these nodes is merged in turn according to its order in the sequence.
// Keys in mapping nodes earlier in the sequence override keys specified in
// later mapping nodes. -- http://yaml.org/type/merge.html
const MERGE_KEY = '<<';
const merge = {
identify: value => value === MERGE_KEY ||
(typeof value === 'symbol' && value.description === MERGE_KEY),
default: 'key',
tag: 'tag:yaml.org,2002:merge',
test: /^<<$/,
resolve: () => Object.assign(new Scalar(Symbol(MERGE_KEY)), {
addToJSMap: addMergeToJSMap
}),
stringify: () => MERGE_KEY
};
const isMergeKey = (ctx, key) => (merge.identify(key) ||
(isScalar(key) &&
(!key.type || key.type === Scalar.PLAIN) &&
merge.identify(key.value))) &&
ctx?.doc.schema.tags.some(tag => tag.tag === merge.tag && tag.default);
function addMergeToJSMap(ctx, map, value) {
value = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
if (isSeq(value))
for (const it of value.items)
mergeValue(ctx, map, it);
else if (Array.isArray(value))
for (const it of value)
mergeValue(ctx, map, it);
else
mergeValue(ctx, map, value);
}
function mergeValue(ctx, map, value) {
const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
if (!isMap(source))
throw new Error('Merge sources must be maps or map aliases');
const srcMap = source.toJSON(null, ctx, Map);
for (const [key, value] of srcMap) {
if (map instanceof Map) {
if (!map.has(key))
map.set(key, value);
}
else if (map instanceof Set) {
map.add(key);
}
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
Object.defineProperty(map, key, {
value,
writable: true,
enumerable: true,
configurable: true
});
}
}
return map;
}
export { addMergeToJSMap, isMergeKey, merge };

View File

@@ -1,5 +1,5 @@
import { isSeq, isPair, isMap } from '../../nodes/identity.js';
import { Pair, createPair } from '../../nodes/Pair.js';
import { createPair, Pair } from '../../nodes/Pair.js';
import { Scalar } from '../../nodes/Scalar.js';
import { YAMLSeq } from '../../nodes/YAMLSeq.js';

View File

@@ -6,6 +6,7 @@ import { binary } from './binary.js';
import { trueTag, falseTag } from './bool.js';
import { floatNaN, floatExp, float } from './float.js';
import { intBin, intOct, int, intHex } from './int.js';
import { merge } from './merge.js';
import { omap } from './omap.js';
import { pairs } from './pairs.js';
import { set } from './set.js';
@@ -26,6 +27,7 @@ const schema = [
floatExp,
float,
binary,
merge,
omap,
pairs,
set,

View File

@@ -95,7 +95,7 @@ const timestamp = {
}
return new Date(date);
},
stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
stringify: ({ value }) => value?.toISOString().replace(/(T00:00:00)?\.000Z$/, '') ?? ''
};
export { floatTime, intTime, timestamp };

View File

@@ -9,6 +9,8 @@ const FOLD_QUOTED = 'quoted';
function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
if (!lineWidth || lineWidth < 0)
return text;
if (lineWidth < minContentWidth)
minContentWidth = 0;
const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
if (text.length <= endStep)
return text;
@@ -28,7 +30,7 @@ function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth =
let escStart = -1;
let escEnd = -1;
if (mode === FOLD_BLOCK) {
i = consumeMoreIndentedLines(text, i);
i = consumeMoreIndentedLines(text, i, indent.length);
if (i !== -1)
end = i + endStep;
}
@@ -52,8 +54,8 @@ function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth =
}
if (ch === '\n') {
if (mode === FOLD_BLOCK)
i = consumeMoreIndentedLines(text, i);
end = i + endStep;
i = consumeMoreIndentedLines(text, i, indent.length);
end = i + indent.length + endStep;
split = undefined;
}
else {
@@ -121,15 +123,24 @@ function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth =
* Presumes `i + 1` is at the start of a line
* @returns index of last newline in more-indented block
*/
function consumeMoreIndentedLines(text, i) {
let ch = text[i + 1];
function consumeMoreIndentedLines(text, i, indent) {
let end = i;
let start = i + 1;
let ch = text[start];
while (ch === ' ' || ch === '\t') {
do {
ch = text[(i += 1)];
} while (ch && ch !== '\n');
ch = text[i + 1];
if (i < start + indent) {
ch = text[++i];
}
else {
do {
ch = text[++i];
} while (ch && ch !== '\n');
end = i;
start = i + 1;
ch = text[start];
}
}
return i;
return end;
}
export { FOLD_BLOCK, FOLD_FLOW, FOLD_QUOTED, foldFlowLines };

View File

@@ -54,7 +54,12 @@ function getTagObject(tags, item) {
let obj;
if (isScalar(item)) {
obj = item.value;
const match = tags.filter(t => t.identify?.(obj));
let match = tags.filter(t => t.identify?.(obj));
if (match.length > 1) {
const testMatch = match.filter(t => t.test);
if (testMatch.length > 0)
match = testMatch;
}
tagObj =
match.find(t => t.format === item.format) ?? match.find(t => !t.format);
}
@@ -63,7 +68,7 @@ function getTagObject(tags, item) {
tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
}
if (!tagObj) {
const name = obj?.constructor?.name ?? typeof obj;
const name = obj?.constructor?.name ?? (obj === null ? 'null' : typeof obj);
throw new Error(`Tag not resolved for ${name} value`);
}
return tagObj;
@@ -78,7 +83,7 @@ function stringifyProps(node, tagObj, { anchors, doc }) {
anchors.add(anchor);
props.push(`&${anchor}`);
}
const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;
const tag = node.tag ?? (tagObj.default ? null : tagObj.tag);
if (tag)
props.push(doc.directives.tagString(tag));
return props.join(' ');
@@ -104,8 +109,7 @@ function stringify(item, ctx, onComment, onChompKeep) {
const node = isNode(item)
? item
: ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
if (!tagObj)
tagObj = getTagObject(ctx.doc.schema.tags, node);
tagObj ?? (tagObj = getTagObject(ctx.doc.schema.tags, node));
const props = stringifyProps(node, tagObj, ctx);
if (props.length > 0)
ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;

View File

@@ -1,4 +1,3 @@
import { Collection } from '../nodes/Collection.js';
import { isNode, isPair } from '../nodes/identity.js';
import { stringify } from './stringify.js';
import { lineComment, indentComment } from './stringifyComment.js';
@@ -59,7 +58,7 @@ function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, fl
onChompKeep();
return str;
}
function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {
function stringifyFlowCollection({ items }, ctx, { flowChars, itemIndent }) {
const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx;
itemIndent += indentStep;
const itemCtx = Object.assign({}, ctx, {
@@ -112,32 +111,25 @@ function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemInden
lines.push(str);
linesAtValue = lines.length;
}
let str;
const { start, end } = flowChars;
if (lines.length === 0) {
str = start + end;
return start + end;
}
else {
if (!reqNewline) {
const len = lines.reduce((sum, line) => sum + line.length + 2, 2);
reqNewline = len > Collection.maxFlowStringSingleLineLength;
reqNewline = ctx.options.lineWidth > 0 && len > ctx.options.lineWidth;
}
if (reqNewline) {
str = start;
let str = start;
for (const line of lines)
str += line ? `\n${indentStep}${indent}${line}` : '\n';
str += `\n${indent}${end}`;
return `${str}\n${indent}${end}`;
}
else {
str = `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;
return `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;
}
}
if (comment) {
str += lineComment(str, indent, commentString(comment));
if (onComment)
onComment();
}
return str;
}
function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {
if (comment && chompKeep)

View File

@@ -10,7 +10,7 @@ function stringifyPair({ key, value }, ctx, onComment, onChompKeep) {
if (keyComment) {
throw new Error('With simple keys, key nodes cannot have comments');
}
if (isCollection(key)) {
if (isCollection(key) || (!isNode(key) && typeof key === 'object')) {
const msg = 'With simple keys, collection cannot be used as a key value';
throw new Error(msg);
}

View File

@@ -1,5 +1,5 @@
import { Scalar } from '../nodes/Scalar.js';
import { foldFlowLines, FOLD_QUOTED, FOLD_FLOW, FOLD_BLOCK } from './foldFlowLines.js';
import { foldFlowLines, FOLD_FLOW, FOLD_QUOTED, FOLD_BLOCK } from './foldFlowLines.js';
const getFoldOptions = (ctx, isBlock) => ({
indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart,
@@ -219,23 +219,32 @@ function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
start = start.replace(/\n+/g, `$&${indent}`);
}
const indentSize = indent ? '2' : '1'; // root is at -1
let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;
// Leading | or > is added later
let header = (startWithSpace ? indentSize : '') + chomp;
if (comment) {
header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' '));
if (onComment)
onComment();
}
if (literal) {
value = value.replace(/\n+/g, `$&${indent}`);
return `${header}\n${indent}${start}${value}${end}`;
if (!literal) {
const foldedValue = value
.replace(/\n+/g, '\n$&')
.replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
// ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
.replace(/\n+/g, `$&${indent}`);
let literalFallback = false;
const foldOptions = getFoldOptions(ctx, true);
if (blockQuote !== 'folded' && type !== Scalar.BLOCK_FOLDED) {
foldOptions.onOverflow = () => {
literalFallback = true;
};
}
const body = foldFlowLines(`${start}${foldedValue}${end}`, indent, FOLD_BLOCK, foldOptions);
if (!literalFallback)
return `>${header}\n${indent}${body}`;
}
value = value
.replace(/\n+/g, '\n$&')
.replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
// ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
.replace(/\n+/g, `$&${indent}`);
const body = foldFlowLines(`${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx, true));
return `${header}\n${indent}${body}`;
value = value.replace(/\n+/g, `$&${indent}`);
return `|${header}\n${indent}${start}${value}${end}`;
}
function plainString(item, ctx, onComment, onChompKeep) {
const { type, value } = item;
@@ -244,10 +253,9 @@ function plainString(item, ctx, onComment, onChompKeep) {
(inFlow && /[[\]{},]/.test(value))) {
return quotedString(value, ctx);
}
if (!value ||
/^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
if (/^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
// not allowed:
// - empty string, '-' or '?'
// - '-' or '?'
// - start with an indicator character (except [?:-]) or /[?-] /
// - '\n ', ': ' or ' \n' anywhere
// - '#' not preceded by a non-space char

View File

@@ -1,8 +1,8 @@
export { createNode } from './doc/createNode.js';
export { debug, warn } from './log.js';
export { createPair } from './nodes/Pair.js';
export { findPair } from './nodes/YAMLMap.js';
export { toJS } from './nodes/toJS.js';
export { findPair } from './nodes/YAMLMap.js';
export { map as mapTag } from './schema/common/map.js';
export { seq as seqTag } from './schema/common/seq.js';
export { string as stringTag } from './schema/common/string.js';

8
node_modules/yaml/dist/cli.d.ts generated vendored Normal file
View File

@@ -0,0 +1,8 @@
export declare const help = "yaml: A command-line YAML processor and inspector\n\nReads stdin and writes output to stdout and errors & warnings to stderr.\n\nUsage:\n yaml Process a YAML stream, outputting it as YAML\n yaml cst Parse the CST of a YAML stream\n yaml lex Parse the lexical tokens of a YAML stream\n yaml valid Validate a YAML stream, returning 0 on success\n\nOptions:\n --help, -h Show this message.\n --json, -j Output JSON.\n --indent 2 Output pretty-printed data, indented by the given number of spaces.\n --merge, -m Enable support for \"<<\" merge keys.\n\nAdditional options for bare \"yaml\" command:\n --doc, -d Output pretty-printed JS Document objects.\n --single, -1 Require the input to consist of a single YAML document.\n --strict, -s Stop on errors.\n --visit, -v Apply a visitor to each document (requires a path to import)\n --yaml 1.1 Set the YAML version. (default: 1.2)";
export declare class UserError extends Error {
static ARGS: number;
static SINGLE: number;
code: number;
constructor(code: number, message: string);
}
export declare function cli(stdin: NodeJS.ReadableStream, done: (error?: Error) => void, argv?: string[]): Promise<void>;

201
node_modules/yaml/dist/cli.mjs generated vendored Normal file
View File

@@ -0,0 +1,201 @@
import { resolve } from 'path';
import { parseArgs } from 'util';
import { prettyToken } from './parse/cst.js';
import { Lexer } from './parse/lexer.js';
import { Parser } from './parse/parser.js';
import { Composer } from './compose/composer.js';
import { LineCounter } from './parse/line-counter.js';
import { prettifyError } from './errors.js';
import { visit } from './visit.js';
const help = `\
yaml: A command-line YAML processor and inspector
Reads stdin and writes output to stdout and errors & warnings to stderr.
Usage:
yaml Process a YAML stream, outputting it as YAML
yaml cst Parse the CST of a YAML stream
yaml lex Parse the lexical tokens of a YAML stream
yaml valid Validate a YAML stream, returning 0 on success
Options:
--help, -h Show this message.
--json, -j Output JSON.
--indent 2 Output pretty-printed data, indented by the given number of spaces.
--merge, -m Enable support for "<<" merge keys.
Additional options for bare "yaml" command:
--doc, -d Output pretty-printed JS Document objects.
--single, -1 Require the input to consist of a single YAML document.
--strict, -s Stop on errors.
--visit, -v Apply a visitor to each document (requires a path to import)
--yaml 1.1 Set the YAML version. (default: 1.2)`;
class UserError extends Error {
constructor(code, message) {
super(`Error: ${message}`);
this.code = code;
}
}
UserError.ARGS = 2;
UserError.SINGLE = 3;
async function cli(stdin, done, argv) {
let args;
try {
args = parseArgs({
args: argv,
allowPositionals: true,
options: {
doc: { type: 'boolean', short: 'd' },
help: { type: 'boolean', short: 'h' },
indent: { type: 'string', short: 'i' },
merge: { type: 'boolean', short: 'm' },
json: { type: 'boolean', short: 'j' },
single: { type: 'boolean', short: '1' },
strict: { type: 'boolean', short: 's' },
visit: { type: 'string', short: 'v' },
yaml: { type: 'string', default: '1.2' }
}
});
}
catch (error) {
return done(new UserError(UserError.ARGS, error.message));
}
const { positionals: [mode], values: opt } = args;
let indent = Number(opt.indent);
stdin.setEncoding('utf-8');
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
switch (opt.help || mode) {
/* istanbul ignore next */
case true: // --help
console.log(help);
break;
case 'lex': {
const lexer = new Lexer();
const data = [];
const add = (tok) => {
if (opt.json)
data.push(tok);
else
console.log(prettyToken(tok));
};
stdin.on('data', (chunk) => {
for (const tok of lexer.lex(chunk, true))
add(tok);
});
stdin.on('end', () => {
for (const tok of lexer.lex('', false))
add(tok);
if (opt.json)
console.log(JSON.stringify(data, null, indent));
done();
});
break;
}
case 'cst': {
const parser = new Parser();
const data = [];
const add = (tok) => {
if (opt.json)
data.push(tok);
else
console.dir(tok, { depth: null });
};
stdin.on('data', (chunk) => {
for (const tok of parser.parse(chunk, true))
add(tok);
});
stdin.on('end', () => {
for (const tok of parser.parse('', false))
add(tok);
if (opt.json)
console.log(JSON.stringify(data, null, indent));
done();
});
break;
}
case undefined:
case 'valid': {
const lineCounter = new LineCounter();
const parser = new Parser(lineCounter.addNewLine);
// @ts-expect-error Version is validated at runtime
const composer = new Composer({ version: opt.yaml, merge: opt.merge });
const visitor = opt.visit
? (await import(resolve(opt.visit))).default
: null;
let source = '';
let hasDoc = false;
let reqDocEnd = false;
const data = [];
const add = (doc) => {
if (hasDoc && opt.single) {
return done(new UserError(UserError.SINGLE, 'Input stream contains multiple documents'));
}
for (const error of doc.errors) {
prettifyError(source, lineCounter)(error);
if (opt.strict || mode === 'valid')
return done(error);
console.error(error);
}
for (const warning of doc.warnings) {
prettifyError(source, lineCounter)(warning);
console.error(warning);
}
if (visitor)
visit(doc, visitor);
if (mode === 'valid')
doc.toJS();
else if (opt.json)
data.push(doc);
else if (opt.doc) {
Object.defineProperties(doc, {
options: { enumerable: false },
schema: { enumerable: false }
});
console.dir(doc, { depth: null });
}
else {
if (reqDocEnd)
console.log('...');
try {
indent || (indent = 2);
const str = doc.toString({ indent });
console.log(str.endsWith('\n') ? str.slice(0, -1) : str);
}
catch (error) {
done(error);
}
}
hasDoc = true;
reqDocEnd = !doc.directives?.docEnd;
};
stdin.on('data', (chunk) => {
source += chunk;
for (const tok of parser.parse(chunk, true)) {
for (const doc of composer.next(tok))
add(doc);
}
});
stdin.on('end', () => {
for (const tok of parser.parse('', false)) {
for (const doc of composer.next(tok))
add(doc);
}
for (const doc of composer.end(false))
add(doc);
if (opt.single && !hasDoc) {
return done(new UserError(UserError.SINGLE, 'Input stream contained no documents'));
}
if (mode !== 'valid' && opt.json) {
console.log(JSON.stringify(opt.single ? data[0] : data, null, indent));
}
done();
});
break;
}
default:
done(new UserError(UserError.ARGS, `Unknown command: ${JSON.stringify(mode)}`));
}
}
export { UserError, cli, help };

View File

@@ -1,5 +1,11 @@
import type { ParsedNode } from '../nodes/Node.js';
import type { BlockMap, BlockSequence, FlowCollection, SourceToken } from '../parse/cst.js';
import type { ComposeContext, ComposeNode } from './compose-node.js';
import type { ComposeErrorHandler } from './composer.js';
export declare function composeCollection(CN: ComposeNode, ctx: ComposeContext, token: BlockMap | BlockSequence | FlowCollection, tagToken: SourceToken | null, onError: ComposeErrorHandler): ParsedNode;
import type { ParsedNode } from '../nodes/Node';
import type { BlockMap, BlockSequence, FlowCollection, SourceToken } from '../parse/cst';
import type { ComposeContext, ComposeNode } from './compose-node';
import type { ComposeErrorHandler } from './composer';
interface Props {
anchor: SourceToken | null;
tag: SourceToken | null;
newlineAfterProp: SourceToken | null;
}
export declare function composeCollection(CN: ComposeNode, ctx: ComposeContext, token: BlockMap | BlockSequence | FlowCollection, props: Props, onError: ComposeErrorHandler): ParsedNode;
export {};

View File

@@ -25,10 +25,23 @@ function resolveCollection(CN, ctx, token, onError, tagName, tag) {
coll.tag = tagName;
return coll;
}
function composeCollection(CN, ctx, token, tagToken, onError) {
function composeCollection(CN, ctx, token, props, onError) {
const tagToken = props.tag;
const tagName = !tagToken
? null
: ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
if (token.type === 'block-seq') {
const { anchor, newlineAfterProp: nl } = props;
const lastProp = anchor && tagToken
? anchor.offset > tagToken.offset
? anchor
: tagToken
: (anchor ?? tagToken);
if (lastProp && (!nl || nl.offset < lastProp.offset)) {
const message = 'Missing newline after block sequence props';
onError(lastProp, 'MISSING_CHAR', message);
}
}
const expType = token.type === 'block-map'
? 'map'
: token.type === 'block-seq'
@@ -42,8 +55,7 @@ function composeCollection(CN, ctx, token, tagToken, onError) {
!tagName ||
tagName === '!' ||
(tagName === YAMLMap.YAMLMap.tagName && expType === 'map') ||
(tagName === YAMLSeq.YAMLSeq.tagName && expType === 'seq') ||
!expType) {
(tagName === YAMLSeq.YAMLSeq.tagName && expType === 'seq')) {
return resolveCollection(CN, ctx, token, onError, tagName);
}
let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType);
@@ -54,8 +66,8 @@ function composeCollection(CN, ctx, token, tagToken, onError) {
tag = kt;
}
else {
if (kt?.collection) {
onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true);
if (kt) {
onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection ?? 'scalar'}`, true);
}
else {
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);

View File

@@ -1,7 +1,7 @@
import type { Directives } from '../doc/directives.js';
import { Document } from '../doc/Document.js';
import type { ParsedNode } from '../nodes/Node.js';
import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options.js';
import type * as CST from '../parse/cst.js';
import type { ComposeErrorHandler } from './composer.js';
import type { Directives } from '../doc/directives';
import { Document } from '../doc/Document';
import type { ParsedNode } from '../nodes/Node';
import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options';
import type * as CST from '../parse/cst';
import type { ComposeErrorHandler } from './composer';
export declare function composeDoc<Contents extends ParsedNode = ParsedNode, Strict extends boolean = true>(options: ParseOptions & DocumentOptions & SchemaOptions, directives: Directives, { offset, start, value, end }: CST.Document, onError: ComposeErrorHandler): Document.Parsed<Contents, Strict>;

View File

@@ -9,6 +9,7 @@ function composeDoc(options, directives, { offset, start, value, end }, onError)
const opts = Object.assign({ _directives: directives }, options);
const doc = new Document.Document(undefined, opts);
const ctx = {
atKey: false,
atRoot: true,
directives: doc.directives,
options: doc.options,
@@ -19,6 +20,7 @@ function composeDoc(options, directives, { offset, start, value, end }, onError)
next: value ?? end?.[0],
offset,
onError,
parentIndent: 0,
startOnNewline: true
});
if (props.found) {

View File

@@ -1,10 +1,11 @@
import type { Directives } from '../doc/directives.js';
import type { ParsedNode } from '../nodes/Node.js';
import type { ParseOptions } from '../options.js';
import type { SourceToken, Token } from '../parse/cst.js';
import type { Schema } from '../schema/Schema.js';
import type { ComposeErrorHandler } from './composer.js';
import type { Directives } from '../doc/directives';
import type { ParsedNode } from '../nodes/Node';
import type { ParseOptions } from '../options';
import type { SourceToken, Token } from '../parse/cst';
import type { Schema } from '../schema/Schema';
import type { ComposeErrorHandler } from './composer';
export interface ComposeContext {
atKey: boolean;
atRoot: boolean;
directives: Directives;
options: Readonly<Required<Omit<ParseOptions, 'lineCounter'>>>;
@@ -15,6 +16,7 @@ interface Props {
comment: string;
anchor: SourceToken | null;
tag: SourceToken | null;
newlineAfterProp: SourceToken | null;
end: number;
}
declare const CN: {
@@ -23,5 +25,5 @@ declare const CN: {
};
export type ComposeNode = typeof CN;
export declare function composeNode(ctx: ComposeContext, token: Token, props: Props, onError: ComposeErrorHandler): ParsedNode;
export declare function composeEmptyNode(ctx: ComposeContext, offset: number, before: Token[] | undefined, pos: number | null, { spaceBefore, comment, anchor, tag, end }: Props, onError: ComposeErrorHandler): import("../index.js").Scalar.Parsed;
export declare function composeEmptyNode(ctx: ComposeContext, offset: number, before: Token[] | undefined, pos: number | null, { spaceBefore, comment, anchor, tag, end }: Props, onError: ComposeErrorHandler): import('../index').Scalar.Parsed;
export {};

View File

@@ -1,6 +1,7 @@
'use strict';
var Alias = require('../nodes/Alias.js');
var identity = require('../nodes/identity.js');
var composeCollection = require('./compose-collection.js');
var composeScalar = require('./compose-scalar.js');
var resolveEnd = require('./resolve-end.js');
@@ -8,6 +9,7 @@ var utilEmptyScalarPosition = require('./util-empty-scalar-position.js');
const CN = { composeNode, composeEmptyNode };
function composeNode(ctx, token, props, onError) {
const atKey = ctx.atKey;
const { spaceBefore, comment, anchor, tag } = props;
let node;
let isSrcToken = true;
@@ -28,7 +30,7 @@ function composeNode(ctx, token, props, onError) {
case 'block-map':
case 'block-seq':
case 'flow-collection':
node = composeCollection.composeCollection(CN, ctx, token, tag, onError);
node = composeCollection.composeCollection(CN, ctx, token, props, onError);
if (anchor)
node.anchor = anchor.source.substring(1);
break;
@@ -43,6 +45,14 @@ function composeNode(ctx, token, props, onError) {
}
if (anchor && node.anchor === '')
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
if (atKey &&
ctx.options.stringKeys &&
(!identity.isScalar(node) ||
typeof node.value !== 'string' ||
(node.tag && node.tag !== 'tag:yaml.org,2002:str'))) {
const msg = 'With stringKeys, all keys must be strings';
onError(tag ?? token, 'NON_STRING_KEY', msg);
}
if (spaceBefore)
node.spaceBefore = true;
if (comment) {

View File

@@ -1,5 +1,5 @@
import { Scalar } from '../nodes/Scalar.js';
import type { BlockScalar, FlowScalar, SourceToken } from '../parse/cst.js';
import type { ComposeContext } from './compose-node.js';
import type { ComposeErrorHandler } from './composer.js';
import { Scalar } from '../nodes/Scalar';
import type { BlockScalar, FlowScalar, SourceToken } from '../parse/cst';
import type { ComposeContext } from './compose-node';
import type { ComposeErrorHandler } from './composer';
export declare function composeScalar(ctx: ComposeContext, token: FlowScalar | BlockScalar, tagToken: SourceToken | null, onError: ComposeErrorHandler): Scalar.Parsed;

View File

@@ -7,16 +7,21 @@ var resolveFlowScalar = require('./resolve-flow-scalar.js');
function composeScalar(ctx, token, tagToken, onError) {
const { value, type, comment, range } = token.type === 'block-scalar'
? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError)
? resolveBlockScalar.resolveBlockScalar(ctx, token, onError)
: resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);
const tagName = tagToken
? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
: null;
const tag = tagToken && tagName
? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
: token.type === 'scalar'
? findScalarTagByTest(ctx, value, token, onError)
: ctx.schema[identity.SCALAR];
let tag;
if (ctx.options.stringKeys && ctx.atKey) {
tag = ctx.schema[identity.SCALAR];
}
else if (tagName)
tag = findScalarTagByName(ctx.schema, value, tagName, tagToken, onError);
else if (token.type === 'scalar')
tag = findScalarTagByTest(ctx, value, token, onError);
else
tag = ctx.schema[identity.SCALAR];
let scalar;
try {
const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
@@ -64,8 +69,9 @@ function findScalarTagByName(schema, value, tagName, tagToken, onError) {
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
return schema[identity.SCALAR];
}
function findScalarTagByTest({ directives, schema }, value, token, onError) {
const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[identity.SCALAR];
function findScalarTagByTest({ atKey, directives, schema }, value, token, onError) {
const tag = schema.tags.find(tag => (tag.default === true || (atKey && tag.default === 'key')) &&
tag.test?.test(value)) || schema[identity.SCALAR];
if (schema.compat) {
const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??
schema[identity.SCALAR];

View File

@@ -1,9 +1,10 @@
import { Directives } from '../doc/directives.js';
import { Document } from '../doc/Document.js';
import { ErrorCode, YAMLParseError, YAMLWarning } from '../errors.js';
import type { ParsedNode, Range } from '../nodes/Node.js';
import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options.js';
import type { Token } from '../parse/cst.js';
import { Directives } from '../doc/directives';
import { Document } from '../doc/Document';
import type { ErrorCode } from '../errors';
import { YAMLParseError, YAMLWarning } from '../errors';
import type { ParsedNode, Range } from '../nodes/Node';
import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options';
import type { Token } from '../parse/cst';
type ErrorSource = number | [number, number] | Range | {
offset: number;
source?: string;

View File

@@ -1,5 +1,6 @@
'use strict';
var node_process = require('process');
var directives = require('../doc/directives.js');
var Document = require('../doc/Document.js');
var errors = require('../errors.js');
@@ -133,7 +134,7 @@ class Composer {
}
/** Advance the composer by one CST token. */
*next(token) {
if (process.env.LOG_STREAM)
if (node_process.env.LOG_STREAM)
console.dir(token, { depth: null });
switch (token.type) {
case 'directive':

View File

@@ -1,7 +1,6 @@
import type { ParsedNode } from '../nodes/Node.js';
import { YAMLMap } from '../nodes/YAMLMap.js';
import type { BlockMap } from '../parse/cst.js';
import { CollectionTag } from '../schema/types.js';
import type { ComposeContext, ComposeNode } from './compose-node.js';
import type { ComposeErrorHandler } from './composer.js';
export declare function resolveBlockMap({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLMap.Parsed<ParsedNode, ParsedNode | null>;
import { YAMLMap } from '../nodes/YAMLMap';
import type { BlockMap } from '../parse/cst';
import type { CollectionTag } from '../schema/types';
import type { ComposeContext, ComposeNode } from './compose-node';
import type { ComposeErrorHandler } from './composer';
export declare function resolveBlockMap({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLMap.Parsed;

View File

@@ -23,6 +23,7 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
next: key ?? sep?.[0],
offset,
onError,
parentIndent: bm.indent,
startOnNewline: true
});
const implicitKey = !keyProps.found;
@@ -43,7 +44,7 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
}
continue;
}
if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) {
if (keyProps.newlineAfterProp || utilContainsNewline.containsNewline(key)) {
onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
}
}
@@ -51,12 +52,14 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
onError(offset, 'BAD_INDENT', startColMsg);
}
// key value
ctx.atKey = true;
const keyStart = keyProps.end;
const keyNode = key
? composeNode(ctx, key, keyProps, onError)
: composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
if (ctx.schema.compat)
utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);
ctx.atKey = false;
if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
// value properties
@@ -65,6 +68,7 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
next: value,
offset: keyNode.range[2],
onError,
parentIndent: bm.indent,
startOnNewline: !key || key.type === 'block-scalar'
});
offset = valueProps.end;

View File

@@ -1,8 +1,9 @@
import { Range } from '../nodes/Node.js';
import { Scalar } from '../nodes/Scalar.js';
import type { BlockScalar } from '../parse/cst.js';
import type { ComposeErrorHandler } from './composer.js';
export declare function resolveBlockScalar(scalar: BlockScalar, strict: boolean, onError: ComposeErrorHandler): {
import type { Range } from '../nodes/Node';
import { Scalar } from '../nodes/Scalar';
import type { BlockScalar } from '../parse/cst';
import type { ComposeContext } from './compose-node';
import type { ComposeErrorHandler } from './composer';
export declare function resolveBlockScalar(ctx: ComposeContext, scalar: BlockScalar, onError: ComposeErrorHandler): {
value: string;
type: Scalar.BLOCK_FOLDED | Scalar.BLOCK_LITERAL | null;
comment: string;

View File

@@ -2,9 +2,9 @@
var Scalar = require('../nodes/Scalar.js');
function resolveBlockScalar(scalar, strict, onError) {
function resolveBlockScalar(ctx, scalar, onError) {
const start = scalar.offset;
const header = parseBlockScalarHeader(scalar, strict, onError);
const header = parseBlockScalarHeader(scalar, ctx.options.strict, onError);
if (!header)
return { value: '', type: null, comment: '', range: [start, start, start] };
const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;
@@ -46,6 +46,10 @@ function resolveBlockScalar(scalar, strict, onError) {
if (header.indent === 0)
trimIndent = indent.length;
contentStart = i;
if (trimIndent === 0 && !ctx.atRoot) {
const message = 'Block scalar values in collections must be indented';
onError(offset, 'BAD_INDENT', message);
}
break;
}
offset += indent.length + content.length + 1;

View File

@@ -1,6 +1,6 @@
import { YAMLSeq } from '../nodes/YAMLSeq.js';
import type { BlockSequence } from '../parse/cst.js';
import { CollectionTag } from '../schema/types.js';
import type { ComposeContext, ComposeNode } from './compose-node.js';
import type { ComposeErrorHandler } from './composer.js';
export declare function resolveBlockSeq({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLSeq.Parsed<import("../index.js").ParsedNode>;
import { YAMLSeq } from '../nodes/YAMLSeq';
import type { BlockSequence } from '../parse/cst';
import type { CollectionTag } from '../schema/types';
import type { ComposeContext, ComposeNode } from './compose-node';
import type { ComposeErrorHandler } from './composer';
export declare function resolveBlockSeq({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLSeq.Parsed;

View File

@@ -9,6 +9,8 @@ function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, ta
const seq = new NodeClass(ctx.schema);
if (ctx.atRoot)
ctx.atRoot = false;
if (ctx.atKey)
ctx.atKey = false;
let offset = bs.offset;
let commentEnd = null;
for (const { start, value } of bs.items) {
@@ -17,6 +19,7 @@ function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, ta
next: value,
offset,
onError,
parentIndent: bs.indent,
startOnNewline: true
});
if (!props.found) {

View File

@@ -1,5 +1,5 @@
import type { SourceToken } from '../parse/cst.js';
import type { ComposeErrorHandler } from './composer.js';
import type { SourceToken } from '../parse/cst';
import type { ComposeErrorHandler } from './composer';
export declare function resolveEnd(end: SourceToken[] | undefined, offset: number, reqSpace: boolean, onError: ComposeErrorHandler): {
comment: string;
offset: number;

View File

@@ -1,7 +1,7 @@
import { YAMLMap } from '../nodes/YAMLMap.js';
import { YAMLSeq } from '../nodes/YAMLSeq.js';
import type { FlowCollection } from '../parse/cst.js';
import { CollectionTag } from '../schema/types.js';
import type { ComposeContext, ComposeNode } from './compose-node.js';
import type { ComposeErrorHandler } from './composer.js';
export declare function resolveFlowCollection({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLMap.Parsed<import("../index.js").ParsedNode, import("../index.js").ParsedNode | null> | YAMLSeq.Parsed<import("../index.js").ParsedNode>;
import { YAMLMap } from '../nodes/YAMLMap';
import { YAMLSeq } from '../nodes/YAMLSeq';
import type { FlowCollection } from '../parse/cst';
import type { CollectionTag } from '../schema/types';
import type { ComposeContext, ComposeNode } from './compose-node';
import type { ComposeErrorHandler } from './composer';
export declare function resolveFlowCollection({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLMap.Parsed<import('../index').ParsedNode, import('../index').ParsedNode | null> | YAMLSeq.Parsed<import('../index').ParsedNode>;

View File

@@ -20,6 +20,8 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
const atRoot = ctx.atRoot;
if (atRoot)
ctx.atRoot = false;
if (ctx.atKey)
ctx.atKey = false;
let offset = fc.offset + fc.start.source.length;
for (let i = 0; i < fc.items.length; ++i) {
const collItem = fc.items[i];
@@ -30,6 +32,7 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
next: key ?? sep?.[0],
offset,
onError,
parentIndent: fc.indent,
startOnNewline: false
});
if (!props.found) {
@@ -98,12 +101,14 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
else {
// item is a key+value pair
// key value
ctx.atKey = true;
const keyStart = props.end;
const keyNode = key
? composeNode(ctx, key, props, onError)
: composeEmptyNode(ctx, keyStart, start, null, props, onError);
if (isBlock(key))
onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
ctx.atKey = false;
// value properties
const valueProps = resolveProps.resolveProps(sep ?? [], {
flow: fcName,
@@ -111,6 +116,7 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
next: value,
offset: keyNode.range[2],
onError,
parentIndent: fc.indent,
startOnNewline: false
});
if (valueProps.found) {
@@ -163,6 +169,8 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
const map = new YAMLMap.YAMLMap(ctx.schema);
map.flow = true;
map.items.push(pair);
const endRange = (valueNode ?? keyNode).range;
map.range = [keyNode.range[0], endRange[1], endRange[2]];
coll.items.push(map);
}
offset = valueNode ? valueNode.range[2] : valueProps.end;

View File

@@ -1,7 +1,7 @@
import { Range } from '../nodes/Node.js';
import { Scalar } from '../nodes/Scalar.js';
import type { FlowScalar } from '../parse/cst.js';
import type { ComposeErrorHandler } from './composer.js';
import type { Range } from '../nodes/Node';
import { Scalar } from '../nodes/Scalar';
import type { FlowScalar } from '../parse/cst';
import type { ComposeErrorHandler } from './composer';
export declare function resolveFlowScalar(scalar: FlowScalar, strict: boolean, onError: ComposeErrorHandler): {
value: string;
type: Scalar.PLAIN | Scalar.QUOTE_DOUBLE | Scalar.QUOTE_SINGLE | null;

View File

@@ -86,7 +86,7 @@ function foldLines(source) {
first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
}
catch (_) {
catch {
first = /(.*?)[ \t]*\r?\n/sy;
line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
}
@@ -191,19 +191,19 @@ function foldNewline(source, offset) {
return { fold, offset };
}
const escapeCodes = {
'0': '\0',
a: '\x07',
b: '\b',
e: '\x1b',
f: '\f',
n: '\n',
r: '\r',
t: '\t',
v: '\v',
N: '\u0085',
_: '\u00a0',
L: '\u2028',
P: '\u2029',
'0': '\0', // null character
a: '\x07', // bell character
b: '\b', // backspace
e: '\x1b', // escape character
f: '\f', // form feed
n: '\n', // line feed
r: '\r', // carriage return
t: '\t', // horizontal tab
v: '\v', // vertical tab
N: '\u0085', // Unicode next line
_: '\u00a0', // Unicode non-breaking space
L: '\u2028', // Unicode line separator
P: '\u2029', // Unicode paragraph separator
' ': ' ',
'"': '"',
'/': '/',

View File

@@ -1,22 +1,23 @@
import type { SourceToken, Token } from '../parse/cst.js';
import type { ComposeErrorHandler } from './composer.js';
import type { SourceToken, Token } from '../parse/cst';
import type { ComposeErrorHandler } from './composer';
export interface ResolvePropsArg {
flow?: 'flow map' | 'flow sequence';
indicator: 'doc-start' | 'explicit-key-ind' | 'map-value-ind' | 'seq-item-ind';
next: Token | null | undefined;
offset: number;
onError: ComposeErrorHandler;
parentIndent: number;
startOnNewline: boolean;
}
export declare function resolveProps(tokens: SourceToken[], { flow, indicator, next, offset, onError, startOnNewline }: ResolvePropsArg): {
export declare function resolveProps(tokens: SourceToken[], { flow, indicator, next, offset, onError, parentIndent, startOnNewline }: ResolvePropsArg): {
comma: SourceToken | null;
found: SourceToken | null;
spaceBefore: boolean;
comment: string;
hasNewline: boolean;
hasNewlineAfterProp: boolean;
anchor: SourceToken | null;
tag: SourceToken | null;
newlineAfterProp: SourceToken | null;
end: number;
start: number;
};

View File

@@ -1,16 +1,17 @@
'use strict';
function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {
function resolveProps(tokens, { flow, indicator, next, offset, onError, parentIndent, startOnNewline }) {
let spaceBefore = false;
let atNewline = startOnNewline;
let hasSpace = startOnNewline;
let comment = '';
let commentSep = '';
let hasNewline = false;
let hasNewlineAfterProp = false;
let reqSpace = false;
let tab = null;
let anchor = null;
let tag = null;
let newlineAfterProp = null;
let comma = null;
let found = null;
let start = null;
@@ -22,16 +23,22 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
reqSpace = false;
}
if (tab) {
if (atNewline && token.type !== 'comment' && token.type !== 'newline') {
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
}
tab = null;
}
switch (token.type) {
case 'space':
// At the doc level, tabs at line start may be parsed
// as leading white space rather than indentation.
// In a flow collection, only the parser handles indent.
if (!flow &&
atNewline &&
indicator !== 'doc-start' &&
token.source[0] === '\t')
onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
(indicator !== 'doc-start' || next?.type !== 'flow-collection') &&
token.source.includes('\t')) {
tab = token;
}
hasSpace = true;
break;
case 'comment': {
@@ -50,7 +57,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (atNewline) {
if (comment)
comment += token.source;
else
else if (!found || indicator !== 'seq-item-ind')
spaceBefore = true;
}
else
@@ -58,7 +65,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
atNewline = true;
hasNewline = true;
if (anchor || tag)
hasNewlineAfterProp = true;
newlineAfterProp = token;
hasSpace = true;
break;
case 'anchor':
@@ -67,8 +74,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (token.source.endsWith(':'))
onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
anchor = token;
if (start === null)
start = token.offset;
start ?? (start = token.offset);
atNewline = false;
hasSpace = false;
reqSpace = true;
@@ -77,8 +83,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (tag)
onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
tag = token;
if (start === null)
start = token.offset;
start ?? (start = token.offset);
atNewline = false;
hasSpace = false;
reqSpace = true;
@@ -91,7 +96,8 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (found)
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);
found = token;
atNewline = false;
atNewline =
indicator === 'seq-item-ind' || indicator === 'explicit-key-ind';
hasSpace = false;
break;
case 'comma':
@@ -117,17 +123,23 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
next.type !== 'space' &&
next.type !== 'newline' &&
next.type !== 'comma' &&
(next.type !== 'scalar' || next.source !== ''))
(next.type !== 'scalar' || next.source !== '')) {
onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
}
if (tab &&
((atNewline && tab.indent <= parentIndent) ||
next?.type === 'block-map' ||
next?.type === 'block-seq'))
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
return {
comma,
found,
spaceBefore,
comment,
hasNewline,
hasNewlineAfterProp,
anchor,
tag,
newlineAfterProp,
end,
start: start ?? end
};

View File

@@ -1,2 +1,2 @@
import type { Token } from '../parse/cst.js';
import type { Token } from '../parse/cst';
export declare function containsNewline(key: Token | null | undefined): boolean | null;

View File

@@ -1,2 +1,2 @@
import type { Token } from '../parse/cst.js';
import type { Token } from '../parse/cst';
export declare function emptyScalarPosition(offset: number, before: Token[] | undefined, pos: number | null): number;

View File

@@ -2,8 +2,7 @@
function emptyScalarPosition(offset, before, pos) {
if (before) {
if (pos === null)
pos = before.length;
pos ?? (pos = before.length);
for (let i = pos - 1; i >= 0; --i) {
let st = before[i];
switch (st.type) {

View File

@@ -1,3 +1,3 @@
import { Token } from '../parse/cst';
import { ComposeErrorHandler } from './composer';
import type { Token } from '../parse/cst';
import type { ComposeErrorHandler } from './composer';
export declare function flowIndentCheck(indent: number, fc: Token | null | undefined, onError: ComposeErrorHandler): void;

View File

@@ -1,4 +1,4 @@
import type { ParsedNode } from '../nodes/Node.js';
import type { Pair } from '../nodes/Pair.js';
import type { ComposeContext } from './compose-node.js';
import type { ParsedNode } from '../nodes/Node';
import type { Pair } from '../nodes/Pair';
import type { ComposeContext } from './compose-node';
export declare function mapIncludes(ctx: ComposeContext, items: Pair<ParsedNode>[], search: ParsedNode): boolean;

View File

@@ -8,11 +8,7 @@ function mapIncludes(ctx, items, search) {
return false;
const isEqual = typeof uniqueKeys === 'function'
? uniqueKeys
: (a, b) => a === b ||
(identity.isScalar(a) &&
identity.isScalar(b) &&
a.value === b.value &&
!(a.value === '<<' && ctx.schema.merge));
: (a, b) => a === b || (identity.isScalar(a) && identity.isScalar(b) && a.value === b.value);
return items.some(pair => isEqual(pair.key, search));
}

View File

@@ -1,14 +1,14 @@
import type { YAMLError, YAMLWarning } from '../errors.js';
import { Alias } from '../nodes/Alias.js';
import { NODE_TYPE } from '../nodes/identity.js';
import type { Node, NodeType, ParsedNode, Range } from '../nodes/Node.js';
import { Pair } from '../nodes/Pair.js';
import type { Scalar } from '../nodes/Scalar.js';
import type { YAMLMap } from '../nodes/YAMLMap.js';
import type { YAMLSeq } from '../nodes/YAMLSeq.js';
import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from '../options.js';
import { Schema } from '../schema/Schema.js';
import { Directives } from './directives.js';
import type { YAMLError, YAMLWarning } from '../errors';
import { Alias } from '../nodes/Alias';
import { NODE_TYPE } from '../nodes/identity';
import type { Node, NodeType, ParsedNode, Range } from '../nodes/Node';
import { Pair } from '../nodes/Pair';
import type { Scalar } from '../nodes/Scalar';
import type { YAMLMap } from '../nodes/YAMLMap';
import type { YAMLSeq } from '../nodes/YAMLSeq';
import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from '../options';
import { Schema } from '../schema/Schema';
import { Directives } from './directives';
export type Replacer = any[] | ((key: any, value: any) => unknown);
export declare namespace Document {
/** @ts-ignore The typing of directives fails in TS <= 4.2 */

View File

@@ -37,6 +37,7 @@ class Document {
logLevel: 'warn',
prettyErrors: true,
strict: true,
stringKeys: false,
uniqueKeys: true,
version: '1.2'
}, options);
@@ -260,7 +261,7 @@ class Document {
this.directives.yaml.version = '1.1';
else
this.directives = new directives.Directives({ version: '1.1' });
opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
opt = { resolveKnownTags: false, schema: 'yaml-1.1' };
break;
case '1.2':
case 'next':
@@ -268,7 +269,7 @@ class Document {
this.directives.yaml.version = version;
else
this.directives = new directives.Directives({ version });
opt = { merge: false, resolveKnownTags: true, schema: 'core' };
opt = { resolveKnownTags: true, schema: 'core' };
break;
case null:
if (this.directives)

View File

@@ -1,5 +1,5 @@
import type { Node } from '../nodes/Node.js';
import type { Document } from './Document.js';
import type { Node } from '../nodes/Node';
import type { Document } from './Document';
/**
* Verify that the input string is a valid anchor.
*

View File

@@ -41,8 +41,7 @@ function createNodeAnchors(doc, prefix) {
return {
onAnchor: (source) => {
aliasObjects.push(source);
if (!prevAnchors)
prevAnchors = anchorNames(doc);
prevAnchors ?? (prevAnchors = anchorNames(doc));
const anchor = findNewAnchor(prefix, prevAnchors);
prevAnchors.add(anchor);
return anchor;

View File

@@ -13,6 +13,7 @@ function applyReviver(reviver, obj, key, val) {
for (let i = 0, len = val.length; i < len; ++i) {
const v0 = val[i];
const v1 = applyReviver(reviver, val, String(i), v0);
// eslint-disable-next-line @typescript-eslint/no-array-delete
if (v1 === undefined)
delete val[i];
else if (v1 !== v0)

View File

@@ -1,7 +1,7 @@
import type { Node } from '../nodes/Node.js';
import type { Schema } from '../schema/Schema.js';
import type { CollectionTag, ScalarTag } from '../schema/types.js';
import type { Replacer } from './Document.js';
import type { Node } from '../nodes/Node';
import type { Schema } from '../schema/Schema';
import type { CollectionTag, ScalarTag } from '../schema/types';
import type { Replacer } from './Document';
export interface CreateNodeContext {
aliasDuplicateObjects: boolean;
keepUndefined: boolean;

View File

@@ -40,8 +40,7 @@ function createNode(value, tagName, ctx) {
if (aliasDuplicateObjects && value && typeof value === 'object') {
ref = sourceObjects.get(value);
if (ref) {
if (!ref.anchor)
ref.anchor = onAnchor(value);
ref.anchor ?? (ref.anchor = onAnchor(value));
return new Alias.Alias(ref.anchor);
}
else {

View File

@@ -1,4 +1,4 @@
import type { Document } from './Document.js';
import type { Document } from './Document';
export declare class Directives {
static defaultYaml: Directives['yaml'];
static defaultTags: Directives['tags'];

2
node_modules/yaml/dist/errors.d.ts generated vendored
View File

@@ -1,5 +1,5 @@
import type { LineCounter } from './parse/line-counter';
export type ErrorCode = 'ALIAS_PROPS' | 'BAD_ALIAS' | 'BAD_DIRECTIVE' | 'BAD_DQ_ESCAPE' | 'BAD_INDENT' | 'BAD_PROP_ORDER' | 'BAD_SCALAR_START' | 'BLOCK_AS_IMPLICIT_KEY' | 'BLOCK_IN_FLOW' | 'DUPLICATE_KEY' | 'IMPOSSIBLE' | 'KEY_OVER_1024_CHARS' | 'MISSING_CHAR' | 'MULTILINE_IMPLICIT_KEY' | 'MULTIPLE_ANCHORS' | 'MULTIPLE_DOCS' | 'MULTIPLE_TAGS' | 'TAB_AS_INDENT' | 'TAG_RESOLVE_FAILED' | 'UNEXPECTED_TOKEN' | 'BAD_COLLECTION_TYPE';
export type ErrorCode = 'ALIAS_PROPS' | 'BAD_ALIAS' | 'BAD_DIRECTIVE' | 'BAD_DQ_ESCAPE' | 'BAD_INDENT' | 'BAD_PROP_ORDER' | 'BAD_SCALAR_START' | 'BLOCK_AS_IMPLICIT_KEY' | 'BLOCK_IN_FLOW' | 'DUPLICATE_KEY' | 'IMPOSSIBLE' | 'KEY_OVER_1024_CHARS' | 'MISSING_CHAR' | 'MULTILINE_IMPLICIT_KEY' | 'MULTIPLE_ANCHORS' | 'MULTIPLE_DOCS' | 'MULTIPLE_TAGS' | 'NON_STRING_KEY' | 'TAB_AS_INDENT' | 'TAG_RESOLVE_FAILED' | 'UNEXPECTED_TOKEN' | 'BAD_COLLECTION_TYPE';
export type LinePos = {
line: number;
col: number;

39
node_modules/yaml/dist/index.d.ts generated vendored
View File

@@ -1,22 +1,25 @@
export { Composer } from './compose/composer.js';
export { Document } from './doc/Document.js';
export { Schema } from './schema/Schema.js';
export { ErrorCode, YAMLError, YAMLParseError, YAMLWarning } from './errors.js';
export { Alias } from './nodes/Alias.js';
export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq } from './nodes/identity.js';
export { Node, ParsedNode, Range } from './nodes/Node.js';
export { Pair } from './nodes/Pair.js';
export { Scalar } from './nodes/Scalar.js';
export { YAMLMap } from './nodes/YAMLMap.js';
export { YAMLSeq } from './nodes/YAMLSeq.js';
export type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options.js';
export * as CST from './parse/cst.js';
export { Lexer } from './parse/lexer.js';
export { LineCounter } from './parse/line-counter.js';
export { Parser } from './parse/parser.js';
export { EmptyStream, parse, parseAllDocuments, parseDocument, stringify } from './public-api.js';
export { Composer } from './compose/composer';
export { Document } from './doc/Document';
export { Schema } from './schema/Schema';
export type { ErrorCode } from './errors';
export { YAMLError, YAMLParseError, YAMLWarning } from './errors';
export { Alias } from './nodes/Alias';
export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq } from './nodes/identity';
export type { Node, ParsedNode, Range } from './nodes/Node';
export { Pair } from './nodes/Pair';
export { Scalar } from './nodes/Scalar';
export { YAMLMap } from './nodes/YAMLMap';
export { YAMLSeq } from './nodes/YAMLSeq';
export type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options';
export * as CST from './parse/cst';
export { Lexer } from './parse/lexer';
export { LineCounter } from './parse/line-counter';
export { Parser } from './parse/parser';
export type { EmptyStream } from './public-api';
export { parse, parseAllDocuments, parseDocument, stringify } from './public-api';
export type { TagId, Tags } from './schema/tags';
export type { CollectionTag, ScalarTag } from './schema/types';
export type { YAMLOMap } from './schema/yaml-1.1/omap';
export type { YAMLSet } from './schema/yaml-1.1/set';
export { asyncVisitor, asyncVisitorFn, visit, visitAsync, visitor, visitorFn } from './visit.js';
export type { asyncVisitor, asyncVisitorFn, visitor, visitorFn } from './visit';
export { visit, visitAsync } from './visit';

8
node_modules/yaml/dist/log.js generated vendored
View File

@@ -1,15 +1,15 @@
'use strict';
var node_process = require('process');
function debug(logLevel, ...messages) {
if (logLevel === 'debug')
console.log(...messages);
}
function warn(logLevel, warning) {
if (logLevel === 'debug' || logLevel === 'warn') {
// https://github.com/typescript-eslint/typescript-eslint/issues/7478
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
if (typeof process !== 'undefined' && process.emitWarning)
process.emitWarning(warning);
if (typeof node_process.emitWarning === 'function')
node_process.emitWarning(warning);
else
console.warn(warning);
}

View File

@@ -1,11 +1,12 @@
import type { Document } from '../doc/Document.js';
import type { FlowScalar } from '../parse/cst.js';
import type { StringifyContext } from '../stringify/stringify.js';
import { NodeBase, Range } from './Node.js';
import type { Document } from '../doc/Document';
import type { FlowScalar } from '../parse/cst';
import type { StringifyContext } from '../stringify/stringify';
import type { Range } from './Node';
import { NodeBase } from './Node';
import type { Scalar } from './Scalar';
import { ToJSContext } from './toJS.js';
import type { YAMLMap } from './YAMLMap.js';
import type { YAMLSeq } from './YAMLSeq.js';
import type { ToJSContext } from './toJS';
import type { YAMLMap } from './YAMLMap';
import type { YAMLSeq } from './YAMLSeq';
export declare namespace Alias {
interface Parsed extends Alias {
range: Range;
@@ -22,7 +23,7 @@ export declare class Alias extends NodeBase {
* Resolve the value of this alias within `doc`, finding the last
* instance of the `source` anchor before this node.
*/
resolve(doc: Document): Scalar | YAMLMap | YAMLSeq | undefined;
toJSON(_arg?: unknown, ctx?: ToJSContext): {} | null;
resolve(doc: Document, ctx?: ToJSContext): Scalar | YAMLMap | YAMLSeq | undefined;
toJSON(_arg?: unknown, ctx?: ToJSContext): unknown;
toString(ctx?: StringifyContext, _onComment?: () => void, _onChompKeep?: () => void): string;
}

View File

@@ -20,23 +20,36 @@ class Alias extends Node.NodeBase {
* Resolve the value of this alias within `doc`, finding the last
* instance of the `source` anchor before this node.
*/
resolve(doc) {
resolve(doc, ctx) {
let nodes;
if (ctx?.aliasResolveCache) {
nodes = ctx.aliasResolveCache;
}
else {
nodes = [];
visit.visit(doc, {
Node: (_key, node) => {
if (identity.isAlias(node) || identity.hasAnchor(node))
nodes.push(node);
}
});
if (ctx)
ctx.aliasResolveCache = nodes;
}
let found = undefined;
visit.visit(doc, {
Node: (_key, node) => {
if (node === this)
return visit.visit.BREAK;
if (node.anchor === this.source)
found = node;
}
});
for (const node of nodes) {
if (node === this)
break;
if (node.anchor === this.source)
found = node;
}
return found;
}
toJSON(_arg, ctx) {
if (!ctx)
return { source: this.source };
const { anchors, doc, maxAliasCount } = ctx;
const source = this.resolve(doc);
const source = this.resolve(doc, ctx);
if (!source) {
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
throw new ReferenceError(msg);

View File

@@ -1,10 +1,9 @@
import type { Schema } from '../schema/Schema.js';
import { NODE_TYPE } from './identity.js';
import { NodeBase } from './Node.js';
export declare function collectionFromPath(schema: Schema, path: unknown[], value: unknown): import("./Node.js").Node;
import type { Schema } from '../schema/Schema';
import { NODE_TYPE } from './identity';
import { NodeBase } from './Node';
export declare function collectionFromPath(schema: Schema, path: unknown[], value: unknown): import('./Node').Node;
export declare const isEmptyPath: (path: Iterable<unknown> | null | undefined) => path is null | undefined;
export declare abstract class Collection extends NodeBase {
static maxFlowStringSingleLineLength: number;
schema: Schema | undefined;
[NODE_TYPE]: symbol;
items: unknown[];

View File

@@ -145,7 +145,6 @@ class Collection extends Node.NodeBase {
}
}
}
Collection.maxFlowStringSingleLineLength = 60;
exports.Collection = Collection;
exports.collectionFromPath = collectionFromPath;

View File

@@ -1,12 +1,13 @@
import type { Document } from '../doc/Document.js';
import type { ToJSOptions } from '../options.js';
import { Token } from '../parse/cst.js';
import type { StringifyContext } from '../stringify/stringify.js';
import type { Alias } from './Alias.js';
import { NODE_TYPE } from './identity.js';
import type { Scalar } from './Scalar.js';
import type { YAMLMap } from './YAMLMap.js';
import type { YAMLSeq } from './YAMLSeq.js';
import type { Document } from '../doc/Document';
import type { ToJSOptions } from '../options';
import type { Token } from '../parse/cst';
import type { StringifyContext } from '../stringify/stringify';
import type { Alias } from './Alias';
import { NODE_TYPE } from './identity';
import type { Scalar } from './Scalar';
import type { ToJSContext } from './toJS';
import type { MapLike, YAMLMap } from './YAMLMap';
import type { YAMLSeq } from './YAMLSeq';
export type Node<T = unknown> = Alias | Scalar<T> | YAMLMap<unknown, T> | YAMLSeq<T>;
/** Utility type mapper */
export type NodeType<T> = T extends string | number | bigint | boolean | null | undefined ? Scalar<T> : T extends Date ? Scalar<string | Date> : T extends Array<any> ? YAMLSeq<NodeType<T[number]>> : T extends {
@@ -15,6 +16,7 @@ export type NodeType<T> = T extends string | number | bigint | boolean | null |
[key: number]: any;
} ? YAMLMap<NodeType<keyof T>, NodeType<T[keyof T]>> : Node;
export type ParsedNode = Alias.Parsed | Scalar.Parsed | YAMLMap.Parsed | YAMLSeq.Parsed;
/** `[start, value-end, node-end]` */
export type Range = [number, number, number];
export declare abstract class NodeBase {
readonly [NODE_TYPE]: symbol;
@@ -35,6 +37,11 @@ export declare abstract class NodeBase {
srcToken?: Token;
/** A fully qualified tag, if required */
tag?: string;
/**
* Customize the way that a key-value pair is resolved.
* Used for YAML 1.1 !!merge << handling.
*/
addToJSMap?: (ctx: ToJSContext | undefined, map: MapLike, value: unknown) => void;
/** A plain JS representation of this node */
abstract toJSON(): any;
abstract toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;

View File

@@ -1,11 +1,12 @@
import { CreateNodeContext } from '../doc/createNode.js';
import type { CollectionItem } from '../parse/cst.js';
import type { Schema } from '../schema/Schema.js';
import type { StringifyContext } from '../stringify/stringify.js';
import { addPairToJSMap } from './addPairToJSMap.js';
import { NODE_TYPE } from './identity.js';
import type { ToJSContext } from './toJS.js';
export declare function createPair(key: unknown, value: unknown, ctx: CreateNodeContext): Pair<import("./Node.js").Node, import("./YAMLMap.js").YAMLMap<unknown, unknown> | import("./Scalar.js").Scalar<unknown> | import("./Alias.js").Alias | import("./YAMLSeq.js").YAMLSeq<unknown>>;
import type { CreateNodeContext } from '../doc/createNode';
import type { CollectionItem } from '../parse/cst';
import type { Schema } from '../schema/Schema';
import type { StringifyContext } from '../stringify/stringify';
import { addPairToJSMap } from './addPairToJSMap';
import { NODE_TYPE } from './identity';
import type { Node } from './Node';
import type { ToJSContext } from './toJS';
export declare function createPair(key: unknown, value: unknown, ctx: CreateNodeContext): Pair<Node, Node>;
export declare class Pair<K = unknown, V = unknown> {
readonly [NODE_TYPE]: symbol;
/** Always Node or null when parsed, but can be set to anything. */

View File

@@ -1,6 +1,7 @@
import type { BlockScalar, FlowScalar } from '../parse/cst.js';
import { NodeBase, Range } from './Node.js';
import { ToJSContext } from './toJS.js';
import type { BlockScalar, FlowScalar } from '../parse/cst';
import type { Range } from './Node';
import { NodeBase } from './Node';
import type { ToJSContext } from './toJS';
export declare const isScalarValue: (value: unknown) => boolean;
export declare namespace Scalar {
interface Parsed extends Scalar {

View File

@@ -1,12 +1,12 @@
import type { BlockMap, FlowCollection } from '../parse/cst.js';
import type { Schema } from '../schema/Schema.js';
import type { StringifyContext } from '../stringify/stringify.js';
import { CreateNodeContext } from '../util.js';
import { Collection } from './Collection.js';
import type { ParsedNode, Range } from './Node.js';
import { Pair } from './Pair.js';
import { Scalar } from './Scalar.js';
import type { ToJSContext } from './toJS.js';
import type { BlockMap, FlowCollection } from '../parse/cst';
import type { Schema } from '../schema/Schema';
import type { StringifyContext } from '../stringify/stringify';
import type { CreateNodeContext } from '../util';
import { Collection } from './Collection';
import type { ParsedNode, Range } from './Node';
import { Pair } from './Pair';
import type { Scalar } from './Scalar';
import type { ToJSContext } from './toJS';
export type MapLike = Map<unknown, unknown> | Set<unknown> | Record<string | number | symbol, unknown>;
export declare function findPair<K = unknown, V = unknown>(items: Iterable<Pair<K, V>>, key: unknown): Pair<K, V> | undefined;
export declare namespace YAMLMap {
@@ -24,7 +24,7 @@ export declare class YAMLMap<K = unknown, V = unknown> extends Collection {
* A generic collection parsing method that can be extended
* to other node classes that inherit from YAMLMap
*/
static from(schema: Schema, obj: unknown, ctx: CreateNodeContext): YAMLMap<unknown, unknown>;
static from(schema: Schema, obj: unknown, ctx: CreateNodeContext): YAMLMap;
/**
* Adds a value to the collection.
*

View File

@@ -1,12 +1,12 @@
import { CreateNodeContext } from '../doc/createNode.js';
import type { BlockSequence, FlowCollection } from '../parse/cst.js';
import type { Schema } from '../schema/Schema.js';
import type { StringifyContext } from '../stringify/stringify.js';
import { Collection } from './Collection.js';
import type { ParsedNode, Range } from './Node.js';
import type { Pair } from './Pair.js';
import { Scalar } from './Scalar.js';
import { ToJSContext } from './toJS.js';
import type { CreateNodeContext } from '../doc/createNode';
import type { BlockSequence, FlowCollection } from '../parse/cst';
import type { Schema } from '../schema/Schema';
import type { StringifyContext } from '../stringify/stringify';
import { Collection } from './Collection';
import type { ParsedNode, Range } from './Node';
import type { Pair } from './Pair';
import type { Scalar } from './Scalar';
import type { ToJSContext } from './toJS';
export declare namespace YAMLSeq {
interface Parsed<T extends ParsedNode | Pair<ParsedNode, ParsedNode | null> = ParsedNode> extends YAMLSeq<T> {
items: T[];
@@ -56,5 +56,5 @@ export declare class YAMLSeq<T = unknown> extends Collection {
set(key: unknown, value: T): void;
toJSON(_?: unknown, ctx?: ToJSContext): unknown[];
toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
static from(schema: Schema, obj: unknown, ctx: CreateNodeContext): YAMLSeq<unknown>;
static from(schema: Schema, obj: unknown, ctx: CreateNodeContext): YAMLSeq;
}

View File

@@ -1,4 +1,4 @@
import type { Pair } from './Pair.js';
import { ToJSContext } from './toJS.js';
import type { MapLike } from './YAMLMap.js';
import type { Pair } from './Pair';
import type { ToJSContext } from './toJS';
import type { MapLike } from './YAMLMap';
export declare function addPairToJSMap(ctx: ToJSContext | undefined, map: MapLike, { key, value }: Pair): MapLike;

View File

@@ -1,24 +1,17 @@
'use strict';
var log = require('../log.js');
var merge = require('../schema/yaml-1.1/merge.js');
var stringify = require('../stringify/stringify.js');
var identity = require('./identity.js');
var Scalar = require('./Scalar.js');
var toJS = require('./toJS.js');
const MERGE_KEY = '<<';
function addPairToJSMap(ctx, map, { key, value }) {
if (ctx?.doc.schema.merge && isMergeKey(key)) {
value = identity.isAlias(value) ? value.resolve(ctx.doc) : value;
if (identity.isSeq(value))
for (const it of value.items)
mergeToJSMap(ctx, map, it);
else if (Array.isArray(value))
for (const it of value)
mergeToJSMap(ctx, map, it);
else
mergeToJSMap(ctx, map, value);
}
if (identity.isNode(key) && key.addToJSMap)
key.addToJSMap(ctx, map, value);
// TODO: Should drop this special case for bare << handling
else if (merge.isMergeKey(ctx, key))
merge.addMergeToJSMap(ctx, map, value);
else {
const jsKey = toJS.toJS(key, '', ctx);
if (map instanceof Map) {
@@ -43,44 +36,10 @@ function addPairToJSMap(ctx, map, { key, value }) {
}
return map;
}
const isMergeKey = (key) => key === MERGE_KEY ||
(identity.isScalar(key) &&
key.value === MERGE_KEY &&
(!key.type || key.type === Scalar.Scalar.PLAIN));
// If the value associated with a merge key is a single mapping node, each of
// its key/value pairs is inserted into the current mapping, unless the key
// already exists in it. If the value associated with the merge key is a
// sequence, then this sequence is expected to contain mapping nodes and each
// of these nodes is merged in turn according to its order in the sequence.
// Keys in mapping nodes earlier in the sequence override keys specified in
// later mapping nodes. -- http://yaml.org/type/merge.html
function mergeToJSMap(ctx, map, value) {
const source = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value;
if (!identity.isMap(source))
throw new Error('Merge sources must be maps or map aliases');
const srcMap = source.toJSON(null, ctx, Map);
for (const [key, value] of srcMap) {
if (map instanceof Map) {
if (!map.has(key))
map.set(key, value);
}
else if (map instanceof Set) {
map.add(key);
}
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
Object.defineProperty(map, key, {
value,
writable: true,
enumerable: true,
configurable: true
});
}
}
return map;
}
function stringifyKey(key, jsKey, ctx) {
if (jsKey === null)
return '';
// eslint-disable-next-line @typescript-eslint/no-base-to-string
if (typeof jsKey !== 'object')
return String(jsKey);
if (identity.isNode(key) && ctx?.doc) {

View File

@@ -1,10 +1,10 @@
import type { Document } from '../doc/Document.js';
import type { Alias } from './Alias.js';
import type { Node } from './Node.js';
import type { Pair } from './Pair.js';
import type { Scalar } from './Scalar.js';
import type { YAMLMap } from './YAMLMap.js';
import type { YAMLSeq } from './YAMLSeq.js';
import type { Document } from '../doc/Document';
import type { Alias } from './Alias';
import type { Node } from './Node';
import type { Pair } from './Pair';
import type { Scalar } from './Scalar';
import type { YAMLMap } from './YAMLMap';
import type { YAMLSeq } from './YAMLSeq';
export declare const ALIAS: unique symbol;
export declare const DOC: unique symbol;
export declare const MAP: unique symbol;
@@ -13,7 +13,7 @@ export declare const SCALAR: unique symbol;
export declare const SEQ: unique symbol;
export declare const NODE_TYPE: unique symbol;
export declare const isAlias: (node: any) => node is Alias;
export declare const isDocument: <T extends Node = Node>(node: any) => node is Document<T, true>;
export declare const isDocument: <T extends Node = Node>(node: any) => node is Document<T>;
export declare const isMap: <K = unknown, V = unknown>(node: any) => node is YAMLMap<K, V>;
export declare const isPair: <K = unknown, V = unknown>(node: any) => node is Pair<K, V>;
export declare const isScalar: <T = unknown>(node: any) => node is Scalar<T>;

View File

@@ -1,5 +1,5 @@
import type { Document } from '../doc/Document.js';
import type { Node } from './Node.js';
import type { Document } from '../doc/Document';
import type { Node } from './Node';
export interface AnchorData {
aliasCount: number;
count: number;
@@ -7,6 +7,8 @@ export interface AnchorData {
}
export interface ToJSContext {
anchors: Map<Node, AnchorData>;
/** Cached anchor and alias nodes in the order they occur in the document */
aliasResolveCache?: Node[];
doc: Document<Node, boolean>;
keep: boolean;
mapAsMap: boolean;

Some files were not shown because too many files have changed in this diff Show More