node_modules: upgrade

This commit is contained in:
Dawid Dziurla
2025-06-14 23:18:18 +02:00
parent 948e4a4fb8
commit de40b1f21f
268 changed files with 2150 additions and 3858 deletions

View File

@@ -23,10 +23,23 @@ function resolveCollection(CN, ctx, token, onError, tagName, tag) {
coll.tag = tagName;
return coll;
}
function composeCollection(CN, ctx, token, tagToken, onError) {
function composeCollection(CN, ctx, token, props, onError) {
const tagToken = props.tag;
const tagName = !tagToken
? null
: ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
if (token.type === 'block-seq') {
const { anchor, newlineAfterProp: nl } = props;
const lastProp = anchor && tagToken
? anchor.offset > tagToken.offset
? anchor
: tagToken
: (anchor ?? tagToken);
if (lastProp && (!nl || nl.offset < lastProp.offset)) {
const message = 'Missing newline after block sequence props';
onError(lastProp, 'MISSING_CHAR', message);
}
}
const expType = token.type === 'block-map'
? 'map'
: token.type === 'block-seq'
@@ -40,8 +53,7 @@ function composeCollection(CN, ctx, token, tagToken, onError) {
!tagName ||
tagName === '!' ||
(tagName === YAMLMap.tagName && expType === 'map') ||
(tagName === YAMLSeq.tagName && expType === 'seq') ||
!expType) {
(tagName === YAMLSeq.tagName && expType === 'seq')) {
return resolveCollection(CN, ctx, token, onError, tagName);
}
let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType);
@@ -52,8 +64,8 @@ function composeCollection(CN, ctx, token, tagToken, onError) {
tag = kt;
}
else {
if (kt?.collection) {
onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true);
if (kt) {
onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection ?? 'scalar'}`, true);
}
else {
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);

View File

@@ -7,6 +7,7 @@ function composeDoc(options, directives, { offset, start, value, end }, onError)
const opts = Object.assign({ _directives: directives }, options);
const doc = new Document(undefined, opts);
const ctx = {
atKey: false,
atRoot: true,
directives: doc.directives,
options: doc.options,
@@ -17,6 +18,7 @@ function composeDoc(options, directives, { offset, start, value, end }, onError)
next: value ?? end?.[0],
offset,
onError,
parentIndent: 0,
startOnNewline: true
});
if (props.found) {

View File

@@ -1,4 +1,5 @@
import { Alias } from '../nodes/Alias.js';
import { isScalar } from '../nodes/identity.js';
import { composeCollection } from './compose-collection.js';
import { composeScalar } from './compose-scalar.js';
import { resolveEnd } from './resolve-end.js';
@@ -6,6 +7,7 @@ import { emptyScalarPosition } from './util-empty-scalar-position.js';
const CN = { composeNode, composeEmptyNode };
function composeNode(ctx, token, props, onError) {
const atKey = ctx.atKey;
const { spaceBefore, comment, anchor, tag } = props;
let node;
let isSrcToken = true;
@@ -26,7 +28,7 @@ function composeNode(ctx, token, props, onError) {
case 'block-map':
case 'block-seq':
case 'flow-collection':
node = composeCollection(CN, ctx, token, tag, onError);
node = composeCollection(CN, ctx, token, props, onError);
if (anchor)
node.anchor = anchor.source.substring(1);
break;
@@ -41,6 +43,14 @@ function composeNode(ctx, token, props, onError) {
}
if (anchor && node.anchor === '')
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
if (atKey &&
ctx.options.stringKeys &&
(!isScalar(node) ||
typeof node.value !== 'string' ||
(node.tag && node.tag !== 'tag:yaml.org,2002:str'))) {
const msg = 'With stringKeys, all keys must be strings';
onError(tag ?? token, 'NON_STRING_KEY', msg);
}
if (spaceBefore)
node.spaceBefore = true;
if (comment) {

View File

@@ -1,20 +1,25 @@
import { SCALAR, isScalar } from '../nodes/identity.js';
import { isScalar, SCALAR } from '../nodes/identity.js';
import { Scalar } from '../nodes/Scalar.js';
import { resolveBlockScalar } from './resolve-block-scalar.js';
import { resolveFlowScalar } from './resolve-flow-scalar.js';
function composeScalar(ctx, token, tagToken, onError) {
const { value, type, comment, range } = token.type === 'block-scalar'
? resolveBlockScalar(token, ctx.options.strict, onError)
? resolveBlockScalar(ctx, token, onError)
: resolveFlowScalar(token, ctx.options.strict, onError);
const tagName = tagToken
? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
: null;
const tag = tagToken && tagName
? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
: token.type === 'scalar'
? findScalarTagByTest(ctx, value, token, onError)
: ctx.schema[SCALAR];
let tag;
if (ctx.options.stringKeys && ctx.atKey) {
tag = ctx.schema[SCALAR];
}
else if (tagName)
tag = findScalarTagByName(ctx.schema, value, tagName, tagToken, onError);
else if (token.type === 'scalar')
tag = findScalarTagByTest(ctx, value, token, onError);
else
tag = ctx.schema[SCALAR];
let scalar;
try {
const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
@@ -62,8 +67,9 @@ function findScalarTagByName(schema, value, tagName, tagToken, onError) {
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
return schema[SCALAR];
}
function findScalarTagByTest({ directives, schema }, value, token, onError) {
const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[SCALAR];
function findScalarTagByTest({ atKey, directives, schema }, value, token, onError) {
const tag = schema.tags.find(tag => (tag.default === true || (atKey && tag.default === 'key')) &&
tag.test?.test(value)) || schema[SCALAR];
if (schema.compat) {
const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??
schema[SCALAR];

View File

@@ -21,6 +21,7 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
next: key ?? sep?.[0],
offset,
onError,
parentIndent: bm.indent,
startOnNewline: true
});
const implicitKey = !keyProps.found;
@@ -41,7 +42,7 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
}
continue;
}
if (keyProps.hasNewlineAfterProp || containsNewline(key)) {
if (keyProps.newlineAfterProp || containsNewline(key)) {
onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
}
}
@@ -49,12 +50,14 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
onError(offset, 'BAD_INDENT', startColMsg);
}
// key value
ctx.atKey = true;
const keyStart = keyProps.end;
const keyNode = key
? composeNode(ctx, key, keyProps, onError)
: composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
if (ctx.schema.compat)
flowIndentCheck(bm.indent, key, onError);
ctx.atKey = false;
if (mapIncludes(ctx, map.items, keyNode))
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
// value properties
@@ -63,6 +66,7 @@ function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, ta
next: value,
offset: keyNode.range[2],
onError,
parentIndent: bm.indent,
startOnNewline: !key || key.type === 'block-scalar'
});
offset = valueProps.end;

View File

@@ -1,8 +1,8 @@
import { Scalar } from '../nodes/Scalar.js';
function resolveBlockScalar(scalar, strict, onError) {
function resolveBlockScalar(ctx, scalar, onError) {
const start = scalar.offset;
const header = parseBlockScalarHeader(scalar, strict, onError);
const header = parseBlockScalarHeader(scalar, ctx.options.strict, onError);
if (!header)
return { value: '', type: null, comment: '', range: [start, start, start] };
const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL;
@@ -44,6 +44,10 @@ function resolveBlockScalar(scalar, strict, onError) {
if (header.indent === 0)
trimIndent = indent.length;
contentStart = i;
if (trimIndent === 0 && !ctx.atRoot) {
const message = 'Block scalar values in collections must be indented';
onError(offset, 'BAD_INDENT', message);
}
break;
}
offset += indent.length + content.length + 1;

View File

@@ -7,6 +7,8 @@ function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, ta
const seq = new NodeClass(ctx.schema);
if (ctx.atRoot)
ctx.atRoot = false;
if (ctx.atKey)
ctx.atKey = false;
let offset = bs.offset;
let commentEnd = null;
for (const { start, value } of bs.items) {
@@ -15,6 +17,7 @@ function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, ta
next: value,
offset,
onError,
parentIndent: bs.indent,
startOnNewline: true
});
if (!props.found) {

View File

@@ -18,6 +18,8 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
const atRoot = ctx.atRoot;
if (atRoot)
ctx.atRoot = false;
if (ctx.atKey)
ctx.atKey = false;
let offset = fc.offset + fc.start.source.length;
for (let i = 0; i < fc.items.length; ++i) {
const collItem = fc.items[i];
@@ -28,6 +30,7 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
next: key ?? sep?.[0],
offset,
onError,
parentIndent: fc.indent,
startOnNewline: false
});
if (!props.found) {
@@ -96,12 +99,14 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
else {
// item is a key+value pair
// key value
ctx.atKey = true;
const keyStart = props.end;
const keyNode = key
? composeNode(ctx, key, props, onError)
: composeEmptyNode(ctx, keyStart, start, null, props, onError);
if (isBlock(key))
onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
ctx.atKey = false;
// value properties
const valueProps = resolveProps(sep ?? [], {
flow: fcName,
@@ -109,6 +114,7 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
next: value,
offset: keyNode.range[2],
onError,
parentIndent: fc.indent,
startOnNewline: false
});
if (valueProps.found) {
@@ -161,6 +167,8 @@ function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onErr
const map = new YAMLMap(ctx.schema);
map.flow = true;
map.items.push(pair);
const endRange = (valueNode ?? keyNode).range;
map.range = [keyNode.range[0], endRange[1], endRange[2]];
coll.items.push(map);
}
offset = valueNode ? valueNode.range[2] : valueProps.end;

View File

@@ -84,7 +84,7 @@ function foldLines(source) {
first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
}
catch (_) {
catch {
first = /(.*?)[ \t]*\r?\n/sy;
line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
}
@@ -189,19 +189,19 @@ function foldNewline(source, offset) {
return { fold, offset };
}
const escapeCodes = {
'0': '\0',
a: '\x07',
b: '\b',
e: '\x1b',
f: '\f',
n: '\n',
r: '\r',
t: '\t',
v: '\v',
N: '\u0085',
_: '\u00a0',
L: '\u2028',
P: '\u2029',
'0': '\0', // null character
a: '\x07', // bell character
b: '\b', // backspace
e: '\x1b', // escape character
f: '\f', // form feed
n: '\n', // line feed
r: '\r', // carriage return
t: '\t', // horizontal tab
v: '\v', // vertical tab
N: '\u0085', // Unicode next line
_: '\u00a0', // Unicode non-breaking space
L: '\u2028', // Unicode line separator
P: '\u2029', // Unicode paragraph separator
' ': ' ',
'"': '"',
'/': '/',

View File

@@ -1,14 +1,15 @@
function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {
function resolveProps(tokens, { flow, indicator, next, offset, onError, parentIndent, startOnNewline }) {
let spaceBefore = false;
let atNewline = startOnNewline;
let hasSpace = startOnNewline;
let comment = '';
let commentSep = '';
let hasNewline = false;
let hasNewlineAfterProp = false;
let reqSpace = false;
let tab = null;
let anchor = null;
let tag = null;
let newlineAfterProp = null;
let comma = null;
let found = null;
let start = null;
@@ -20,16 +21,22 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
reqSpace = false;
}
if (tab) {
if (atNewline && token.type !== 'comment' && token.type !== 'newline') {
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
}
tab = null;
}
switch (token.type) {
case 'space':
// At the doc level, tabs at line start may be parsed
// as leading white space rather than indentation.
// In a flow collection, only the parser handles indent.
if (!flow &&
atNewline &&
indicator !== 'doc-start' &&
token.source[0] === '\t')
onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
(indicator !== 'doc-start' || next?.type !== 'flow-collection') &&
token.source.includes('\t')) {
tab = token;
}
hasSpace = true;
break;
case 'comment': {
@@ -48,7 +55,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (atNewline) {
if (comment)
comment += token.source;
else
else if (!found || indicator !== 'seq-item-ind')
spaceBefore = true;
}
else
@@ -56,7 +63,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
atNewline = true;
hasNewline = true;
if (anchor || tag)
hasNewlineAfterProp = true;
newlineAfterProp = token;
hasSpace = true;
break;
case 'anchor':
@@ -65,8 +72,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (token.source.endsWith(':'))
onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
anchor = token;
if (start === null)
start = token.offset;
start ?? (start = token.offset);
atNewline = false;
hasSpace = false;
reqSpace = true;
@@ -75,8 +81,7 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (tag)
onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
tag = token;
if (start === null)
start = token.offset;
start ?? (start = token.offset);
atNewline = false;
hasSpace = false;
reqSpace = true;
@@ -89,7 +94,8 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
if (found)
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);
found = token;
atNewline = false;
atNewline =
indicator === 'seq-item-ind' || indicator === 'explicit-key-ind';
hasSpace = false;
break;
case 'comma':
@@ -115,17 +121,23 @@ function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnN
next.type !== 'space' &&
next.type !== 'newline' &&
next.type !== 'comma' &&
(next.type !== 'scalar' || next.source !== ''))
(next.type !== 'scalar' || next.source !== '')) {
onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
}
if (tab &&
((atNewline && tab.indent <= parentIndent) ||
next?.type === 'block-map' ||
next?.type === 'block-seq'))
onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
return {
comma,
found,
spaceBefore,
comment,
hasNewline,
hasNewlineAfterProp,
anchor,
tag,
newlineAfterProp,
end,
start: start ?? end
};

View File

@@ -1,7 +1,6 @@
function emptyScalarPosition(offset, before, pos) {
if (before) {
if (pos === null)
pos = before.length;
pos ?? (pos = before.length);
for (let i = pos - 1; i >= 0; --i) {
let st = before[i];
switch (st.type) {

View File

@@ -6,11 +6,7 @@ function mapIncludes(ctx, items, search) {
return false;
const isEqual = typeof uniqueKeys === 'function'
? uniqueKeys
: (a, b) => a === b ||
(isScalar(a) &&
isScalar(b) &&
a.value === b.value &&
!(a.value === '<<' && ctx.schema.merge));
: (a, b) => a === b || (isScalar(a) && isScalar(b) && a.value === b.value);
return items.some(pair => isEqual(pair.key, search));
}

View File

@@ -35,6 +35,7 @@ class Document {
logLevel: 'warn',
prettyErrors: true,
strict: true,
stringKeys: false,
uniqueKeys: true,
version: '1.2'
}, options);
@@ -258,7 +259,7 @@ class Document {
this.directives.yaml.version = '1.1';
else
this.directives = new Directives({ version: '1.1' });
opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
opt = { resolveKnownTags: false, schema: 'yaml-1.1' };
break;
case '1.2':
case 'next':
@@ -266,7 +267,7 @@ class Document {
this.directives.yaml.version = version;
else
this.directives = new Directives({ version });
opt = { merge: false, resolveKnownTags: true, schema: 'core' };
opt = { resolveKnownTags: true, schema: 'core' };
break;
case null:
if (this.directives)

View File

@@ -39,8 +39,7 @@ function createNodeAnchors(doc, prefix) {
return {
onAnchor: (source) => {
aliasObjects.push(source);
if (!prevAnchors)
prevAnchors = anchorNames(doc);
prevAnchors ?? (prevAnchors = anchorNames(doc));
const anchor = findNewAnchor(prefix, prevAnchors);
prevAnchors.add(anchor);
return anchor;

View File

@@ -11,6 +11,7 @@ function applyReviver(reviver, obj, key, val) {
for (let i = 0, len = val.length; i < len; ++i) {
const v0 = val[i];
const v1 = applyReviver(reviver, val, String(i), v0);
// eslint-disable-next-line @typescript-eslint/no-array-delete
if (v1 === undefined)
delete val[i];
else if (v1 !== v0)

View File

@@ -38,8 +38,7 @@ function createNode(value, tagName, ctx) {
if (aliasDuplicateObjects && value && typeof value === 'object') {
ref = sourceObjects.get(value);
if (ref) {
if (!ref.anchor)
ref.anchor = onAnchor(value);
ref.anchor ?? (ref.anchor = onAnchor(value));
return new Alias(ref.anchor);
}
else {

View File

@@ -4,12 +4,7 @@ function debug(logLevel, ...messages) {
}
function warn(logLevel, warning) {
if (logLevel === 'debug' || logLevel === 'warn') {
// https://github.com/typescript-eslint/typescript-eslint/issues/7478
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
if (typeof process !== 'undefined' && process.emitWarning)
process.emitWarning(warning);
else
console.warn(warning);
console.warn(warning);
}
}

View File

@@ -1,21 +0,0 @@
/******************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
function __classPrivateFieldGet(receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
}
export { __classPrivateFieldGet };

View File

@@ -1,6 +1,6 @@
import { anchorIsValid } from '../doc/anchors.js';
import { visit } from '../visit.js';
import { ALIAS, isAlias, isCollection, isPair } from './identity.js';
import { ALIAS, isAlias, isCollection, isPair, hasAnchor } from './identity.js';
import { NodeBase } from './Node.js';
import { toJS } from './toJS.js';
@@ -18,23 +18,36 @@ class Alias extends NodeBase {
* Resolve the value of this alias within `doc`, finding the last
* instance of the `source` anchor before this node.
*/
resolve(doc) {
resolve(doc, ctx) {
let nodes;
if (ctx?.aliasResolveCache) {
nodes = ctx.aliasResolveCache;
}
else {
nodes = [];
visit(doc, {
Node: (_key, node) => {
if (isAlias(node) || hasAnchor(node))
nodes.push(node);
}
});
if (ctx)
ctx.aliasResolveCache = nodes;
}
let found = undefined;
visit(doc, {
Node: (_key, node) => {
if (node === this)
return visit.BREAK;
if (node.anchor === this.source)
found = node;
}
});
for (const node of nodes) {
if (node === this)
break;
if (node.anchor === this.source)
found = node;
}
return found;
}
toJSON(_arg, ctx) {
if (!ctx)
return { source: this.source };
const { anchors, doc, maxAliasCount } = ctx;
const source = this.resolve(doc);
const source = this.resolve(doc, ctx);
if (!source) {
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
throw new ReferenceError(msg);

View File

@@ -143,6 +143,5 @@ class Collection extends NodeBase {
}
}
}
Collection.maxFlowStringSingleLineLength = 60;
export { Collection, collectionFromPath, isEmptyPath };

View File

@@ -1,7 +1,7 @@
import { stringifyCollection } from '../stringify/stringifyCollection.js';
import { addPairToJSMap } from './addPairToJSMap.js';
import { Collection } from './Collection.js';
import { isPair, isScalar, MAP } from './identity.js';
import { MAP, isPair, isScalar } from './identity.js';
import { Pair, createPair } from './Pair.js';
import { isScalarValue } from './Scalar.js';

View File

@@ -1,22 +1,15 @@
import { warn } from '../log.js';
import { isMergeKey, addMergeToJSMap } from '../schema/yaml-1.1/merge.js';
import { createStringifyContext } from '../stringify/stringify.js';
import { isAlias, isSeq, isScalar, isMap, isNode } from './identity.js';
import { Scalar } from './Scalar.js';
import { isNode } from './identity.js';
import { toJS } from './toJS.js';
const MERGE_KEY = '<<';
function addPairToJSMap(ctx, map, { key, value }) {
if (ctx?.doc.schema.merge && isMergeKey(key)) {
value = isAlias(value) ? value.resolve(ctx.doc) : value;
if (isSeq(value))
for (const it of value.items)
mergeToJSMap(ctx, map, it);
else if (Array.isArray(value))
for (const it of value)
mergeToJSMap(ctx, map, it);
else
mergeToJSMap(ctx, map, value);
}
if (isNode(key) && key.addToJSMap)
key.addToJSMap(ctx, map, value);
// TODO: Should drop this special case for bare << handling
else if (isMergeKey(ctx, key))
addMergeToJSMap(ctx, map, value);
else {
const jsKey = toJS(key, '', ctx);
if (map instanceof Map) {
@@ -41,44 +34,10 @@ function addPairToJSMap(ctx, map, { key, value }) {
}
return map;
}
const isMergeKey = (key) => key === MERGE_KEY ||
(isScalar(key) &&
key.value === MERGE_KEY &&
(!key.type || key.type === Scalar.PLAIN));
// If the value associated with a merge key is a single mapping node, each of
// its key/value pairs is inserted into the current mapping, unless the key
// already exists in it. If the value associated with the merge key is a
// sequence, then this sequence is expected to contain mapping nodes and each
// of these nodes is merged in turn according to its order in the sequence.
// Keys in mapping nodes earlier in the sequence override keys specified in
// later mapping nodes. -- http://yaml.org/type/merge.html
function mergeToJSMap(ctx, map, value) {
const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
if (!isMap(source))
throw new Error('Merge sources must be maps or map aliases');
const srcMap = source.toJSON(null, ctx, Map);
for (const [key, value] of srcMap) {
if (map instanceof Map) {
if (!map.has(key))
map.set(key, value);
}
else if (map instanceof Set) {
map.add(key);
}
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
Object.defineProperty(map, key, {
value,
writable: true,
enumerable: true,
configurable: true
});
}
}
return map;
}
function stringifyKey(key, jsKey, ctx) {
if (jsKey === null)
return '';
// eslint-disable-next-line @typescript-eslint/no-base-to-string
if (typeof jsKey !== 'object')
return String(jsKey);
if (isNode(key) && ctx?.doc) {

View File

@@ -18,7 +18,7 @@ function resolveAsScalar(token, strict = true, onError) {
case 'double-quoted-scalar':
return resolveFlowScalar(token, strict, _onError);
case 'block-scalar':
return resolveBlockScalar(token, strict, _onError);
return resolveBlockScalar({ options: { strict } }, token, _onError);
}
}
return null;

View File

@@ -79,11 +79,11 @@ function isEmpty(ch) {
return false;
}
}
const hexDigits = '0123456789ABCDEFabcdef'.split('');
const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
const invalidFlowScalarChars = ',[]{}'.split('');
const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
const hexDigits = new Set('0123456789ABCDEFabcdef');
const tagChars = new Set("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()");
const flowIndicatorChars = new Set(',[]{}');
const invalidAnchorChars = new Set(' ,[]{}\n\r\t');
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch);
/**
* Splits an input string into lexical tokens, i.e. smaller strings that are
* easily identifiable by `tokens.tokenType()`.
@@ -149,6 +149,8 @@ class Lexer {
*/
*lex(source, incomplete = false) {
if (source) {
if (typeof source !== 'string')
throw TypeError('source is not a string');
this.buffer = this.buffer ? this.buffer + source : source;
this.lineEndPos = null;
}
@@ -248,11 +250,16 @@ class Lexer {
}
if (line[0] === '%') {
let dirEnd = line.length;
const cs = line.indexOf('#');
if (cs !== -1) {
let cs = line.indexOf('#');
while (cs !== -1) {
const ch = line[cs - 1];
if (ch === ' ' || ch === '\t')
if (ch === ' ' || ch === '\t') {
dirEnd = cs - 1;
break;
}
else {
cs = line.indexOf('#', cs + 1);
}
}
while (true) {
const ch = line[dirEnd - 1];
@@ -283,15 +290,11 @@ class Lexer {
if (!this.atEnd && !this.hasChars(4))
return this.setNext('line-start');
const s = this.peek(3);
if (s === '---' && isEmpty(this.charAt(3))) {
if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) {
yield* this.pushCount(3);
this.indentValue = 0;
this.indentNext = 0;
return 'doc';
}
else if (s === '...' && isEmpty(this.charAt(3))) {
yield* this.pushCount(3);
return 'stream';
return s === '---' ? 'doc' : 'stream';
}
}
this.indentValue = yield* this.pushSpaces(false);
@@ -518,8 +521,10 @@ class Lexer {
if (indent >= this.indentNext) {
if (this.blockScalarIndent === -1)
this.indentNext = indent;
else
this.indentNext += this.blockScalarIndent;
else {
this.indentNext =
this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext);
}
do {
const cs = this.continueScalar(nl + 1);
if (cs === -1)
@@ -532,14 +537,25 @@ class Lexer {
nl = this.buffer.length;
}
}
if (!this.blockScalarKeep) {
// Trailing insufficiently indented tabs are invalid.
// To catch that during parsing, we include them in the block scalar value.
let i = nl + 1;
ch = this.buffer[i];
while (ch === ' ')
ch = this.buffer[++i];
if (ch === '\t') {
while (ch === '\t' || ch === ' ' || ch === '\r' || ch === '\n')
ch = this.buffer[++i];
nl = i - 1;
}
else if (!this.blockScalarKeep) {
do {
let i = nl - 1;
let ch = this.buffer[i];
if (ch === '\r')
ch = this.buffer[--i];
const lastChar = i; // Drop the line if last char not more indented
while (ch === ' ' || ch === '\t')
while (ch === ' ')
ch = this.buffer[--i];
if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
nl = i;
@@ -559,7 +575,7 @@ class Lexer {
while ((ch = this.buffer[++i])) {
if (ch === ':') {
const next = this.buffer[i + 1];
if (isEmpty(next) || (inFlow && next === ','))
if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next)))
break;
end = i;
}
@@ -574,7 +590,7 @@ class Lexer {
else
end = i;
}
if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
if (next === '#' || (inFlow && flowIndicatorChars.has(next)))
break;
if (ch === '\n') {
const cs = this.continueScalar(i + 1);
@@ -584,7 +600,7 @@ class Lexer {
}
}
else {
if (inFlow && invalidFlowScalarChars.includes(ch))
if (inFlow && flowIndicatorChars.has(ch))
break;
end = i;
}
@@ -629,7 +645,7 @@ class Lexer {
case ':': {
const inFlow = this.flowLevel > 0;
const ch1 = this.charAt(1);
if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {
if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) {
if (!inFlow)
this.indentNext = this.indentValue + 1;
else if (this.flowKey)
@@ -654,11 +670,11 @@ class Lexer {
let i = this.pos + 1;
let ch = this.buffer[i];
while (ch) {
if (tagChars.includes(ch))
if (tagChars.has(ch))
ch = this.buffer[++i];
else if (ch === '%' &&
hexDigits.includes(this.buffer[i + 1]) &&
hexDigits.includes(this.buffer[i + 2])) {
hexDigits.has(this.buffer[i + 1]) &&
hexDigits.has(this.buffer[i + 2])) {
ch = this.buffer[(i += 3)];
}
else

View File

@@ -304,7 +304,7 @@ class Parser {
}
else {
Object.assign(it, { key: token, sep: [] });
this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
this.onKeyLine = !it.explicitKey;
return;
}
break;
@@ -513,7 +513,10 @@ class Parser {
return;
}
if (this.indent >= map.indent) {
const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;
const atMapIndent = !this.onKeyLine && this.indent === map.indent;
const atNextItem = atMapIndent &&
(it.sep || it.explicitKey) &&
this.type !== 'seq-item-ind';
// For empty nodes, assign newline-separated not indented empty tokens to following node
let start = [];
if (atNextItem && it.sep && !it.value) {
@@ -553,25 +556,26 @@ class Parser {
}
return;
case 'explicit-key-ind':
if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {
if (!it.sep && !it.explicitKey) {
it.start.push(this.sourceToken);
it.explicitKey = true;
}
else if (atNextItem || it.value) {
start.push(this.sourceToken);
map.items.push({ start });
map.items.push({ start, explicitKey: true });
}
else {
this.stack.push({
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start: [this.sourceToken] }]
items: [{ start: [this.sourceToken], explicitKey: true }]
});
}
this.onKeyLine = true;
return;
case 'map-value-ind':
if (includesToken(it.start, 'explicit-key-ind')) {
if (it.explicitKey) {
if (!it.sep) {
if (includesToken(it.start, 'newline')) {
Object.assign(it, { key: null, sep: [this.sourceToken] });
@@ -604,7 +608,9 @@ class Parser {
const sep = it.sep;
sep.push(this.sourceToken);
// @ts-expect-error type guard is wrong here
delete it.key, delete it.sep;
delete it.key;
// @ts-expect-error type guard is wrong here
delete it.sep;
this.stack.push({
type: 'block-map',
offset: this.offset,
@@ -662,9 +668,20 @@ class Parser {
default: {
const bv = this.startBlockValue(map);
if (bv) {
if (atNextItem &&
bv.type !== 'block-seq' &&
includesToken(it.start, 'explicit-key-ind')) {
if (bv.type === 'block-seq') {
if (!it.explicitKey &&
it.sep &&
!includesToken(it.sep, 'newline')) {
yield* this.pop({
type: 'error',
offset: this.offset,
message: 'Unexpected block-seq-ind on same line with key',
source: this.source
});
return;
}
}
else if (atMapIndent) {
map.items.push({ start });
}
this.stack.push(bv);
@@ -885,7 +902,7 @@ class Parser {
type: 'block-map',
offset: this.offset,
indent: this.indent,
items: [{ start }]
items: [{ start, explicitKey: true }]
};
}
case 'map-value-ind': {

View File

@@ -2,6 +2,7 @@ import { Composer } from './compose/composer.js';
import { Document } from './doc/Document.js';
import { prettifyError, YAMLParseError } from './errors.js';
import { warn } from './log.js';
import { isDocument } from './nodes/identity.js';
import { LineCounter } from './parse/line-counter.js';
import { Parser } from './parse/parser.js';
@@ -93,6 +94,8 @@ function stringify(value, replacer, options) {
if (!keepUndefined)
return undefined;
}
if (isDocument(value) && !_replacer)
return value.toString(options);
return new Document(value, _replacer, options).toString(options);
}

View File

@@ -12,10 +12,9 @@ class Schema {
: compat
? getTags(null, compat)
: null;
this.merge = !!merge;
this.name = (typeof schema === 'string' && schema) || 'core';
this.knownTags = resolveKnownTags ? coreKnownTags : {};
this.tags = getTags(customTags, this.name);
this.tags = getTags(customTags, this.name, merge);
this.toStringOptions = toStringDefaults ?? null;
Object.defineProperty(this, MAP, { value: map });
Object.defineProperty(this, SCALAR, { value: string });

View File

@@ -5,7 +5,7 @@ const floatNaN = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/,
test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/,
resolve: str => str.slice(-3).toLowerCase() === 'nan'
? NaN
: str[0] === '-'

View File

@@ -27,7 +27,7 @@ const jsonScalars = [
identify: value => typeof value === 'boolean',
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^true|false$/,
test: /^true$|^false$/,
resolve: str => str === 'true',
stringify: stringifyJSON
},

View File

@@ -3,16 +3,17 @@ import { nullTag } from './common/null.js';
import { seq } from './common/seq.js';
import { string } from './common/string.js';
import { boolTag } from './core/bool.js';
import { float, floatExp, floatNaN } from './core/float.js';
import { int, intHex, intOct } from './core/int.js';
import { floatNaN, floatExp, float } from './core/float.js';
import { intOct, intHex, int } from './core/int.js';
import { schema } from './core/schema.js';
import { schema as schema$1 } from './json/schema.js';
import { binary } from './yaml-1.1/binary.js';
import { merge } from './yaml-1.1/merge.js';
import { omap } from './yaml-1.1/omap.js';
import { pairs } from './yaml-1.1/pairs.js';
import { schema as schema$2 } from './yaml-1.1/schema.js';
import { set } from './yaml-1.1/set.js';
import { timestamp, floatTime, intTime } from './yaml-1.1/timestamp.js';
import { timestamp, intTime, floatTime } from './yaml-1.1/timestamp.js';
const schemas = new Map([
['core', schema],
@@ -33,6 +34,7 @@ const tagsByName = {
intOct,
intTime,
map,
merge,
null: nullTag,
omap,
pairs,
@@ -42,13 +44,20 @@ const tagsByName = {
};
const coreKnownTags = {
'tag:yaml.org,2002:binary': binary,
'tag:yaml.org,2002:merge': merge,
'tag:yaml.org,2002:omap': omap,
'tag:yaml.org,2002:pairs': pairs,
'tag:yaml.org,2002:set': set,
'tag:yaml.org,2002:timestamp': timestamp
};
function getTags(customTags, schemaName) {
let tags = schemas.get(schemaName);
function getTags(customTags, schemaName, addMergeTag) {
const schemaTags = schemas.get(schemaName);
if (schemaTags && !customTags) {
return addMergeTag && !schemaTags.includes(merge)
? schemaTags.concat(merge)
: schemaTags.slice();
}
let tags = schemaTags;
if (!tags) {
if (Array.isArray(customTags))
tags = [];
@@ -67,17 +76,21 @@ function getTags(customTags, schemaName) {
else if (typeof customTags === 'function') {
tags = customTags(tags.slice());
}
return tags.map(tag => {
if (typeof tag !== 'string')
return tag;
const tagObj = tagsByName[tag];
if (tagObj)
return tagObj;
const keys = Object.keys(tagsByName)
.map(key => JSON.stringify(key))
.join(', ');
throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
});
if (addMergeTag)
tags = tags.concat(merge);
return tags.reduce((tags, tag) => {
const tagObj = typeof tag === 'string' ? tagsByName[tag] : tag;
if (!tagObj) {
const tagName = JSON.stringify(tag);
const keys = Object.keys(tagsByName)
.map(key => JSON.stringify(key))
.join(', ');
throw new Error(`Unknown custom tag ${tagName}; use one of ${keys}`);
}
if (!tags.includes(tagObj))
tags.push(tagObj);
return tags;
}, []);
}
export { coreKnownTags, getTags };

View File

@@ -2,7 +2,7 @@ import { Scalar } from '../../nodes/Scalar.js';
import { stringifyString } from '../../stringify/stringifyString.js';
const binary = {
identify: value => value instanceof Uint8Array,
identify: value => value instanceof Uint8Array, // Buffer inherits from Uint8Array
default: false,
tag: 'tag:yaml.org,2002:binary',
/**
@@ -14,10 +14,7 @@ const binary = {
* document.querySelector('#photo').src = URL.createObjectURL(blob)
*/
resolve(src, onError) {
if (typeof Buffer === 'function') {
return Buffer.from(src, 'base64');
}
else if (typeof atob === 'function') {
if (typeof atob === 'function') {
// On IE 11, atob() can't handle newlines
const str = atob(src.replace(/[\n\r]/g, ''));
const buffer = new Uint8Array(str.length);
@@ -31,15 +28,11 @@ const binary = {
}
},
stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
if (!value)
return '';
const buf = value; // checked earlier by binary.identify()
let str;
if (typeof Buffer === 'function') {
str =
buf instanceof Buffer
? buf.toString('base64')
: Buffer.from(buf.buffer).toString('base64');
}
else if (typeof btoa === 'function') {
if (typeof btoa === 'function') {
let s = '';
for (let i = 0; i < buf.length; ++i)
s += String.fromCharCode(buf[i]);
@@ -48,8 +41,7 @@ const binary = {
else {
throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
}
if (!type)
type = Scalar.BLOCK_LITERAL;
type ?? (type = Scalar.BLOCK_LITERAL);
if (type !== Scalar.QUOTE_DOUBLE) {
const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
const n = Math.ceil(str.length / lineWidth);

View File

@@ -18,7 +18,7 @@ const falseTag = {
identify: value => value === false,
default: true,
tag: 'tag:yaml.org,2002:bool',
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/,
resolve: () => new Scalar(false),
stringify: boolStringify
};

View File

@@ -5,7 +5,7 @@ const floatNaN = {
identify: value => typeof value === 'number',
default: true,
tag: 'tag:yaml.org,2002:float',
test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/,
test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/,
resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
? NaN
: str[0] === '-'

View File

@@ -0,0 +1,64 @@
import { isScalar, isAlias, isSeq, isMap } from '../../nodes/identity.js';
import { Scalar } from '../../nodes/Scalar.js';
// If the value associated with a merge key is a single mapping node, each of
// its key/value pairs is inserted into the current mapping, unless the key
// already exists in it. If the value associated with the merge key is a
// sequence, then this sequence is expected to contain mapping nodes and each
// of these nodes is merged in turn according to its order in the sequence.
// Keys in mapping nodes earlier in the sequence override keys specified in
// later mapping nodes. -- http://yaml.org/type/merge.html
const MERGE_KEY = '<<';
const merge = {
identify: value => value === MERGE_KEY ||
(typeof value === 'symbol' && value.description === MERGE_KEY),
default: 'key',
tag: 'tag:yaml.org,2002:merge',
test: /^<<$/,
resolve: () => Object.assign(new Scalar(Symbol(MERGE_KEY)), {
addToJSMap: addMergeToJSMap
}),
stringify: () => MERGE_KEY
};
const isMergeKey = (ctx, key) => (merge.identify(key) ||
(isScalar(key) &&
(!key.type || key.type === Scalar.PLAIN) &&
merge.identify(key.value))) &&
ctx?.doc.schema.tags.some(tag => tag.tag === merge.tag && tag.default);
function addMergeToJSMap(ctx, map, value) {
value = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
if (isSeq(value))
for (const it of value.items)
mergeValue(ctx, map, it);
else if (Array.isArray(value))
for (const it of value)
mergeValue(ctx, map, it);
else
mergeValue(ctx, map, value);
}
function mergeValue(ctx, map, value) {
const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
if (!isMap(source))
throw new Error('Merge sources must be maps or map aliases');
const srcMap = source.toJSON(null, ctx, Map);
for (const [key, value] of srcMap) {
if (map instanceof Map) {
if (!map.has(key))
map.set(key, value);
}
else if (map instanceof Set) {
map.add(key);
}
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
Object.defineProperty(map, key, {
value,
writable: true,
enumerable: true,
configurable: true
});
}
}
return map;
}
export { addMergeToJSMap, isMergeKey, merge };

View File

@@ -1,5 +1,5 @@
import { isSeq, isPair, isMap } from '../../nodes/identity.js';
import { Pair, createPair } from '../../nodes/Pair.js';
import { createPair, Pair } from '../../nodes/Pair.js';
import { Scalar } from '../../nodes/Scalar.js';
import { YAMLSeq } from '../../nodes/YAMLSeq.js';

View File

@@ -6,6 +6,7 @@ import { binary } from './binary.js';
import { trueTag, falseTag } from './bool.js';
import { floatNaN, floatExp, float } from './float.js';
import { intBin, intOct, int, intHex } from './int.js';
import { merge } from './merge.js';
import { omap } from './omap.js';
import { pairs } from './pairs.js';
import { set } from './set.js';
@@ -26,6 +27,7 @@ const schema = [
floatExp,
float,
binary,
merge,
omap,
pairs,
set,

View File

@@ -95,7 +95,7 @@ const timestamp = {
}
return new Date(date);
},
stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
stringify: ({ value }) => value?.toISOString().replace(/(T00:00:00)?\.000Z$/, '') ?? ''
};
export { floatTime, intTime, timestamp };

View File

@@ -9,6 +9,8 @@ const FOLD_QUOTED = 'quoted';
function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
if (!lineWidth || lineWidth < 0)
return text;
if (lineWidth < minContentWidth)
minContentWidth = 0;
const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
if (text.length <= endStep)
return text;
@@ -28,7 +30,7 @@ function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth =
let escStart = -1;
let escEnd = -1;
if (mode === FOLD_BLOCK) {
i = consumeMoreIndentedLines(text, i);
i = consumeMoreIndentedLines(text, i, indent.length);
if (i !== -1)
end = i + endStep;
}
@@ -52,8 +54,8 @@ function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth =
}
if (ch === '\n') {
if (mode === FOLD_BLOCK)
i = consumeMoreIndentedLines(text, i);
end = i + endStep;
i = consumeMoreIndentedLines(text, i, indent.length);
end = i + indent.length + endStep;
split = undefined;
}
else {
@@ -121,15 +123,24 @@ function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth =
* Presumes `i + 1` is at the start of a line
* @returns index of last newline in more-indented block
*/
function consumeMoreIndentedLines(text, i) {
let ch = text[i + 1];
function consumeMoreIndentedLines(text, i, indent) {
let end = i;
let start = i + 1;
let ch = text[start];
while (ch === ' ' || ch === '\t') {
do {
ch = text[(i += 1)];
} while (ch && ch !== '\n');
ch = text[i + 1];
if (i < start + indent) {
ch = text[++i];
}
else {
do {
ch = text[++i];
} while (ch && ch !== '\n');
end = i;
start = i + 1;
ch = text[start];
}
}
return i;
return end;
}
export { FOLD_BLOCK, FOLD_FLOW, FOLD_QUOTED, foldFlowLines };

View File

@@ -54,7 +54,12 @@ function getTagObject(tags, item) {
let obj;
if (isScalar(item)) {
obj = item.value;
const match = tags.filter(t => t.identify?.(obj));
let match = tags.filter(t => t.identify?.(obj));
if (match.length > 1) {
const testMatch = match.filter(t => t.test);
if (testMatch.length > 0)
match = testMatch;
}
tagObj =
match.find(t => t.format === item.format) ?? match.find(t => !t.format);
}
@@ -63,7 +68,7 @@ function getTagObject(tags, item) {
tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
}
if (!tagObj) {
const name = obj?.constructor?.name ?? typeof obj;
const name = obj?.constructor?.name ?? (obj === null ? 'null' : typeof obj);
throw new Error(`Tag not resolved for ${name} value`);
}
return tagObj;
@@ -78,7 +83,7 @@ function stringifyProps(node, tagObj, { anchors, doc }) {
anchors.add(anchor);
props.push(`&${anchor}`);
}
const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;
const tag = node.tag ?? (tagObj.default ? null : tagObj.tag);
if (tag)
props.push(doc.directives.tagString(tag));
return props.join(' ');
@@ -104,8 +109,7 @@ function stringify(item, ctx, onComment, onChompKeep) {
const node = isNode(item)
? item
: ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
if (!tagObj)
tagObj = getTagObject(ctx.doc.schema.tags, node);
tagObj ?? (tagObj = getTagObject(ctx.doc.schema.tags, node));
const props = stringifyProps(node, tagObj, ctx);
if (props.length > 0)
ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;

View File

@@ -1,4 +1,3 @@
import { Collection } from '../nodes/Collection.js';
import { isNode, isPair } from '../nodes/identity.js';
import { stringify } from './stringify.js';
import { lineComment, indentComment } from './stringifyComment.js';
@@ -59,7 +58,7 @@ function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, fl
onChompKeep();
return str;
}
function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {
function stringifyFlowCollection({ items }, ctx, { flowChars, itemIndent }) {
const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx;
itemIndent += indentStep;
const itemCtx = Object.assign({}, ctx, {
@@ -112,32 +111,25 @@ function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemInden
lines.push(str);
linesAtValue = lines.length;
}
let str;
const { start, end } = flowChars;
if (lines.length === 0) {
str = start + end;
return start + end;
}
else {
if (!reqNewline) {
const len = lines.reduce((sum, line) => sum + line.length + 2, 2);
reqNewline = len > Collection.maxFlowStringSingleLineLength;
reqNewline = ctx.options.lineWidth > 0 && len > ctx.options.lineWidth;
}
if (reqNewline) {
str = start;
let str = start;
for (const line of lines)
str += line ? `\n${indentStep}${indent}${line}` : '\n';
str += `\n${indent}${end}`;
return `${str}\n${indent}${end}`;
}
else {
str = `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;
return `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;
}
}
if (comment) {
str += lineComment(str, indent, commentString(comment));
if (onComment)
onComment();
}
return str;
}
function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {
if (comment && chompKeep)

View File

@@ -10,7 +10,7 @@ function stringifyPair({ key, value }, ctx, onComment, onChompKeep) {
if (keyComment) {
throw new Error('With simple keys, key nodes cannot have comments');
}
if (isCollection(key)) {
if (isCollection(key) || (!isNode(key) && typeof key === 'object')) {
const msg = 'With simple keys, collection cannot be used as a key value';
throw new Error(msg);
}

View File

@@ -1,5 +1,5 @@
import { Scalar } from '../nodes/Scalar.js';
import { foldFlowLines, FOLD_QUOTED, FOLD_FLOW, FOLD_BLOCK } from './foldFlowLines.js';
import { foldFlowLines, FOLD_FLOW, FOLD_QUOTED, FOLD_BLOCK } from './foldFlowLines.js';
const getFoldOptions = (ctx, isBlock) => ({
indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart,
@@ -219,23 +219,32 @@ function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
start = start.replace(/\n+/g, `$&${indent}`);
}
const indentSize = indent ? '2' : '1'; // root is at -1
let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;
// Leading | or > is added later
let header = (startWithSpace ? indentSize : '') + chomp;
if (comment) {
header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' '));
if (onComment)
onComment();
}
if (literal) {
value = value.replace(/\n+/g, `$&${indent}`);
return `${header}\n${indent}${start}${value}${end}`;
if (!literal) {
const foldedValue = value
.replace(/\n+/g, '\n$&')
.replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
// ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
.replace(/\n+/g, `$&${indent}`);
let literalFallback = false;
const foldOptions = getFoldOptions(ctx, true);
if (blockQuote !== 'folded' && type !== Scalar.BLOCK_FOLDED) {
foldOptions.onOverflow = () => {
literalFallback = true;
};
}
const body = foldFlowLines(`${start}${foldedValue}${end}`, indent, FOLD_BLOCK, foldOptions);
if (!literalFallback)
return `>${header}\n${indent}${body}`;
}
value = value
.replace(/\n+/g, '\n$&')
.replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
// ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
.replace(/\n+/g, `$&${indent}`);
const body = foldFlowLines(`${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx, true));
return `${header}\n${indent}${body}`;
value = value.replace(/\n+/g, `$&${indent}`);
return `|${header}\n${indent}${start}${value}${end}`;
}
function plainString(item, ctx, onComment, onChompKeep) {
const { type, value } = item;
@@ -244,10 +253,9 @@ function plainString(item, ctx, onComment, onChompKeep) {
(inFlow && /[[\]{},]/.test(value))) {
return quotedString(value, ctx);
}
if (!value ||
/^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
if (/^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
// not allowed:
// - empty string, '-' or '?'
// - '-' or '?'
// - start with an indicator character (except [?:-]) or /[?-] /
// - '\n ', ': ' or ' \n' anywhere
// - '#' not preceded by a non-space char

View File

@@ -1,8 +1,8 @@
export { createNode } from './doc/createNode.js';
export { debug, warn } from './log.js';
export { createPair } from './nodes/Pair.js';
export { findPair } from './nodes/YAMLMap.js';
export { toJS } from './nodes/toJS.js';
export { findPair } from './nodes/YAMLMap.js';
export { map as mapTag } from './schema/common/map.js';
export { seq as seqTag } from './schema/common/seq.js';
export { string as stringTag } from './schema/common/string.js';