7728 lines
1.0 MiB
JavaScript
7728 lines
1.0 MiB
JavaScript
|
'use strict';
|
||
|
|
||
|
var obsidian = require('obsidian');
|
||
|
|
||
|
const f = (fn) => [
|
||
|
/*eslint no-unused-vars: 0*/
|
||
|
function (a) {return fn(...arguments);},
|
||
|
function (a, b) {return fn(...arguments);},
|
||
|
function (a, b, c) {return fn(...arguments);},
|
||
|
function (a, b, c, d) {return fn(...arguments);},
|
||
|
function (a, b, c, d, e) {return fn(...arguments);},
|
||
|
];
|
||
|
|
||
|
const currify = (fn, ...args) => {
|
||
|
check(fn);
|
||
|
|
||
|
if (args.length >= fn.length)
|
||
|
return fn(...args);
|
||
|
|
||
|
const again = (...args2) => {
|
||
|
return currify(fn, ...[...args, ...args2]);
|
||
|
};
|
||
|
|
||
|
const count = fn.length - args.length - 1;
|
||
|
const func = f(again)[count];
|
||
|
|
||
|
return func || again;
|
||
|
};
|
||
|
|
||
|
var currify_1 = currify;
|
||
|
|
||
|
function check(fn) {
|
||
|
if (typeof fn !== 'function')
|
||
|
throw Error('fn should be function!');
|
||
|
}
|
||
|
|
||
|
var fullstore = (value) => {
|
||
|
const data = {
|
||
|
value,
|
||
|
};
|
||
|
|
||
|
return (...args) => {
|
||
|
const [value] = args;
|
||
|
|
||
|
if (!args.length)
|
||
|
return data.value;
|
||
|
|
||
|
data.value = value;
|
||
|
|
||
|
return value;
|
||
|
};
|
||
|
};
|
||
|
|
||
|
const query$1 = (a) => document.querySelector(`[data-name="${a}"]`);
|
||
|
|
||
|
const setAttribute = currify_1((el, obj, name) => el.setAttribute(name, obj[name]));
|
||
|
const set$1 = currify_1((el, obj, name) => el[name] = obj[name]);
|
||
|
const not = currify_1((f, a) => !f(a));
|
||
|
const isCamelCase = (a) => a != a.toLowerCase();
|
||
|
|
||
|
var createElement = (name, options = {}) => {
|
||
|
const {
|
||
|
dataName,
|
||
|
notAppend,
|
||
|
parent = document.body,
|
||
|
uniq = true,
|
||
|
...restOptions
|
||
|
} = options;
|
||
|
|
||
|
const elFound = isElementPresent(dataName);
|
||
|
|
||
|
if (uniq && elFound)
|
||
|
return elFound;
|
||
|
|
||
|
const el = document.createElement(name);
|
||
|
|
||
|
if (dataName)
|
||
|
el.dataset.name = dataName;
|
||
|
|
||
|
Object.keys(restOptions)
|
||
|
.filter(isCamelCase)
|
||
|
.map(set$1(el, options));
|
||
|
|
||
|
Object.keys(restOptions)
|
||
|
.filter(not(isCamelCase))
|
||
|
.map(setAttribute(el, options));
|
||
|
|
||
|
if (!notAppend)
|
||
|
parent.appendChild(el);
|
||
|
|
||
|
return el;
|
||
|
};
|
||
|
|
||
|
var isElementPresent_1 = isElementPresent;
|
||
|
|
||
|
function isElementPresent(dataName) {
|
||
|
if (!dataName)
|
||
|
return;
|
||
|
|
||
|
return query$1(dataName);
|
||
|
}
|
||
|
createElement.isElementPresent = isElementPresent_1;
|
||
|
|
||
|
const keyDown = currify_1(keyDown_);
|
||
|
|
||
|
const BUTTON_OK_CANCEL = {
|
||
|
ok: 'OK',
|
||
|
cancel: 'Cancel',
|
||
|
};
|
||
|
|
||
|
const zIndex = fullstore(100);
|
||
|
|
||
|
var prompt = (title, msg, value = '', options) => {
|
||
|
const type = getType(options);
|
||
|
const val = String(value)
|
||
|
.replace(/"/g, '"');
|
||
|
|
||
|
const valueStr = `<input type="${ type }" value="${ val }" data-name="js-input">`;
|
||
|
const buttons = getButtons(options) || BUTTON_OK_CANCEL;
|
||
|
|
||
|
return showDialog(title, msg, valueStr, buttons, options);
|
||
|
};
|
||
|
|
||
|
var confirm = (title, msg, options) => {
|
||
|
const buttons = getButtons(options) || BUTTON_OK_CANCEL;
|
||
|
|
||
|
return showDialog(title, msg, '', buttons, options);
|
||
|
};
|
||
|
|
||
|
var progress = (title, message, options) => {
|
||
|
const valueStr = `
|
||
|
<progress value="0" data-name="js-progress" class="progress" max="100"></progress>
|
||
|
<span data-name="js-counter">0%</span>
|
||
|
`;
|
||
|
|
||
|
const buttons = {
|
||
|
cancel: 'Abort',
|
||
|
};
|
||
|
|
||
|
const promise = showDialog(title, message, valueStr, buttons, options);
|
||
|
const {ok, dialog} = promise;
|
||
|
const resolve = ok();
|
||
|
|
||
|
find(dialog, ['cancel']).map((el) => {
|
||
|
el.focus();
|
||
|
});
|
||
|
|
||
|
Object.assign(promise, {
|
||
|
setProgress(count) {
|
||
|
const [elProgress] = find(dialog, ['progress']);
|
||
|
const [elCounter] = find(dialog, ['counter']);
|
||
|
|
||
|
elProgress.value = count;
|
||
|
elCounter.textContent = `${count}%`;
|
||
|
|
||
|
if (count === 100) {
|
||
|
remove(dialog);
|
||
|
resolve();
|
||
|
}
|
||
|
},
|
||
|
|
||
|
remove() {
|
||
|
remove(dialog);
|
||
|
},
|
||
|
});
|
||
|
|
||
|
return promise;
|
||
|
};
|
||
|
|
||
|
function getButtons(options = {}) {
|
||
|
const {buttons} = options;
|
||
|
|
||
|
if (!buttons)
|
||
|
return null;
|
||
|
|
||
|
return buttons;
|
||
|
}
|
||
|
|
||
|
function getType(options = {}) {
|
||
|
const {type} = options;
|
||
|
|
||
|
if (type === 'password')
|
||
|
return 'password';
|
||
|
|
||
|
return 'text';
|
||
|
}
|
||
|
|
||
|
function getTemplate(title, msg, value, buttons) {
|
||
|
const encodedMsg = msg.replace(/\n/g, '<br>');
|
||
|
|
||
|
return `<div class="page">
|
||
|
<div data-name="js-close" class="close-button"></div>
|
||
|
<header>${ title }</header>
|
||
|
<div class="content-area">${ encodedMsg }${ value }</div>
|
||
|
<div class="action-area">
|
||
|
<div class="button-strip">
|
||
|
${parseButtons(buttons)}
|
||
|
</div>
|
||
|
</div>
|
||
|
</div>`;
|
||
|
}
|
||
|
|
||
|
function parseButtons(buttons) {
|
||
|
const names = Object.keys(buttons);
|
||
|
const parse = currify_1((buttons, name, i) => `<button
|
||
|
tabindex=${i}
|
||
|
data-name="js-${name.toLowerCase()}">
|
||
|
${buttons[name]}
|
||
|
</button>`);
|
||
|
|
||
|
return names
|
||
|
.map(parse(buttons))
|
||
|
.join('');
|
||
|
}
|
||
|
|
||
|
function showDialog(title, msg, value, buttons, options) {
|
||
|
const ok = fullstore();
|
||
|
const cancel = fullstore();
|
||
|
|
||
|
const closeButtons = [
|
||
|
'cancel',
|
||
|
'close',
|
||
|
'ok',
|
||
|
];
|
||
|
|
||
|
const promise = new Promise((resolve, reject) => {
|
||
|
const noCancel = options && options.cancel === false;
|
||
|
const empty = () => {};
|
||
|
const rejectError = () => reject(Error());
|
||
|
|
||
|
ok(resolve);
|
||
|
cancel(noCancel ? empty : rejectError);
|
||
|
});
|
||
|
|
||
|
const innerHTML = getTemplate(title, msg, value, buttons);
|
||
|
|
||
|
const dialog = createElement('div', {
|
||
|
innerHTML,
|
||
|
className: 'smalltalk',
|
||
|
style: `z-index: ${zIndex(zIndex() + 1)}`,
|
||
|
});
|
||
|
|
||
|
for (const el of find(dialog, ['ok', 'input']))
|
||
|
el.focus();
|
||
|
|
||
|
for (const el of find(dialog, ['input'])) {
|
||
|
el.setSelectionRange(0, value.length);
|
||
|
}
|
||
|
|
||
|
addListenerAll('click', dialog, closeButtons, (event) => {
|
||
|
closeDialog(event.target, dialog, ok(), cancel());
|
||
|
});
|
||
|
|
||
|
for (const event of ['click', 'contextmenu'])
|
||
|
dialog.addEventListener(event, (e) => {
|
||
|
e.stopPropagation();
|
||
|
for (const el of find(dialog, ['ok', 'input']))
|
||
|
el.focus();
|
||
|
});
|
||
|
|
||
|
dialog.addEventListener('keydown', keyDown(dialog, ok(), cancel()));
|
||
|
|
||
|
return Object.assign(promise, {
|
||
|
dialog,
|
||
|
ok,
|
||
|
});
|
||
|
}
|
||
|
|
||
|
function keyDown_(dialog, ok, cancel, event) {
|
||
|
const KEY = {
|
||
|
ENTER : 13,
|
||
|
ESC : 27,
|
||
|
TAB : 9,
|
||
|
LEFT : 37,
|
||
|
UP : 38,
|
||
|
RIGHT : 39,
|
||
|
DOWN : 40,
|
||
|
};
|
||
|
|
||
|
const {keyCode} = event;
|
||
|
const el = event.target;
|
||
|
|
||
|
const namesAll = ['ok', 'cancel', 'input'];
|
||
|
const names = find(dialog, namesAll)
|
||
|
.map(getDataName);
|
||
|
|
||
|
switch(keyCode) {
|
||
|
case KEY.ENTER:
|
||
|
closeDialog(el, dialog, ok, cancel);
|
||
|
event.preventDefault();
|
||
|
break;
|
||
|
|
||
|
case KEY.ESC:
|
||
|
remove(dialog);
|
||
|
cancel();
|
||
|
break;
|
||
|
|
||
|
case KEY.TAB:
|
||
|
if (event.shiftKey)
|
||
|
tab(dialog, names);
|
||
|
|
||
|
tab(dialog, names);
|
||
|
event.preventDefault();
|
||
|
break;
|
||
|
|
||
|
default:
|
||
|
['left', 'right', 'up', 'down'].filter((name) => {
|
||
|
return keyCode === KEY[name.toUpperCase()];
|
||
|
}).forEach(() => {
|
||
|
changeButtonFocus(dialog, names);
|
||
|
});
|
||
|
|
||
|
break;
|
||
|
}
|
||
|
|
||
|
event.stopPropagation();
|
||
|
}
|
||
|
|
||
|
function getDataName(el) {
|
||
|
return el
|
||
|
.getAttribute('data-name')
|
||
|
.replace('js-', '');
|
||
|
}
|
||
|
|
||
|
const getName = (activeName) => {
|
||
|
if (activeName === 'cancel')
|
||
|
return 'ok';
|
||
|
|
||
|
return 'cancel';
|
||
|
};
|
||
|
|
||
|
function changeButtonFocus(dialog, names) {
|
||
|
const active = document.activeElement;
|
||
|
const activeName = getDataName(active);
|
||
|
const isButton = /ok|cancel/.test(activeName);
|
||
|
const count = names.length - 1;
|
||
|
|
||
|
if (activeName === 'input' || !count || !isButton)
|
||
|
return;
|
||
|
|
||
|
const name = getName(activeName);
|
||
|
|
||
|
for (const el of find(dialog, [name])) {
|
||
|
el.focus();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const getIndex = (count, index) => {
|
||
|
if (index === count)
|
||
|
return 0;
|
||
|
|
||
|
return index + 1;
|
||
|
};
|
||
|
|
||
|
function tab(dialog, names) {
|
||
|
const active = document.activeElement;
|
||
|
const activeName = getDataName(active);
|
||
|
const count = names.length - 1;
|
||
|
|
||
|
const activeIndex = names.indexOf(activeName);
|
||
|
const index = getIndex(count, activeIndex);
|
||
|
|
||
|
const name = names[index];
|
||
|
|
||
|
for (const el of find(dialog, [name]))
|
||
|
el.focus();
|
||
|
}
|
||
|
|
||
|
function closeDialog(el, dialog, ok, cancel) {
|
||
|
const name = el
|
||
|
.getAttribute('data-name')
|
||
|
.replace('js-', '');
|
||
|
|
||
|
if (/close|cancel/.test(name)) {
|
||
|
cancel();
|
||
|
remove(dialog);
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
const value = find(dialog, ['input'])
|
||
|
.reduce((value, el) => el.value, null);
|
||
|
|
||
|
ok(value);
|
||
|
remove(dialog);
|
||
|
}
|
||
|
|
||
|
const query = currify_1((element, name) => element.querySelector(`[data-name="js-${ name }"]`));
|
||
|
|
||
|
function find(element, names) {
|
||
|
const elements = names
|
||
|
.map(query(element))
|
||
|
.filter(Boolean);
|
||
|
|
||
|
return elements;
|
||
|
}
|
||
|
|
||
|
function addListenerAll(event, parent, elements, fn) {
|
||
|
for (const el of find(parent, elements)) {
|
||
|
el.addEventListener(event, fn);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function remove(dialog) {
|
||
|
const {parentElement} = dialog;
|
||
|
|
||
|
if (parentElement)
|
||
|
parentElement.removeChild(dialog);
|
||
|
}
|
||
|
|
||
|
class Progress {
|
||
|
|
||
|
constructor(title, message) {
|
||
|
this.progress = progress(title, message);
|
||
|
this.progress.catch(e => {
|
||
|
this.aborted = true;
|
||
|
if (e && (e.constructor !== Error || e.message !== "")) console.error(e);
|
||
|
});
|
||
|
this.dialog = this.progress.dialog;
|
||
|
this.aborted = false;
|
||
|
}
|
||
|
|
||
|
async forEach(collection, func) {
|
||
|
try {
|
||
|
if (this.aborted)
|
||
|
return;
|
||
|
let processed = 0, range = collection.length, accum = 0, pct = 0;
|
||
|
for (const item of collection) {
|
||
|
await func(item, processed++, collection, this);
|
||
|
if (this.aborted)
|
||
|
return;
|
||
|
accum += 100;
|
||
|
if (accum > range) {
|
||
|
const remainder = accum % range, step = (accum - remainder) / range;
|
||
|
this.progress.setProgress(pct += step);
|
||
|
accum = remainder;
|
||
|
}
|
||
|
}
|
||
|
if (pct < 100)
|
||
|
this.progress.setProgress(100);
|
||
|
return this;
|
||
|
} finally {
|
||
|
this.progress.remove();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
set title(text) { this.dialog.querySelector("header").textContent = text; }
|
||
|
get title() { return this.dialog.querySelector("header").textContent; }
|
||
|
|
||
|
set message(text) {
|
||
|
this.dialog.querySelector(".content-area").childNodes[0].textContent = text;
|
||
|
}
|
||
|
|
||
|
get message() {
|
||
|
return this.dialog.querySelector(".content-area").childNodes[0].textContent;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
async function validatedInput(title, message, value = "", regex = ".*", what = "entry") {
|
||
|
while (true) {
|
||
|
const input = prompt(title, message, value);
|
||
|
const inputField = input.dialog.find("input");
|
||
|
const isValid = (t) => new RegExp(`^${regex}$`).test(t);
|
||
|
|
||
|
inputField.setSelectionRange(value.length, value.length);
|
||
|
inputField.pattern = regex;
|
||
|
inputField.oninput = () => inputField.setAttribute("aria-invalid", !isValid(inputField.value));
|
||
|
|
||
|
const result = await input;
|
||
|
if (isValid(result)) return result;
|
||
|
|
||
|
new obsidian.Notice(`"${result}" is not a valid ${what}`);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const tagBody = /^#[^\u2000-\u206F\u2E00-\u2E7F'!"#$%&()*+,.:;<=>?@^`{|}~\[\]\\\s]+$/;
|
||
|
|
||
|
class Tag {
|
||
|
constructor(name) {
|
||
|
const
|
||
|
hashed = this.tag = Tag.toTag(name),
|
||
|
canonical = this.canonical = hashed.toLowerCase(),
|
||
|
canonical_prefix = this.canonical_prefix = canonical + "/";
|
||
|
this.name = hashed.slice(1);
|
||
|
this.matches = function (text) {
|
||
|
text = text.toLowerCase();
|
||
|
return text == canonical || text.startsWith(canonical_prefix);
|
||
|
};
|
||
|
}
|
||
|
toString() { return this.tag; }
|
||
|
|
||
|
static isTag(s) { return tagBody.test(s); }
|
||
|
|
||
|
static toTag(name) {
|
||
|
while (name.startsWith("##")) name = name.slice(1);
|
||
|
return name.startsWith("#") ? name : "#"+name;
|
||
|
}
|
||
|
|
||
|
static canonical(name) {
|
||
|
return Tag.toTag(name).toLowerCase();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
class Replacement {
|
||
|
|
||
|
constructor(fromTag, toTag) {
|
||
|
const cache = Object.assign(
|
||
|
Object.create(null), {
|
||
|
[fromTag.tag]: toTag.tag,
|
||
|
[fromTag.name]: toTag.name,
|
||
|
}
|
||
|
);
|
||
|
|
||
|
this.inString = function(text, pos = 0) {
|
||
|
return text.slice(0, pos) + toTag.tag + text.slice(pos + fromTag.tag.length);
|
||
|
};
|
||
|
|
||
|
this.inArray = (tags, skipOdd, isAlias) => {
|
||
|
return tags.map((t, i) => {
|
||
|
if (skipOdd && (i & 1)) return t; // leave odd entries (separators) alone
|
||
|
// Obsidian allows spaces as separators within array elements
|
||
|
if (!t) return t;
|
||
|
// Skip non-tag parts
|
||
|
if (isAlias) {
|
||
|
if (!t.startsWith("#") || !Tag.isTag(t)) return t;
|
||
|
} else if (/[ ,\n]/.test(t)) {
|
||
|
return this.inArray(t.split(/([, \n]+)/), true).join("");
|
||
|
}
|
||
|
if (cache[t]) return cache[t];
|
||
|
const lc = t.toLowerCase();
|
||
|
if (cache[lc]) {
|
||
|
return cache[t] = cache[lc];
|
||
|
} else if (lc.startsWith(fromTag.canonical_prefix)) {
|
||
|
return cache[t] = cache[lc] = this.inString(t);
|
||
|
} else if (("#" + lc).startsWith(fromTag.canonical_prefix)) {
|
||
|
return cache[t] = cache[lc] = this.inString("#" + t).slice(1);
|
||
|
}
|
||
|
return cache[t] = cache[lc] = t;
|
||
|
});
|
||
|
};
|
||
|
|
||
|
this.willMergeTags = function (tagNames) {
|
||
|
// Renaming to change case doesn't lose info, so ignore it
|
||
|
if (fromTag.canonical === toTag.canonical) return;
|
||
|
|
||
|
const existing = new Set(tagNames.map(s => s.toLowerCase()));
|
||
|
|
||
|
for (const tagName of tagNames.filter(fromTag.matches)) {
|
||
|
const changed = this.inString(tagName);
|
||
|
if (existing.has(changed.toLowerCase()))
|
||
|
return [new Tag(tagName), new Tag(changed)];
|
||
|
}
|
||
|
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const ALIAS = Symbol.for('yaml.alias');
|
||
|
const DOC = Symbol.for('yaml.document');
|
||
|
const MAP = Symbol.for('yaml.map');
|
||
|
const PAIR = Symbol.for('yaml.pair');
|
||
|
const SCALAR$1 = Symbol.for('yaml.scalar');
|
||
|
const SEQ = Symbol.for('yaml.seq');
|
||
|
const NODE_TYPE = Symbol.for('yaml.node.type');
|
||
|
const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;
|
||
|
const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;
|
||
|
const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;
|
||
|
const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;
|
||
|
const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR$1;
|
||
|
const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;
|
||
|
function isCollection(node) {
|
||
|
if (node && typeof node === 'object')
|
||
|
switch (node[NODE_TYPE]) {
|
||
|
case MAP:
|
||
|
case SEQ:
|
||
|
return true;
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
function isNode(node) {
|
||
|
if (node && typeof node === 'object')
|
||
|
switch (node[NODE_TYPE]) {
|
||
|
case ALIAS:
|
||
|
case MAP:
|
||
|
case SCALAR$1:
|
||
|
case SEQ:
|
||
|
return true;
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;
|
||
|
class NodeBase {
|
||
|
constructor(type) {
|
||
|
Object.defineProperty(this, NODE_TYPE, { value: type });
|
||
|
}
|
||
|
/** Create a copy of this node. */
|
||
|
clone() {
|
||
|
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
|
||
|
if (this.range)
|
||
|
copy.range = this.range.slice();
|
||
|
return copy;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const BREAK = Symbol('break visit');
|
||
|
const SKIP = Symbol('skip children');
|
||
|
const REMOVE = Symbol('remove node');
|
||
|
/**
|
||
|
* Apply a visitor to an AST node or document.
|
||
|
*
|
||
|
* Walks through the tree (depth-first) starting from `node`, calling a
|
||
|
* `visitor` function with three arguments:
|
||
|
* - `key`: For sequence values and map `Pair`, the node's index in the
|
||
|
* collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
|
||
|
* `null` for the root node.
|
||
|
* - `node`: The current node.
|
||
|
* - `path`: The ancestry of the current node.
|
||
|
*
|
||
|
* The return value of the visitor may be used to control the traversal:
|
||
|
* - `undefined` (default): Do nothing and continue
|
||
|
* - `visit.SKIP`: Do not visit the children of this node, continue with next
|
||
|
* sibling
|
||
|
* - `visit.BREAK`: Terminate traversal completely
|
||
|
* - `visit.REMOVE`: Remove the current node, then continue with the next one
|
||
|
* - `Node`: Replace the current node, then continue by visiting it
|
||
|
* - `number`: While iterating the items of a sequence or map, set the index
|
||
|
* of the next step. This is useful especially if the index of the current
|
||
|
* node has changed.
|
||
|
*
|
||
|
* If `visitor` is a single function, it will be called with all values
|
||
|
* encountered in the tree, including e.g. `null` values. Alternatively,
|
||
|
* separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
|
||
|
* `Alias` and `Scalar` node. To define the same visitor function for more than
|
||
|
* one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
|
||
|
* and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
|
||
|
* specific defined one will be used for each node.
|
||
|
*/
|
||
|
function visit(node, visitor) {
|
||
|
if (typeof visitor === 'object' &&
|
||
|
(visitor.Collection || visitor.Node || visitor.Value)) {
|
||
|
visitor = Object.assign({
|
||
|
Alias: visitor.Node,
|
||
|
Map: visitor.Node,
|
||
|
Scalar: visitor.Node,
|
||
|
Seq: visitor.Node
|
||
|
}, visitor.Value && {
|
||
|
Map: visitor.Value,
|
||
|
Scalar: visitor.Value,
|
||
|
Seq: visitor.Value
|
||
|
}, visitor.Collection && {
|
||
|
Map: visitor.Collection,
|
||
|
Seq: visitor.Collection
|
||
|
}, visitor);
|
||
|
}
|
||
|
if (isDocument(node)) {
|
||
|
const cd = _visit(null, node.contents, visitor, Object.freeze([node]));
|
||
|
if (cd === REMOVE)
|
||
|
node.contents = null;
|
||
|
}
|
||
|
else
|
||
|
_visit(null, node, visitor, Object.freeze([]));
|
||
|
}
|
||
|
// Without the `as symbol` casts, TS declares these in the `visit`
|
||
|
// namespace using `var`, but then complains about that because
|
||
|
// `unique symbol` must be `const`.
|
||
|
/** Terminate visit traversal completely */
|
||
|
visit.BREAK = BREAK;
|
||
|
/** Do not visit the children of the current node */
|
||
|
visit.SKIP = SKIP;
|
||
|
/** Remove the current node */
|
||
|
visit.REMOVE = REMOVE;
|
||
|
function _visit(key, node, visitor, path) {
|
||
|
let ctrl = undefined;
|
||
|
if (typeof visitor === 'function')
|
||
|
ctrl = visitor(key, node, path);
|
||
|
else if (isMap(node)) {
|
||
|
if (visitor.Map)
|
||
|
ctrl = visitor.Map(key, node, path);
|
||
|
}
|
||
|
else if (isSeq(node)) {
|
||
|
if (visitor.Seq)
|
||
|
ctrl = visitor.Seq(key, node, path);
|
||
|
}
|
||
|
else if (isPair(node)) {
|
||
|
if (visitor.Pair)
|
||
|
ctrl = visitor.Pair(key, node, path);
|
||
|
}
|
||
|
else if (isScalar(node)) {
|
||
|
if (visitor.Scalar)
|
||
|
ctrl = visitor.Scalar(key, node, path);
|
||
|
}
|
||
|
else if (isAlias(node)) {
|
||
|
if (visitor.Alias)
|
||
|
ctrl = visitor.Alias(key, node, path);
|
||
|
}
|
||
|
if (isNode(ctrl) || isPair(ctrl)) {
|
||
|
const parent = path[path.length - 1];
|
||
|
if (isCollection(parent)) {
|
||
|
parent.items[key] = ctrl;
|
||
|
}
|
||
|
else if (isPair(parent)) {
|
||
|
if (key === 'key')
|
||
|
parent.key = ctrl;
|
||
|
else
|
||
|
parent.value = ctrl;
|
||
|
}
|
||
|
else if (isDocument(parent)) {
|
||
|
parent.contents = ctrl;
|
||
|
}
|
||
|
else {
|
||
|
const pt = isAlias(parent) ? 'alias' : 'scalar';
|
||
|
throw new Error(`Cannot replace node with ${pt} parent`);
|
||
|
}
|
||
|
return _visit(key, ctrl, visitor, path);
|
||
|
}
|
||
|
if (typeof ctrl !== 'symbol') {
|
||
|
if (isCollection(node)) {
|
||
|
path = Object.freeze(path.concat(node));
|
||
|
for (let i = 0; i < node.items.length; ++i) {
|
||
|
const ci = _visit(i, node.items[i], visitor, path);
|
||
|
if (typeof ci === 'number')
|
||
|
i = ci - 1;
|
||
|
else if (ci === BREAK)
|
||
|
return BREAK;
|
||
|
else if (ci === REMOVE) {
|
||
|
node.items.splice(i, 1);
|
||
|
i -= 1;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
else if (isPair(node)) {
|
||
|
path = Object.freeze(path.concat(node));
|
||
|
const ck = _visit('key', node.key, visitor, path);
|
||
|
if (ck === BREAK)
|
||
|
return BREAK;
|
||
|
else if (ck === REMOVE)
|
||
|
node.key = null;
|
||
|
const cv = _visit('value', node.value, visitor, path);
|
||
|
if (cv === BREAK)
|
||
|
return BREAK;
|
||
|
else if (cv === REMOVE)
|
||
|
node.value = null;
|
||
|
}
|
||
|
}
|
||
|
return ctrl;
|
||
|
}
|
||
|
|
||
|
const escapeChars = {
|
||
|
'!': '%21',
|
||
|
',': '%2C',
|
||
|
'[': '%5B',
|
||
|
']': '%5D',
|
||
|
'{': '%7B',
|
||
|
'}': '%7D'
|
||
|
};
|
||
|
const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]);
|
||
|
class Directives {
|
||
|
constructor(yaml, tags) {
|
||
|
/**
|
||
|
* The directives-end/doc-start marker `---`. If `null`, a marker may still be
|
||
|
* included in the document's stringified representation.
|
||
|
*/
|
||
|
this.marker = null;
|
||
|
this.yaml = Object.assign({}, Directives.defaultYaml, yaml);
|
||
|
this.tags = Object.assign({}, Directives.defaultTags, tags);
|
||
|
}
|
||
|
clone() {
|
||
|
const copy = new Directives(this.yaml, this.tags);
|
||
|
copy.marker = this.marker;
|
||
|
return copy;
|
||
|
}
|
||
|
/**
|
||
|
* During parsing, get a Directives instance for the current document and
|
||
|
* update the stream state according to the current version's spec.
|
||
|
*/
|
||
|
atDocument() {
|
||
|
const res = new Directives(this.yaml, this.tags);
|
||
|
switch (this.yaml.version) {
|
||
|
case '1.1':
|
||
|
this.atNextDocument = true;
|
||
|
break;
|
||
|
case '1.2':
|
||
|
this.atNextDocument = false;
|
||
|
this.yaml = {
|
||
|
explicit: Directives.defaultYaml.explicit,
|
||
|
version: '1.2'
|
||
|
};
|
||
|
this.tags = Object.assign({}, Directives.defaultTags);
|
||
|
break;
|
||
|
}
|
||
|
return res;
|
||
|
}
|
||
|
/**
|
||
|
* @param onError - May be called even if the action was successful
|
||
|
* @returns `true` on success
|
||
|
*/
|
||
|
add(line, onError) {
|
||
|
if (this.atNextDocument) {
|
||
|
this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };
|
||
|
this.tags = Object.assign({}, Directives.defaultTags);
|
||
|
this.atNextDocument = false;
|
||
|
}
|
||
|
const parts = line.trim().split(/[ \t]+/);
|
||
|
const name = parts.shift();
|
||
|
switch (name) {
|
||
|
case '%TAG': {
|
||
|
if (parts.length !== 2) {
|
||
|
onError(0, '%TAG directive should contain exactly two parts');
|
||
|
if (parts.length < 2)
|
||
|
return false;
|
||
|
}
|
||
|
const [handle, prefix] = parts;
|
||
|
this.tags[handle] = prefix;
|
||
|
return true;
|
||
|
}
|
||
|
case '%YAML': {
|
||
|
this.yaml.explicit = true;
|
||
|
if (parts.length < 1) {
|
||
|
onError(0, '%YAML directive should contain exactly one part');
|
||
|
return false;
|
||
|
}
|
||
|
const [version] = parts;
|
||
|
if (version === '1.1' || version === '1.2') {
|
||
|
this.yaml.version = version;
|
||
|
return true;
|
||
|
}
|
||
|
else {
|
||
|
onError(6, `Unsupported YAML version ${version}`, true);
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
default:
|
||
|
onError(0, `Unknown directive ${name}`, true);
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Resolves a tag, matching handles to those defined in %TAG directives.
|
||
|
*
|
||
|
* @returns Resolved tag, which may also be the non-specific tag `'!'` or a
|
||
|
* `'!local'` tag, or `null` if unresolvable.
|
||
|
*/
|
||
|
tagName(source, onError) {
|
||
|
if (source === '!')
|
||
|
return '!'; // non-specific tag
|
||
|
if (source[0] !== '!') {
|
||
|
onError(`Not a valid tag: ${source}`);
|
||
|
return null;
|
||
|
}
|
||
|
if (source[1] === '<') {
|
||
|
const verbatim = source.slice(2, -1);
|
||
|
if (verbatim === '!' || verbatim === '!!') {
|
||
|
onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);
|
||
|
return null;
|
||
|
}
|
||
|
if (source[source.length - 1] !== '>')
|
||
|
onError('Verbatim tags must end with a >');
|
||
|
return verbatim;
|
||
|
}
|
||
|
const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);
|
||
|
if (!suffix)
|
||
|
onError(`The ${source} tag has no suffix`);
|
||
|
const prefix = this.tags[handle];
|
||
|
if (prefix)
|
||
|
return prefix + decodeURIComponent(suffix);
|
||
|
if (handle === '!')
|
||
|
return source; // local tag
|
||
|
onError(`Could not resolve tag: ${source}`);
|
||
|
return null;
|
||
|
}
|
||
|
/**
|
||
|
* Given a fully resolved tag, returns its printable string form,
|
||
|
* taking into account current tag prefixes and defaults.
|
||
|
*/
|
||
|
tagString(tag) {
|
||
|
for (const [handle, prefix] of Object.entries(this.tags)) {
|
||
|
if (tag.startsWith(prefix))
|
||
|
return handle + escapeTagName(tag.substring(prefix.length));
|
||
|
}
|
||
|
return tag[0] === '!' ? tag : `!<${tag}>`;
|
||
|
}
|
||
|
toString(doc) {
|
||
|
const lines = this.yaml.explicit
|
||
|
? [`%YAML ${this.yaml.version || '1.2'}`]
|
||
|
: [];
|
||
|
const tagEntries = Object.entries(this.tags);
|
||
|
let tagNames;
|
||
|
if (doc && tagEntries.length > 0 && isNode(doc.contents)) {
|
||
|
const tags = {};
|
||
|
visit(doc.contents, (_key, node) => {
|
||
|
if (isNode(node) && node.tag)
|
||
|
tags[node.tag] = true;
|
||
|
});
|
||
|
tagNames = Object.keys(tags);
|
||
|
}
|
||
|
else
|
||
|
tagNames = [];
|
||
|
for (const [handle, prefix] of tagEntries) {
|
||
|
if (handle === '!!' && prefix === 'tag:yaml.org,2002:')
|
||
|
continue;
|
||
|
if (!doc || tagNames.some(tn => tn.startsWith(prefix)))
|
||
|
lines.push(`%TAG ${handle} ${prefix}`);
|
||
|
}
|
||
|
return lines.join('\n');
|
||
|
}
|
||
|
}
|
||
|
Directives.defaultYaml = { explicit: false, version: '1.2' };
|
||
|
Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' };
|
||
|
|
||
|
/**
|
||
|
* Verify that the input string is a valid anchor.
|
||
|
*
|
||
|
* Will throw on errors.
|
||
|
*/
|
||
|
function anchorIsValid(anchor) {
|
||
|
if (/[\x00-\x19\s,[\]{}]/.test(anchor)) {
|
||
|
const sa = JSON.stringify(anchor);
|
||
|
const msg = `Anchor must not contain whitespace or control characters: ${sa}`;
|
||
|
throw new Error(msg);
|
||
|
}
|
||
|
return true;
|
||
|
}
|
||
|
function anchorNames(root) {
|
||
|
const anchors = new Set();
|
||
|
visit(root, {
|
||
|
Value(_key, node) {
|
||
|
if (node.anchor)
|
||
|
anchors.add(node.anchor);
|
||
|
}
|
||
|
});
|
||
|
return anchors;
|
||
|
}
|
||
|
/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
|
||
|
function findNewAnchor(prefix, exclude) {
|
||
|
for (let i = 1; true; ++i) {
|
||
|
const name = `${prefix}${i}`;
|
||
|
if (!exclude.has(name))
|
||
|
return name;
|
||
|
}
|
||
|
}
|
||
|
function createNodeAnchors(doc, prefix) {
|
||
|
const aliasObjects = [];
|
||
|
const sourceObjects = new Map();
|
||
|
let prevAnchors = null;
|
||
|
return {
|
||
|
onAnchor(source) {
|
||
|
aliasObjects.push(source);
|
||
|
if (!prevAnchors)
|
||
|
prevAnchors = anchorNames(doc);
|
||
|
const anchor = findNewAnchor(prefix, prevAnchors);
|
||
|
prevAnchors.add(anchor);
|
||
|
return anchor;
|
||
|
},
|
||
|
/**
|
||
|
* With circular references, the source node is only resolved after all
|
||
|
* of its child nodes are. This is why anchors are set only after all of
|
||
|
* the nodes have been created.
|
||
|
*/
|
||
|
setAnchors() {
|
||
|
for (const source of aliasObjects) {
|
||
|
const ref = sourceObjects.get(source);
|
||
|
if (typeof ref === 'object' &&
|
||
|
ref.anchor &&
|
||
|
(isScalar(ref.node) || isCollection(ref.node))) {
|
||
|
ref.node.anchor = ref.anchor;
|
||
|
}
|
||
|
else {
|
||
|
const error = new Error('Failed to resolve repeated object (this should not happen)');
|
||
|
error.source = source;
|
||
|
throw error;
|
||
|
}
|
||
|
}
|
||
|
},
|
||
|
sourceObjects
|
||
|
};
|
||
|
}
|
||
|
|
||
|
class Alias extends NodeBase {
|
||
|
constructor(source) {
|
||
|
super(ALIAS);
|
||
|
this.source = source;
|
||
|
Object.defineProperty(this, 'tag', {
|
||
|
set() {
|
||
|
throw new Error('Alias nodes cannot have tags');
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Resolve the value of this alias within `doc`, finding the last
|
||
|
* instance of the `source` anchor before this node.
|
||
|
*/
|
||
|
resolve(doc) {
|
||
|
let found = undefined;
|
||
|
visit(doc, {
|
||
|
Node: (_key, node) => {
|
||
|
if (node === this)
|
||
|
return visit.BREAK;
|
||
|
if (node.anchor === this.source)
|
||
|
found = node;
|
||
|
}
|
||
|
});
|
||
|
return found;
|
||
|
}
|
||
|
toJSON(_arg, ctx) {
|
||
|
if (!ctx)
|
||
|
return { source: this.source };
|
||
|
const { anchors, doc, maxAliasCount } = ctx;
|
||
|
const source = this.resolve(doc);
|
||
|
if (!source) {
|
||
|
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
|
||
|
throw new ReferenceError(msg);
|
||
|
}
|
||
|
const data = anchors.get(source);
|
||
|
/* istanbul ignore if */
|
||
|
if (!data || data.res === undefined) {
|
||
|
const msg = 'This should not happen: Alias anchor was not resolved?';
|
||
|
throw new ReferenceError(msg);
|
||
|
}
|
||
|
if (maxAliasCount >= 0) {
|
||
|
data.count += 1;
|
||
|
if (data.aliasCount === 0)
|
||
|
data.aliasCount = getAliasCount(doc, source, anchors);
|
||
|
if (data.count * data.aliasCount > maxAliasCount) {
|
||
|
const msg = 'Excessive alias count indicates a resource exhaustion attack';
|
||
|
throw new ReferenceError(msg);
|
||
|
}
|
||
|
}
|
||
|
return data.res;
|
||
|
}
|
||
|
toString(ctx, _onComment, _onChompKeep) {
|
||
|
const src = `*${this.source}`;
|
||
|
if (ctx) {
|
||
|
anchorIsValid(this.source);
|
||
|
if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {
|
||
|
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
|
||
|
throw new Error(msg);
|
||
|
}
|
||
|
if (ctx.implicitKey)
|
||
|
return `${src} `;
|
||
|
}
|
||
|
return src;
|
||
|
}
|
||
|
}
|
||
|
function getAliasCount(doc, node, anchors) {
|
||
|
if (isAlias(node)) {
|
||
|
const source = node.resolve(doc);
|
||
|
const anchor = anchors && source && anchors.get(source);
|
||
|
return anchor ? anchor.count * anchor.aliasCount : 0;
|
||
|
}
|
||
|
else if (isCollection(node)) {
|
||
|
let count = 0;
|
||
|
for (const item of node.items) {
|
||
|
const c = getAliasCount(doc, item, anchors);
|
||
|
if (c > count)
|
||
|
count = c;
|
||
|
}
|
||
|
return count;
|
||
|
}
|
||
|
else if (isPair(node)) {
|
||
|
const kc = getAliasCount(doc, node.key, anchors);
|
||
|
const vc = getAliasCount(doc, node.value, anchors);
|
||
|
return Math.max(kc, vc);
|
||
|
}
|
||
|
return 1;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Recursively convert any node or its contents to native JavaScript
|
||
|
*
|
||
|
* @param value - The input value
|
||
|
* @param arg - If `value` defines a `toJSON()` method, use this
|
||
|
* as its first argument
|
||
|
* @param ctx - Conversion context, originally set in Document#toJS(). If
|
||
|
* `{ keep: true }` is not set, output should be suitable for JSON
|
||
|
* stringification.
|
||
|
*/
|
||
|
function toJS(value, arg, ctx) {
|
||
|
if (Array.isArray(value))
|
||
|
return value.map((v, i) => toJS(v, String(i), ctx));
|
||
|
if (value && typeof value.toJSON === 'function') {
|
||
|
if (!ctx || !hasAnchor(value))
|
||
|
return value.toJSON(arg, ctx);
|
||
|
const data = { aliasCount: 0, count: 1, res: undefined };
|
||
|
ctx.anchors.set(value, data);
|
||
|
ctx.onCreate = res => {
|
||
|
data.res = res;
|
||
|
delete ctx.onCreate;
|
||
|
};
|
||
|
const res = value.toJSON(arg, ctx);
|
||
|
if (ctx.onCreate)
|
||
|
ctx.onCreate(res);
|
||
|
return res;
|
||
|
}
|
||
|
if (typeof value === 'bigint' && !(ctx && ctx.keep))
|
||
|
return Number(value);
|
||
|
return value;
|
||
|
}
|
||
|
|
||
|
const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');
|
||
|
class Scalar extends NodeBase {
|
||
|
constructor(value) {
|
||
|
super(SCALAR$1);
|
||
|
this.value = value;
|
||
|
}
|
||
|
toJSON(arg, ctx) {
|
||
|
return ctx && ctx.keep ? this.value : toJS(this.value, arg, ctx);
|
||
|
}
|
||
|
toString() {
|
||
|
return String(this.value);
|
||
|
}
|
||
|
}
|
||
|
Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED';
|
||
|
Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL';
|
||
|
Scalar.PLAIN = 'PLAIN';
|
||
|
Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';
|
||
|
Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE';
|
||
|
|
||
|
const defaultTagPrefix = 'tag:yaml.org,2002:';
|
||
|
function findTagObject(value, tagName, tags) {
|
||
|
if (tagName) {
|
||
|
const match = tags.filter(t => t.tag === tagName);
|
||
|
const tagObj = match.find(t => !t.format) || match[0];
|
||
|
if (!tagObj)
|
||
|
throw new Error(`Tag ${tagName} not found`);
|
||
|
return tagObj;
|
||
|
}
|
||
|
return tags.find(t => t.identify && t.identify(value) && !t.format);
|
||
|
}
|
||
|
function createNode(value, tagName, ctx) {
|
||
|
var _a, _b;
|
||
|
if (isDocument(value))
|
||
|
value = value.contents;
|
||
|
if (isNode(value))
|
||
|
return value;
|
||
|
if (isPair(value)) {
|
||
|
const map = (_b = (_a = ctx.schema[MAP]).createNode) === null || _b === void 0 ? void 0 : _b.call(_a, ctx.schema, null, ctx);
|
||
|
map.items.push(value);
|
||
|
return map;
|
||
|
}
|
||
|
if (value instanceof String ||
|
||
|
value instanceof Number ||
|
||
|
value instanceof Boolean ||
|
||
|
(typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere
|
||
|
) {
|
||
|
// https://tc39.es/ecma262/#sec-serializejsonproperty
|
||
|
value = value.valueOf();
|
||
|
}
|
||
|
const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;
|
||
|
// Detect duplicate references to the same object & use Alias nodes for all
|
||
|
// after first. The `ref` wrapper allows for circular references to resolve.
|
||
|
let ref = undefined;
|
||
|
if (aliasDuplicateObjects && value && typeof value === 'object') {
|
||
|
ref = sourceObjects.get(value);
|
||
|
if (ref) {
|
||
|
if (!ref.anchor)
|
||
|
ref.anchor = onAnchor(value);
|
||
|
return new Alias(ref.anchor);
|
||
|
}
|
||
|
else {
|
||
|
ref = { anchor: null, node: null };
|
||
|
sourceObjects.set(value, ref);
|
||
|
}
|
||
|
}
|
||
|
if (tagName && tagName.startsWith('!!'))
|
||
|
tagName = defaultTagPrefix + tagName.slice(2);
|
||
|
let tagObj = findTagObject(value, tagName, schema.tags);
|
||
|
if (!tagObj) {
|
||
|
if (value && typeof value.toJSON === 'function')
|
||
|
value = value.toJSON();
|
||
|
if (!value || typeof value !== 'object') {
|
||
|
const node = new Scalar(value);
|
||
|
if (ref)
|
||
|
ref.node = node;
|
||
|
return node;
|
||
|
}
|
||
|
tagObj =
|
||
|
value instanceof Map
|
||
|
? schema[MAP]
|
||
|
: Symbol.iterator in Object(value)
|
||
|
? schema[SEQ]
|
||
|
: schema[MAP];
|
||
|
}
|
||
|
if (onTagObj) {
|
||
|
onTagObj(tagObj);
|
||
|
delete ctx.onTagObj;
|
||
|
}
|
||
|
const node = (tagObj === null || tagObj === void 0 ? void 0 : tagObj.createNode)
|
||
|
? tagObj.createNode(ctx.schema, value, ctx)
|
||
|
: new Scalar(value);
|
||
|
if (tagName)
|
||
|
node.tag = tagName;
|
||
|
if (ref)
|
||
|
ref.node = node;
|
||
|
return node;
|
||
|
}
|
||
|
|
||
|
function collectionFromPath(schema, path, value) {
|
||
|
let v = value;
|
||
|
for (let i = path.length - 1; i >= 0; --i) {
|
||
|
const k = path[i];
|
||
|
if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {
|
||
|
const a = [];
|
||
|
a[k] = v;
|
||
|
v = a;
|
||
|
}
|
||
|
else {
|
||
|
v = new Map([[k, v]]);
|
||
|
}
|
||
|
}
|
||
|
return createNode(v, undefined, {
|
||
|
aliasDuplicateObjects: false,
|
||
|
keepUndefined: false,
|
||
|
onAnchor: () => {
|
||
|
throw new Error('This should not happen, please report a bug.');
|
||
|
},
|
||
|
schema,
|
||
|
sourceObjects: new Map()
|
||
|
});
|
||
|
}
|
||
|
// null, undefined, or an empty non-string iterable (e.g. [])
|
||
|
const isEmptyPath = (path) => path == null ||
|
||
|
(typeof path === 'object' && !!path[Symbol.iterator]().next().done);
|
||
|
class Collection extends NodeBase {
|
||
|
constructor(type, schema) {
|
||
|
super(type);
|
||
|
Object.defineProperty(this, 'schema', {
|
||
|
value: schema,
|
||
|
configurable: true,
|
||
|
enumerable: false,
|
||
|
writable: true
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Create a copy of this collection.
|
||
|
*
|
||
|
* @param schema - If defined, overwrites the original's schema
|
||
|
*/
|
||
|
clone(schema) {
|
||
|
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
|
||
|
if (schema)
|
||
|
copy.schema = schema;
|
||
|
copy.items = copy.items.map(it => isNode(it) || isPair(it) ? it.clone(schema) : it);
|
||
|
if (this.range)
|
||
|
copy.range = this.range.slice();
|
||
|
return copy;
|
||
|
}
|
||
|
/**
|
||
|
* Adds a value to the collection. For `!!map` and `!!omap` the value must
|
||
|
* be a Pair instance or a `{ key, value }` object, which may not have a key
|
||
|
* that already exists in the map.
|
||
|
*/
|
||
|
addIn(path, value) {
|
||
|
if (isEmptyPath(path))
|
||
|
this.add(value);
|
||
|
else {
|
||
|
const [key, ...rest] = path;
|
||
|
const node = this.get(key, true);
|
||
|
if (isCollection(node))
|
||
|
node.addIn(rest, value);
|
||
|
else if (node === undefined && this.schema)
|
||
|
this.set(key, collectionFromPath(this.schema, rest, value));
|
||
|
else
|
||
|
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Removes a value from the collection.
|
||
|
* @returns `true` if the item was found and removed.
|
||
|
*/
|
||
|
deleteIn(path) {
|
||
|
const [key, ...rest] = path;
|
||
|
if (rest.length === 0)
|
||
|
return this.delete(key);
|
||
|
const node = this.get(key, true);
|
||
|
if (isCollection(node))
|
||
|
return node.deleteIn(rest);
|
||
|
else
|
||
|
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||
|
}
|
||
|
/**
|
||
|
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||
|
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||
|
* `true` (collections are always returned intact).
|
||
|
*/
|
||
|
getIn(path, keepScalar) {
|
||
|
const [key, ...rest] = path;
|
||
|
const node = this.get(key, true);
|
||
|
if (rest.length === 0)
|
||
|
return !keepScalar && isScalar(node) ? node.value : node;
|
||
|
else
|
||
|
return isCollection(node) ? node.getIn(rest, keepScalar) : undefined;
|
||
|
}
|
||
|
hasAllNullValues(allowScalar) {
|
||
|
return this.items.every(node => {
|
||
|
if (!isPair(node))
|
||
|
return false;
|
||
|
const n = node.value;
|
||
|
return (n == null ||
|
||
|
(allowScalar &&
|
||
|
isScalar(n) &&
|
||
|
n.value == null &&
|
||
|
!n.commentBefore &&
|
||
|
!n.comment &&
|
||
|
!n.tag));
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Checks if the collection includes a value with the key `key`.
|
||
|
*/
|
||
|
hasIn(path) {
|
||
|
const [key, ...rest] = path;
|
||
|
if (rest.length === 0)
|
||
|
return this.has(key);
|
||
|
const node = this.get(key, true);
|
||
|
return isCollection(node) ? node.hasIn(rest) : false;
|
||
|
}
|
||
|
/**
|
||
|
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||
|
* boolean to add/remove the item from the set.
|
||
|
*/
|
||
|
setIn(path, value) {
|
||
|
const [key, ...rest] = path;
|
||
|
if (rest.length === 0) {
|
||
|
this.set(key, value);
|
||
|
}
|
||
|
else {
|
||
|
const node = this.get(key, true);
|
||
|
if (isCollection(node))
|
||
|
node.setIn(rest, value);
|
||
|
else if (node === undefined && this.schema)
|
||
|
this.set(key, collectionFromPath(this.schema, rest, value));
|
||
|
else
|
||
|
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
Collection.maxFlowStringSingleLineLength = 60;
|
||
|
|
||
|
/**
|
||
|
* Stringifies a comment.
|
||
|
*
|
||
|
* Empty comment lines are left empty,
|
||
|
* lines consisting of a single space are replaced by `#`,
|
||
|
* and all other lines are prefixed with a `#`.
|
||
|
*/
|
||
|
const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');
|
||
|
function indentComment(comment, indent) {
|
||
|
if (/^\n+$/.test(comment))
|
||
|
return comment.substring(1);
|
||
|
return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;
|
||
|
}
|
||
|
const lineComment = (str, indent, comment) => comment.includes('\n')
|
||
|
? '\n' + indentComment(comment, indent)
|
||
|
: (str.endsWith(' ') ? '' : ' ') + comment;
|
||
|
|
||
|
const FOLD_FLOW = 'flow';
|
||
|
const FOLD_BLOCK = 'block';
|
||
|
const FOLD_QUOTED = 'quoted';
|
||
|
/**
|
||
|
* Tries to keep input at up to `lineWidth` characters, splitting only on spaces
|
||
|
* not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
|
||
|
* terminated with `\n` and started with `indent`.
|
||
|
*/
|
||
|
function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
|
||
|
if (!lineWidth || lineWidth < 0)
|
||
|
return text;
|
||
|
const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
|
||
|
if (text.length <= endStep)
|
||
|
return text;
|
||
|
const folds = [];
|
||
|
const escapedFolds = {};
|
||
|
let end = lineWidth - indent.length;
|
||
|
if (typeof indentAtStart === 'number') {
|
||
|
if (indentAtStart > lineWidth - Math.max(2, minContentWidth))
|
||
|
folds.push(0);
|
||
|
else
|
||
|
end = lineWidth - indentAtStart;
|
||
|
}
|
||
|
let split = undefined;
|
||
|
let prev = undefined;
|
||
|
let overflow = false;
|
||
|
let i = -1;
|
||
|
let escStart = -1;
|
||
|
let escEnd = -1;
|
||
|
if (mode === FOLD_BLOCK) {
|
||
|
i = consumeMoreIndentedLines(text, i);
|
||
|
if (i !== -1)
|
||
|
end = i + endStep;
|
||
|
}
|
||
|
for (let ch; (ch = text[(i += 1)]);) {
|
||
|
if (mode === FOLD_QUOTED && ch === '\\') {
|
||
|
escStart = i;
|
||
|
switch (text[i + 1]) {
|
||
|
case 'x':
|
||
|
i += 3;
|
||
|
break;
|
||
|
case 'u':
|
||
|
i += 5;
|
||
|
break;
|
||
|
case 'U':
|
||
|
i += 9;
|
||
|
break;
|
||
|
default:
|
||
|
i += 1;
|
||
|
}
|
||
|
escEnd = i;
|
||
|
}
|
||
|
if (ch === '\n') {
|
||
|
if (mode === FOLD_BLOCK)
|
||
|
i = consumeMoreIndentedLines(text, i);
|
||
|
end = i + endStep;
|
||
|
split = undefined;
|
||
|
}
|
||
|
else {
|
||
|
if (ch === ' ' &&
|
||
|
prev &&
|
||
|
prev !== ' ' &&
|
||
|
prev !== '\n' &&
|
||
|
prev !== '\t') {
|
||
|
// space surrounded by non-space can be replaced with newline + indent
|
||
|
const next = text[i + 1];
|
||
|
if (next && next !== ' ' && next !== '\n' && next !== '\t')
|
||
|
split = i;
|
||
|
}
|
||
|
if (i >= end) {
|
||
|
if (split) {
|
||
|
folds.push(split);
|
||
|
end = split + endStep;
|
||
|
split = undefined;
|
||
|
}
|
||
|
else if (mode === FOLD_QUOTED) {
|
||
|
// white-space collected at end may stretch past lineWidth
|
||
|
while (prev === ' ' || prev === '\t') {
|
||
|
prev = ch;
|
||
|
ch = text[(i += 1)];
|
||
|
overflow = true;
|
||
|
}
|
||
|
// Account for newline escape, but don't break preceding escape
|
||
|
const j = i > escEnd + 1 ? i - 2 : escStart - 1;
|
||
|
// Bail out if lineWidth & minContentWidth are shorter than an escape string
|
||
|
if (escapedFolds[j])
|
||
|
return text;
|
||
|
folds.push(j);
|
||
|
escapedFolds[j] = true;
|
||
|
end = j + endStep;
|
||
|
split = undefined;
|
||
|
}
|
||
|
else {
|
||
|
overflow = true;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
prev = ch;
|
||
|
}
|
||
|
if (overflow && onOverflow)
|
||
|
onOverflow();
|
||
|
if (folds.length === 0)
|
||
|
return text;
|
||
|
if (onFold)
|
||
|
onFold();
|
||
|
let res = text.slice(0, folds[0]);
|
||
|
for (let i = 0; i < folds.length; ++i) {
|
||
|
const fold = folds[i];
|
||
|
const end = folds[i + 1] || text.length;
|
||
|
if (fold === 0)
|
||
|
res = `\n${indent}${text.slice(0, end)}`;
|
||
|
else {
|
||
|
if (mode === FOLD_QUOTED && escapedFolds[fold])
|
||
|
res += `${text[fold]}\\`;
|
||
|
res += `\n${indent}${text.slice(fold + 1, end)}`;
|
||
|
}
|
||
|
}
|
||
|
return res;
|
||
|
}
|
||
|
/**
|
||
|
* Presumes `i + 1` is at the start of a line
|
||
|
* @returns index of last newline in more-indented block
|
||
|
*/
|
||
|
function consumeMoreIndentedLines(text, i) {
|
||
|
let ch = text[i + 1];
|
||
|
while (ch === ' ' || ch === '\t') {
|
||
|
do {
|
||
|
ch = text[(i += 1)];
|
||
|
} while (ch && ch !== '\n');
|
||
|
ch = text[i + 1];
|
||
|
}
|
||
|
return i;
|
||
|
}
|
||
|
|
||
|
const getFoldOptions = (ctx) => ({
|
||
|
indentAtStart: ctx.indentAtStart,
|
||
|
lineWidth: ctx.options.lineWidth,
|
||
|
minContentWidth: ctx.options.minContentWidth
|
||
|
});
|
||
|
// Also checks for lines starting with %, as parsing the output as YAML 1.1 will
|
||
|
// presume that's starting a new document.
|
||
|
const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str);
|
||
|
function lineLengthOverLimit(str, lineWidth, indentLength) {
|
||
|
if (!lineWidth || lineWidth < 0)
|
||
|
return false;
|
||
|
const limit = lineWidth - indentLength;
|
||
|
const strLen = str.length;
|
||
|
if (strLen <= limit)
|
||
|
return false;
|
||
|
for (let i = 0, start = 0; i < strLen; ++i) {
|
||
|
if (str[i] === '\n') {
|
||
|
if (i - start > limit)
|
||
|
return true;
|
||
|
start = i + 1;
|
||
|
if (strLen - start <= limit)
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
return true;
|
||
|
}
|
||
|
function doubleQuotedString(value, ctx) {
|
||
|
const json = JSON.stringify(value);
|
||
|
if (ctx.options.doubleQuotedAsJSON)
|
||
|
return json;
|
||
|
const { implicitKey } = ctx;
|
||
|
const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;
|
||
|
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
|
||
|
let str = '';
|
||
|
let start = 0;
|
||
|
for (let i = 0, ch = json[i]; ch; ch = json[++i]) {
|
||
|
if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') {
|
||
|
// space before newline needs to be escaped to not be folded
|
||
|
str += json.slice(start, i) + '\\ ';
|
||
|
i += 1;
|
||
|
start = i;
|
||
|
ch = '\\';
|
||
|
}
|
||
|
if (ch === '\\')
|
||
|
switch (json[i + 1]) {
|
||
|
case 'u':
|
||
|
{
|
||
|
str += json.slice(start, i);
|
||
|
const code = json.substr(i + 2, 4);
|
||
|
switch (code) {
|
||
|
case '0000':
|
||
|
str += '\\0';
|
||
|
break;
|
||
|
case '0007':
|
||
|
str += '\\a';
|
||
|
break;
|
||
|
case '000b':
|
||
|
str += '\\v';
|
||
|
break;
|
||
|
case '001b':
|
||
|
str += '\\e';
|
||
|
break;
|
||
|
case '0085':
|
||
|
str += '\\N';
|
||
|
break;
|
||
|
case '00a0':
|
||
|
str += '\\_';
|
||
|
break;
|
||
|
case '2028':
|
||
|
str += '\\L';
|
||
|
break;
|
||
|
case '2029':
|
||
|
str += '\\P';
|
||
|
break;
|
||
|
default:
|
||
|
if (code.substr(0, 2) === '00')
|
||
|
str += '\\x' + code.substr(2);
|
||
|
else
|
||
|
str += json.substr(i, 6);
|
||
|
}
|
||
|
i += 5;
|
||
|
start = i + 1;
|
||
|
}
|
||
|
break;
|
||
|
case 'n':
|
||
|
if (implicitKey ||
|
||
|
json[i + 2] === '"' ||
|
||
|
json.length < minMultiLineLength) {
|
||
|
i += 1;
|
||
|
}
|
||
|
else {
|
||
|
// folding will eat first newline
|
||
|
str += json.slice(start, i) + '\n\n';
|
||
|
while (json[i + 2] === '\\' &&
|
||
|
json[i + 3] === 'n' &&
|
||
|
json[i + 4] !== '"') {
|
||
|
str += '\n';
|
||
|
i += 2;
|
||
|
}
|
||
|
str += indent;
|
||
|
// space after newline needs to be escaped to not be folded
|
||
|
if (json[i + 2] === ' ')
|
||
|
str += '\\';
|
||
|
i += 1;
|
||
|
start = i + 1;
|
||
|
}
|
||
|
break;
|
||
|
default:
|
||
|
i += 1;
|
||
|
}
|
||
|
}
|
||
|
str = start ? str + json.slice(start) : json;
|
||
|
return implicitKey
|
||
|
? str
|
||
|
: foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx));
|
||
|
}
|
||
|
function singleQuotedString(value, ctx) {
|
||
|
if (ctx.options.singleQuote === false ||
|
||
|
(ctx.implicitKey && value.includes('\n')) ||
|
||
|
/[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline
|
||
|
)
|
||
|
return doubleQuotedString(value, ctx);
|
||
|
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
|
||
|
const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'";
|
||
|
return ctx.implicitKey
|
||
|
? res
|
||
|
: foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx));
|
||
|
}
|
||
|
function quotedString(value, ctx) {
|
||
|
const { singleQuote } = ctx.options;
|
||
|
let qs;
|
||
|
if (singleQuote === false)
|
||
|
qs = doubleQuotedString;
|
||
|
else {
|
||
|
const hasDouble = value.includes('"');
|
||
|
const hasSingle = value.includes("'");
|
||
|
if (hasDouble && !hasSingle)
|
||
|
qs = singleQuotedString;
|
||
|
else if (hasSingle && !hasDouble)
|
||
|
qs = doubleQuotedString;
|
||
|
else
|
||
|
qs = singleQuote ? singleQuotedString : doubleQuotedString;
|
||
|
}
|
||
|
return qs(value, ctx);
|
||
|
}
|
||
|
function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
|
||
|
const { blockQuote, commentString, lineWidth } = ctx.options;
|
||
|
// 1. Block can't end in whitespace unless the last line is non-empty.
|
||
|
// 2. Strings consisting of only whitespace are best rendered explicitly.
|
||
|
if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) {
|
||
|
return quotedString(value, ctx);
|
||
|
}
|
||
|
const indent = ctx.indent ||
|
||
|
(ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');
|
||
|
const literal = blockQuote === 'literal'
|
||
|
? true
|
||
|
: blockQuote === 'folded' || type === Scalar.BLOCK_FOLDED
|
||
|
? false
|
||
|
: type === Scalar.BLOCK_LITERAL
|
||
|
? true
|
||
|
: !lineLengthOverLimit(value, lineWidth, indent.length);
|
||
|
if (!value)
|
||
|
return literal ? '|\n' : '>\n';
|
||
|
// determine chomping from whitespace at value end
|
||
|
let chomp;
|
||
|
let endStart;
|
||
|
for (endStart = value.length; endStart > 0; --endStart) {
|
||
|
const ch = value[endStart - 1];
|
||
|
if (ch !== '\n' && ch !== '\t' && ch !== ' ')
|
||
|
break;
|
||
|
}
|
||
|
let end = value.substring(endStart);
|
||
|
const endNlPos = end.indexOf('\n');
|
||
|
if (endNlPos === -1) {
|
||
|
chomp = '-'; // strip
|
||
|
}
|
||
|
else if (value === end || endNlPos !== end.length - 1) {
|
||
|
chomp = '+'; // keep
|
||
|
if (onChompKeep)
|
||
|
onChompKeep();
|
||
|
}
|
||
|
else {
|
||
|
chomp = ''; // clip
|
||
|
}
|
||
|
if (end) {
|
||
|
value = value.slice(0, -end.length);
|
||
|
if (end[end.length - 1] === '\n')
|
||
|
end = end.slice(0, -1);
|
||
|
end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`);
|
||
|
}
|
||
|
// determine indent indicator from whitespace at value start
|
||
|
let startWithSpace = false;
|
||
|
let startEnd;
|
||
|
let startNlPos = -1;
|
||
|
for (startEnd = 0; startEnd < value.length; ++startEnd) {
|
||
|
const ch = value[startEnd];
|
||
|
if (ch === ' ')
|
||
|
startWithSpace = true;
|
||
|
else if (ch === '\n')
|
||
|
startNlPos = startEnd;
|
||
|
else
|
||
|
break;
|
||
|
}
|
||
|
let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);
|
||
|
if (start) {
|
||
|
value = value.substring(start.length);
|
||
|
start = start.replace(/\n+/g, `$&${indent}`);
|
||
|
}
|
||
|
const indentSize = indent ? '2' : '1'; // root is at -1
|
||
|
let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;
|
||
|
if (comment) {
|
||
|
header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' '));
|
||
|
if (onComment)
|
||
|
onComment();
|
||
|
}
|
||
|
if (literal) {
|
||
|
value = value.replace(/\n+/g, `$&${indent}`);
|
||
|
return `${header}\n${indent}${start}${value}${end}`;
|
||
|
}
|
||
|
value = value
|
||
|
.replace(/\n+/g, '\n$&')
|
||
|
.replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
|
||
|
// ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
|
||
|
.replace(/\n+/g, `$&${indent}`);
|
||
|
const body = foldFlowLines(`${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx));
|
||
|
return `${header}\n${indent}${body}`;
|
||
|
}
|
||
|
function plainString(item, ctx, onComment, onChompKeep) {
|
||
|
const { type, value } = item;
|
||
|
const { actualString, implicitKey, indent, inFlow } = ctx;
|
||
|
if ((implicitKey && /[\n[\]{},]/.test(value)) ||
|
||
|
(inFlow && /[[\]{},]/.test(value))) {
|
||
|
return quotedString(value, ctx);
|
||
|
}
|
||
|
if (!value ||
|
||
|
/^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
|
||
|
// not allowed:
|
||
|
// - empty string, '-' or '?'
|
||
|
// - start with an indicator character (except [?:-]) or /[?-] /
|
||
|
// - '\n ', ': ' or ' \n' anywhere
|
||
|
// - '#' not preceded by a non-space char
|
||
|
// - end with ' ' or ':'
|
||
|
return implicitKey || inFlow || value.indexOf('\n') === -1
|
||
|
? quotedString(value, ctx)
|
||
|
: blockString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
if (!implicitKey &&
|
||
|
!inFlow &&
|
||
|
type !== Scalar.PLAIN &&
|
||
|
value.indexOf('\n') !== -1) {
|
||
|
// Where allowed & type not set explicitly, prefer block style for multiline strings
|
||
|
return blockString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
if (indent === '' && containsDocumentMarker(value)) {
|
||
|
ctx.forceBlockIndent = true;
|
||
|
return blockString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
const str = value.replace(/\n+/g, `$&\n${indent}`);
|
||
|
// Verify that output will be parsed as a string, as e.g. plain numbers and
|
||
|
// booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),
|
||
|
// and others in v1.1.
|
||
|
if (actualString) {
|
||
|
const test = (tag) => { var _a; return tag.default && tag.tag !== 'tag:yaml.org,2002:str' && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(str)); };
|
||
|
const { compat, tags } = ctx.doc.schema;
|
||
|
if (tags.some(test) || (compat === null || compat === void 0 ? void 0 : compat.some(test)))
|
||
|
return quotedString(value, ctx);
|
||
|
}
|
||
|
return implicitKey
|
||
|
? str
|
||
|
: foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx));
|
||
|
}
|
||
|
function stringifyString(item, ctx, onComment, onChompKeep) {
|
||
|
const { implicitKey, inFlow } = ctx;
|
||
|
const ss = typeof item.value === 'string'
|
||
|
? item
|
||
|
: Object.assign({}, item, { value: String(item.value) });
|
||
|
let { type } = item;
|
||
|
if (type !== Scalar.QUOTE_DOUBLE) {
|
||
|
// force double quotes on control characters & unpaired surrogates
|
||
|
if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value))
|
||
|
type = Scalar.QUOTE_DOUBLE;
|
||
|
}
|
||
|
const _stringify = (_type) => {
|
||
|
switch (_type) {
|
||
|
case Scalar.BLOCK_FOLDED:
|
||
|
case Scalar.BLOCK_LITERAL:
|
||
|
return implicitKey || inFlow
|
||
|
? quotedString(ss.value, ctx) // blocks are not valid inside flow containers
|
||
|
: blockString(ss, ctx, onComment, onChompKeep);
|
||
|
case Scalar.QUOTE_DOUBLE:
|
||
|
return doubleQuotedString(ss.value, ctx);
|
||
|
case Scalar.QUOTE_SINGLE:
|
||
|
return singleQuotedString(ss.value, ctx);
|
||
|
case Scalar.PLAIN:
|
||
|
return plainString(ss, ctx, onComment, onChompKeep);
|
||
|
default:
|
||
|
return null;
|
||
|
}
|
||
|
};
|
||
|
let res = _stringify(type);
|
||
|
if (res === null) {
|
||
|
const { defaultKeyType, defaultStringType } = ctx.options;
|
||
|
const t = (implicitKey && defaultKeyType) || defaultStringType;
|
||
|
res = _stringify(t);
|
||
|
if (res === null)
|
||
|
throw new Error(`Unsupported default string type ${t}`);
|
||
|
}
|
||
|
return res;
|
||
|
}
|
||
|
|
||
|
function createStringifyContext(doc, options) {
|
||
|
const opt = Object.assign({
|
||
|
blockQuote: true,
|
||
|
commentString: stringifyComment,
|
||
|
defaultKeyType: null,
|
||
|
defaultStringType: 'PLAIN',
|
||
|
directives: null,
|
||
|
doubleQuotedAsJSON: false,
|
||
|
doubleQuotedMinMultiLineLength: 40,
|
||
|
falseStr: 'false',
|
||
|
indentSeq: true,
|
||
|
lineWidth: 80,
|
||
|
minContentWidth: 20,
|
||
|
nullStr: 'null',
|
||
|
simpleKeys: false,
|
||
|
singleQuote: null,
|
||
|
trueStr: 'true',
|
||
|
verifyAliasOrder: true
|
||
|
}, doc.schema.toStringOptions, options);
|
||
|
let inFlow;
|
||
|
switch (opt.collectionStyle) {
|
||
|
case 'block':
|
||
|
inFlow = false;
|
||
|
break;
|
||
|
case 'flow':
|
||
|
inFlow = true;
|
||
|
break;
|
||
|
default:
|
||
|
inFlow = null;
|
||
|
}
|
||
|
return {
|
||
|
anchors: new Set(),
|
||
|
doc,
|
||
|
indent: '',
|
||
|
indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',
|
||
|
inFlow,
|
||
|
options: opt
|
||
|
};
|
||
|
}
|
||
|
function getTagObject(tags, item) {
|
||
|
if (item.tag) {
|
||
|
const match = tags.filter(t => t.tag === item.tag);
|
||
|
if (match.length > 0)
|
||
|
return match.find(t => t.format === item.format) || match[0];
|
||
|
}
|
||
|
let tagObj = undefined;
|
||
|
let obj;
|
||
|
if (isScalar(item)) {
|
||
|
obj = item.value;
|
||
|
const match = tags.filter(t => t.identify && t.identify(obj));
|
||
|
tagObj =
|
||
|
match.find(t => t.format === item.format) || match.find(t => !t.format);
|
||
|
}
|
||
|
else {
|
||
|
obj = item;
|
||
|
tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
|
||
|
}
|
||
|
if (!tagObj) {
|
||
|
// @ts-ignore
|
||
|
const name = obj && obj.constructor ? obj.constructor.name : typeof obj;
|
||
|
throw new Error(`Tag not resolved for ${name} value`);
|
||
|
}
|
||
|
return tagObj;
|
||
|
}
|
||
|
// needs to be called before value stringifier to allow for circular anchor refs
|
||
|
function stringifyProps(node, tagObj, { anchors, doc }) {
|
||
|
if (!doc.directives)
|
||
|
return '';
|
||
|
const props = [];
|
||
|
const anchor = (isScalar(node) || isCollection(node)) && node.anchor;
|
||
|
if (anchor && anchorIsValid(anchor)) {
|
||
|
anchors.add(anchor);
|
||
|
props.push(`&${anchor}`);
|
||
|
}
|
||
|
const tag = node.tag || (tagObj.default ? null : tagObj.tag);
|
||
|
if (tag)
|
||
|
props.push(doc.directives.tagString(tag));
|
||
|
return props.join(' ');
|
||
|
}
|
||
|
function stringify$1(item, ctx, onComment, onChompKeep) {
|
||
|
var _a;
|
||
|
if (isPair(item))
|
||
|
return item.toString(ctx, onComment, onChompKeep);
|
||
|
if (isAlias(item)) {
|
||
|
if (ctx.doc.directives)
|
||
|
return item.toString(ctx);
|
||
|
if ((_a = ctx.resolvedAliases) === null || _a === void 0 ? void 0 : _a.has(item)) {
|
||
|
throw new TypeError(`Cannot stringify circular structure without alias nodes`);
|
||
|
}
|
||
|
else {
|
||
|
if (ctx.resolvedAliases)
|
||
|
ctx.resolvedAliases.add(item);
|
||
|
else
|
||
|
ctx.resolvedAliases = new Set([item]);
|
||
|
item = item.resolve(ctx.doc);
|
||
|
}
|
||
|
}
|
||
|
let tagObj = undefined;
|
||
|
const node = isNode(item)
|
||
|
? item
|
||
|
: ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
|
||
|
if (!tagObj)
|
||
|
tagObj = getTagObject(ctx.doc.schema.tags, node);
|
||
|
const props = stringifyProps(node, tagObj, ctx);
|
||
|
if (props.length > 0)
|
||
|
ctx.indentAtStart = (ctx.indentAtStart || 0) + props.length + 1;
|
||
|
const str = typeof tagObj.stringify === 'function'
|
||
|
? tagObj.stringify(node, ctx, onComment, onChompKeep)
|
||
|
: isScalar(node)
|
||
|
? stringifyString(node, ctx, onComment, onChompKeep)
|
||
|
: node.toString(ctx, onComment, onChompKeep);
|
||
|
if (!props)
|
||
|
return str;
|
||
|
return isScalar(node) || str[0] === '{' || str[0] === '['
|
||
|
? `${props} ${str}`
|
||
|
: `${props}\n${ctx.indent}${str}`;
|
||
|
}
|
||
|
|
||
|
function stringifyPair({ key, value }, ctx, onComment, onChompKeep) {
|
||
|
const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;
|
||
|
let keyComment = (isNode(key) && key.comment) || null;
|
||
|
if (simpleKeys) {
|
||
|
if (keyComment) {
|
||
|
throw new Error('With simple keys, key nodes cannot have comments');
|
||
|
}
|
||
|
if (isCollection(key)) {
|
||
|
const msg = 'With simple keys, collection cannot be used as a key value';
|
||
|
throw new Error(msg);
|
||
|
}
|
||
|
}
|
||
|
let explicitKey = !simpleKeys &&
|
||
|
(!key ||
|
||
|
(keyComment && value == null && !ctx.inFlow) ||
|
||
|
isCollection(key) ||
|
||
|
(isScalar(key)
|
||
|
? key.type === Scalar.BLOCK_FOLDED || key.type === Scalar.BLOCK_LITERAL
|
||
|
: typeof key === 'object'));
|
||
|
ctx = Object.assign({}, ctx, {
|
||
|
allNullValues: false,
|
||
|
implicitKey: !explicitKey && (simpleKeys || !allNullValues),
|
||
|
indent: indent + indentStep
|
||
|
});
|
||
|
let keyCommentDone = false;
|
||
|
let chompKeep = false;
|
||
|
let str = stringify$1(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));
|
||
|
if (!explicitKey && !ctx.inFlow && str.length > 1024) {
|
||
|
if (simpleKeys)
|
||
|
throw new Error('With simple keys, single line scalar must not span more than 1024 characters');
|
||
|
explicitKey = true;
|
||
|
}
|
||
|
if (ctx.inFlow) {
|
||
|
if (allNullValues || value == null) {
|
||
|
if (keyCommentDone && onComment)
|
||
|
onComment();
|
||
|
return explicitKey ? `? ${str}` : str;
|
||
|
}
|
||
|
}
|
||
|
else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {
|
||
|
str = `? ${str}`;
|
||
|
if (keyComment && !keyCommentDone) {
|
||
|
str += lineComment(str, ctx.indent, commentString(keyComment));
|
||
|
}
|
||
|
else if (chompKeep && onChompKeep)
|
||
|
onChompKeep();
|
||
|
return str;
|
||
|
}
|
||
|
if (keyCommentDone)
|
||
|
keyComment = null;
|
||
|
if (explicitKey) {
|
||
|
if (keyComment)
|
||
|
str += lineComment(str, ctx.indent, commentString(keyComment));
|
||
|
str = `? ${str}\n${indent}:`;
|
||
|
}
|
||
|
else {
|
||
|
str = `${str}:`;
|
||
|
if (keyComment)
|
||
|
str += lineComment(str, ctx.indent, commentString(keyComment));
|
||
|
}
|
||
|
let vcb = '';
|
||
|
let valueComment = null;
|
||
|
if (isNode(value)) {
|
||
|
if (value.spaceBefore)
|
||
|
vcb = '\n';
|
||
|
if (value.commentBefore) {
|
||
|
const cs = commentString(value.commentBefore);
|
||
|
vcb += `\n${indentComment(cs, ctx.indent)}`;
|
||
|
}
|
||
|
valueComment = value.comment;
|
||
|
}
|
||
|
else if (value && typeof value === 'object') {
|
||
|
value = doc.createNode(value);
|
||
|
}
|
||
|
ctx.implicitKey = false;
|
||
|
if (!explicitKey && !keyComment && isScalar(value))
|
||
|
ctx.indentAtStart = str.length + 1;
|
||
|
chompKeep = false;
|
||
|
if (!indentSeq &&
|
||
|
indentStep.length >= 2 &&
|
||
|
!ctx.inFlow &&
|
||
|
!explicitKey &&
|
||
|
isSeq(value) &&
|
||
|
!value.flow &&
|
||
|
!value.tag &&
|
||
|
!value.anchor) {
|
||
|
// If indentSeq === false, consider '- ' as part of indentation where possible
|
||
|
ctx.indent = ctx.indent.substr(2);
|
||
|
}
|
||
|
let valueCommentDone = false;
|
||
|
const valueStr = stringify$1(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));
|
||
|
let ws = ' ';
|
||
|
if (vcb || keyComment) {
|
||
|
ws = valueStr === '' && !ctx.inFlow ? vcb : `${vcb}\n${ctx.indent}`;
|
||
|
}
|
||
|
else if (!explicitKey && isCollection(value)) {
|
||
|
const flow = valueStr[0] === '[' || valueStr[0] === '{';
|
||
|
if (!flow || valueStr.includes('\n'))
|
||
|
ws = `\n${ctx.indent}`;
|
||
|
}
|
||
|
else if (valueStr === '' || valueStr[0] === '\n')
|
||
|
ws = '';
|
||
|
str += ws + valueStr;
|
||
|
if (ctx.inFlow) {
|
||
|
if (valueCommentDone && onComment)
|
||
|
onComment();
|
||
|
}
|
||
|
else if (valueComment && !valueCommentDone) {
|
||
|
str += lineComment(str, ctx.indent, commentString(valueComment));
|
||
|
}
|
||
|
else if (chompKeep && onChompKeep) {
|
||
|
onChompKeep();
|
||
|
}
|
||
|
return str;
|
||
|
}
|
||
|
|
||
|
function warn(logLevel, warning) {
|
||
|
if (logLevel === 'debug' || logLevel === 'warn') {
|
||
|
if (typeof process !== 'undefined' && process.emitWarning)
|
||
|
process.emitWarning(warning);
|
||
|
else
|
||
|
console.warn(warning);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const MERGE_KEY = '<<';
|
||
|
function addPairToJSMap(ctx, map, { key, value }) {
|
||
|
if (ctx && ctx.doc.schema.merge && isMergeKey(key)) {
|
||
|
value = isAlias(value) ? value.resolve(ctx.doc) : value;
|
||
|
if (isSeq(value))
|
||
|
for (const it of value.items)
|
||
|
mergeToJSMap(ctx, map, it);
|
||
|
else if (Array.isArray(value))
|
||
|
for (const it of value)
|
||
|
mergeToJSMap(ctx, map, it);
|
||
|
else
|
||
|
mergeToJSMap(ctx, map, value);
|
||
|
}
|
||
|
else {
|
||
|
const jsKey = toJS(key, '', ctx);
|
||
|
if (map instanceof Map) {
|
||
|
map.set(jsKey, toJS(value, jsKey, ctx));
|
||
|
}
|
||
|
else if (map instanceof Set) {
|
||
|
map.add(jsKey);
|
||
|
}
|
||
|
else {
|
||
|
const stringKey = stringifyKey(key, jsKey, ctx);
|
||
|
const jsValue = toJS(value, stringKey, ctx);
|
||
|
if (stringKey in map)
|
||
|
Object.defineProperty(map, stringKey, {
|
||
|
value: jsValue,
|
||
|
writable: true,
|
||
|
enumerable: true,
|
||
|
configurable: true
|
||
|
});
|
||
|
else
|
||
|
map[stringKey] = jsValue;
|
||
|
}
|
||
|
}
|
||
|
return map;
|
||
|
}
|
||
|
const isMergeKey = (key) => key === MERGE_KEY ||
|
||
|
(isScalar(key) &&
|
||
|
key.value === MERGE_KEY &&
|
||
|
(!key.type || key.type === Scalar.PLAIN));
|
||
|
// If the value associated with a merge key is a single mapping node, each of
|
||
|
// its key/value pairs is inserted into the current mapping, unless the key
|
||
|
// already exists in it. If the value associated with the merge key is a
|
||
|
// sequence, then this sequence is expected to contain mapping nodes and each
|
||
|
// of these nodes is merged in turn according to its order in the sequence.
|
||
|
// Keys in mapping nodes earlier in the sequence override keys specified in
|
||
|
// later mapping nodes. -- http://yaml.org/type/merge.html
|
||
|
function mergeToJSMap(ctx, map, value) {
|
||
|
const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
|
||
|
if (!isMap(source))
|
||
|
throw new Error('Merge sources must be maps or map aliases');
|
||
|
const srcMap = source.toJSON(null, ctx, Map);
|
||
|
for (const [key, value] of srcMap) {
|
||
|
if (map instanceof Map) {
|
||
|
if (!map.has(key))
|
||
|
map.set(key, value);
|
||
|
}
|
||
|
else if (map instanceof Set) {
|
||
|
map.add(key);
|
||
|
}
|
||
|
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
|
||
|
Object.defineProperty(map, key, {
|
||
|
value,
|
||
|
writable: true,
|
||
|
enumerable: true,
|
||
|
configurable: true
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
return map;
|
||
|
}
|
||
|
function stringifyKey(key, jsKey, ctx) {
|
||
|
if (jsKey === null)
|
||
|
return '';
|
||
|
if (typeof jsKey !== 'object')
|
||
|
return String(jsKey);
|
||
|
if (isNode(key) && ctx && ctx.doc) {
|
||
|
const strCtx = createStringifyContext(ctx.doc, {});
|
||
|
strCtx.anchors = new Set();
|
||
|
for (const node of ctx.anchors.keys())
|
||
|
strCtx.anchors.add(node.anchor);
|
||
|
strCtx.inFlow = true;
|
||
|
strCtx.inStringifyKey = true;
|
||
|
const strKey = key.toString(strCtx);
|
||
|
if (!ctx.mapKeyWarned) {
|
||
|
let jsonStr = JSON.stringify(strKey);
|
||
|
if (jsonStr.length > 40)
|
||
|
jsonStr = jsonStr.substring(0, 36) + '..."';
|
||
|
warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);
|
||
|
ctx.mapKeyWarned = true;
|
||
|
}
|
||
|
return strKey;
|
||
|
}
|
||
|
return JSON.stringify(jsKey);
|
||
|
}
|
||
|
|
||
|
function createPair(key, value, ctx) {
|
||
|
const k = createNode(key, undefined, ctx);
|
||
|
const v = createNode(value, undefined, ctx);
|
||
|
return new Pair(k, v);
|
||
|
}
|
||
|
class Pair {
|
||
|
constructor(key, value = null) {
|
||
|
Object.defineProperty(this, NODE_TYPE, { value: PAIR });
|
||
|
this.key = key;
|
||
|
this.value = value;
|
||
|
}
|
||
|
clone(schema) {
|
||
|
let { key, value } = this;
|
||
|
if (isNode(key))
|
||
|
key = key.clone(schema);
|
||
|
if (isNode(value))
|
||
|
value = value.clone(schema);
|
||
|
return new Pair(key, value);
|
||
|
}
|
||
|
toJSON(_, ctx) {
|
||
|
const pair = ctx && ctx.mapAsMap ? new Map() : {};
|
||
|
return addPairToJSMap(ctx, pair, this);
|
||
|
}
|
||
|
toString(ctx, onComment, onChompKeep) {
|
||
|
return ctx && ctx.doc
|
||
|
? stringifyPair(this, ctx, onComment, onChompKeep)
|
||
|
: JSON.stringify(this);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* `yaml` defines document-specific options in three places: as an argument of
|
||
|
* parse, create and stringify calls, in the values of `YAML.defaultOptions`,
|
||
|
* and in the version-dependent `YAML.Document.defaults` object. Values set in
|
||
|
* `YAML.defaultOptions` override version-dependent defaults, and argument
|
||
|
* options override both.
|
||
|
*/
|
||
|
const defaultOptions = {
|
||
|
intAsBigInt: false,
|
||
|
keepSourceTokens: false,
|
||
|
logLevel: 'warn',
|
||
|
prettyErrors: true,
|
||
|
strict: true,
|
||
|
uniqueKeys: true,
|
||
|
version: '1.2'
|
||
|
};
|
||
|
|
||
|
function stringifyCollection(collection, ctx, options) {
|
||
|
var _a;
|
||
|
const flow = (_a = ctx.inFlow) !== null && _a !== void 0 ? _a : collection.flow;
|
||
|
const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;
|
||
|
return stringify(collection, ctx, options);
|
||
|
}
|
||
|
function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {
|
||
|
const { indent, options: { commentString } } = ctx;
|
||
|
const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });
|
||
|
let chompKeep = false; // flag for the preceding node's status
|
||
|
const lines = [];
|
||
|
for (let i = 0; i < items.length; ++i) {
|
||
|
const item = items[i];
|
||
|
let comment = null;
|
||
|
if (isNode(item)) {
|
||
|
if (!chompKeep && item.spaceBefore)
|
||
|
lines.push('');
|
||
|
addCommentBefore(ctx, lines, item.commentBefore, chompKeep);
|
||
|
if (item.comment)
|
||
|
comment = item.comment;
|
||
|
}
|
||
|
else if (isPair(item)) {
|
||
|
const ik = isNode(item.key) ? item.key : null;
|
||
|
if (ik) {
|
||
|
if (!chompKeep && ik.spaceBefore)
|
||
|
lines.push('');
|
||
|
addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);
|
||
|
}
|
||
|
}
|
||
|
chompKeep = false;
|
||
|
let str = stringify$1(item, itemCtx, () => (comment = null), () => (chompKeep = true));
|
||
|
if (comment)
|
||
|
str += lineComment(str, itemIndent, commentString(comment));
|
||
|
if (chompKeep && comment)
|
||
|
chompKeep = false;
|
||
|
lines.push(blockItemPrefix + str);
|
||
|
}
|
||
|
let str;
|
||
|
if (lines.length === 0) {
|
||
|
str = flowChars.start + flowChars.end;
|
||
|
}
|
||
|
else {
|
||
|
str = lines[0];
|
||
|
for (let i = 1; i < lines.length; ++i) {
|
||
|
const line = lines[i];
|
||
|
str += line ? `\n${indent}${line}` : '\n';
|
||
|
}
|
||
|
}
|
||
|
if (comment) {
|
||
|
str += '\n' + indentComment(commentString(comment), indent);
|
||
|
if (onComment)
|
||
|
onComment();
|
||
|
}
|
||
|
else if (chompKeep && onChompKeep)
|
||
|
onChompKeep();
|
||
|
return str;
|
||
|
}
|
||
|
function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {
|
||
|
const { indent, indentStep, options: { commentString } } = ctx;
|
||
|
itemIndent += indentStep;
|
||
|
const itemCtx = Object.assign({}, ctx, {
|
||
|
indent: itemIndent,
|
||
|
inFlow: true,
|
||
|
type: null
|
||
|
});
|
||
|
let reqNewline = false;
|
||
|
let linesAtValue = 0;
|
||
|
const lines = [];
|
||
|
for (let i = 0; i < items.length; ++i) {
|
||
|
const item = items[i];
|
||
|
let comment = null;
|
||
|
if (isNode(item)) {
|
||
|
if (item.spaceBefore)
|
||
|
lines.push('');
|
||
|
addCommentBefore(ctx, lines, item.commentBefore, false);
|
||
|
if (item.comment)
|
||
|
comment = item.comment;
|
||
|
}
|
||
|
else if (isPair(item)) {
|
||
|
const ik = isNode(item.key) ? item.key : null;
|
||
|
if (ik) {
|
||
|
if (ik.spaceBefore)
|
||
|
lines.push('');
|
||
|
addCommentBefore(ctx, lines, ik.commentBefore, false);
|
||
|
if (ik.comment)
|
||
|
reqNewline = true;
|
||
|
}
|
||
|
const iv = isNode(item.value) ? item.value : null;
|
||
|
if (iv) {
|
||
|
if (iv.comment)
|
||
|
comment = iv.comment;
|
||
|
if (iv.commentBefore)
|
||
|
reqNewline = true;
|
||
|
}
|
||
|
else if (item.value == null && ik && ik.comment) {
|
||
|
comment = ik.comment;
|
||
|
}
|
||
|
}
|
||
|
if (comment)
|
||
|
reqNewline = true;
|
||
|
let str = stringify$1(item, itemCtx, () => (comment = null));
|
||
|
if (i < items.length - 1)
|
||
|
str += ',';
|
||
|
if (comment)
|
||
|
str += lineComment(str, itemIndent, commentString(comment));
|
||
|
if (!reqNewline && (lines.length > linesAtValue || str.includes('\n')))
|
||
|
reqNewline = true;
|
||
|
lines.push(str);
|
||
|
linesAtValue = lines.length;
|
||
|
}
|
||
|
let str;
|
||
|
const { start, end } = flowChars;
|
||
|
if (lines.length === 0) {
|
||
|
str = start + end;
|
||
|
}
|
||
|
else {
|
||
|
if (!reqNewline) {
|
||
|
const len = lines.reduce((sum, line) => sum + line.length + 2, 2);
|
||
|
reqNewline = len > Collection.maxFlowStringSingleLineLength;
|
||
|
}
|
||
|
if (reqNewline) {
|
||
|
str = start;
|
||
|
for (const line of lines)
|
||
|
str += line ? `\n${indentStep}${indent}${line}` : '\n';
|
||
|
str += `\n${indent}${end}`;
|
||
|
}
|
||
|
else {
|
||
|
str = `${start} ${lines.join(' ')} ${end}`;
|
||
|
}
|
||
|
}
|
||
|
if (comment) {
|
||
|
str += lineComment(str, commentString(comment), indent);
|
||
|
if (onComment)
|
||
|
onComment();
|
||
|
}
|
||
|
return str;
|
||
|
}
|
||
|
function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {
|
||
|
if (comment && chompKeep)
|
||
|
comment = comment.replace(/^\n+/, '');
|
||
|
if (comment) {
|
||
|
const ic = indentComment(commentString(comment), indent);
|
||
|
lines.push(ic.trimStart()); // Avoid double indent on first line
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function findPair(items, key) {
|
||
|
const k = isScalar(key) ? key.value : key;
|
||
|
for (const it of items) {
|
||
|
if (isPair(it)) {
|
||
|
if (it.key === key || it.key === k)
|
||
|
return it;
|
||
|
if (isScalar(it.key) && it.key.value === k)
|
||
|
return it;
|
||
|
}
|
||
|
}
|
||
|
return undefined;
|
||
|
}
|
||
|
class YAMLMap extends Collection {
|
||
|
constructor(schema) {
|
||
|
super(MAP, schema);
|
||
|
this.items = [];
|
||
|
}
|
||
|
static get tagName() {
|
||
|
return 'tag:yaml.org,2002:map';
|
||
|
}
|
||
|
/**
|
||
|
* Adds a value to the collection.
|
||
|
*
|
||
|
* @param overwrite - If not set `true`, using a key that is already in the
|
||
|
* collection will throw. Otherwise, overwrites the previous value.
|
||
|
*/
|
||
|
add(pair, overwrite) {
|
||
|
let _pair;
|
||
|
if (isPair(pair))
|
||
|
_pair = pair;
|
||
|
else if (!pair || typeof pair !== 'object' || !('key' in pair)) {
|
||
|
// In TypeScript, this never happens.
|
||
|
_pair = new Pair(pair, pair.value);
|
||
|
}
|
||
|
else
|
||
|
_pair = new Pair(pair.key, pair.value);
|
||
|
const prev = findPair(this.items, _pair.key);
|
||
|
const sortEntries = this.schema && this.schema.sortMapEntries;
|
||
|
if (prev) {
|
||
|
if (!overwrite)
|
||
|
throw new Error(`Key ${_pair.key} already set`);
|
||
|
// For scalars, keep the old node & its comments and anchors
|
||
|
if (isScalar(prev.value) && isScalarValue(_pair.value))
|
||
|
prev.value.value = _pair.value;
|
||
|
else
|
||
|
prev.value = _pair.value;
|
||
|
}
|
||
|
else if (sortEntries) {
|
||
|
const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);
|
||
|
if (i === -1)
|
||
|
this.items.push(_pair);
|
||
|
else
|
||
|
this.items.splice(i, 0, _pair);
|
||
|
}
|
||
|
else {
|
||
|
this.items.push(_pair);
|
||
|
}
|
||
|
}
|
||
|
delete(key) {
|
||
|
const it = findPair(this.items, key);
|
||
|
if (!it)
|
||
|
return false;
|
||
|
const del = this.items.splice(this.items.indexOf(it), 1);
|
||
|
return del.length > 0;
|
||
|
}
|
||
|
get(key, keepScalar) {
|
||
|
const it = findPair(this.items, key);
|
||
|
const node = it && it.value;
|
||
|
return !keepScalar && isScalar(node) ? node.value : node;
|
||
|
}
|
||
|
has(key) {
|
||
|
return !!findPair(this.items, key);
|
||
|
}
|
||
|
set(key, value) {
|
||
|
this.add(new Pair(key, value), true);
|
||
|
}
|
||
|
/**
|
||
|
* @param ctx - Conversion context, originally set in Document#toJS()
|
||
|
* @param {Class} Type - If set, forces the returned collection type
|
||
|
* @returns Instance of Type, Map, or Object
|
||
|
*/
|
||
|
toJSON(_, ctx, Type) {
|
||
|
const map = Type ? new Type() : ctx && ctx.mapAsMap ? new Map() : {};
|
||
|
if (ctx && ctx.onCreate)
|
||
|
ctx.onCreate(map);
|
||
|
for (const item of this.items)
|
||
|
addPairToJSMap(ctx, map, item);
|
||
|
return map;
|
||
|
}
|
||
|
toString(ctx, onComment, onChompKeep) {
|
||
|
if (!ctx)
|
||
|
return JSON.stringify(this);
|
||
|
for (const item of this.items) {
|
||
|
if (!isPair(item))
|
||
|
throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);
|
||
|
}
|
||
|
if (!ctx.allNullValues && this.hasAllNullValues(false))
|
||
|
ctx = Object.assign({}, ctx, { allNullValues: true });
|
||
|
return stringifyCollection(this, ctx, {
|
||
|
blockItemPrefix: '',
|
||
|
flowChars: { start: '{', end: '}' },
|
||
|
itemIndent: ctx.indent || '',
|
||
|
onChompKeep,
|
||
|
onComment
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function createMap(schema, obj, ctx) {
|
||
|
const { keepUndefined, replacer } = ctx;
|
||
|
const map = new YAMLMap(schema);
|
||
|
const add = (key, value) => {
|
||
|
if (typeof replacer === 'function')
|
||
|
value = replacer.call(obj, key, value);
|
||
|
else if (Array.isArray(replacer) && !replacer.includes(key))
|
||
|
return;
|
||
|
if (value !== undefined || keepUndefined)
|
||
|
map.items.push(createPair(key, value, ctx));
|
||
|
};
|
||
|
if (obj instanceof Map) {
|
||
|
for (const [key, value] of obj)
|
||
|
add(key, value);
|
||
|
}
|
||
|
else if (obj && typeof obj === 'object') {
|
||
|
for (const key of Object.keys(obj))
|
||
|
add(key, obj[key]);
|
||
|
}
|
||
|
if (typeof schema.sortMapEntries === 'function') {
|
||
|
map.items.sort(schema.sortMapEntries);
|
||
|
}
|
||
|
return map;
|
||
|
}
|
||
|
const map = {
|
||
|
collection: 'map',
|
||
|
createNode: createMap,
|
||
|
default: true,
|
||
|
nodeClass: YAMLMap,
|
||
|
tag: 'tag:yaml.org,2002:map',
|
||
|
resolve(map, onError) {
|
||
|
if (!isMap(map))
|
||
|
onError('Expected a mapping for this tag');
|
||
|
return map;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
class YAMLSeq extends Collection {
|
||
|
constructor(schema) {
|
||
|
super(SEQ, schema);
|
||
|
this.items = [];
|
||
|
}
|
||
|
static get tagName() {
|
||
|
return 'tag:yaml.org,2002:seq';
|
||
|
}
|
||
|
add(value) {
|
||
|
this.items.push(value);
|
||
|
}
|
||
|
/**
|
||
|
* Removes a value from the collection.
|
||
|
*
|
||
|
* `key` must contain a representation of an integer for this to succeed.
|
||
|
* It may be wrapped in a `Scalar`.
|
||
|
*
|
||
|
* @returns `true` if the item was found and removed.
|
||
|
*/
|
||
|
delete(key) {
|
||
|
const idx = asItemIndex(key);
|
||
|
if (typeof idx !== 'number')
|
||
|
return false;
|
||
|
const del = this.items.splice(idx, 1);
|
||
|
return del.length > 0;
|
||
|
}
|
||
|
/**
|
||
|
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||
|
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||
|
* `true` (collections are always returned intact).
|
||
|
*
|
||
|
* `key` must contain a representation of an integer for this to succeed.
|
||
|
* It may be wrapped in a `Scalar`.
|
||
|
*/
|
||
|
get(key, keepScalar) {
|
||
|
const idx = asItemIndex(key);
|
||
|
if (typeof idx !== 'number')
|
||
|
return undefined;
|
||
|
const it = this.items[idx];
|
||
|
return !keepScalar && isScalar(it) ? it.value : it;
|
||
|
}
|
||
|
/**
|
||
|
* Checks if the collection includes a value with the key `key`.
|
||
|
*
|
||
|
* `key` must contain a representation of an integer for this to succeed.
|
||
|
* It may be wrapped in a `Scalar`.
|
||
|
*/
|
||
|
has(key) {
|
||
|
const idx = asItemIndex(key);
|
||
|
return typeof idx === 'number' && idx < this.items.length;
|
||
|
}
|
||
|
/**
|
||
|
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||
|
* boolean to add/remove the item from the set.
|
||
|
*
|
||
|
* If `key` does not contain a representation of an integer, this will throw.
|
||
|
* It may be wrapped in a `Scalar`.
|
||
|
*/
|
||
|
set(key, value) {
|
||
|
const idx = asItemIndex(key);
|
||
|
if (typeof idx !== 'number')
|
||
|
throw new Error(`Expected a valid index, not ${key}.`);
|
||
|
const prev = this.items[idx];
|
||
|
if (isScalar(prev) && isScalarValue(value))
|
||
|
prev.value = value;
|
||
|
else
|
||
|
this.items[idx] = value;
|
||
|
}
|
||
|
toJSON(_, ctx) {
|
||
|
const seq = [];
|
||
|
if (ctx && ctx.onCreate)
|
||
|
ctx.onCreate(seq);
|
||
|
let i = 0;
|
||
|
for (const item of this.items)
|
||
|
seq.push(toJS(item, String(i++), ctx));
|
||
|
return seq;
|
||
|
}
|
||
|
toString(ctx, onComment, onChompKeep) {
|
||
|
if (!ctx)
|
||
|
return JSON.stringify(this);
|
||
|
return stringifyCollection(this, ctx, {
|
||
|
blockItemPrefix: '- ',
|
||
|
flowChars: { start: '[', end: ']' },
|
||
|
itemIndent: (ctx.indent || '') + ' ',
|
||
|
onChompKeep,
|
||
|
onComment
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
function asItemIndex(key) {
|
||
|
let idx = isScalar(key) ? key.value : key;
|
||
|
if (idx && typeof idx === 'string')
|
||
|
idx = Number(idx);
|
||
|
return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0
|
||
|
? idx
|
||
|
: null;
|
||
|
}
|
||
|
|
||
|
function createSeq(schema, obj, ctx) {
|
||
|
const { replacer } = ctx;
|
||
|
const seq = new YAMLSeq(schema);
|
||
|
if (obj && Symbol.iterator in Object(obj)) {
|
||
|
let i = 0;
|
||
|
for (let it of obj) {
|
||
|
if (typeof replacer === 'function') {
|
||
|
const key = obj instanceof Set ? it : String(i++);
|
||
|
it = replacer.call(obj, key, it);
|
||
|
}
|
||
|
seq.items.push(createNode(it, undefined, ctx));
|
||
|
}
|
||
|
}
|
||
|
return seq;
|
||
|
}
|
||
|
const seq = {
|
||
|
collection: 'seq',
|
||
|
createNode: createSeq,
|
||
|
default: true,
|
||
|
nodeClass: YAMLSeq,
|
||
|
tag: 'tag:yaml.org,2002:seq',
|
||
|
resolve(seq, onError) {
|
||
|
if (!isSeq(seq))
|
||
|
onError('Expected a sequence for this tag');
|
||
|
return seq;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const string = {
|
||
|
identify: value => typeof value === 'string',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:str',
|
||
|
resolve: str => str,
|
||
|
stringify(item, ctx, onComment, onChompKeep) {
|
||
|
ctx = Object.assign({ actualString: true }, ctx);
|
||
|
return stringifyString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
const nullTag = {
|
||
|
identify: value => value == null,
|
||
|
createNode: () => new Scalar(null),
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:null',
|
||
|
test: /^(?:~|[Nn]ull|NULL)?$/,
|
||
|
resolve: () => new Scalar(null),
|
||
|
stringify: ({ source }, ctx) => source && nullTag.test.test(source) ? source : ctx.options.nullStr
|
||
|
};
|
||
|
|
||
|
const boolTag = {
|
||
|
identify: value => typeof value === 'boolean',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:bool',
|
||
|
test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
|
||
|
resolve: str => new Scalar(str[0] === 't' || str[0] === 'T'),
|
||
|
stringify({ source, value }, ctx) {
|
||
|
if (source && boolTag.test.test(source)) {
|
||
|
const sv = source[0] === 't' || source[0] === 'T';
|
||
|
if (value === sv)
|
||
|
return source;
|
||
|
}
|
||
|
return value ? ctx.options.trueStr : ctx.options.falseStr;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function stringifyNumber({ format, minFractionDigits, tag, value }) {
|
||
|
if (typeof value === 'bigint')
|
||
|
return String(value);
|
||
|
const num = typeof value === 'number' ? value : Number(value);
|
||
|
if (!isFinite(num))
|
||
|
return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';
|
||
|
let n = JSON.stringify(value);
|
||
|
if (!format &&
|
||
|
minFractionDigits &&
|
||
|
(!tag || tag === 'tag:yaml.org,2002:float') &&
|
||
|
/^\d/.test(n)) {
|
||
|
let i = n.indexOf('.');
|
||
|
if (i < 0) {
|
||
|
i = n.length;
|
||
|
n += '.';
|
||
|
}
|
||
|
let d = minFractionDigits - (n.length - i - 1);
|
||
|
while (d-- > 0)
|
||
|
n += '0';
|
||
|
}
|
||
|
return n;
|
||
|
}
|
||
|
|
||
|
const floatNaN$1 = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/,
|
||
|
resolve: str => str.slice(-3).toLowerCase() === 'nan'
|
||
|
? NaN
|
||
|
: str[0] === '-'
|
||
|
? Number.NEGATIVE_INFINITY
|
||
|
: Number.POSITIVE_INFINITY,
|
||
|
stringify: stringifyNumber
|
||
|
};
|
||
|
const floatExp$1 = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
format: 'EXP',
|
||
|
test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
|
||
|
resolve: str => parseFloat(str),
|
||
|
stringify(node) {
|
||
|
const num = Number(node.value);
|
||
|
return isFinite(num) ? num.toExponential() : stringifyNumber(node);
|
||
|
}
|
||
|
};
|
||
|
const float$1 = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/,
|
||
|
resolve(str) {
|
||
|
const node = new Scalar(parseFloat(str));
|
||
|
const dot = str.indexOf('.');
|
||
|
if (dot !== -1 && str[str.length - 1] === '0')
|
||
|
node.minFractionDigits = str.length - dot - 1;
|
||
|
return node;
|
||
|
},
|
||
|
stringify: stringifyNumber
|
||
|
};
|
||
|
|
||
|
const intIdentify$2 = (value) => typeof value === 'bigint' || Number.isInteger(value);
|
||
|
const intResolve$1 = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));
|
||
|
function intStringify$1(node, radix, prefix) {
|
||
|
const { value } = node;
|
||
|
if (intIdentify$2(value) && value >= 0)
|
||
|
return prefix + value.toString(radix);
|
||
|
return stringifyNumber(node);
|
||
|
}
|
||
|
const intOct$1 = {
|
||
|
identify: value => intIdentify$2(value) && value >= 0,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'OCT',
|
||
|
test: /^0o[0-7]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve$1(str, 2, 8, opt),
|
||
|
stringify: node => intStringify$1(node, 8, '0o')
|
||
|
};
|
||
|
const int$1 = {
|
||
|
identify: intIdentify$2,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
test: /^[-+]?[0-9]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve$1(str, 0, 10, opt),
|
||
|
stringify: stringifyNumber
|
||
|
};
|
||
|
const intHex$1 = {
|
||
|
identify: value => intIdentify$2(value) && value >= 0,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'HEX',
|
||
|
test: /^0x[0-9a-fA-F]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve$1(str, 2, 16, opt),
|
||
|
stringify: node => intStringify$1(node, 16, '0x')
|
||
|
};
|
||
|
|
||
|
const schema$2 = [
|
||
|
map,
|
||
|
seq,
|
||
|
string,
|
||
|
nullTag,
|
||
|
boolTag,
|
||
|
intOct$1,
|
||
|
int$1,
|
||
|
intHex$1,
|
||
|
floatNaN$1,
|
||
|
floatExp$1,
|
||
|
float$1
|
||
|
];
|
||
|
|
||
|
function intIdentify$1(value) {
|
||
|
return typeof value === 'bigint' || Number.isInteger(value);
|
||
|
}
|
||
|
const stringifyJSON = ({ value }) => JSON.stringify(value);
|
||
|
const jsonScalars = [
|
||
|
{
|
||
|
identify: value => typeof value === 'string',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:str',
|
||
|
resolve: str => str,
|
||
|
stringify: stringifyJSON
|
||
|
},
|
||
|
{
|
||
|
identify: value => value == null,
|
||
|
createNode: () => new Scalar(null),
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:null',
|
||
|
test: /^null$/,
|
||
|
resolve: () => null,
|
||
|
stringify: stringifyJSON
|
||
|
},
|
||
|
{
|
||
|
identify: value => typeof value === 'boolean',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:bool',
|
||
|
test: /^true|false$/,
|
||
|
resolve: str => str === 'true',
|
||
|
stringify: stringifyJSON
|
||
|
},
|
||
|
{
|
||
|
identify: intIdentify$1,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
test: /^-?(?:0|[1-9][0-9]*)$/,
|
||
|
resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),
|
||
|
stringify: ({ value }) => intIdentify$1(value) ? value.toString() : JSON.stringify(value)
|
||
|
},
|
||
|
{
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
|
||
|
resolve: str => parseFloat(str),
|
||
|
stringify: stringifyJSON
|
||
|
}
|
||
|
];
|
||
|
const jsonError = {
|
||
|
default: true,
|
||
|
tag: '',
|
||
|
test: /^/,
|
||
|
resolve(str, onError) {
|
||
|
onError(`Unresolved plain scalar ${JSON.stringify(str)}`);
|
||
|
return str;
|
||
|
}
|
||
|
};
|
||
|
const schema$1 = [map, seq].concat(jsonScalars, jsonError);
|
||
|
|
||
|
const binary = {
|
||
|
identify: value => value instanceof Uint8Array,
|
||
|
default: false,
|
||
|
tag: 'tag:yaml.org,2002:binary',
|
||
|
/**
|
||
|
* Returns a Buffer in node and an Uint8Array in browsers
|
||
|
*
|
||
|
* To use the resulting buffer as an image, you'll want to do something like:
|
||
|
*
|
||
|
* const blob = new Blob([buffer], { type: 'image/jpeg' })
|
||
|
* document.querySelector('#photo').src = URL.createObjectURL(blob)
|
||
|
*/
|
||
|
resolve(src, onError) {
|
||
|
if (typeof Buffer === 'function') {
|
||
|
return Buffer.from(src, 'base64');
|
||
|
}
|
||
|
else if (typeof atob === 'function') {
|
||
|
// On IE 11, atob() can't handle newlines
|
||
|
const str = atob(src.replace(/[\n\r]/g, ''));
|
||
|
const buffer = new Uint8Array(str.length);
|
||
|
for (let i = 0; i < str.length; ++i)
|
||
|
buffer[i] = str.charCodeAt(i);
|
||
|
return buffer;
|
||
|
}
|
||
|
else {
|
||
|
onError('This environment does not support reading binary tags; either Buffer or atob is required');
|
||
|
return src;
|
||
|
}
|
||
|
},
|
||
|
stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
|
||
|
const buf = value; // checked earlier by binary.identify()
|
||
|
let str;
|
||
|
if (typeof Buffer === 'function') {
|
||
|
str =
|
||
|
buf instanceof Buffer
|
||
|
? buf.toString('base64')
|
||
|
: Buffer.from(buf.buffer).toString('base64');
|
||
|
}
|
||
|
else if (typeof btoa === 'function') {
|
||
|
let s = '';
|
||
|
for (let i = 0; i < buf.length; ++i)
|
||
|
s += String.fromCharCode(buf[i]);
|
||
|
str = btoa(s);
|
||
|
}
|
||
|
else {
|
||
|
throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
|
||
|
}
|
||
|
if (!type)
|
||
|
type = Scalar.BLOCK_LITERAL;
|
||
|
if (type !== Scalar.QUOTE_DOUBLE) {
|
||
|
const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
|
||
|
const n = Math.ceil(str.length / lineWidth);
|
||
|
const lines = new Array(n);
|
||
|
for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {
|
||
|
lines[i] = str.substr(o, lineWidth);
|
||
|
}
|
||
|
str = lines.join(type === Scalar.BLOCK_LITERAL ? '\n' : ' ');
|
||
|
}
|
||
|
return stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function resolvePairs(seq, onError) {
|
||
|
if (isSeq(seq)) {
|
||
|
for (let i = 0; i < seq.items.length; ++i) {
|
||
|
let item = seq.items[i];
|
||
|
if (isPair(item))
|
||
|
continue;
|
||
|
else if (isMap(item)) {
|
||
|
if (item.items.length > 1)
|
||
|
onError('Each pair must have its own sequence indicator');
|
||
|
const pair = item.items[0] || new Pair(new Scalar(null));
|
||
|
if (item.commentBefore)
|
||
|
pair.key.commentBefore = pair.key.commentBefore
|
||
|
? `${item.commentBefore}\n${pair.key.commentBefore}`
|
||
|
: item.commentBefore;
|
||
|
if (item.comment) {
|
||
|
const cn = pair.value || pair.key;
|
||
|
cn.comment = cn.comment
|
||
|
? `${item.comment}\n${cn.comment}`
|
||
|
: item.comment;
|
||
|
}
|
||
|
item = pair;
|
||
|
}
|
||
|
seq.items[i] = isPair(item) ? item : new Pair(item);
|
||
|
}
|
||
|
}
|
||
|
else
|
||
|
onError('Expected a sequence for this tag');
|
||
|
return seq;
|
||
|
}
|
||
|
function createPairs(schema, iterable, ctx) {
|
||
|
const { replacer } = ctx;
|
||
|
const pairs = new YAMLSeq(schema);
|
||
|
pairs.tag = 'tag:yaml.org,2002:pairs';
|
||
|
let i = 0;
|
||
|
if (iterable && Symbol.iterator in Object(iterable))
|
||
|
for (let it of iterable) {
|
||
|
if (typeof replacer === 'function')
|
||
|
it = replacer.call(iterable, String(i++), it);
|
||
|
let key, value;
|
||
|
if (Array.isArray(it)) {
|
||
|
if (it.length === 2) {
|
||
|
key = it[0];
|
||
|
value = it[1];
|
||
|
}
|
||
|
else
|
||
|
throw new TypeError(`Expected [key, value] tuple: ${it}`);
|
||
|
}
|
||
|
else if (it && it instanceof Object) {
|
||
|
const keys = Object.keys(it);
|
||
|
if (keys.length === 1) {
|
||
|
key = keys[0];
|
||
|
value = it[key];
|
||
|
}
|
||
|
else
|
||
|
throw new TypeError(`Expected { key: value } tuple: ${it}`);
|
||
|
}
|
||
|
else {
|
||
|
key = it;
|
||
|
}
|
||
|
pairs.items.push(createPair(key, value, ctx));
|
||
|
}
|
||
|
return pairs;
|
||
|
}
|
||
|
const pairs = {
|
||
|
collection: 'seq',
|
||
|
default: false,
|
||
|
tag: 'tag:yaml.org,2002:pairs',
|
||
|
resolve: resolvePairs,
|
||
|
createNode: createPairs
|
||
|
};
|
||
|
|
||
|
class YAMLOMap extends YAMLSeq {
|
||
|
constructor() {
|
||
|
super();
|
||
|
this.add = YAMLMap.prototype.add.bind(this);
|
||
|
this.delete = YAMLMap.prototype.delete.bind(this);
|
||
|
this.get = YAMLMap.prototype.get.bind(this);
|
||
|
this.has = YAMLMap.prototype.has.bind(this);
|
||
|
this.set = YAMLMap.prototype.set.bind(this);
|
||
|
this.tag = YAMLOMap.tag;
|
||
|
}
|
||
|
/**
|
||
|
* If `ctx` is given, the return type is actually `Map<unknown, unknown>`,
|
||
|
* but TypeScript won't allow widening the signature of a child method.
|
||
|
*/
|
||
|
toJSON(_, ctx) {
|
||
|
if (!ctx)
|
||
|
return super.toJSON(_);
|
||
|
const map = new Map();
|
||
|
if (ctx && ctx.onCreate)
|
||
|
ctx.onCreate(map);
|
||
|
for (const pair of this.items) {
|
||
|
let key, value;
|
||
|
if (isPair(pair)) {
|
||
|
key = toJS(pair.key, '', ctx);
|
||
|
value = toJS(pair.value, key, ctx);
|
||
|
}
|
||
|
else {
|
||
|
key = toJS(pair, '', ctx);
|
||
|
}
|
||
|
if (map.has(key))
|
||
|
throw new Error('Ordered maps must not include duplicate keys');
|
||
|
map.set(key, value);
|
||
|
}
|
||
|
return map;
|
||
|
}
|
||
|
}
|
||
|
YAMLOMap.tag = 'tag:yaml.org,2002:omap';
|
||
|
const omap = {
|
||
|
collection: 'seq',
|
||
|
identify: value => value instanceof Map,
|
||
|
nodeClass: YAMLOMap,
|
||
|
default: false,
|
||
|
tag: 'tag:yaml.org,2002:omap',
|
||
|
resolve(seq, onError) {
|
||
|
const pairs = resolvePairs(seq, onError);
|
||
|
const seenKeys = [];
|
||
|
for (const { key } of pairs.items) {
|
||
|
if (isScalar(key)) {
|
||
|
if (seenKeys.includes(key.value)) {
|
||
|
onError(`Ordered maps must not include duplicate keys: ${key.value}`);
|
||
|
}
|
||
|
else {
|
||
|
seenKeys.push(key.value);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return Object.assign(new YAMLOMap(), pairs);
|
||
|
},
|
||
|
createNode(schema, iterable, ctx) {
|
||
|
const pairs = createPairs(schema, iterable, ctx);
|
||
|
const omap = new YAMLOMap();
|
||
|
omap.items = pairs.items;
|
||
|
return omap;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function boolStringify({ value, source }, ctx) {
|
||
|
const boolObj = value ? trueTag : falseTag;
|
||
|
if (source && boolObj.test.test(source))
|
||
|
return source;
|
||
|
return value ? ctx.options.trueStr : ctx.options.falseStr;
|
||
|
}
|
||
|
const trueTag = {
|
||
|
identify: value => value === true,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:bool',
|
||
|
test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
|
||
|
resolve: () => new Scalar(true),
|
||
|
stringify: boolStringify
|
||
|
};
|
||
|
const falseTag = {
|
||
|
identify: value => value === false,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:bool',
|
||
|
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
|
||
|
resolve: () => new Scalar(false),
|
||
|
stringify: boolStringify
|
||
|
};
|
||
|
|
||
|
const floatNaN = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/,
|
||
|
resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
|
||
|
? NaN
|
||
|
: str[0] === '-'
|
||
|
? Number.NEGATIVE_INFINITY
|
||
|
: Number.POSITIVE_INFINITY,
|
||
|
stringify: stringifyNumber
|
||
|
};
|
||
|
const floatExp = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
format: 'EXP',
|
||
|
test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/,
|
||
|
resolve: (str) => parseFloat(str.replace(/_/g, '')),
|
||
|
stringify(node) {
|
||
|
const num = Number(node.value);
|
||
|
return isFinite(num) ? num.toExponential() : stringifyNumber(node);
|
||
|
}
|
||
|
};
|
||
|
const float = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/,
|
||
|
resolve(str) {
|
||
|
const node = new Scalar(parseFloat(str.replace(/_/g, '')));
|
||
|
const dot = str.indexOf('.');
|
||
|
if (dot !== -1) {
|
||
|
const f = str.substring(dot + 1).replace(/_/g, '');
|
||
|
if (f[f.length - 1] === '0')
|
||
|
node.minFractionDigits = f.length;
|
||
|
}
|
||
|
return node;
|
||
|
},
|
||
|
stringify: stringifyNumber
|
||
|
};
|
||
|
|
||
|
const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
|
||
|
function intResolve(str, offset, radix, { intAsBigInt }) {
|
||
|
const sign = str[0];
|
||
|
if (sign === '-' || sign === '+')
|
||
|
offset += 1;
|
||
|
str = str.substring(offset).replace(/_/g, '');
|
||
|
if (intAsBigInt) {
|
||
|
switch (radix) {
|
||
|
case 2:
|
||
|
str = `0b${str}`;
|
||
|
break;
|
||
|
case 8:
|
||
|
str = `0o${str}`;
|
||
|
break;
|
||
|
case 16:
|
||
|
str = `0x${str}`;
|
||
|
break;
|
||
|
}
|
||
|
const n = BigInt(str);
|
||
|
return sign === '-' ? BigInt(-1) * n : n;
|
||
|
}
|
||
|
const n = parseInt(str, radix);
|
||
|
return sign === '-' ? -1 * n : n;
|
||
|
}
|
||
|
function intStringify(node, radix, prefix) {
|
||
|
const { value } = node;
|
||
|
if (intIdentify(value)) {
|
||
|
const str = value.toString(radix);
|
||
|
return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
|
||
|
}
|
||
|
return stringifyNumber(node);
|
||
|
}
|
||
|
const intBin = {
|
||
|
identify: intIdentify,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'BIN',
|
||
|
test: /^[-+]?0b[0-1_]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),
|
||
|
stringify: node => intStringify(node, 2, '0b')
|
||
|
};
|
||
|
const intOct = {
|
||
|
identify: intIdentify,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'OCT',
|
||
|
test: /^[-+]?0[0-7_]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),
|
||
|
stringify: node => intStringify(node, 8, '0')
|
||
|
};
|
||
|
const int = {
|
||
|
identify: intIdentify,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
test: /^[-+]?[0-9][0-9_]*$/,
|
||
|
resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
|
||
|
stringify: stringifyNumber
|
||
|
};
|
||
|
const intHex = {
|
||
|
identify: intIdentify,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'HEX',
|
||
|
test: /^[-+]?0x[0-9a-fA-F_]+$/,
|
||
|
resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
|
||
|
stringify: node => intStringify(node, 16, '0x')
|
||
|
};
|
||
|
|
||
|
class YAMLSet extends YAMLMap {
|
||
|
constructor(schema) {
|
||
|
super(schema);
|
||
|
this.tag = YAMLSet.tag;
|
||
|
}
|
||
|
add(key) {
|
||
|
let pair;
|
||
|
if (isPair(key))
|
||
|
pair = key;
|
||
|
else if (typeof key === 'object' &&
|
||
|
'key' in key &&
|
||
|
'value' in key &&
|
||
|
key.value === null)
|
||
|
pair = new Pair(key.key, null);
|
||
|
else
|
||
|
pair = new Pair(key, null);
|
||
|
const prev = findPair(this.items, pair.key);
|
||
|
if (!prev)
|
||
|
this.items.push(pair);
|
||
|
}
|
||
|
get(key, keepPair) {
|
||
|
const pair = findPair(this.items, key);
|
||
|
return !keepPair && isPair(pair)
|
||
|
? isScalar(pair.key)
|
||
|
? pair.key.value
|
||
|
: pair.key
|
||
|
: pair;
|
||
|
}
|
||
|
set(key, value) {
|
||
|
if (typeof value !== 'boolean')
|
||
|
throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);
|
||
|
const prev = findPair(this.items, key);
|
||
|
if (prev && !value) {
|
||
|
this.items.splice(this.items.indexOf(prev), 1);
|
||
|
}
|
||
|
else if (!prev && value) {
|
||
|
this.items.push(new Pair(key));
|
||
|
}
|
||
|
}
|
||
|
toJSON(_, ctx) {
|
||
|
return super.toJSON(_, ctx, Set);
|
||
|
}
|
||
|
toString(ctx, onComment, onChompKeep) {
|
||
|
if (!ctx)
|
||
|
return JSON.stringify(this);
|
||
|
if (this.hasAllNullValues(true))
|
||
|
return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);
|
||
|
else
|
||
|
throw new Error('Set items must all have null values');
|
||
|
}
|
||
|
}
|
||
|
YAMLSet.tag = 'tag:yaml.org,2002:set';
|
||
|
const set = {
|
||
|
collection: 'map',
|
||
|
identify: value => value instanceof Set,
|
||
|
nodeClass: YAMLSet,
|
||
|
default: false,
|
||
|
tag: 'tag:yaml.org,2002:set',
|
||
|
resolve(map, onError) {
|
||
|
if (isMap(map)) {
|
||
|
if (map.hasAllNullValues(true))
|
||
|
return Object.assign(new YAMLSet(), map);
|
||
|
else
|
||
|
onError('Set items must all have null values');
|
||
|
}
|
||
|
else
|
||
|
onError('Expected a mapping for this tag');
|
||
|
return map;
|
||
|
},
|
||
|
createNode(schema, iterable, ctx) {
|
||
|
const { replacer } = ctx;
|
||
|
const set = new YAMLSet(schema);
|
||
|
if (iterable && Symbol.iterator in Object(iterable))
|
||
|
for (let value of iterable) {
|
||
|
if (typeof replacer === 'function')
|
||
|
value = replacer.call(iterable, value, value);
|
||
|
set.items.push(createPair(value, null, ctx));
|
||
|
}
|
||
|
return set;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
/** Internal types handle bigint as number, because TS can't figure it out. */
|
||
|
function parseSexagesimal(str, asBigInt) {
|
||
|
const sign = str[0];
|
||
|
const parts = sign === '-' || sign === '+' ? str.substring(1) : str;
|
||
|
const num = (n) => asBigInt ? BigInt(n) : Number(n);
|
||
|
const res = parts
|
||
|
.replace(/_/g, '')
|
||
|
.split(':')
|
||
|
.reduce((res, p) => res * num(60) + num(p), num(0));
|
||
|
return (sign === '-' ? num(-1) * res : res);
|
||
|
}
|
||
|
/**
|
||
|
* hhhh:mm:ss.sss
|
||
|
*
|
||
|
* Internal types handle bigint as number, because TS can't figure it out.
|
||
|
*/
|
||
|
function stringifySexagesimal(node) {
|
||
|
let { value } = node;
|
||
|
let num = (n) => n;
|
||
|
if (typeof value === 'bigint')
|
||
|
num = n => BigInt(n);
|
||
|
else if (isNaN(value) || !isFinite(value))
|
||
|
return stringifyNumber(node);
|
||
|
let sign = '';
|
||
|
if (value < 0) {
|
||
|
sign = '-';
|
||
|
value *= num(-1);
|
||
|
}
|
||
|
const _60 = num(60);
|
||
|
const parts = [value % _60]; // seconds, including ms
|
||
|
if (value < 60) {
|
||
|
parts.unshift(0); // at least one : is required
|
||
|
}
|
||
|
else {
|
||
|
value = (value - parts[0]) / _60;
|
||
|
parts.unshift(value % _60); // minutes
|
||
|
if (value >= 60) {
|
||
|
value = (value - parts[0]) / _60;
|
||
|
parts.unshift(value); // hours
|
||
|
}
|
||
|
}
|
||
|
return (sign +
|
||
|
parts
|
||
|
.map(n => (n < 10 ? '0' + String(n) : String(n)))
|
||
|
.join(':')
|
||
|
.replace(/000000\d*$/, '') // % 60 may introduce error
|
||
|
);
|
||
|
}
|
||
|
const intTime = {
|
||
|
identify: value => typeof value === 'bigint' || Number.isInteger(value),
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:int',
|
||
|
format: 'TIME',
|
||
|
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,
|
||
|
resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),
|
||
|
stringify: stringifySexagesimal
|
||
|
};
|
||
|
const floatTime = {
|
||
|
identify: value => typeof value === 'number',
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:float',
|
||
|
format: 'TIME',
|
||
|
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/,
|
||
|
resolve: str => parseSexagesimal(str, false),
|
||
|
stringify: stringifySexagesimal
|
||
|
};
|
||
|
const timestamp = {
|
||
|
identify: value => value instanceof Date,
|
||
|
default: true,
|
||
|
tag: 'tag:yaml.org,2002:timestamp',
|
||
|
// If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
|
||
|
// may be omitted altogether, resulting in a date format. In such a case, the time part is
|
||
|
// assumed to be 00:00:00Z (start of day, UTC).
|
||
|
test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
|
||
|
'(?:' + // time is optional
|
||
|
'(?:t|T|[ \\t]+)' + // t | T | whitespace
|
||
|
'([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
|
||
|
'(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
|
||
|
')?$'),
|
||
|
resolve(str) {
|
||
|
const match = str.match(timestamp.test);
|
||
|
if (!match)
|
||
|
throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');
|
||
|
const [, year, month, day, hour, minute, second] = match.map(Number);
|
||
|
const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;
|
||
|
let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);
|
||
|
const tz = match[8];
|
||
|
if (tz && tz !== 'Z') {
|
||
|
let d = parseSexagesimal(tz, false);
|
||
|
if (Math.abs(d) < 30)
|
||
|
d *= 60;
|
||
|
date -= 60000 * d;
|
||
|
}
|
||
|
return new Date(date);
|
||
|
},
|
||
|
stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
|
||
|
};
|
||
|
|
||
|
const schema = [
|
||
|
map,
|
||
|
seq,
|
||
|
string,
|
||
|
nullTag,
|
||
|
trueTag,
|
||
|
falseTag,
|
||
|
intBin,
|
||
|
intOct,
|
||
|
int,
|
||
|
intHex,
|
||
|
floatNaN,
|
||
|
floatExp,
|
||
|
float,
|
||
|
binary,
|
||
|
omap,
|
||
|
pairs,
|
||
|
set,
|
||
|
intTime,
|
||
|
floatTime,
|
||
|
timestamp
|
||
|
];
|
||
|
|
||
|
const schemas = new Map([
|
||
|
['core', schema$2],
|
||
|
['failsafe', [map, seq, string]],
|
||
|
['json', schema$1],
|
||
|
['yaml11', schema],
|
||
|
['yaml-1.1', schema]
|
||
|
]);
|
||
|
const tagsByName = {
|
||
|
binary,
|
||
|
bool: boolTag,
|
||
|
float: float$1,
|
||
|
floatExp: floatExp$1,
|
||
|
floatNaN: floatNaN$1,
|
||
|
floatTime,
|
||
|
int: int$1,
|
||
|
intHex: intHex$1,
|
||
|
intOct: intOct$1,
|
||
|
intTime,
|
||
|
map,
|
||
|
null: nullTag,
|
||
|
omap,
|
||
|
pairs,
|
||
|
seq,
|
||
|
set,
|
||
|
timestamp
|
||
|
};
|
||
|
const coreKnownTags = {
|
||
|
'tag:yaml.org,2002:binary': binary,
|
||
|
'tag:yaml.org,2002:omap': omap,
|
||
|
'tag:yaml.org,2002:pairs': pairs,
|
||
|
'tag:yaml.org,2002:set': set,
|
||
|
'tag:yaml.org,2002:timestamp': timestamp
|
||
|
};
|
||
|
function getTags(customTags, schemaName) {
|
||
|
let tags = schemas.get(schemaName);
|
||
|
if (!tags) {
|
||
|
if (Array.isArray(customTags))
|
||
|
tags = [];
|
||
|
else {
|
||
|
const keys = Array.from(schemas.keys())
|
||
|
.filter(key => key !== 'yaml11')
|
||
|
.map(key => JSON.stringify(key))
|
||
|
.join(', ');
|
||
|
throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`);
|
||
|
}
|
||
|
}
|
||
|
if (Array.isArray(customTags)) {
|
||
|
for (const tag of customTags)
|
||
|
tags = tags.concat(tag);
|
||
|
}
|
||
|
else if (typeof customTags === 'function') {
|
||
|
tags = customTags(tags.slice());
|
||
|
}
|
||
|
return tags.map(tag => {
|
||
|
if (typeof tag !== 'string')
|
||
|
return tag;
|
||
|
const tagObj = tagsByName[tag];
|
||
|
if (tagObj)
|
||
|
return tagObj;
|
||
|
const keys = Object.keys(tagsByName)
|
||
|
.map(key => JSON.stringify(key))
|
||
|
.join(', ');
|
||
|
throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
|
||
|
});
|
||
|
}
|
||
|
|
||
|
const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
|
||
|
class Schema {
|
||
|
constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {
|
||
|
this.compat = Array.isArray(compat)
|
||
|
? getTags(compat, 'compat')
|
||
|
: compat
|
||
|
? getTags(null, compat)
|
||
|
: null;
|
||
|
this.merge = !!merge;
|
||
|
this.name = (typeof schema === 'string' && schema) || 'core';
|
||
|
this.knownTags = resolveKnownTags ? coreKnownTags : {};
|
||
|
this.tags = getTags(customTags, this.name);
|
||
|
this.toStringOptions = toStringDefaults || null;
|
||
|
Object.defineProperty(this, MAP, { value: map });
|
||
|
Object.defineProperty(this, SCALAR$1, { value: string });
|
||
|
Object.defineProperty(this, SEQ, { value: seq });
|
||
|
// Used by createMap()
|
||
|
this.sortMapEntries =
|
||
|
sortMapEntries === true ? sortMapEntriesByKey : sortMapEntries || null;
|
||
|
}
|
||
|
clone() {
|
||
|
const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));
|
||
|
copy.tags = this.tags.slice();
|
||
|
return copy;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function stringifyDocument(doc, options) {
|
||
|
const lines = [];
|
||
|
let hasDirectives = options.directives === true;
|
||
|
if (options.directives !== false && doc.directives) {
|
||
|
const dir = doc.directives.toString(doc);
|
||
|
if (dir) {
|
||
|
lines.push(dir);
|
||
|
hasDirectives = true;
|
||
|
}
|
||
|
else if (doc.directives.marker)
|
||
|
hasDirectives = true;
|
||
|
}
|
||
|
if (hasDirectives)
|
||
|
lines.push('---');
|
||
|
const ctx = createStringifyContext(doc, options);
|
||
|
const { commentString } = ctx.options;
|
||
|
if (doc.commentBefore) {
|
||
|
if (lines.length !== 1)
|
||
|
lines.unshift('');
|
||
|
const cs = commentString(doc.commentBefore);
|
||
|
lines.unshift(indentComment(cs, ''));
|
||
|
}
|
||
|
let chompKeep = false;
|
||
|
let contentComment = null;
|
||
|
if (doc.contents) {
|
||
|
if (isNode(doc.contents)) {
|
||
|
if (doc.contents.spaceBefore && hasDirectives)
|
||
|
lines.push('');
|
||
|
if (doc.contents.commentBefore) {
|
||
|
const cs = commentString(doc.contents.commentBefore);
|
||
|
lines.push(indentComment(cs, ''));
|
||
|
}
|
||
|
// top-level block scalars need to be indented if followed by a comment
|
||
|
ctx.forceBlockIndent = !!doc.comment;
|
||
|
contentComment = doc.contents.comment;
|
||
|
}
|
||
|
const onChompKeep = contentComment ? undefined : () => (chompKeep = true);
|
||
|
let body = stringify$1(doc.contents, ctx, () => (contentComment = null), onChompKeep);
|
||
|
if (contentComment)
|
||
|
body += lineComment(body, '', commentString(contentComment));
|
||
|
if ((body[0] === '|' || body[0] === '>') &&
|
||
|
lines[lines.length - 1] === '---') {
|
||
|
// Top-level block scalars with a preceding doc marker ought to use the
|
||
|
// same line for their header.
|
||
|
lines[lines.length - 1] = `--- ${body}`;
|
||
|
}
|
||
|
else
|
||
|
lines.push(body);
|
||
|
}
|
||
|
else {
|
||
|
lines.push(stringify$1(doc.contents, ctx));
|
||
|
}
|
||
|
let dc = doc.comment;
|
||
|
if (dc && chompKeep)
|
||
|
dc = dc.replace(/^\n+/, '');
|
||
|
if (dc) {
|
||
|
if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')
|
||
|
lines.push('');
|
||
|
lines.push(indentComment(commentString(dc), ''));
|
||
|
}
|
||
|
return lines.join('\n') + '\n';
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
|
||
|
* in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
|
||
|
* 2021 edition: https://tc39.es/ecma262/#sec-json.parse
|
||
|
*
|
||
|
* Includes extensions for handling Map and Set objects.
|
||
|
*/
|
||
|
function applyReviver(reviver, obj, key, val) {
|
||
|
if (val && typeof val === 'object') {
|
||
|
if (Array.isArray(val)) {
|
||
|
for (let i = 0, len = val.length; i < len; ++i) {
|
||
|
const v0 = val[i];
|
||
|
const v1 = applyReviver(reviver, val, String(i), v0);
|
||
|
if (v1 === undefined)
|
||
|
delete val[i];
|
||
|
else if (v1 !== v0)
|
||
|
val[i] = v1;
|
||
|
}
|
||
|
}
|
||
|
else if (val instanceof Map) {
|
||
|
for (const k of Array.from(val.keys())) {
|
||
|
const v0 = val.get(k);
|
||
|
const v1 = applyReviver(reviver, val, k, v0);
|
||
|
if (v1 === undefined)
|
||
|
val.delete(k);
|
||
|
else if (v1 !== v0)
|
||
|
val.set(k, v1);
|
||
|
}
|
||
|
}
|
||
|
else if (val instanceof Set) {
|
||
|
for (const v0 of Array.from(val)) {
|
||
|
const v1 = applyReviver(reviver, val, v0, v0);
|
||
|
if (v1 === undefined)
|
||
|
val.delete(v0);
|
||
|
else if (v1 !== v0) {
|
||
|
val.delete(v0);
|
||
|
val.add(v1);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
for (const [k, v0] of Object.entries(val)) {
|
||
|
const v1 = applyReviver(reviver, val, k, v0);
|
||
|
if (v1 === undefined)
|
||
|
delete val[k];
|
||
|
else if (v1 !== v0)
|
||
|
val[k] = v1;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return reviver.call(obj, key, val);
|
||
|
}
|
||
|
|
||
|
class Document {
|
||
|
constructor(value, replacer, options) {
|
||
|
/** A comment before this Document */
|
||
|
this.commentBefore = null;
|
||
|
/** A comment immediately after this Document */
|
||
|
this.comment = null;
|
||
|
/** Errors encountered during parsing. */
|
||
|
this.errors = [];
|
||
|
/** Warnings encountered during parsing. */
|
||
|
this.warnings = [];
|
||
|
Object.defineProperty(this, NODE_TYPE, { value: DOC });
|
||
|
let _replacer = null;
|
||
|
if (typeof replacer === 'function' || Array.isArray(replacer)) {
|
||
|
_replacer = replacer;
|
||
|
}
|
||
|
else if (options === undefined && replacer) {
|
||
|
options = replacer;
|
||
|
replacer = undefined;
|
||
|
}
|
||
|
const opt = Object.assign({}, defaultOptions, options);
|
||
|
this.options = opt;
|
||
|
let { version } = opt;
|
||
|
if (options === null || options === void 0 ? void 0 : options.directives) {
|
||
|
this.directives = options.directives.atDocument();
|
||
|
if (this.directives.yaml.explicit)
|
||
|
version = this.directives.yaml.version;
|
||
|
}
|
||
|
else
|
||
|
this.directives = new Directives({ version });
|
||
|
this.setSchema(version, options);
|
||
|
if (value === undefined)
|
||
|
this.contents = null;
|
||
|
else {
|
||
|
this.contents = this.createNode(value, _replacer, options);
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Create a deep copy of this Document and its contents.
|
||
|
*
|
||
|
* Custom Node values that inherit from `Object` still refer to their original instances.
|
||
|
*/
|
||
|
clone() {
|
||
|
const copy = Object.create(Document.prototype, {
|
||
|
[NODE_TYPE]: { value: DOC }
|
||
|
});
|
||
|
copy.commentBefore = this.commentBefore;
|
||
|
copy.comment = this.comment;
|
||
|
copy.errors = this.errors.slice();
|
||
|
copy.warnings = this.warnings.slice();
|
||
|
copy.options = Object.assign({}, this.options);
|
||
|
if (this.directives)
|
||
|
copy.directives = this.directives.clone();
|
||
|
copy.schema = this.schema.clone();
|
||
|
copy.contents = isNode(this.contents)
|
||
|
? this.contents.clone(copy.schema)
|
||
|
: this.contents;
|
||
|
if (this.range)
|
||
|
copy.range = this.range.slice();
|
||
|
return copy;
|
||
|
}
|
||
|
/** Adds a value to the document. */
|
||
|
add(value) {
|
||
|
if (assertCollection(this.contents))
|
||
|
this.contents.add(value);
|
||
|
}
|
||
|
/** Adds a value to the document. */
|
||
|
addIn(path, value) {
|
||
|
if (assertCollection(this.contents))
|
||
|
this.contents.addIn(path, value);
|
||
|
}
|
||
|
/**
|
||
|
* Create a new `Alias` node, ensuring that the target `node` has the required anchor.
|
||
|
*
|
||
|
* If `node` already has an anchor, `name` is ignored.
|
||
|
* Otherwise, the `node.anchor` value will be set to `name`,
|
||
|
* or if an anchor with that name is already present in the document,
|
||
|
* `name` will be used as a prefix for a new unique anchor.
|
||
|
* If `name` is undefined, the generated anchor will use 'a' as a prefix.
|
||
|
*/
|
||
|
createAlias(node, name) {
|
||
|
if (!node.anchor) {
|
||
|
const prev = anchorNames(this);
|
||
|
node.anchor =
|
||
|
!name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name;
|
||
|
}
|
||
|
return new Alias(node.anchor);
|
||
|
}
|
||
|
createNode(value, replacer, options) {
|
||
|
let _replacer = undefined;
|
||
|
if (typeof replacer === 'function') {
|
||
|
value = replacer.call({ '': value }, '', value);
|
||
|
_replacer = replacer;
|
||
|
}
|
||
|
else if (Array.isArray(replacer)) {
|
||
|
const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;
|
||
|
const asStr = replacer.filter(keyToStr).map(String);
|
||
|
if (asStr.length > 0)
|
||
|
replacer = replacer.concat(asStr);
|
||
|
_replacer = replacer;
|
||
|
}
|
||
|
else if (options === undefined && replacer) {
|
||
|
options = replacer;
|
||
|
replacer = undefined;
|
||
|
}
|
||
|
const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options || {};
|
||
|
const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors(this, anchorPrefix || 'a');
|
||
|
const ctx = {
|
||
|
aliasDuplicateObjects: aliasDuplicateObjects !== null && aliasDuplicateObjects !== void 0 ? aliasDuplicateObjects : true,
|
||
|
keepUndefined: keepUndefined !== null && keepUndefined !== void 0 ? keepUndefined : false,
|
||
|
onAnchor,
|
||
|
onTagObj,
|
||
|
replacer: _replacer,
|
||
|
schema: this.schema,
|
||
|
sourceObjects
|
||
|
};
|
||
|
const node = createNode(value, tag, ctx);
|
||
|
if (flow && isCollection(node))
|
||
|
node.flow = true;
|
||
|
setAnchors();
|
||
|
return node;
|
||
|
}
|
||
|
/**
|
||
|
* Convert a key and a value into a `Pair` using the current schema,
|
||
|
* recursively wrapping all values as `Scalar` or `Collection` nodes.
|
||
|
*/
|
||
|
createPair(key, value, options = {}) {
|
||
|
const k = this.createNode(key, null, options);
|
||
|
const v = this.createNode(value, null, options);
|
||
|
return new Pair(k, v);
|
||
|
}
|
||
|
/**
|
||
|
* Removes a value from the document.
|
||
|
* @returns `true` if the item was found and removed.
|
||
|
*/
|
||
|
delete(key) {
|
||
|
return assertCollection(this.contents) ? this.contents.delete(key) : false;
|
||
|
}
|
||
|
/**
|
||
|
* Removes a value from the document.
|
||
|
* @returns `true` if the item was found and removed.
|
||
|
*/
|
||
|
deleteIn(path) {
|
||
|
if (isEmptyPath(path)) {
|
||
|
if (this.contents == null)
|
||
|
return false;
|
||
|
this.contents = null;
|
||
|
return true;
|
||
|
}
|
||
|
return assertCollection(this.contents)
|
||
|
? this.contents.deleteIn(path)
|
||
|
: false;
|
||
|
}
|
||
|
/**
|
||
|
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||
|
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||
|
* `true` (collections are always returned intact).
|
||
|
*/
|
||
|
get(key, keepScalar) {
|
||
|
return isCollection(this.contents)
|
||
|
? this.contents.get(key, keepScalar)
|
||
|
: undefined;
|
||
|
}
|
||
|
/**
|
||
|
* Returns item at `path`, or `undefined` if not found. By default unwraps
|
||
|
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||
|
* `true` (collections are always returned intact).
|
||
|
*/
|
||
|
getIn(path, keepScalar) {
|
||
|
if (isEmptyPath(path))
|
||
|
return !keepScalar && isScalar(this.contents)
|
||
|
? this.contents.value
|
||
|
: this.contents;
|
||
|
return isCollection(this.contents)
|
||
|
? this.contents.getIn(path, keepScalar)
|
||
|
: undefined;
|
||
|
}
|
||
|
/**
|
||
|
* Checks if the document includes a value with the key `key`.
|
||
|
*/
|
||
|
has(key) {
|
||
|
return isCollection(this.contents) ? this.contents.has(key) : false;
|
||
|
}
|
||
|
/**
|
||
|
* Checks if the document includes a value at `path`.
|
||
|
*/
|
||
|
hasIn(path) {
|
||
|
if (isEmptyPath(path))
|
||
|
return this.contents !== undefined;
|
||
|
return isCollection(this.contents) ? this.contents.hasIn(path) : false;
|
||
|
}
|
||
|
/**
|
||
|
* Sets a value in this document. For `!!set`, `value` needs to be a
|
||
|
* boolean to add/remove the item from the set.
|
||
|
*/
|
||
|
set(key, value) {
|
||
|
if (this.contents == null) {
|
||
|
this.contents = collectionFromPath(this.schema, [key], value);
|
||
|
}
|
||
|
else if (assertCollection(this.contents)) {
|
||
|
this.contents.set(key, value);
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Sets a value in this document. For `!!set`, `value` needs to be a
|
||
|
* boolean to add/remove the item from the set.
|
||
|
*/
|
||
|
setIn(path, value) {
|
||
|
if (isEmptyPath(path))
|
||
|
this.contents = value;
|
||
|
else if (this.contents == null) {
|
||
|
this.contents = collectionFromPath(this.schema, Array.from(path), value);
|
||
|
}
|
||
|
else if (assertCollection(this.contents)) {
|
||
|
this.contents.setIn(path, value);
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Change the YAML version and schema used by the document.
|
||
|
* A `null` version disables support for directives, explicit tags, anchors, and aliases.
|
||
|
* It also requires the `schema` option to be given as a `Schema` instance value.
|
||
|
*
|
||
|
* Overrides all previously set schema options.
|
||
|
*/
|
||
|
setSchema(version, options = {}) {
|
||
|
if (typeof version === 'number')
|
||
|
version = String(version);
|
||
|
let opt;
|
||
|
switch (version) {
|
||
|
case '1.1':
|
||
|
if (this.directives)
|
||
|
this.directives.yaml.version = '1.1';
|
||
|
else
|
||
|
this.directives = new Directives({ version: '1.1' });
|
||
|
opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
|
||
|
break;
|
||
|
case '1.2':
|
||
|
if (this.directives)
|
||
|
this.directives.yaml.version = '1.2';
|
||
|
else
|
||
|
this.directives = new Directives({ version: '1.2' });
|
||
|
opt = { merge: false, resolveKnownTags: true, schema: 'core' };
|
||
|
break;
|
||
|
case null:
|
||
|
if (this.directives)
|
||
|
delete this.directives;
|
||
|
opt = null;
|
||
|
break;
|
||
|
default: {
|
||
|
const sv = JSON.stringify(version);
|
||
|
throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);
|
||
|
}
|
||
|
}
|
||
|
// Not using `instanceof Schema` to allow for duck typing
|
||
|
if (options.schema instanceof Object)
|
||
|
this.schema = options.schema;
|
||
|
else if (opt)
|
||
|
this.schema = new Schema(Object.assign(opt, options));
|
||
|
else
|
||
|
throw new Error(`With a null YAML version, the { schema: Schema } option is required`);
|
||
|
}
|
||
|
// json & jsonArg are only used from toJSON()
|
||
|
toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
|
||
|
const ctx = {
|
||
|
anchors: new Map(),
|
||
|
doc: this,
|
||
|
keep: !json,
|
||
|
mapAsMap: mapAsMap === true,
|
||
|
mapKeyWarned: false,
|
||
|
maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,
|
||
|
stringify: stringify$1
|
||
|
};
|
||
|
const res = toJS(this.contents, jsonArg || '', ctx);
|
||
|
if (typeof onAnchor === 'function')
|
||
|
for (const { count, res } of ctx.anchors.values())
|
||
|
onAnchor(res, count);
|
||
|
return typeof reviver === 'function'
|
||
|
? applyReviver(reviver, { '': res }, '', res)
|
||
|
: res;
|
||
|
}
|
||
|
/**
|
||
|
* A JSON representation of the document `contents`.
|
||
|
*
|
||
|
* @param jsonArg Used by `JSON.stringify` to indicate the array index or
|
||
|
* property name.
|
||
|
*/
|
||
|
toJSON(jsonArg, onAnchor) {
|
||
|
return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });
|
||
|
}
|
||
|
/** A YAML representation of the document. */
|
||
|
toString(options = {}) {
|
||
|
if (this.errors.length > 0)
|
||
|
throw new Error('Document with errors cannot be stringified');
|
||
|
if ('indent' in options &&
|
||
|
(!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {
|
||
|
const s = JSON.stringify(options.indent);
|
||
|
throw new Error(`"indent" option must be a positive integer, not ${s}`);
|
||
|
}
|
||
|
return stringifyDocument(this, options);
|
||
|
}
|
||
|
}
|
||
|
function assertCollection(contents) {
|
||
|
if (isCollection(contents))
|
||
|
return true;
|
||
|
throw new Error('Expected a YAML collection as document contents');
|
||
|
}
|
||
|
|
||
|
class YAMLError extends Error {
|
||
|
constructor(name, pos, code, message) {
|
||
|
super();
|
||
|
this.name = name;
|
||
|
this.code = code;
|
||
|
this.message = message;
|
||
|
this.pos = pos;
|
||
|
}
|
||
|
}
|
||
|
class YAMLParseError extends YAMLError {
|
||
|
constructor(pos, code, message) {
|
||
|
super('YAMLParseError', pos, code, message);
|
||
|
}
|
||
|
}
|
||
|
class YAMLWarning extends YAMLError {
|
||
|
constructor(pos, code, message) {
|
||
|
super('YAMLWarning', pos, code, message);
|
||
|
}
|
||
|
}
|
||
|
const prettifyError = (src, lc) => (error) => {
|
||
|
if (error.pos[0] === -1)
|
||
|
return;
|
||
|
error.linePos = error.pos.map(pos => lc.linePos(pos));
|
||
|
const { line, col } = error.linePos[0];
|
||
|
error.message += ` at line ${line}, column ${col}`;
|
||
|
let ci = col - 1;
|
||
|
let lineStr = src
|
||
|
.substring(lc.lineStarts[line - 1], lc.lineStarts[line])
|
||
|
.replace(/[\n\r]+$/, '');
|
||
|
// Trim to max 80 chars, keeping col position near the middle
|
||
|
if (ci >= 60 && lineStr.length > 80) {
|
||
|
const trimStart = Math.min(ci - 39, lineStr.length - 79);
|
||
|
lineStr = '…' + lineStr.substring(trimStart);
|
||
|
ci -= trimStart - 1;
|
||
|
}
|
||
|
if (lineStr.length > 80)
|
||
|
lineStr = lineStr.substring(0, 79) + '…';
|
||
|
// Include previous line in context if pointing at line start
|
||
|
if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {
|
||
|
// Regexp won't match if start is trimmed
|
||
|
let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);
|
||
|
if (prev.length > 80)
|
||
|
prev = prev.substring(0, 79) + '…\n';
|
||
|
lineStr = prev + lineStr;
|
||
|
}
|
||
|
if (/[^ ]/.test(lineStr)) {
|
||
|
let count = 1;
|
||
|
const end = error.linePos[1];
|
||
|
if (end && end.line === line && end.col > col) {
|
||
|
count = Math.min(end.col - col, 80 - ci);
|
||
|
}
|
||
|
const pointer = ' '.repeat(ci) + '^'.repeat(count);
|
||
|
error.message += `:\n\n${lineStr}\n${pointer}\n`;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {
|
||
|
let spaceBefore = false;
|
||
|
let atNewline = startOnNewline;
|
||
|
let hasSpace = startOnNewline;
|
||
|
let comment = '';
|
||
|
let commentSep = '';
|
||
|
let hasNewline = false;
|
||
|
let reqSpace = false;
|
||
|
let anchor = null;
|
||
|
let tag = null;
|
||
|
let comma = null;
|
||
|
let found = null;
|
||
|
let start = null;
|
||
|
for (const token of tokens) {
|
||
|
if (reqSpace) {
|
||
|
if (token.type !== 'space' &&
|
||
|
token.type !== 'newline' &&
|
||
|
token.type !== 'comma')
|
||
|
onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
|
||
|
reqSpace = false;
|
||
|
}
|
||
|
switch (token.type) {
|
||
|
case 'space':
|
||
|
// At the doc level, tabs at line start may be parsed
|
||
|
// as leading white space rather than indentation.
|
||
|
// In a flow collection, only the parser handles indent.
|
||
|
if (!flow &&
|
||
|
atNewline &&
|
||
|
indicator !== 'doc-start' &&
|
||
|
token.source[0] === '\t')
|
||
|
onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
|
||
|
hasSpace = true;
|
||
|
break;
|
||
|
case 'comment': {
|
||
|
if (!hasSpace)
|
||
|
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
|
||
|
const cb = token.source.substring(1) || ' ';
|
||
|
if (!comment)
|
||
|
comment = cb;
|
||
|
else
|
||
|
comment += commentSep + cb;
|
||
|
commentSep = '';
|
||
|
atNewline = false;
|
||
|
break;
|
||
|
}
|
||
|
case 'newline':
|
||
|
if (atNewline) {
|
||
|
if (comment)
|
||
|
comment += token.source;
|
||
|
else
|
||
|
spaceBefore = true;
|
||
|
}
|
||
|
else
|
||
|
commentSep += token.source;
|
||
|
atNewline = true;
|
||
|
hasNewline = true;
|
||
|
hasSpace = true;
|
||
|
break;
|
||
|
case 'anchor':
|
||
|
if (anchor)
|
||
|
onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');
|
||
|
anchor = token;
|
||
|
if (start === null)
|
||
|
start = token.offset;
|
||
|
atNewline = false;
|
||
|
hasSpace = false;
|
||
|
reqSpace = true;
|
||
|
break;
|
||
|
case 'tag': {
|
||
|
if (tag)
|
||
|
onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
|
||
|
tag = token;
|
||
|
if (start === null)
|
||
|
start = token.offset;
|
||
|
atNewline = false;
|
||
|
hasSpace = false;
|
||
|
reqSpace = true;
|
||
|
break;
|
||
|
}
|
||
|
case indicator:
|
||
|
// Could here handle preceding comments differently
|
||
|
if (anchor || tag)
|
||
|
onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);
|
||
|
if (found)
|
||
|
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow || 'collection'}`);
|
||
|
found = token;
|
||
|
atNewline = false;
|
||
|
hasSpace = false;
|
||
|
break;
|
||
|
case 'comma':
|
||
|
if (flow) {
|
||
|
if (comma)
|
||
|
onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);
|
||
|
comma = token;
|
||
|
atNewline = false;
|
||
|
hasSpace = false;
|
||
|
break;
|
||
|
}
|
||
|
// else fallthrough
|
||
|
default:
|
||
|
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);
|
||
|
atNewline = false;
|
||
|
hasSpace = false;
|
||
|
}
|
||
|
}
|
||
|
const last = tokens[tokens.length - 1];
|
||
|
const end = last ? last.offset + last.source.length : offset;
|
||
|
if (reqSpace &&
|
||
|
next &&
|
||
|
next.type !== 'space' &&
|
||
|
next.type !== 'newline' &&
|
||
|
next.type !== 'comma' &&
|
||
|
(next.type !== 'scalar' || next.source !== ''))
|
||
|
onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
|
||
|
return {
|
||
|
comma,
|
||
|
found,
|
||
|
spaceBefore,
|
||
|
comment,
|
||
|
hasNewline,
|
||
|
anchor,
|
||
|
tag,
|
||
|
end,
|
||
|
start: start !== null && start !== void 0 ? start : end
|
||
|
};
|
||
|
}
|
||
|
|
||
|
function containsNewline(key) {
|
||
|
if (!key)
|
||
|
return null;
|
||
|
switch (key.type) {
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
if (key.source.includes('\n'))
|
||
|
return true;
|
||
|
if (key.end)
|
||
|
for (const st of key.end)
|
||
|
if (st.type === 'newline')
|
||
|
return true;
|
||
|
return false;
|
||
|
case 'flow-collection':
|
||
|
for (const it of key.items) {
|
||
|
for (const st of it.start)
|
||
|
if (st.type === 'newline')
|
||
|
return true;
|
||
|
if (it.sep)
|
||
|
for (const st of it.sep)
|
||
|
if (st.type === 'newline')
|
||
|
return true;
|
||
|
if (containsNewline(it.key) || containsNewline(it.value))
|
||
|
return true;
|
||
|
}
|
||
|
return false;
|
||
|
default:
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function flowIndentCheck(indent, fc, onError) {
|
||
|
if ((fc === null || fc === void 0 ? void 0 : fc.type) === 'flow-collection') {
|
||
|
const end = fc.end[0];
|
||
|
if (end.indent === indent &&
|
||
|
(end.source === ']' || end.source === '}') &&
|
||
|
containsNewline(fc)) {
|
||
|
const msg = 'Flow end indicator should be more indented than parent';
|
||
|
onError(end, 'BAD_INDENT', msg, true);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function mapIncludes(ctx, items, search) {
|
||
|
const { uniqueKeys } = ctx.options;
|
||
|
if (uniqueKeys === false)
|
||
|
return false;
|
||
|
const isEqual = typeof uniqueKeys === 'function'
|
||
|
? uniqueKeys
|
||
|
: (a, b) => a === b ||
|
||
|
(isScalar(a) &&
|
||
|
isScalar(b) &&
|
||
|
a.value === b.value &&
|
||
|
!(a.value === '<<' && ctx.schema.merge));
|
||
|
return items.some(pair => isEqual(pair.key, search));
|
||
|
}
|
||
|
|
||
|
const startColMsg = 'All mapping items must start at the same column';
|
||
|
function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {
|
||
|
var _a;
|
||
|
const map = new YAMLMap(ctx.schema);
|
||
|
if (ctx.atRoot)
|
||
|
ctx.atRoot = false;
|
||
|
let offset = bm.offset;
|
||
|
for (const collItem of bm.items) {
|
||
|
const { start, key, sep, value } = collItem;
|
||
|
// key properties
|
||
|
const keyProps = resolveProps(start, {
|
||
|
indicator: 'explicit-key-ind',
|
||
|
next: key || (sep === null || sep === void 0 ? void 0 : sep[0]),
|
||
|
offset,
|
||
|
onError,
|
||
|
startOnNewline: true
|
||
|
});
|
||
|
const implicitKey = !keyProps.found;
|
||
|
if (implicitKey) {
|
||
|
if (key) {
|
||
|
if (key.type === 'block-seq')
|
||
|
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');
|
||
|
else if ('indent' in key && key.indent !== bm.indent)
|
||
|
onError(offset, 'BAD_INDENT', startColMsg);
|
||
|
}
|
||
|
if (!keyProps.anchor && !keyProps.tag && !sep) {
|
||
|
// TODO: assert being at last item?
|
||
|
if (keyProps.comment) {
|
||
|
if (map.comment)
|
||
|
map.comment += '\n' + keyProps.comment;
|
||
|
else
|
||
|
map.comment = keyProps.comment;
|
||
|
}
|
||
|
continue;
|
||
|
}
|
||
|
}
|
||
|
else if (((_a = keyProps.found) === null || _a === void 0 ? void 0 : _a.indent) !== bm.indent)
|
||
|
onError(offset, 'BAD_INDENT', startColMsg);
|
||
|
if (implicitKey && containsNewline(key))
|
||
|
onError(key, // checked by containsNewline()
|
||
|
'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
|
||
|
// key value
|
||
|
const keyStart = keyProps.end;
|
||
|
const keyNode = key
|
||
|
? composeNode(ctx, key, keyProps, onError)
|
||
|
: composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
|
||
|
if (ctx.schema.compat)
|
||
|
flowIndentCheck(bm.indent, key, onError);
|
||
|
if (mapIncludes(ctx, map.items, keyNode))
|
||
|
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
|
||
|
// value properties
|
||
|
const valueProps = resolveProps(sep || [], {
|
||
|
indicator: 'map-value-ind',
|
||
|
next: value,
|
||
|
offset: keyNode.range[2],
|
||
|
onError,
|
||
|
startOnNewline: !key || key.type === 'block-scalar'
|
||
|
});
|
||
|
offset = valueProps.end;
|
||
|
if (valueProps.found) {
|
||
|
if (implicitKey) {
|
||
|
if ((value === null || value === void 0 ? void 0 : value.type) === 'block-map' && !valueProps.hasNewline)
|
||
|
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');
|
||
|
if (ctx.options.strict &&
|
||
|
keyProps.start < valueProps.found.offset - 1024)
|
||
|
onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');
|
||
|
}
|
||
|
// value value
|
||
|
const valueNode = value
|
||
|
? composeNode(ctx, value, valueProps, onError)
|
||
|
: composeEmptyNode(ctx, offset, sep, null, valueProps, onError);
|
||
|
if (ctx.schema.compat)
|
||
|
flowIndentCheck(bm.indent, value, onError);
|
||
|
offset = valueNode.range[2];
|
||
|
const pair = new Pair(keyNode, valueNode);
|
||
|
if (ctx.options.keepSourceTokens)
|
||
|
pair.srcToken = collItem;
|
||
|
map.items.push(pair);
|
||
|
}
|
||
|
else {
|
||
|
// key with no value
|
||
|
if (implicitKey)
|
||
|
onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');
|
||
|
if (valueProps.comment) {
|
||
|
if (keyNode.comment)
|
||
|
keyNode.comment += '\n' + valueProps.comment;
|
||
|
else
|
||
|
keyNode.comment = valueProps.comment;
|
||
|
}
|
||
|
const pair = new Pair(keyNode);
|
||
|
if (ctx.options.keepSourceTokens)
|
||
|
pair.srcToken = collItem;
|
||
|
map.items.push(pair);
|
||
|
}
|
||
|
}
|
||
|
map.range = [bm.offset, offset, offset];
|
||
|
return map;
|
||
|
}
|
||
|
|
||
|
function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {
|
||
|
const seq = new YAMLSeq(ctx.schema);
|
||
|
if (ctx.atRoot)
|
||
|
ctx.atRoot = false;
|
||
|
let offset = bs.offset;
|
||
|
for (const { start, value } of bs.items) {
|
||
|
const props = resolveProps(start, {
|
||
|
indicator: 'seq-item-ind',
|
||
|
next: value,
|
||
|
offset,
|
||
|
onError,
|
||
|
startOnNewline: true
|
||
|
});
|
||
|
offset = props.end;
|
||
|
if (!props.found) {
|
||
|
if (props.anchor || props.tag || value) {
|
||
|
if (value && value.type === 'block-seq')
|
||
|
onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column');
|
||
|
else
|
||
|
onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');
|
||
|
}
|
||
|
else {
|
||
|
// TODO: assert being at last item?
|
||
|
if (props.comment)
|
||
|
seq.comment = props.comment;
|
||
|
continue;
|
||
|
}
|
||
|
}
|
||
|
const node = value
|
||
|
? composeNode(ctx, value, props, onError)
|
||
|
: composeEmptyNode(ctx, offset, start, null, props, onError);
|
||
|
if (ctx.schema.compat)
|
||
|
flowIndentCheck(bs.indent, value, onError);
|
||
|
offset = node.range[2];
|
||
|
seq.items.push(node);
|
||
|
}
|
||
|
seq.range = [bs.offset, offset, offset];
|
||
|
return seq;
|
||
|
}
|
||
|
|
||
|
function resolveEnd(end, offset, reqSpace, onError) {
|
||
|
let comment = '';
|
||
|
if (end) {
|
||
|
let hasSpace = false;
|
||
|
let sep = '';
|
||
|
for (const token of end) {
|
||
|
const { source, type } = token;
|
||
|
switch (type) {
|
||
|
case 'space':
|
||
|
hasSpace = true;
|
||
|
break;
|
||
|
case 'comment': {
|
||
|
if (reqSpace && !hasSpace)
|
||
|
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
|
||
|
const cb = source.substring(1) || ' ';
|
||
|
if (!comment)
|
||
|
comment = cb;
|
||
|
else
|
||
|
comment += sep + cb;
|
||
|
sep = '';
|
||
|
break;
|
||
|
}
|
||
|
case 'newline':
|
||
|
if (comment)
|
||
|
sep += source;
|
||
|
hasSpace = true;
|
||
|
break;
|
||
|
default:
|
||
|
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);
|
||
|
}
|
||
|
offset += source.length;
|
||
|
}
|
||
|
}
|
||
|
return { comment, offset };
|
||
|
}
|
||
|
|
||
|
const blockMsg = 'Block collections are not allowed within flow collections';
|
||
|
const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');
|
||
|
function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {
|
||
|
const isMap = fc.start.source === '{';
|
||
|
const fcName = isMap ? 'flow map' : 'flow sequence';
|
||
|
const coll = isMap
|
||
|
? new YAMLMap(ctx.schema)
|
||
|
: new YAMLSeq(ctx.schema);
|
||
|
coll.flow = true;
|
||
|
const atRoot = ctx.atRoot;
|
||
|
if (atRoot)
|
||
|
ctx.atRoot = false;
|
||
|
let offset = fc.offset + fc.start.source.length;
|
||
|
for (let i = 0; i < fc.items.length; ++i) {
|
||
|
const collItem = fc.items[i];
|
||
|
const { start, key, sep, value } = collItem;
|
||
|
const props = resolveProps(start, {
|
||
|
flow: fcName,
|
||
|
indicator: 'explicit-key-ind',
|
||
|
next: key || (sep === null || sep === void 0 ? void 0 : sep[0]),
|
||
|
offset,
|
||
|
onError,
|
||
|
startOnNewline: false
|
||
|
});
|
||
|
if (!props.found) {
|
||
|
if (!props.anchor && !props.tag && !sep && !value) {
|
||
|
if (i === 0 && props.comma)
|
||
|
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
|
||
|
else if (i < fc.items.length - 1)
|
||
|
onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);
|
||
|
if (props.comment) {
|
||
|
if (coll.comment)
|
||
|
coll.comment += '\n' + props.comment;
|
||
|
else
|
||
|
coll.comment = props.comment;
|
||
|
}
|
||
|
offset = props.end;
|
||
|
continue;
|
||
|
}
|
||
|
if (!isMap && ctx.options.strict && containsNewline(key))
|
||
|
onError(key, // checked by containsNewline()
|
||
|
'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
|
||
|
}
|
||
|
if (i === 0) {
|
||
|
if (props.comma)
|
||
|
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
|
||
|
}
|
||
|
else {
|
||
|
if (!props.comma)
|
||
|
onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);
|
||
|
if (props.comment) {
|
||
|
let prevItemComment = '';
|
||
|
loop: for (const st of start) {
|
||
|
switch (st.type) {
|
||
|
case 'comma':
|
||
|
case 'space':
|
||
|
break;
|
||
|
case 'comment':
|
||
|
prevItemComment = st.source.substring(1);
|
||
|
break loop;
|
||
|
default:
|
||
|
break loop;
|
||
|
}
|
||
|
}
|
||
|
if (prevItemComment) {
|
||
|
let prev = coll.items[coll.items.length - 1];
|
||
|
if (isPair(prev))
|
||
|
prev = prev.value || prev.key;
|
||
|
if (prev.comment)
|
||
|
prev.comment += '\n' + prevItemComment;
|
||
|
else
|
||
|
prev.comment = prevItemComment;
|
||
|
props.comment = props.comment.substring(prevItemComment.length + 1);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
if (!isMap && !sep && !props.found) {
|
||
|
// item is a value in a seq
|
||
|
// → key & sep are empty, start does not include ? or :
|
||
|
const valueNode = value
|
||
|
? composeNode(ctx, value, props, onError)
|
||
|
: composeEmptyNode(ctx, props.end, sep, null, props, onError);
|
||
|
coll.items.push(valueNode);
|
||
|
offset = valueNode.range[2];
|
||
|
if (isBlock(value))
|
||
|
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
|
||
|
}
|
||
|
else {
|
||
|
// item is a key+value pair
|
||
|
// key value
|
||
|
const keyStart = props.end;
|
||
|
const keyNode = key
|
||
|
? composeNode(ctx, key, props, onError)
|
||
|
: composeEmptyNode(ctx, keyStart, start, null, props, onError);
|
||
|
if (isBlock(key))
|
||
|
onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
|
||
|
// value properties
|
||
|
const valueProps = resolveProps(sep || [], {
|
||
|
flow: fcName,
|
||
|
indicator: 'map-value-ind',
|
||
|
next: value,
|
||
|
offset: keyNode.range[2],
|
||
|
onError,
|
||
|
startOnNewline: false
|
||
|
});
|
||
|
if (valueProps.found) {
|
||
|
if (!isMap && !props.found && ctx.options.strict) {
|
||
|
if (sep)
|
||
|
for (const st of sep) {
|
||
|
if (st === valueProps.found)
|
||
|
break;
|
||
|
if (st.type === 'newline') {
|
||
|
onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
if (props.start < valueProps.found.offset - 1024)
|
||
|
onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');
|
||
|
}
|
||
|
}
|
||
|
else if (value) {
|
||
|
if ('source' in value && value.source && value.source[0] === ':')
|
||
|
onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);
|
||
|
else
|
||
|
onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);
|
||
|
}
|
||
|
// value value
|
||
|
const valueNode = value
|
||
|
? composeNode(ctx, value, valueProps, onError)
|
||
|
: valueProps.found
|
||
|
? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)
|
||
|
: null;
|
||
|
if (valueNode) {
|
||
|
if (isBlock(value))
|
||
|
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
|
||
|
}
|
||
|
else if (valueProps.comment) {
|
||
|
if (keyNode.comment)
|
||
|
keyNode.comment += '\n' + valueProps.comment;
|
||
|
else
|
||
|
keyNode.comment = valueProps.comment;
|
||
|
}
|
||
|
const pair = new Pair(keyNode, valueNode);
|
||
|
if (ctx.options.keepSourceTokens)
|
||
|
pair.srcToken = collItem;
|
||
|
if (isMap) {
|
||
|
const map = coll;
|
||
|
if (mapIncludes(ctx, map.items, keyNode))
|
||
|
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
|
||
|
map.items.push(pair);
|
||
|
}
|
||
|
else {
|
||
|
const map = new YAMLMap(ctx.schema);
|
||
|
map.flow = true;
|
||
|
map.items.push(pair);
|
||
|
coll.items.push(map);
|
||
|
}
|
||
|
offset = valueNode ? valueNode.range[2] : valueProps.end;
|
||
|
}
|
||
|
}
|
||
|
const expectedEnd = isMap ? '}' : ']';
|
||
|
const [ce, ...ee] = fc.end;
|
||
|
let cePos = offset;
|
||
|
if (ce && ce.source === expectedEnd)
|
||
|
cePos = ce.offset + ce.source.length;
|
||
|
else {
|
||
|
const name = fcName[0].toUpperCase() + fcName.substring(1);
|
||
|
const msg = atRoot
|
||
|
? `${name} must end with a ${expectedEnd}`
|
||
|
: `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;
|
||
|
onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);
|
||
|
if (ce && ce.source.length !== 1)
|
||
|
ee.unshift(ce);
|
||
|
}
|
||
|
if (ee.length > 0) {
|
||
|
const end = resolveEnd(ee, cePos, ctx.options.strict, onError);
|
||
|
if (end.comment) {
|
||
|
if (coll.comment)
|
||
|
coll.comment += '\n' + end.comment;
|
||
|
else
|
||
|
coll.comment = end.comment;
|
||
|
}
|
||
|
coll.range = [fc.offset, cePos, end.offset];
|
||
|
}
|
||
|
else {
|
||
|
coll.range = [fc.offset, cePos, cePos];
|
||
|
}
|
||
|
return coll;
|
||
|
}
|
||
|
|
||
|
function composeCollection(CN, ctx, token, tagToken, onError) {
|
||
|
let coll;
|
||
|
switch (token.type) {
|
||
|
case 'block-map': {
|
||
|
coll = resolveBlockMap(CN, ctx, token, onError);
|
||
|
break;
|
||
|
}
|
||
|
case 'block-seq': {
|
||
|
coll = resolveBlockSeq(CN, ctx, token, onError);
|
||
|
break;
|
||
|
}
|
||
|
case 'flow-collection': {
|
||
|
coll = resolveFlowCollection(CN, ctx, token, onError);
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
if (!tagToken)
|
||
|
return coll;
|
||
|
const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
|
||
|
if (!tagName)
|
||
|
return coll;
|
||
|
// Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841
|
||
|
const Coll = coll.constructor;
|
||
|
if (tagName === '!' || tagName === Coll.tagName) {
|
||
|
coll.tag = Coll.tagName;
|
||
|
return coll;
|
||
|
}
|
||
|
const expType = isMap(coll) ? 'map' : 'seq';
|
||
|
let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);
|
||
|
if (!tag) {
|
||
|
const kt = ctx.schema.knownTags[tagName];
|
||
|
if (kt && kt.collection === expType) {
|
||
|
ctx.schema.tags.push(Object.assign({}, kt, { default: false }));
|
||
|
tag = kt;
|
||
|
}
|
||
|
else {
|
||
|
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);
|
||
|
coll.tag = tagName;
|
||
|
return coll;
|
||
|
}
|
||
|
}
|
||
|
const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);
|
||
|
const node = isNode(res)
|
||
|
? res
|
||
|
: new Scalar(res);
|
||
|
node.range = coll.range;
|
||
|
node.tag = tagName;
|
||
|
if (tag === null || tag === void 0 ? void 0 : tag.format)
|
||
|
node.format = tag.format;
|
||
|
return node;
|
||
|
}
|
||
|
|
||
|
function resolveBlockScalar(scalar, strict, onError) {
|
||
|
const start = scalar.offset;
|
||
|
const header = parseBlockScalarHeader(scalar, strict, onError);
|
||
|
if (!header)
|
||
|
return { value: '', type: null, comment: '', range: [start, start, start] };
|
||
|
const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL;
|
||
|
const lines = scalar.source ? splitLines(scalar.source) : [];
|
||
|
// determine the end of content & start of chomping
|
||
|
let chompStart = lines.length;
|
||
|
for (let i = lines.length - 1; i >= 0; --i) {
|
||
|
const content = lines[i][1];
|
||
|
if (content === '' || content === '\r')
|
||
|
chompStart = i;
|
||
|
else
|
||
|
break;
|
||
|
}
|
||
|
// shortcut for empty contents
|
||
|
if (!scalar.source || chompStart === 0) {
|
||
|
const value = header.chomp === '+' ? '\n'.repeat(Math.max(0, lines.length - 1)) : '';
|
||
|
let end = start + header.length;
|
||
|
if (scalar.source)
|
||
|
end += scalar.source.length;
|
||
|
return { value, type, comment: header.comment, range: [start, end, end] };
|
||
|
}
|
||
|
// find the indentation level to trim from start
|
||
|
let trimIndent = scalar.indent + header.indent;
|
||
|
let offset = scalar.offset + header.length;
|
||
|
let contentStart = 0;
|
||
|
for (let i = 0; i < chompStart; ++i) {
|
||
|
const [indent, content] = lines[i];
|
||
|
if (content === '' || content === '\r') {
|
||
|
if (header.indent === 0 && indent.length > trimIndent)
|
||
|
trimIndent = indent.length;
|
||
|
}
|
||
|
else {
|
||
|
if (indent.length < trimIndent) {
|
||
|
const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
|
||
|
onError(offset + indent.length, 'MISSING_CHAR', message);
|
||
|
}
|
||
|
if (header.indent === 0)
|
||
|
trimIndent = indent.length;
|
||
|
contentStart = i;
|
||
|
break;
|
||
|
}
|
||
|
offset += indent.length + content.length + 1;
|
||
|
}
|
||
|
let value = '';
|
||
|
let sep = '';
|
||
|
let prevMoreIndented = false;
|
||
|
// leading whitespace is kept intact
|
||
|
for (let i = 0; i < contentStart; ++i)
|
||
|
value += lines[i][0].slice(trimIndent) + '\n';
|
||
|
for (let i = contentStart; i < chompStart; ++i) {
|
||
|
let [indent, content] = lines[i];
|
||
|
offset += indent.length + content.length + 1;
|
||
|
const crlf = content[content.length - 1] === '\r';
|
||
|
if (crlf)
|
||
|
content = content.slice(0, -1);
|
||
|
/* istanbul ignore if already caught in lexer */
|
||
|
if (content && indent.length < trimIndent) {
|
||
|
const src = header.indent
|
||
|
? 'explicit indentation indicator'
|
||
|
: 'first line';
|
||
|
const message = `Block scalar lines must not be less indented than their ${src}`;
|
||
|
onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);
|
||
|
indent = '';
|
||
|
}
|
||
|
if (type === Scalar.BLOCK_LITERAL) {
|
||
|
value += sep + indent.slice(trimIndent) + content;
|
||
|
sep = '\n';
|
||
|
}
|
||
|
else if (indent.length > trimIndent || content[0] === '\t') {
|
||
|
// more-indented content within a folded block
|
||
|
if (sep === ' ')
|
||
|
sep = '\n';
|
||
|
else if (!prevMoreIndented && sep === '\n')
|
||
|
sep = '\n\n';
|
||
|
value += sep + indent.slice(trimIndent) + content;
|
||
|
sep = '\n';
|
||
|
prevMoreIndented = true;
|
||
|
}
|
||
|
else if (content === '') {
|
||
|
// empty line
|
||
|
if (sep === '\n')
|
||
|
value += '\n';
|
||
|
else
|
||
|
sep = '\n';
|
||
|
}
|
||
|
else {
|
||
|
value += sep + content;
|
||
|
sep = ' ';
|
||
|
prevMoreIndented = false;
|
||
|
}
|
||
|
}
|
||
|
switch (header.chomp) {
|
||
|
case '-':
|
||
|
break;
|
||
|
case '+':
|
||
|
for (let i = chompStart; i < lines.length; ++i)
|
||
|
value += '\n' + lines[i][0].slice(trimIndent);
|
||
|
if (value[value.length - 1] !== '\n')
|
||
|
value += '\n';
|
||
|
break;
|
||
|
default:
|
||
|
value += '\n';
|
||
|
}
|
||
|
const end = start + header.length + scalar.source.length;
|
||
|
return { value, type, comment: header.comment, range: [start, end, end] };
|
||
|
}
|
||
|
function parseBlockScalarHeader({ offset, props }, strict, onError) {
|
||
|
/* istanbul ignore if should not happen */
|
||
|
if (props[0].type !== 'block-scalar-header') {
|
||
|
onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');
|
||
|
return null;
|
||
|
}
|
||
|
const { source } = props[0];
|
||
|
const mode = source[0];
|
||
|
let indent = 0;
|
||
|
let chomp = '';
|
||
|
let error = -1;
|
||
|
for (let i = 1; i < source.length; ++i) {
|
||
|
const ch = source[i];
|
||
|
if (!chomp && (ch === '-' || ch === '+'))
|
||
|
chomp = ch;
|
||
|
else {
|
||
|
const n = Number(ch);
|
||
|
if (!indent && n)
|
||
|
indent = n;
|
||
|
else if (error === -1)
|
||
|
error = offset + i;
|
||
|
}
|
||
|
}
|
||
|
if (error !== -1)
|
||
|
onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);
|
||
|
let hasSpace = false;
|
||
|
let comment = '';
|
||
|
let length = source.length;
|
||
|
for (let i = 1; i < props.length; ++i) {
|
||
|
const token = props[i];
|
||
|
switch (token.type) {
|
||
|
case 'space':
|
||
|
hasSpace = true;
|
||
|
// fallthrough
|
||
|
case 'newline':
|
||
|
length += token.source.length;
|
||
|
break;
|
||
|
case 'comment':
|
||
|
if (strict && !hasSpace) {
|
||
|
const message = 'Comments must be separated from other tokens by white space characters';
|
||
|
onError(token, 'MISSING_CHAR', message);
|
||
|
}
|
||
|
length += token.source.length;
|
||
|
comment = token.source.substring(1);
|
||
|
break;
|
||
|
case 'error':
|
||
|
onError(token, 'UNEXPECTED_TOKEN', token.message);
|
||
|
length += token.source.length;
|
||
|
break;
|
||
|
/* istanbul ignore next should not happen */
|
||
|
default: {
|
||
|
const message = `Unexpected token in block scalar header: ${token.type}`;
|
||
|
onError(token, 'UNEXPECTED_TOKEN', message);
|
||
|
const ts = token.source;
|
||
|
if (ts && typeof ts === 'string')
|
||
|
length += ts.length;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return { mode, indent, chomp, comment, length };
|
||
|
}
|
||
|
/** @returns Array of lines split up as `[indent, content]` */
|
||
|
function splitLines(source) {
|
||
|
const split = source.split(/\n( *)/);
|
||
|
const first = split[0];
|
||
|
const m = first.match(/^( *)/);
|
||
|
const line0 = m && m[1] ? [m[1], first.slice(m[1].length)] : ['', first];
|
||
|
const lines = [line0];
|
||
|
for (let i = 1; i < split.length; i += 2)
|
||
|
lines.push([split[i], split[i + 1]]);
|
||
|
return lines;
|
||
|
}
|
||
|
|
||
|
function resolveFlowScalar(scalar, strict, onError) {
|
||
|
const { offset, type, source, end } = scalar;
|
||
|
let _type;
|
||
|
let value;
|
||
|
const _onError = (rel, code, msg) => onError(offset + rel, code, msg);
|
||
|
switch (type) {
|
||
|
case 'scalar':
|
||
|
_type = Scalar.PLAIN;
|
||
|
value = plainValue(source, _onError);
|
||
|
break;
|
||
|
case 'single-quoted-scalar':
|
||
|
_type = Scalar.QUOTE_SINGLE;
|
||
|
value = singleQuotedValue(source, _onError);
|
||
|
break;
|
||
|
case 'double-quoted-scalar':
|
||
|
_type = Scalar.QUOTE_DOUBLE;
|
||
|
value = doubleQuotedValue(source, _onError);
|
||
|
break;
|
||
|
/* istanbul ignore next should not happen */
|
||
|
default:
|
||
|
onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);
|
||
|
return {
|
||
|
value: '',
|
||
|
type: null,
|
||
|
comment: '',
|
||
|
range: [offset, offset + source.length, offset + source.length]
|
||
|
};
|
||
|
}
|
||
|
const valueEnd = offset + source.length;
|
||
|
const re = resolveEnd(end, valueEnd, strict, onError);
|
||
|
return {
|
||
|
value,
|
||
|
type: _type,
|
||
|
comment: re.comment,
|
||
|
range: [offset, valueEnd, re.offset]
|
||
|
};
|
||
|
}
|
||
|
function plainValue(source, onError) {
|
||
|
let badChar = '';
|
||
|
switch (source[0]) {
|
||
|
/* istanbul ignore next should not happen */
|
||
|
case '\t':
|
||
|
badChar = 'a tab character';
|
||
|
break;
|
||
|
case ',':
|
||
|
badChar = 'flow indicator character ,';
|
||
|
break;
|
||
|
case '%':
|
||
|
badChar = 'directive indicator character %';
|
||
|
break;
|
||
|
case '|':
|
||
|
case '>': {
|
||
|
badChar = `block scalar indicator ${source[0]}`;
|
||
|
break;
|
||
|
}
|
||
|
case '@':
|
||
|
case '`': {
|
||
|
badChar = `reserved character ${source[0]}`;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
if (badChar)
|
||
|
onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);
|
||
|
return foldLines(source);
|
||
|
}
|
||
|
function singleQuotedValue(source, onError) {
|
||
|
if (source[source.length - 1] !== "'" || source.length === 1)
|
||
|
onError(source.length, 'MISSING_CHAR', "Missing closing 'quote");
|
||
|
return foldLines(source.slice(1, -1)).replace(/''/g, "'");
|
||
|
}
|
||
|
function foldLines(source) {
|
||
|
/**
|
||
|
* The negative lookbehind here and in the `re` RegExp is to
|
||
|
* prevent causing a polynomial search time in certain cases.
|
||
|
*
|
||
|
* The try-catch is for Safari, which doesn't support this yet:
|
||
|
* https://caniuse.com/js-regexp-lookbehind
|
||
|
*/
|
||
|
let first, line;
|
||
|
try {
|
||
|
first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
|
||
|
line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
|
||
|
}
|
||
|
catch (_) {
|
||
|
first = /(.*?)[ \t]*\r?\n/sy;
|
||
|
line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
|
||
|
}
|
||
|
let match = first.exec(source);
|
||
|
if (!match)
|
||
|
return source;
|
||
|
let res = match[1];
|
||
|
let sep = ' ';
|
||
|
let pos = first.lastIndex;
|
||
|
line.lastIndex = pos;
|
||
|
while ((match = line.exec(source))) {
|
||
|
if (match[1] === '') {
|
||
|
if (sep === '\n')
|
||
|
res += sep;
|
||
|
else
|
||
|
sep = '\n';
|
||
|
}
|
||
|
else {
|
||
|
res += sep + match[1];
|
||
|
sep = ' ';
|
||
|
}
|
||
|
pos = line.lastIndex;
|
||
|
}
|
||
|
const last = /[ \t]*(.*)/sy;
|
||
|
last.lastIndex = pos;
|
||
|
match = last.exec(source);
|
||
|
return res + sep + ((match && match[1]) || '');
|
||
|
}
|
||
|
function doubleQuotedValue(source, onError) {
|
||
|
let res = '';
|
||
|
for (let i = 1; i < source.length - 1; ++i) {
|
||
|
const ch = source[i];
|
||
|
if (ch === '\r' && source[i + 1] === '\n')
|
||
|
continue;
|
||
|
if (ch === '\n') {
|
||
|
const { fold, offset } = foldNewline(source, i);
|
||
|
res += fold;
|
||
|
i = offset;
|
||
|
}
|
||
|
else if (ch === '\\') {
|
||
|
let next = source[++i];
|
||
|
const cc = escapeCodes[next];
|
||
|
if (cc)
|
||
|
res += cc;
|
||
|
else if (next === '\n') {
|
||
|
// skip escaped newlines, but still trim the following line
|
||
|
next = source[i + 1];
|
||
|
while (next === ' ' || next === '\t')
|
||
|
next = source[++i + 1];
|
||
|
}
|
||
|
else if (next === '\r' && source[i + 1] === '\n') {
|
||
|
// skip escaped CRLF newlines, but still trim the following line
|
||
|
next = source[++i + 1];
|
||
|
while (next === ' ' || next === '\t')
|
||
|
next = source[++i + 1];
|
||
|
}
|
||
|
else if (next === 'x' || next === 'u' || next === 'U') {
|
||
|
const length = { x: 2, u: 4, U: 8 }[next];
|
||
|
res += parseCharCode(source, i + 1, length, onError);
|
||
|
i += length;
|
||
|
}
|
||
|
else {
|
||
|
const raw = source.substr(i - 1, 2);
|
||
|
onError(i - 1, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
|
||
|
res += raw;
|
||
|
}
|
||
|
}
|
||
|
else if (ch === ' ' || ch === '\t') {
|
||
|
// trim trailing whitespace
|
||
|
const wsStart = i;
|
||
|
let next = source[i + 1];
|
||
|
while (next === ' ' || next === '\t')
|
||
|
next = source[++i + 1];
|
||
|
if (next !== '\n' && !(next === '\r' && source[i + 2] === '\n'))
|
||
|
res += i > wsStart ? source.slice(wsStart, i + 1) : ch;
|
||
|
}
|
||
|
else {
|
||
|
res += ch;
|
||
|
}
|
||
|
}
|
||
|
if (source[source.length - 1] !== '"' || source.length === 1)
|
||
|
onError(source.length, 'MISSING_CHAR', 'Missing closing "quote');
|
||
|
return res;
|
||
|
}
|
||
|
/**
|
||
|
* Fold a single newline into a space, multiple newlines to N - 1 newlines.
|
||
|
* Presumes `source[offset] === '\n'`
|
||
|
*/
|
||
|
function foldNewline(source, offset) {
|
||
|
let fold = '';
|
||
|
let ch = source[offset + 1];
|
||
|
while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') {
|
||
|
if (ch === '\r' && source[offset + 2] !== '\n')
|
||
|
break;
|
||
|
if (ch === '\n')
|
||
|
fold += '\n';
|
||
|
offset += 1;
|
||
|
ch = source[offset + 1];
|
||
|
}
|
||
|
if (!fold)
|
||
|
fold = ' ';
|
||
|
return { fold, offset };
|
||
|
}
|
||
|
const escapeCodes = {
|
||
|
'0': '\0',
|
||
|
a: '\x07',
|
||
|
b: '\b',
|
||
|
e: '\x1b',
|
||
|
f: '\f',
|
||
|
n: '\n',
|
||
|
r: '\r',
|
||
|
t: '\t',
|
||
|
v: '\v',
|
||
|
N: '\u0085',
|
||
|
_: '\u00a0',
|
||
|
L: '\u2028',
|
||
|
P: '\u2029',
|
||
|
' ': ' ',
|
||
|
'"': '"',
|
||
|
'/': '/',
|
||
|
'\\': '\\',
|
||
|
'\t': '\t'
|
||
|
};
|
||
|
function parseCharCode(source, offset, length, onError) {
|
||
|
const cc = source.substr(offset, length);
|
||
|
const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
|
||
|
const code = ok ? parseInt(cc, 16) : NaN;
|
||
|
if (isNaN(code)) {
|
||
|
const raw = source.substr(offset - 2, length + 2);
|
||
|
onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
|
||
|
return raw;
|
||
|
}
|
||
|
return String.fromCodePoint(code);
|
||
|
}
|
||
|
|
||
|
function composeScalar(ctx, token, tagToken, onError) {
|
||
|
const { value, type, comment, range } = token.type === 'block-scalar'
|
||
|
? resolveBlockScalar(token, ctx.options.strict, onError)
|
||
|
: resolveFlowScalar(token, ctx.options.strict, onError);
|
||
|
const tagName = tagToken
|
||
|
? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
|
||
|
: null;
|
||
|
const tag = tagToken && tagName
|
||
|
? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
|
||
|
: token.type === 'scalar'
|
||
|
? findScalarTagByTest(ctx, value, token, onError)
|
||
|
: ctx.schema[SCALAR$1];
|
||
|
let scalar;
|
||
|
try {
|
||
|
const res = tag.resolve(value, msg => onError(tagToken || token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
|
||
|
scalar = isScalar(res) ? res : new Scalar(res);
|
||
|
}
|
||
|
catch (error) {
|
||
|
const msg = error instanceof Error ? error.message : String(error);
|
||
|
onError(tagToken || token, 'TAG_RESOLVE_FAILED', msg);
|
||
|
scalar = new Scalar(value);
|
||
|
}
|
||
|
scalar.range = range;
|
||
|
scalar.source = value;
|
||
|
if (type)
|
||
|
scalar.type = type;
|
||
|
if (tagName)
|
||
|
scalar.tag = tagName;
|
||
|
if (tag.format)
|
||
|
scalar.format = tag.format;
|
||
|
if (comment)
|
||
|
scalar.comment = comment;
|
||
|
return scalar;
|
||
|
}
|
||
|
function findScalarTagByName(schema, value, tagName, tagToken, onError) {
|
||
|
var _a;
|
||
|
if (tagName === '!')
|
||
|
return schema[SCALAR$1]; // non-specific tag
|
||
|
const matchWithTest = [];
|
||
|
for (const tag of schema.tags) {
|
||
|
if (!tag.collection && tag.tag === tagName) {
|
||
|
if (tag.default && tag.test)
|
||
|
matchWithTest.push(tag);
|
||
|
else
|
||
|
return tag;
|
||
|
}
|
||
|
}
|
||
|
for (const tag of matchWithTest)
|
||
|
if ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value))
|
||
|
return tag;
|
||
|
const kt = schema.knownTags[tagName];
|
||
|
if (kt && !kt.collection) {
|
||
|
// Ensure that the known tag is available for stringifying,
|
||
|
// but does not get used by default.
|
||
|
schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));
|
||
|
return kt;
|
||
|
}
|
||
|
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
|
||
|
return schema[SCALAR$1];
|
||
|
}
|
||
|
function findScalarTagByTest({ directives, schema }, value, token, onError) {
|
||
|
const tag = schema.tags.find(tag => { var _a; return tag.default && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value)); }) || schema[SCALAR$1];
|
||
|
if (schema.compat) {
|
||
|
const compat = schema.compat.find(tag => { var _a; return tag.default && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value)); }) ||
|
||
|
schema[SCALAR$1];
|
||
|
if (tag.tag !== compat.tag) {
|
||
|
const ts = directives.tagString(tag.tag);
|
||
|
const cs = directives.tagString(compat.tag);
|
||
|
const msg = `Value may be parsed as either ${ts} or ${cs}`;
|
||
|
onError(token, 'TAG_RESOLVE_FAILED', msg, true);
|
||
|
}
|
||
|
}
|
||
|
return tag;
|
||
|
}
|
||
|
|
||
|
function emptyScalarPosition(offset, before, pos) {
|
||
|
if (before) {
|
||
|
if (pos === null)
|
||
|
pos = before.length;
|
||
|
for (let i = pos - 1; i >= 0; --i) {
|
||
|
let st = before[i];
|
||
|
switch (st.type) {
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
offset -= st.source.length;
|
||
|
continue;
|
||
|
}
|
||
|
// Technically, an empty scalar is immediately after the last non-empty
|
||
|
// node, but it's more useful to place it after any whitespace.
|
||
|
st = before[++i];
|
||
|
while ((st === null || st === void 0 ? void 0 : st.type) === 'space') {
|
||
|
offset += st.source.length;
|
||
|
st = before[++i];
|
||
|
}
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
return offset;
|
||
|
}
|
||
|
|
||
|
const CN = { composeNode, composeEmptyNode };
|
||
|
function composeNode(ctx, token, props, onError) {
|
||
|
const { spaceBefore, comment, anchor, tag } = props;
|
||
|
let node;
|
||
|
switch (token.type) {
|
||
|
case 'alias':
|
||
|
node = composeAlias(ctx, token, onError);
|
||
|
if (anchor || tag)
|
||
|
onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');
|
||
|
break;
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
case 'block-scalar':
|
||
|
node = composeScalar(ctx, token, tag, onError);
|
||
|
if (anchor)
|
||
|
node.anchor = anchor.source.substring(1);
|
||
|
break;
|
||
|
case 'block-map':
|
||
|
case 'block-seq':
|
||
|
case 'flow-collection':
|
||
|
node = composeCollection(CN, ctx, token, tag, onError);
|
||
|
if (anchor)
|
||
|
node.anchor = anchor.source.substring(1);
|
||
|
break;
|
||
|
default:
|
||
|
console.log(token);
|
||
|
throw new Error(`Unsupporten token type: ${token.type}`);
|
||
|
}
|
||
|
if (anchor && node.anchor === '')
|
||
|
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
|
||
|
if (spaceBefore)
|
||
|
node.spaceBefore = true;
|
||
|
if (comment) {
|
||
|
if (token.type === 'scalar' && token.source === '')
|
||
|
node.comment = comment;
|
||
|
else
|
||
|
node.commentBefore = comment;
|
||
|
}
|
||
|
if (ctx.options.keepSourceTokens)
|
||
|
node.srcToken = token;
|
||
|
return node;
|
||
|
}
|
||
|
function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) {
|
||
|
const token = {
|
||
|
type: 'scalar',
|
||
|
offset: emptyScalarPosition(offset, before, pos),
|
||
|
indent: -1,
|
||
|
source: ''
|
||
|
};
|
||
|
const node = composeScalar(ctx, token, tag, onError);
|
||
|
if (anchor) {
|
||
|
node.anchor = anchor.source.substring(1);
|
||
|
if (node.anchor === '')
|
||
|
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
|
||
|
}
|
||
|
if (spaceBefore)
|
||
|
node.spaceBefore = true;
|
||
|
if (comment)
|
||
|
node.comment = comment;
|
||
|
return node;
|
||
|
}
|
||
|
function composeAlias({ options }, { offset, source, end }, onError) {
|
||
|
const alias = new Alias(source.substring(1));
|
||
|
if (alias.source === '')
|
||
|
onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');
|
||
|
const valueEnd = offset + source.length;
|
||
|
const re = resolveEnd(end, valueEnd, options.strict, onError);
|
||
|
alias.range = [offset, valueEnd, re.offset];
|
||
|
if (re.comment)
|
||
|
alias.comment = re.comment;
|
||
|
return alias;
|
||
|
}
|
||
|
|
||
|
function composeDoc(options, directives, { offset, start, value, end }, onError) {
|
||
|
const opts = Object.assign({ directives }, options);
|
||
|
const doc = new Document(undefined, opts);
|
||
|
const ctx = {
|
||
|
atRoot: true,
|
||
|
directives: doc.directives,
|
||
|
options: doc.options,
|
||
|
schema: doc.schema
|
||
|
};
|
||
|
const props = resolveProps(start, {
|
||
|
indicator: 'doc-start',
|
||
|
next: value || (end === null || end === void 0 ? void 0 : end[0]),
|
||
|
offset,
|
||
|
onError,
|
||
|
startOnNewline: true
|
||
|
});
|
||
|
if (props.found) {
|
||
|
doc.directives.marker = true;
|
||
|
if (value &&
|
||
|
(value.type === 'block-map' || value.type === 'block-seq') &&
|
||
|
!props.hasNewline)
|
||
|
onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');
|
||
|
}
|
||
|
doc.contents = value
|
||
|
? composeNode(ctx, value, props, onError)
|
||
|
: composeEmptyNode(ctx, props.end, start, null, props, onError);
|
||
|
const contentEnd = doc.contents.range[2];
|
||
|
const re = resolveEnd(end, contentEnd, false, onError);
|
||
|
if (re.comment)
|
||
|
doc.comment = re.comment;
|
||
|
doc.range = [offset, contentEnd, re.offset];
|
||
|
return doc;
|
||
|
}
|
||
|
|
||
|
function getErrorPos(src) {
|
||
|
if (typeof src === 'number')
|
||
|
return [src, src + 1];
|
||
|
if (Array.isArray(src))
|
||
|
return src.length === 2 ? src : [src[0], src[1]];
|
||
|
const { offset, source } = src;
|
||
|
return [offset, offset + (typeof source === 'string' ? source.length : 1)];
|
||
|
}
|
||
|
function parsePrelude(prelude) {
|
||
|
var _a;
|
||
|
let comment = '';
|
||
|
let atComment = false;
|
||
|
let afterEmptyLine = false;
|
||
|
for (let i = 0; i < prelude.length; ++i) {
|
||
|
const source = prelude[i];
|
||
|
switch (source[0]) {
|
||
|
case '#':
|
||
|
comment +=
|
||
|
(comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') +
|
||
|
(source.substring(1) || ' ');
|
||
|
atComment = true;
|
||
|
afterEmptyLine = false;
|
||
|
break;
|
||
|
case '%':
|
||
|
if (((_a = prelude[i + 1]) === null || _a === void 0 ? void 0 : _a[0]) !== '#')
|
||
|
i += 1;
|
||
|
atComment = false;
|
||
|
break;
|
||
|
default:
|
||
|
// This may be wrong after doc-end, but in that case it doesn't matter
|
||
|
if (!atComment)
|
||
|
afterEmptyLine = true;
|
||
|
atComment = false;
|
||
|
}
|
||
|
}
|
||
|
return { comment, afterEmptyLine };
|
||
|
}
|
||
|
/**
|
||
|
* Compose a stream of CST nodes into a stream of YAML Documents.
|
||
|
*
|
||
|
* ```ts
|
||
|
* import { Composer, Parser } from 'yaml'
|
||
|
*
|
||
|
* const src: string = ...
|
||
|
* const tokens = new Parser().parse(src)
|
||
|
* const docs = new Composer().compose(tokens)
|
||
|
* ```
|
||
|
*/
|
||
|
class Composer {
|
||
|
constructor(options = {}) {
|
||
|
this.doc = null;
|
||
|
this.atDirectives = false;
|
||
|
this.prelude = [];
|
||
|
this.errors = [];
|
||
|
this.warnings = [];
|
||
|
this.onError = (source, code, message, warning) => {
|
||
|
const pos = getErrorPos(source);
|
||
|
if (warning)
|
||
|
this.warnings.push(new YAMLWarning(pos, code, message));
|
||
|
else
|
||
|
this.errors.push(new YAMLParseError(pos, code, message));
|
||
|
};
|
||
|
this.directives = new Directives({
|
||
|
version: options.version || defaultOptions.version
|
||
|
});
|
||
|
this.options = options;
|
||
|
}
|
||
|
decorate(doc, afterDoc) {
|
||
|
const { comment, afterEmptyLine } = parsePrelude(this.prelude);
|
||
|
//console.log({ dc: doc.comment, prelude, comment })
|
||
|
if (comment) {
|
||
|
const dc = doc.contents;
|
||
|
if (afterDoc) {
|
||
|
doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment;
|
||
|
}
|
||
|
else if (afterEmptyLine || doc.directives.marker || !dc) {
|
||
|
doc.commentBefore = comment;
|
||
|
}
|
||
|
else if (isCollection(dc) && !dc.flow && dc.items.length > 0) {
|
||
|
let it = dc.items[0];
|
||
|
if (isPair(it))
|
||
|
it = it.key;
|
||
|
const cb = it.commentBefore;
|
||
|
it.commentBefore = cb ? `${comment}\n${cb}` : comment;
|
||
|
}
|
||
|
else {
|
||
|
const cb = dc.commentBefore;
|
||
|
dc.commentBefore = cb ? `${comment}\n${cb}` : comment;
|
||
|
}
|
||
|
}
|
||
|
if (afterDoc) {
|
||
|
Array.prototype.push.apply(doc.errors, this.errors);
|
||
|
Array.prototype.push.apply(doc.warnings, this.warnings);
|
||
|
}
|
||
|
else {
|
||
|
doc.errors = this.errors;
|
||
|
doc.warnings = this.warnings;
|
||
|
}
|
||
|
this.prelude = [];
|
||
|
this.errors = [];
|
||
|
this.warnings = [];
|
||
|
}
|
||
|
/**
|
||
|
* Current stream status information.
|
||
|
*
|
||
|
* Mostly useful at the end of input for an empty stream.
|
||
|
*/
|
||
|
streamInfo() {
|
||
|
return {
|
||
|
comment: parsePrelude(this.prelude).comment,
|
||
|
directives: this.directives,
|
||
|
errors: this.errors,
|
||
|
warnings: this.warnings
|
||
|
};
|
||
|
}
|
||
|
/**
|
||
|
* Compose tokens into documents.
|
||
|
*
|
||
|
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
|
||
|
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
|
||
|
*/
|
||
|
*compose(tokens, forceDoc = false, endOffset = -1) {
|
||
|
for (const token of tokens)
|
||
|
yield* this.next(token);
|
||
|
yield* this.end(forceDoc, endOffset);
|
||
|
}
|
||
|
/** Advance the composer by one CST token. */
|
||
|
*next(token) {
|
||
|
switch (token.type) {
|
||
|
case 'directive':
|
||
|
this.directives.add(token.source, (offset, message, warning) => {
|
||
|
const pos = getErrorPos(token);
|
||
|
pos[0] += offset;
|
||
|
this.onError(pos, 'BAD_DIRECTIVE', message, warning);
|
||
|
});
|
||
|
this.prelude.push(token.source);
|
||
|
this.atDirectives = true;
|
||
|
break;
|
||
|
case 'document': {
|
||
|
const doc = composeDoc(this.options, this.directives, token, this.onError);
|
||
|
if (this.atDirectives && !doc.directives.marker)
|
||
|
this.onError(token, 'MISSING_CHAR', 'Missing directives-end indicator line');
|
||
|
this.decorate(doc, false);
|
||
|
if (this.doc)
|
||
|
yield this.doc;
|
||
|
this.doc = doc;
|
||
|
this.atDirectives = false;
|
||
|
break;
|
||
|
}
|
||
|
case 'byte-order-mark':
|
||
|
case 'space':
|
||
|
break;
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
this.prelude.push(token.source);
|
||
|
break;
|
||
|
case 'error': {
|
||
|
const msg = token.source
|
||
|
? `${token.message}: ${JSON.stringify(token.source)}`
|
||
|
: token.message;
|
||
|
const error = new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);
|
||
|
if (this.atDirectives || !this.doc)
|
||
|
this.errors.push(error);
|
||
|
else
|
||
|
this.doc.errors.push(error);
|
||
|
break;
|
||
|
}
|
||
|
case 'doc-end': {
|
||
|
if (!this.doc) {
|
||
|
const msg = 'Unexpected doc-end without preceding document';
|
||
|
this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));
|
||
|
break;
|
||
|
}
|
||
|
const end = resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);
|
||
|
this.decorate(this.doc, true);
|
||
|
if (end.comment) {
|
||
|
const dc = this.doc.comment;
|
||
|
this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment;
|
||
|
}
|
||
|
this.doc.range[2] = end.offset;
|
||
|
break;
|
||
|
}
|
||
|
default:
|
||
|
this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Call at end of input to yield any remaining document.
|
||
|
*
|
||
|
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
|
||
|
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
|
||
|
*/
|
||
|
*end(forceDoc = false, endOffset = -1) {
|
||
|
if (this.doc) {
|
||
|
this.decorate(this.doc, true);
|
||
|
yield this.doc;
|
||
|
this.doc = null;
|
||
|
}
|
||
|
else if (forceDoc) {
|
||
|
const opts = Object.assign({ directives: this.directives }, this.options);
|
||
|
const doc = new Document(undefined, opts);
|
||
|
if (this.atDirectives)
|
||
|
this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');
|
||
|
doc.range = [0, endOffset, endOffset];
|
||
|
this.decorate(doc, false);
|
||
|
yield doc;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
|
||
|
*
|
||
|
* Best efforts are made to retain any comments previously associated with the `token`,
|
||
|
* though all contents within a collection's `items` will be overwritten.
|
||
|
*
|
||
|
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
|
||
|
* as this function does not support any schema operations and won't check for such conflicts.
|
||
|
*
|
||
|
* @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
|
||
|
* @param value The string representation of the value, which will have its content properly indented.
|
||
|
* @param context.afterKey In most cases, values after a key should have an additional level of indentation.
|
||
|
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
|
||
|
* @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
|
||
|
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
|
||
|
*/
|
||
|
function setScalarValue(token, value, context = {}) {
|
||
|
let { afterKey = false, implicitKey = false, inFlow = false, type } = context;
|
||
|
let indent = 'indent' in token ? token.indent : null;
|
||
|
if (afterKey && typeof indent === 'number')
|
||
|
indent += 2;
|
||
|
if (!type)
|
||
|
switch (token.type) {
|
||
|
case 'single-quoted-scalar':
|
||
|
type = 'QUOTE_SINGLE';
|
||
|
break;
|
||
|
case 'double-quoted-scalar':
|
||
|
type = 'QUOTE_DOUBLE';
|
||
|
break;
|
||
|
case 'block-scalar': {
|
||
|
const header = token.props[0];
|
||
|
if (header.type !== 'block-scalar-header')
|
||
|
throw new Error('Invalid block scalar header');
|
||
|
type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';
|
||
|
break;
|
||
|
}
|
||
|
default:
|
||
|
type = 'PLAIN';
|
||
|
}
|
||
|
const source = stringifyString({ type, value }, {
|
||
|
implicitKey: implicitKey || indent === null,
|
||
|
indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',
|
||
|
inFlow,
|
||
|
options: { blockQuote: true, lineWidth: -1 }
|
||
|
});
|
||
|
switch (source[0]) {
|
||
|
case '|':
|
||
|
case '>':
|
||
|
setBlockScalarValue(token, source);
|
||
|
break;
|
||
|
case '"':
|
||
|
setFlowScalarValue(token, source, 'double-quoted-scalar');
|
||
|
break;
|
||
|
case "'":
|
||
|
setFlowScalarValue(token, source, 'single-quoted-scalar');
|
||
|
break;
|
||
|
default:
|
||
|
setFlowScalarValue(token, source, 'scalar');
|
||
|
}
|
||
|
}
|
||
|
function setBlockScalarValue(token, source) {
|
||
|
const he = source.indexOf('\n');
|
||
|
const head = source.substring(0, he);
|
||
|
const body = source.substring(he + 1) + '\n';
|
||
|
if (token.type === 'block-scalar') {
|
||
|
const header = token.props[0];
|
||
|
if (header.type !== 'block-scalar-header')
|
||
|
throw new Error('Invalid block scalar header');
|
||
|
header.source = head;
|
||
|
token.source = body;
|
||
|
}
|
||
|
else {
|
||
|
const { offset } = token;
|
||
|
const indent = 'indent' in token ? token.indent : -1;
|
||
|
const props = [
|
||
|
{ type: 'block-scalar-header', offset, indent, source: head }
|
||
|
];
|
||
|
if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))
|
||
|
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
|
||
|
for (const key of Object.keys(token))
|
||
|
if (key !== 'type' && key !== 'offset')
|
||
|
delete token[key];
|
||
|
Object.assign(token, { type: 'block-scalar', indent, props, source: body });
|
||
|
}
|
||
|
}
|
||
|
/** @returns `true` if last token is a newline */
|
||
|
function addEndtoBlockProps(props, end) {
|
||
|
if (end)
|
||
|
for (const st of end)
|
||
|
switch (st.type) {
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
props.push(st);
|
||
|
break;
|
||
|
case 'newline':
|
||
|
props.push(st);
|
||
|
return true;
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
function setFlowScalarValue(token, source, type) {
|
||
|
switch (token.type) {
|
||
|
case 'scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
token.type = type;
|
||
|
token.source = source;
|
||
|
break;
|
||
|
case 'block-scalar': {
|
||
|
const end = token.props.slice(1);
|
||
|
let oa = source.length;
|
||
|
if (token.props[0].type === 'block-scalar-header')
|
||
|
oa -= token.props[0].source.length;
|
||
|
for (const tok of end)
|
||
|
tok.offset += oa;
|
||
|
delete token.props;
|
||
|
Object.assign(token, { type, source, end });
|
||
|
break;
|
||
|
}
|
||
|
case 'block-map':
|
||
|
case 'block-seq': {
|
||
|
const offset = token.offset + source.length;
|
||
|
const nl = { type: 'newline', offset, indent: token.indent, source: '\n' };
|
||
|
delete token.items;
|
||
|
Object.assign(token, { type, source, end: [nl] });
|
||
|
break;
|
||
|
}
|
||
|
default: {
|
||
|
const indent = 'indent' in token ? token.indent : -1;
|
||
|
const end = 'end' in token && Array.isArray(token.end)
|
||
|
? token.end.filter(st => st.type === 'space' ||
|
||
|
st.type === 'comment' ||
|
||
|
st.type === 'newline')
|
||
|
: [];
|
||
|
for (const key of Object.keys(token))
|
||
|
if (key !== 'type' && key !== 'offset')
|
||
|
delete token[key];
|
||
|
Object.assign(token, { type, indent, source, end });
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Stringify a CST document, token, or collection item
|
||
|
*
|
||
|
* Fair warning: This applies no validation whatsoever, and
|
||
|
* simply concatenates the sources in their logical order.
|
||
|
*/
|
||
|
const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);
|
||
|
function stringifyToken(token) {
|
||
|
switch (token.type) {
|
||
|
case 'block-scalar': {
|
||
|
let res = '';
|
||
|
for (const tok of token.props)
|
||
|
res += stringifyToken(tok);
|
||
|
return res + token.source;
|
||
|
}
|
||
|
case 'block-map':
|
||
|
case 'block-seq': {
|
||
|
let res = '';
|
||
|
for (const item of token.items)
|
||
|
res += stringifyItem(item);
|
||
|
return res;
|
||
|
}
|
||
|
case 'flow-collection': {
|
||
|
let res = token.start.source;
|
||
|
for (const item of token.items)
|
||
|
res += stringifyItem(item);
|
||
|
for (const st of token.end)
|
||
|
res += st.source;
|
||
|
return res;
|
||
|
}
|
||
|
case 'document': {
|
||
|
let res = stringifyItem(token);
|
||
|
if (token.end)
|
||
|
for (const st of token.end)
|
||
|
res += st.source;
|
||
|
return res;
|
||
|
}
|
||
|
default: {
|
||
|
let res = token.source;
|
||
|
if ('end' in token && token.end)
|
||
|
for (const st of token.end)
|
||
|
res += st.source;
|
||
|
return res;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
function stringifyItem({ start, key, sep, value }) {
|
||
|
let res = '';
|
||
|
for (const st of start)
|
||
|
res += st.source;
|
||
|
if (key)
|
||
|
res += stringifyToken(key);
|
||
|
if (sep)
|
||
|
for (const st of sep)
|
||
|
res += st.source;
|
||
|
if (value)
|
||
|
res += stringifyToken(value);
|
||
|
return res;
|
||
|
}
|
||
|
|
||
|
/** The byte order mark */
|
||
|
const BOM = '\u{FEFF}';
|
||
|
/** Start of doc-mode */
|
||
|
const DOCUMENT = '\x02'; // C0: Start of Text
|
||
|
/** Unexpected end of flow-mode */
|
||
|
const FLOW_END = '\x18'; // C0: Cancel
|
||
|
/** Next token is a scalar value */
|
||
|
const SCALAR = '\x1f'; // C0: Unit Separator
|
||
|
/** Identify the type of a lexer token. May return `null` for unknown tokens. */
|
||
|
function tokenType(source) {
|
||
|
switch (source) {
|
||
|
case BOM:
|
||
|
return 'byte-order-mark';
|
||
|
case DOCUMENT:
|
||
|
return 'doc-mode';
|
||
|
case FLOW_END:
|
||
|
return 'flow-error-end';
|
||
|
case SCALAR:
|
||
|
return 'scalar';
|
||
|
case '---':
|
||
|
return 'doc-start';
|
||
|
case '...':
|
||
|
return 'doc-end';
|
||
|
case '':
|
||
|
case '\n':
|
||
|
case '\r\n':
|
||
|
return 'newline';
|
||
|
case '-':
|
||
|
return 'seq-item-ind';
|
||
|
case '?':
|
||
|
return 'explicit-key-ind';
|
||
|
case ':':
|
||
|
return 'map-value-ind';
|
||
|
case '{':
|
||
|
return 'flow-map-start';
|
||
|
case '}':
|
||
|
return 'flow-map-end';
|
||
|
case '[':
|
||
|
return 'flow-seq-start';
|
||
|
case ']':
|
||
|
return 'flow-seq-end';
|
||
|
case ',':
|
||
|
return 'comma';
|
||
|
}
|
||
|
switch (source[0]) {
|
||
|
case ' ':
|
||
|
case '\t':
|
||
|
return 'space';
|
||
|
case '#':
|
||
|
return 'comment';
|
||
|
case '%':
|
||
|
return 'directive-line';
|
||
|
case '*':
|
||
|
return 'alias';
|
||
|
case '&':
|
||
|
return 'anchor';
|
||
|
case '!':
|
||
|
return 'tag';
|
||
|
case "'":
|
||
|
return 'single-quoted-scalar';
|
||
|
case '"':
|
||
|
return 'double-quoted-scalar';
|
||
|
case '|':
|
||
|
case '>':
|
||
|
return 'block-scalar-header';
|
||
|
}
|
||
|
return null;
|
||
|
}
|
||
|
|
||
|
/*
|
||
|
START -> stream
|
||
|
|
||
|
stream
|
||
|
directive -> line-end -> stream
|
||
|
indent + line-end -> stream
|
||
|
[else] -> line-start
|
||
|
|
||
|
line-end
|
||
|
comment -> line-end
|
||
|
newline -> .
|
||
|
input-end -> END
|
||
|
|
||
|
line-start
|
||
|
doc-start -> doc
|
||
|
doc-end -> stream
|
||
|
[else] -> indent -> block-start
|
||
|
|
||
|
block-start
|
||
|
seq-item-start -> block-start
|
||
|
explicit-key-start -> block-start
|
||
|
map-value-start -> block-start
|
||
|
[else] -> doc
|
||
|
|
||
|
doc
|
||
|
line-end -> line-start
|
||
|
spaces -> doc
|
||
|
anchor -> doc
|
||
|
tag -> doc
|
||
|
flow-start -> flow -> doc
|
||
|
flow-end -> error -> doc
|
||
|
seq-item-start -> error -> doc
|
||
|
explicit-key-start -> error -> doc
|
||
|
map-value-start -> doc
|
||
|
alias -> doc
|
||
|
quote-start -> quoted-scalar -> doc
|
||
|
block-scalar-header -> line-end -> block-scalar(min) -> line-start
|
||
|
[else] -> plain-scalar(false, min) -> doc
|
||
|
|
||
|
flow
|
||
|
line-end -> flow
|
||
|
spaces -> flow
|
||
|
anchor -> flow
|
||
|
tag -> flow
|
||
|
flow-start -> flow -> flow
|
||
|
flow-end -> .
|
||
|
seq-item-start -> error -> flow
|
||
|
explicit-key-start -> flow
|
||
|
map-value-start -> flow
|
||
|
alias -> flow
|
||
|
quote-start -> quoted-scalar -> flow
|
||
|
comma -> flow
|
||
|
[else] -> plain-scalar(true, 0) -> flow
|
||
|
|
||
|
quoted-scalar
|
||
|
quote-end -> .
|
||
|
[else] -> quoted-scalar
|
||
|
|
||
|
block-scalar(min)
|
||
|
newline + peek(indent < min) -> .
|
||
|
[else] -> block-scalar(min)
|
||
|
|
||
|
plain-scalar(is-flow, min)
|
||
|
scalar-end(is-flow) -> .
|
||
|
peek(newline + (indent < min)) -> .
|
||
|
[else] -> plain-scalar(min)
|
||
|
*/
|
||
|
function isEmpty(ch) {
|
||
|
switch (ch) {
|
||
|
case undefined:
|
||
|
case ' ':
|
||
|
case '\n':
|
||
|
case '\r':
|
||
|
case '\t':
|
||
|
return true;
|
||
|
default:
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
const hexDigits = '0123456789ABCDEFabcdef'.split('');
|
||
|
const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
|
||
|
const invalidFlowScalarChars = ',[]{}'.split('');
|
||
|
const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
|
||
|
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
|
||
|
/**
|
||
|
* Splits an input string into lexical tokens, i.e. smaller strings that are
|
||
|
* easily identifiable by `tokens.tokenType()`.
|
||
|
*
|
||
|
* Lexing starts always in a "stream" context. Incomplete input may be buffered
|
||
|
* until a complete token can be emitted.
|
||
|
*
|
||
|
* In addition to slices of the original input, the following control characters
|
||
|
* may also be emitted:
|
||
|
*
|
||
|
* - `\x02` (Start of Text): A document starts with the next token
|
||
|
* - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
|
||
|
* - `\x1f` (Unit Separator): Next token is a scalar value
|
||
|
* - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
|
||
|
*/
|
||
|
class Lexer {
|
||
|
constructor() {
|
||
|
/**
|
||
|
* Flag indicating whether the end of the current buffer marks the end of
|
||
|
* all input
|
||
|
*/
|
||
|
this.atEnd = false;
|
||
|
/**
|
||
|
* Explicit indent set in block scalar header, as an offset from the current
|
||
|
* minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
|
||
|
* explicitly set.
|
||
|
*/
|
||
|
this.blockScalarIndent = -1;
|
||
|
/**
|
||
|
* Block scalars that include a + (keep) chomping indicator in their header
|
||
|
* include trailing empty lines, which are otherwise excluded from the
|
||
|
* scalar's contents.
|
||
|
*/
|
||
|
this.blockScalarKeep = false;
|
||
|
/** Current input */
|
||
|
this.buffer = '';
|
||
|
/**
|
||
|
* Flag noting whether the map value indicator : can immediately follow this
|
||
|
* node within a flow context.
|
||
|
*/
|
||
|
this.flowKey = false;
|
||
|
/** Count of surrounding flow collection levels. */
|
||
|
this.flowLevel = 0;
|
||
|
/**
|
||
|
* Minimum level of indentation required for next lines to be parsed as a
|
||
|
* part of the current scalar value.
|
||
|
*/
|
||
|
this.indentNext = 0;
|
||
|
/** Indentation level of the current line. */
|
||
|
this.indentValue = 0;
|
||
|
/** Position of the next \n character. */
|
||
|
this.lineEndPos = null;
|
||
|
/** Stores the state of the lexer if reaching the end of incpomplete input */
|
||
|
this.next = null;
|
||
|
/** A pointer to `buffer`; the current position of the lexer. */
|
||
|
this.pos = 0;
|
||
|
}
|
||
|
/**
|
||
|
* Generate YAML tokens from the `source` string. If `incomplete`,
|
||
|
* a part of the last line may be left as a buffer for the next call.
|
||
|
*
|
||
|
* @returns A generator of lexical tokens
|
||
|
*/
|
||
|
*lex(source, incomplete = false) {
|
||
|
if (source) {
|
||
|
this.buffer = this.buffer ? this.buffer + source : source;
|
||
|
this.lineEndPos = null;
|
||
|
}
|
||
|
this.atEnd = !incomplete;
|
||
|
let next = this.next || 'stream';
|
||
|
while (next && (incomplete || this.hasChars(1)))
|
||
|
next = yield* this.parseNext(next);
|
||
|
}
|
||
|
atLineEnd() {
|
||
|
let i = this.pos;
|
||
|
let ch = this.buffer[i];
|
||
|
while (ch === ' ' || ch === '\t')
|
||
|
ch = this.buffer[++i];
|
||
|
if (!ch || ch === '#' || ch === '\n')
|
||
|
return true;
|
||
|
if (ch === '\r')
|
||
|
return this.buffer[i + 1] === '\n';
|
||
|
return false;
|
||
|
}
|
||
|
charAt(n) {
|
||
|
return this.buffer[this.pos + n];
|
||
|
}
|
||
|
continueScalar(offset) {
|
||
|
let ch = this.buffer[offset];
|
||
|
if (this.indentNext > 0) {
|
||
|
let indent = 0;
|
||
|
while (ch === ' ')
|
||
|
ch = this.buffer[++indent + offset];
|
||
|
if (ch === '\r') {
|
||
|
const next = this.buffer[indent + offset + 1];
|
||
|
if (next === '\n' || (!next && !this.atEnd))
|
||
|
return offset + indent + 1;
|
||
|
}
|
||
|
return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
|
||
|
? offset + indent
|
||
|
: -1;
|
||
|
}
|
||
|
if (ch === '-' || ch === '.') {
|
||
|
const dt = this.buffer.substr(offset, 3);
|
||
|
if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
|
||
|
return -1;
|
||
|
}
|
||
|
return offset;
|
||
|
}
|
||
|
getLine() {
|
||
|
let end = this.lineEndPos;
|
||
|
if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {
|
||
|
end = this.buffer.indexOf('\n', this.pos);
|
||
|
this.lineEndPos = end;
|
||
|
}
|
||
|
if (end === -1)
|
||
|
return this.atEnd ? this.buffer.substring(this.pos) : null;
|
||
|
if (this.buffer[end - 1] === '\r')
|
||
|
end -= 1;
|
||
|
return this.buffer.substring(this.pos, end);
|
||
|
}
|
||
|
hasChars(n) {
|
||
|
return this.pos + n <= this.buffer.length;
|
||
|
}
|
||
|
setNext(state) {
|
||
|
this.buffer = this.buffer.substring(this.pos);
|
||
|
this.pos = 0;
|
||
|
this.lineEndPos = null;
|
||
|
this.next = state;
|
||
|
return null;
|
||
|
}
|
||
|
peek(n) {
|
||
|
return this.buffer.substr(this.pos, n);
|
||
|
}
|
||
|
*parseNext(next) {
|
||
|
switch (next) {
|
||
|
case 'stream':
|
||
|
return yield* this.parseStream();
|
||
|
case 'line-start':
|
||
|
return yield* this.parseLineStart();
|
||
|
case 'block-start':
|
||
|
return yield* this.parseBlockStart();
|
||
|
case 'doc':
|
||
|
return yield* this.parseDocument();
|
||
|
case 'flow':
|
||
|
return yield* this.parseFlowCollection();
|
||
|
case 'quoted-scalar':
|
||
|
return yield* this.parseQuotedScalar();
|
||
|
case 'block-scalar':
|
||
|
return yield* this.parseBlockScalar();
|
||
|
case 'plain-scalar':
|
||
|
return yield* this.parsePlainScalar();
|
||
|
}
|
||
|
}
|
||
|
*parseStream() {
|
||
|
let line = this.getLine();
|
||
|
if (line === null)
|
||
|
return this.setNext('stream');
|
||
|
if (line[0] === BOM) {
|
||
|
yield* this.pushCount(1);
|
||
|
line = line.substring(1);
|
||
|
}
|
||
|
if (line[0] === '%') {
|
||
|
let dirEnd = line.length;
|
||
|
const cs = line.indexOf('#');
|
||
|
if (cs !== -1) {
|
||
|
const ch = line[cs - 1];
|
||
|
if (ch === ' ' || ch === '\t')
|
||
|
dirEnd = cs - 1;
|
||
|
}
|
||
|
while (true) {
|
||
|
const ch = line[dirEnd - 1];
|
||
|
if (ch === ' ' || ch === '\t')
|
||
|
dirEnd -= 1;
|
||
|
else
|
||
|
break;
|
||
|
}
|
||
|
const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));
|
||
|
yield* this.pushCount(line.length - n); // possible comment
|
||
|
this.pushNewline();
|
||
|
return 'stream';
|
||
|
}
|
||
|
if (this.atLineEnd()) {
|
||
|
const sp = yield* this.pushSpaces(true);
|
||
|
yield* this.pushCount(line.length - sp);
|
||
|
yield* this.pushNewline();
|
||
|
return 'stream';
|
||
|
}
|
||
|
yield DOCUMENT;
|
||
|
return yield* this.parseLineStart();
|
||
|
}
|
||
|
*parseLineStart() {
|
||
|
const ch = this.charAt(0);
|
||
|
if (!ch && !this.atEnd)
|
||
|
return this.setNext('line-start');
|
||
|
if (ch === '-' || ch === '.') {
|
||
|
if (!this.atEnd && !this.hasChars(4))
|
||
|
return this.setNext('line-start');
|
||
|
const s = this.peek(3);
|
||
|
if (s === '---' && isEmpty(this.charAt(3))) {
|
||
|
yield* this.pushCount(3);
|
||
|
this.indentValue = 0;
|
||
|
this.indentNext = 0;
|
||
|
return 'doc';
|
||
|
}
|
||
|
else if (s === '...' && isEmpty(this.charAt(3))) {
|
||
|
yield* this.pushCount(3);
|
||
|
return 'stream';
|
||
|
}
|
||
|
}
|
||
|
this.indentValue = yield* this.pushSpaces(false);
|
||
|
if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
|
||
|
this.indentNext = this.indentValue;
|
||
|
return yield* this.parseBlockStart();
|
||
|
}
|
||
|
*parseBlockStart() {
|
||
|
const [ch0, ch1] = this.peek(2);
|
||
|
if (!ch1 && !this.atEnd)
|
||
|
return this.setNext('block-start');
|
||
|
if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
|
||
|
const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));
|
||
|
this.indentNext = this.indentValue + 1;
|
||
|
this.indentValue += n;
|
||
|
return yield* this.parseBlockStart();
|
||
|
}
|
||
|
return 'doc';
|
||
|
}
|
||
|
*parseDocument() {
|
||
|
yield* this.pushSpaces(true);
|
||
|
const line = this.getLine();
|
||
|
if (line === null)
|
||
|
return this.setNext('doc');
|
||
|
let n = yield* this.pushIndicators();
|
||
|
switch (line[n]) {
|
||
|
case '#':
|
||
|
yield* this.pushCount(line.length - n);
|
||
|
// fallthrough
|
||
|
case undefined:
|
||
|
yield* this.pushNewline();
|
||
|
return yield* this.parseLineStart();
|
||
|
case '{':
|
||
|
case '[':
|
||
|
yield* this.pushCount(1);
|
||
|
this.flowKey = false;
|
||
|
this.flowLevel = 1;
|
||
|
return 'flow';
|
||
|
case '}':
|
||
|
case ']':
|
||
|
// this is an error
|
||
|
yield* this.pushCount(1);
|
||
|
return 'doc';
|
||
|
case '*':
|
||
|
yield* this.pushUntil(isNotAnchorChar);
|
||
|
return 'doc';
|
||
|
case '"':
|
||
|
case "'":
|
||
|
return yield* this.parseQuotedScalar();
|
||
|
case '|':
|
||
|
case '>':
|
||
|
n += yield* this.parseBlockScalarHeader();
|
||
|
n += yield* this.pushSpaces(true);
|
||
|
yield* this.pushCount(line.length - n);
|
||
|
yield* this.pushNewline();
|
||
|
return yield* this.parseBlockScalar();
|
||
|
default:
|
||
|
return yield* this.parsePlainScalar();
|
||
|
}
|
||
|
}
|
||
|
*parseFlowCollection() {
|
||
|
let nl, sp;
|
||
|
let indent = -1;
|
||
|
do {
|
||
|
nl = yield* this.pushNewline();
|
||
|
sp = yield* this.pushSpaces(true);
|
||
|
if (nl > 0)
|
||
|
this.indentValue = indent = sp;
|
||
|
} while (nl + sp > 0);
|
||
|
const line = this.getLine();
|
||
|
if (line === null)
|
||
|
return this.setNext('flow');
|
||
|
if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||
|
||
|
(indent === 0 &&
|
||
|
(line.startsWith('---') || line.startsWith('...')) &&
|
||
|
isEmpty(line[3]))) {
|
||
|
// Allowing for the terminal ] or } at the same (rather than greater)
|
||
|
// indent level as the initial [ or { is technically invalid, but
|
||
|
// failing here would be surprising to users.
|
||
|
const atFlowEndMarker = indent === this.indentNext - 1 &&
|
||
|
this.flowLevel === 1 &&
|
||
|
(line[0] === ']' || line[0] === '}');
|
||
|
if (!atFlowEndMarker) {
|
||
|
// this is an error
|
||
|
this.flowLevel = 0;
|
||
|
yield FLOW_END;
|
||
|
return yield* this.parseLineStart();
|
||
|
}
|
||
|
}
|
||
|
let n = 0;
|
||
|
while (line[n] === ',') {
|
||
|
n += yield* this.pushCount(1);
|
||
|
n += yield* this.pushSpaces(true);
|
||
|
this.flowKey = false;
|
||
|
}
|
||
|
n += yield* this.pushIndicators();
|
||
|
switch (line[n]) {
|
||
|
case undefined:
|
||
|
return 'flow';
|
||
|
case '#':
|
||
|
yield* this.pushCount(line.length - n);
|
||
|
return 'flow';
|
||
|
case '{':
|
||
|
case '[':
|
||
|
yield* this.pushCount(1);
|
||
|
this.flowKey = false;
|
||
|
this.flowLevel += 1;
|
||
|
return 'flow';
|
||
|
case '}':
|
||
|
case ']':
|
||
|
yield* this.pushCount(1);
|
||
|
this.flowKey = true;
|
||
|
this.flowLevel -= 1;
|
||
|
return this.flowLevel ? 'flow' : 'doc';
|
||
|
case '*':
|
||
|
yield* this.pushUntil(isNotAnchorChar);
|
||
|
return 'flow';
|
||
|
case '"':
|
||
|
case "'":
|
||
|
this.flowKey = true;
|
||
|
return yield* this.parseQuotedScalar();
|
||
|
case ':': {
|
||
|
const next = this.charAt(1);
|
||
|
if (this.flowKey || isEmpty(next) || next === ',') {
|
||
|
this.flowKey = false;
|
||
|
yield* this.pushCount(1);
|
||
|
yield* this.pushSpaces(true);
|
||
|
return 'flow';
|
||
|
}
|
||
|
}
|
||
|
// fallthrough
|
||
|
default:
|
||
|
this.flowKey = false;
|
||
|
return yield* this.parsePlainScalar();
|
||
|
}
|
||
|
}
|
||
|
*parseQuotedScalar() {
|
||
|
const quote = this.charAt(0);
|
||
|
let end = this.buffer.indexOf(quote, this.pos + 1);
|
||
|
if (quote === "'") {
|
||
|
while (end !== -1 && this.buffer[end + 1] === "'")
|
||
|
end = this.buffer.indexOf("'", end + 2);
|
||
|
}
|
||
|
else {
|
||
|
// double-quote
|
||
|
while (end !== -1) {
|
||
|
let n = 0;
|
||
|
while (this.buffer[end - 1 - n] === '\\')
|
||
|
n += 1;
|
||
|
if (n % 2 === 0)
|
||
|
break;
|
||
|
end = this.buffer.indexOf('"', end + 1);
|
||
|
}
|
||
|
}
|
||
|
// Only looking for newlines within the quotes
|
||
|
const qb = this.buffer.substring(0, end);
|
||
|
let nl = qb.indexOf('\n', this.pos);
|
||
|
if (nl !== -1) {
|
||
|
while (nl !== -1) {
|
||
|
const cs = this.continueScalar(nl + 1);
|
||
|
if (cs === -1)
|
||
|
break;
|
||
|
nl = qb.indexOf('\n', cs);
|
||
|
}
|
||
|
if (nl !== -1) {
|
||
|
// this is an error caused by an unexpected unindent
|
||
|
end = nl - (qb[nl - 1] === '\r' ? 2 : 1);
|
||
|
}
|
||
|
}
|
||
|
if (end === -1) {
|
||
|
if (!this.atEnd)
|
||
|
return this.setNext('quoted-scalar');
|
||
|
end = this.buffer.length;
|
||
|
}
|
||
|
yield* this.pushToIndex(end + 1, false);
|
||
|
return this.flowLevel ? 'flow' : 'doc';
|
||
|
}
|
||
|
*parseBlockScalarHeader() {
|
||
|
this.blockScalarIndent = -1;
|
||
|
this.blockScalarKeep = false;
|
||
|
let i = this.pos;
|
||
|
while (true) {
|
||
|
const ch = this.buffer[++i];
|
||
|
if (ch === '+')
|
||
|
this.blockScalarKeep = true;
|
||
|
else if (ch > '0' && ch <= '9')
|
||
|
this.blockScalarIndent = Number(ch) - 1;
|
||
|
else if (ch !== '-')
|
||
|
break;
|
||
|
}
|
||
|
return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');
|
||
|
}
|
||
|
*parseBlockScalar() {
|
||
|
let nl = this.pos - 1; // may be -1 if this.pos === 0
|
||
|
let indent = 0;
|
||
|
let ch;
|
||
|
loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
|
||
|
switch (ch) {
|
||
|
case ' ':
|
||
|
indent += 1;
|
||
|
break;
|
||
|
case '\n':
|
||
|
nl = i;
|
||
|
indent = 0;
|
||
|
break;
|
||
|
case '\r': {
|
||
|
const next = this.buffer[i + 1];
|
||
|
if (!next && !this.atEnd)
|
||
|
return this.setNext('block-scalar');
|
||
|
if (next === '\n')
|
||
|
break;
|
||
|
} // fallthrough
|
||
|
default:
|
||
|
break loop;
|
||
|
}
|
||
|
}
|
||
|
if (!ch && !this.atEnd)
|
||
|
return this.setNext('block-scalar');
|
||
|
if (indent >= this.indentNext) {
|
||
|
if (this.blockScalarIndent === -1)
|
||
|
this.indentNext = indent;
|
||
|
else
|
||
|
this.indentNext += this.blockScalarIndent;
|
||
|
do {
|
||
|
const cs = this.continueScalar(nl + 1);
|
||
|
if (cs === -1)
|
||
|
break;
|
||
|
nl = this.buffer.indexOf('\n', cs);
|
||
|
} while (nl !== -1);
|
||
|
if (nl === -1) {
|
||
|
if (!this.atEnd)
|
||
|
return this.setNext('block-scalar');
|
||
|
nl = this.buffer.length;
|
||
|
}
|
||
|
}
|
||
|
if (!this.blockScalarKeep) {
|
||
|
do {
|
||
|
let i = nl - 1;
|
||
|
let ch = this.buffer[i];
|
||
|
if (ch === '\r')
|
||
|
ch = this.buffer[--i];
|
||
|
while (ch === ' ' || ch === '\t')
|
||
|
ch = this.buffer[--i];
|
||
|
if (ch === '\n' && i >= this.pos)
|
||
|
nl = i;
|
||
|
else
|
||
|
break;
|
||
|
} while (true);
|
||
|
}
|
||
|
yield SCALAR;
|
||
|
yield* this.pushToIndex(nl + 1, true);
|
||
|
return yield* this.parseLineStart();
|
||
|
}
|
||
|
*parsePlainScalar() {
|
||
|
const inFlow = this.flowLevel > 0;
|
||
|
let end = this.pos - 1;
|
||
|
let i = this.pos - 1;
|
||
|
let ch;
|
||
|
while ((ch = this.buffer[++i])) {
|
||
|
if (ch === ':') {
|
||
|
const next = this.buffer[i + 1];
|
||
|
if (isEmpty(next) || (inFlow && next === ','))
|
||
|
break;
|
||
|
end = i;
|
||
|
}
|
||
|
else if (isEmpty(ch)) {
|
||
|
let next = this.buffer[i + 1];
|
||
|
if (ch === '\r') {
|
||
|
if (next === '\n') {
|
||
|
i += 1;
|
||
|
ch = '\n';
|
||
|
next = this.buffer[i + 1];
|
||
|
}
|
||
|
else
|
||
|
end = i;
|
||
|
}
|
||
|
if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
|
||
|
break;
|
||
|
if (ch === '\n') {
|
||
|
const cs = this.continueScalar(i + 1);
|
||
|
if (cs === -1)
|
||
|
break;
|
||
|
i = Math.max(i, cs - 2); // to advance, but still account for ' #'
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
if (inFlow && invalidFlowScalarChars.includes(ch))
|
||
|
break;
|
||
|
end = i;
|
||
|
}
|
||
|
}
|
||
|
if (!ch && !this.atEnd)
|
||
|
return this.setNext('plain-scalar');
|
||
|
yield SCALAR;
|
||
|
yield* this.pushToIndex(end + 1, true);
|
||
|
return inFlow ? 'flow' : 'doc';
|
||
|
}
|
||
|
*pushCount(n) {
|
||
|
if (n > 0) {
|
||
|
yield this.buffer.substr(this.pos, n);
|
||
|
this.pos += n;
|
||
|
return n;
|
||
|
}
|
||
|
return 0;
|
||
|
}
|
||
|
*pushToIndex(i, allowEmpty) {
|
||
|
const s = this.buffer.slice(this.pos, i);
|
||
|
if (s) {
|
||
|
yield s;
|
||
|
this.pos += s.length;
|
||
|
return s.length;
|
||
|
}
|
||
|
else if (allowEmpty)
|
||
|
yield '';
|
||
|
return 0;
|
||
|
}
|
||
|
*pushIndicators() {
|
||
|
switch (this.charAt(0)) {
|
||
|
case '!':
|
||
|
return ((yield* this.pushTag()) +
|
||
|
(yield* this.pushSpaces(true)) +
|
||
|
(yield* this.pushIndicators()));
|
||
|
case '&':
|
||
|
return ((yield* this.pushUntil(isNotAnchorChar)) +
|
||
|
(yield* this.pushSpaces(true)) +
|
||
|
(yield* this.pushIndicators()));
|
||
|
case ':':
|
||
|
case '?': // this is an error outside flow collections
|
||
|
case '-': // this is an error
|
||
|
if (isEmpty(this.charAt(1))) {
|
||
|
if (this.flowLevel === 0)
|
||
|
this.indentNext = this.indentValue + 1;
|
||
|
else if (this.flowKey)
|
||
|
this.flowKey = false;
|
||
|
return ((yield* this.pushCount(1)) +
|
||
|
(yield* this.pushSpaces(true)) +
|
||
|
(yield* this.pushIndicators()));
|
||
|
}
|
||
|
}
|
||
|
return 0;
|
||
|
}
|
||
|
*pushTag() {
|
||
|
if (this.charAt(1) === '<') {
|
||
|
let i = this.pos + 2;
|
||
|
let ch = this.buffer[i];
|
||
|
while (!isEmpty(ch) && ch !== '>')
|
||
|
ch = this.buffer[++i];
|
||
|
return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);
|
||
|
}
|
||
|
else {
|
||
|
let i = this.pos + 1;
|
||
|
let ch = this.buffer[i];
|
||
|
while (ch) {
|
||
|
if (tagChars.includes(ch))
|
||
|
ch = this.buffer[++i];
|
||
|
else if (ch === '%' &&
|
||
|
hexDigits.includes(this.buffer[i + 1]) &&
|
||
|
hexDigits.includes(this.buffer[i + 2])) {
|
||
|
ch = this.buffer[(i += 3)];
|
||
|
}
|
||
|
else
|
||
|
break;
|
||
|
}
|
||
|
return yield* this.pushToIndex(i, false);
|
||
|
}
|
||
|
}
|
||
|
*pushNewline() {
|
||
|
const ch = this.buffer[this.pos];
|
||
|
if (ch === '\n')
|
||
|
return yield* this.pushCount(1);
|
||
|
else if (ch === '\r' && this.charAt(1) === '\n')
|
||
|
return yield* this.pushCount(2);
|
||
|
else
|
||
|
return 0;
|
||
|
}
|
||
|
*pushSpaces(allowTabs) {
|
||
|
let i = this.pos - 1;
|
||
|
let ch;
|
||
|
do {
|
||
|
ch = this.buffer[++i];
|
||
|
} while (ch === ' ' || (allowTabs && ch === '\t'));
|
||
|
const n = i - this.pos;
|
||
|
if (n > 0) {
|
||
|
yield this.buffer.substr(this.pos, n);
|
||
|
this.pos = i;
|
||
|
}
|
||
|
return n;
|
||
|
}
|
||
|
*pushUntil(test) {
|
||
|
let i = this.pos;
|
||
|
let ch = this.buffer[i];
|
||
|
while (!test(ch))
|
||
|
ch = this.buffer[++i];
|
||
|
return yield* this.pushToIndex(i, false);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Tracks newlines during parsing in order to provide an efficient API for
|
||
|
* determining the one-indexed `{ line, col }` position for any offset
|
||
|
* within the input.
|
||
|
*/
|
||
|
class LineCounter {
|
||
|
constructor() {
|
||
|
this.lineStarts = [];
|
||
|
/**
|
||
|
* Should be called in ascending order. Otherwise, call
|
||
|
* `lineCounter.lineStarts.sort()` before calling `linePos()`.
|
||
|
*/
|
||
|
this.addNewLine = (offset) => this.lineStarts.push(offset);
|
||
|
/**
|
||
|
* Performs a binary search and returns the 1-indexed { line, col }
|
||
|
* position of `offset`. If `line === 0`, `addNewLine` has never been
|
||
|
* called or `offset` is before the first known newline.
|
||
|
*/
|
||
|
this.linePos = (offset) => {
|
||
|
let low = 0;
|
||
|
let high = this.lineStarts.length;
|
||
|
while (low < high) {
|
||
|
const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
|
||
|
if (this.lineStarts[mid] < offset)
|
||
|
low = mid + 1;
|
||
|
else
|
||
|
high = mid;
|
||
|
}
|
||
|
if (this.lineStarts[low] === offset)
|
||
|
return { line: low + 1, col: 1 };
|
||
|
if (low === 0)
|
||
|
return { line: 0, col: offset };
|
||
|
const start = this.lineStarts[low - 1];
|
||
|
return { line: low, col: offset - start + 1 };
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function includesToken(list, type) {
|
||
|
for (let i = 0; i < list.length; ++i)
|
||
|
if (list[i].type === type)
|
||
|
return true;
|
||
|
return false;
|
||
|
}
|
||
|
function includesNonEmpty(list) {
|
||
|
for (let i = 0; i < list.length; ++i) {
|
||
|
switch (list[i].type) {
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
break;
|
||
|
default:
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
return false;
|
||
|
}
|
||
|
function isFlowToken(token) {
|
||
|
switch (token === null || token === void 0 ? void 0 : token.type) {
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
case 'flow-collection':
|
||
|
return true;
|
||
|
default:
|
||
|
return false;
|
||
|
}
|
||
|
}
|
||
|
function getPrevProps(parent) {
|
||
|
switch (parent.type) {
|
||
|
case 'document':
|
||
|
return parent.start;
|
||
|
case 'block-map': {
|
||
|
const it = parent.items[parent.items.length - 1];
|
||
|
return it.sep || it.start;
|
||
|
}
|
||
|
case 'block-seq':
|
||
|
return parent.items[parent.items.length - 1].start;
|
||
|
/* istanbul ignore next should not happen */
|
||
|
default:
|
||
|
return [];
|
||
|
}
|
||
|
}
|
||
|
/** Note: May modify input array */
|
||
|
function getFirstKeyStartProps(prev) {
|
||
|
var _a;
|
||
|
if (prev.length === 0)
|
||
|
return [];
|
||
|
let i = prev.length;
|
||
|
loop: while (--i >= 0) {
|
||
|
switch (prev[i].type) {
|
||
|
case 'doc-start':
|
||
|
case 'explicit-key-ind':
|
||
|
case 'map-value-ind':
|
||
|
case 'seq-item-ind':
|
||
|
case 'newline':
|
||
|
break loop;
|
||
|
}
|
||
|
}
|
||
|
while (((_a = prev[++i]) === null || _a === void 0 ? void 0 : _a.type) === 'space') {
|
||
|
/* loop */
|
||
|
}
|
||
|
return prev.splice(i, prev.length);
|
||
|
}
|
||
|
function fixFlowSeqItems(fc) {
|
||
|
if (fc.start.type === 'flow-seq-start') {
|
||
|
for (const it of fc.items) {
|
||
|
if (it.sep &&
|
||
|
!it.value &&
|
||
|
!includesToken(it.start, 'explicit-key-ind') &&
|
||
|
!includesToken(it.sep, 'map-value-ind')) {
|
||
|
if (it.key)
|
||
|
it.value = it.key;
|
||
|
delete it.key;
|
||
|
if (isFlowToken(it.value)) {
|
||
|
if (it.value.end)
|
||
|
Array.prototype.push.apply(it.value.end, it.sep);
|
||
|
else
|
||
|
it.value.end = it.sep;
|
||
|
}
|
||
|
else
|
||
|
Array.prototype.push.apply(it.start, it.sep);
|
||
|
delete it.sep;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* A YAML concrete syntax tree (CST) parser
|
||
|
*
|
||
|
* ```ts
|
||
|
* const src: string = ...
|
||
|
* for (const token of new Parser().parse(src)) {
|
||
|
* // token: Token
|
||
|
* }
|
||
|
* ```
|
||
|
*
|
||
|
* To use the parser with a user-provided lexer:
|
||
|
*
|
||
|
* ```ts
|
||
|
* function* parse(source: string, lexer: Lexer) {
|
||
|
* const parser = new Parser()
|
||
|
* for (const lexeme of lexer.lex(source))
|
||
|
* yield* parser.next(lexeme)
|
||
|
* yield* parser.end()
|
||
|
* }
|
||
|
*
|
||
|
* const src: string = ...
|
||
|
* const lexer = new Lexer()
|
||
|
* for (const token of parse(src, lexer)) {
|
||
|
* // token: Token
|
||
|
* }
|
||
|
* ```
|
||
|
*/
|
||
|
class Parser {
|
||
|
/**
|
||
|
* @param onNewLine - If defined, called separately with the start position of
|
||
|
* each new line (in `parse()`, including the start of input).
|
||
|
*/
|
||
|
constructor(onNewLine) {
|
||
|
/** If true, space and sequence indicators count as indentation */
|
||
|
this.atNewLine = true;
|
||
|
/** If true, next token is a scalar value */
|
||
|
this.atScalar = false;
|
||
|
/** Current indentation level */
|
||
|
this.indent = 0;
|
||
|
/** Current offset since the start of parsing */
|
||
|
this.offset = 0;
|
||
|
/** On the same line with a block map key */
|
||
|
this.onKeyLine = false;
|
||
|
/** Top indicates the node that's currently being built */
|
||
|
this.stack = [];
|
||
|
/** The source of the current token, set in parse() */
|
||
|
this.source = '';
|
||
|
/** The type of the current token, set in parse() */
|
||
|
this.type = '';
|
||
|
// Must be defined after `next()`
|
||
|
this.lexer = new Lexer();
|
||
|
this.onNewLine = onNewLine;
|
||
|
}
|
||
|
/**
|
||
|
* Parse `source` as a YAML stream.
|
||
|
* If `incomplete`, a part of the last line may be left as a buffer for the next call.
|
||
|
*
|
||
|
* Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
|
||
|
*
|
||
|
* @returns A generator of tokens representing each directive, document, and other structure.
|
||
|
*/
|
||
|
*parse(source, incomplete = false) {
|
||
|
if (this.onNewLine && this.offset === 0)
|
||
|
this.onNewLine(0);
|
||
|
for (const lexeme of this.lexer.lex(source, incomplete))
|
||
|
yield* this.next(lexeme);
|
||
|
if (!incomplete)
|
||
|
yield* this.end();
|
||
|
}
|
||
|
/**
|
||
|
* Advance the parser by the `source` of one lexical token.
|
||
|
*/
|
||
|
*next(source) {
|
||
|
this.source = source;
|
||
|
if (this.atScalar) {
|
||
|
this.atScalar = false;
|
||
|
yield* this.step();
|
||
|
this.offset += source.length;
|
||
|
return;
|
||
|
}
|
||
|
const type = tokenType(source);
|
||
|
if (!type) {
|
||
|
const message = `Not a YAML token: ${source}`;
|
||
|
yield* this.pop({ type: 'error', offset: this.offset, message, source });
|
||
|
this.offset += source.length;
|
||
|
}
|
||
|
else if (type === 'scalar') {
|
||
|
this.atNewLine = false;
|
||
|
this.atScalar = true;
|
||
|
this.type = 'scalar';
|
||
|
}
|
||
|
else {
|
||
|
this.type = type;
|
||
|
yield* this.step();
|
||
|
switch (type) {
|
||
|
case 'newline':
|
||
|
this.atNewLine = true;
|
||
|
this.indent = 0;
|
||
|
if (this.onNewLine)
|
||
|
this.onNewLine(this.offset + source.length);
|
||
|
break;
|
||
|
case 'space':
|
||
|
if (this.atNewLine && source[0] === ' ')
|
||
|
this.indent += source.length;
|
||
|
break;
|
||
|
case 'explicit-key-ind':
|
||
|
case 'map-value-ind':
|
||
|
case 'seq-item-ind':
|
||
|
if (this.atNewLine)
|
||
|
this.indent += source.length;
|
||
|
break;
|
||
|
case 'doc-mode':
|
||
|
case 'flow-error-end':
|
||
|
return;
|
||
|
default:
|
||
|
this.atNewLine = false;
|
||
|
}
|
||
|
this.offset += source.length;
|
||
|
}
|
||
|
}
|
||
|
/** Call at end of input to push out any remaining constructions */
|
||
|
*end() {
|
||
|
while (this.stack.length > 0)
|
||
|
yield* this.pop();
|
||
|
}
|
||
|
get sourceToken() {
|
||
|
const st = {
|
||
|
type: this.type,
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
source: this.source
|
||
|
};
|
||
|
return st;
|
||
|
}
|
||
|
*step() {
|
||
|
const top = this.peek(1);
|
||
|
if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
|
||
|
while (this.stack.length > 0)
|
||
|
yield* this.pop();
|
||
|
this.stack.push({
|
||
|
type: 'doc-end',
|
||
|
offset: this.offset,
|
||
|
source: this.source
|
||
|
});
|
||
|
return;
|
||
|
}
|
||
|
if (!top)
|
||
|
return yield* this.stream();
|
||
|
switch (top.type) {
|
||
|
case 'document':
|
||
|
return yield* this.document(top);
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
return yield* this.scalar(top);
|
||
|
case 'block-scalar':
|
||
|
return yield* this.blockScalar(top);
|
||
|
case 'block-map':
|
||
|
return yield* this.blockMap(top);
|
||
|
case 'block-seq':
|
||
|
return yield* this.blockSequence(top);
|
||
|
case 'flow-collection':
|
||
|
return yield* this.flowCollection(top);
|
||
|
case 'doc-end':
|
||
|
return yield* this.documentEnd(top);
|
||
|
}
|
||
|
/* istanbul ignore next should not happen */
|
||
|
yield* this.pop();
|
||
|
}
|
||
|
peek(n) {
|
||
|
return this.stack[this.stack.length - n];
|
||
|
}
|
||
|
*pop(error) {
|
||
|
const token = error || this.stack.pop();
|
||
|
/* istanbul ignore if should not happen */
|
||
|
if (!token) {
|
||
|
const message = 'Tried to pop an empty stack';
|
||
|
yield { type: 'error', offset: this.offset, source: '', message };
|
||
|
}
|
||
|
else if (this.stack.length === 0) {
|
||
|
yield token;
|
||
|
}
|
||
|
else {
|
||
|
const top = this.peek(1);
|
||
|
if (token.type === 'block-scalar') {
|
||
|
// Block scalars use their parent rather than header indent
|
||
|
token.indent = 'indent' in top ? top.indent : 0;
|
||
|
}
|
||
|
else if (token.type === 'flow-collection' && top.type === 'document') {
|
||
|
// Ignore all indent for top-level flow collections
|
||
|
token.indent = 0;
|
||
|
}
|
||
|
if (token.type === 'flow-collection')
|
||
|
fixFlowSeqItems(token);
|
||
|
switch (top.type) {
|
||
|
case 'document':
|
||
|
top.value = token;
|
||
|
break;
|
||
|
case 'block-scalar':
|
||
|
top.props.push(token); // error
|
||
|
break;
|
||
|
case 'block-map': {
|
||
|
const it = top.items[top.items.length - 1];
|
||
|
if (it.value) {
|
||
|
top.items.push({ start: [], key: token, sep: [] });
|
||
|
this.onKeyLine = true;
|
||
|
return;
|
||
|
}
|
||
|
else if (it.sep) {
|
||
|
it.value = token;
|
||
|
}
|
||
|
else {
|
||
|
Object.assign(it, { key: token, sep: [] });
|
||
|
this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
|
||
|
return;
|
||
|
}
|
||
|
break;
|
||
|
}
|
||
|
case 'block-seq': {
|
||
|
const it = top.items[top.items.length - 1];
|
||
|
if (it.value)
|
||
|
top.items.push({ start: [], value: token });
|
||
|
else
|
||
|
it.value = token;
|
||
|
break;
|
||
|
}
|
||
|
case 'flow-collection': {
|
||
|
const it = top.items[top.items.length - 1];
|
||
|
if (!it || it.value)
|
||
|
top.items.push({ start: [], key: token, sep: [] });
|
||
|
else if (it.sep)
|
||
|
it.value = token;
|
||
|
else
|
||
|
Object.assign(it, { key: token, sep: [] });
|
||
|
return;
|
||
|
}
|
||
|
/* istanbul ignore next should not happen */
|
||
|
default:
|
||
|
yield* this.pop();
|
||
|
yield* this.pop(token);
|
||
|
}
|
||
|
if ((top.type === 'document' ||
|
||
|
top.type === 'block-map' ||
|
||
|
top.type === 'block-seq') &&
|
||
|
(token.type === 'block-map' || token.type === 'block-seq')) {
|
||
|
const last = token.items[token.items.length - 1];
|
||
|
if (last &&
|
||
|
!last.sep &&
|
||
|
!last.value &&
|
||
|
last.start.length > 0 &&
|
||
|
!includesNonEmpty(last.start) &&
|
||
|
(token.indent === 0 ||
|
||
|
last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
|
||
|
if (top.type === 'document')
|
||
|
top.end = last.start;
|
||
|
else
|
||
|
top.items.push({ start: last.start });
|
||
|
token.items.splice(-1, 1);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
*stream() {
|
||
|
switch (this.type) {
|
||
|
case 'directive-line':
|
||
|
yield { type: 'directive', offset: this.offset, source: this.source };
|
||
|
return;
|
||
|
case 'byte-order-mark':
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
yield this.sourceToken;
|
||
|
return;
|
||
|
case 'doc-mode':
|
||
|
case 'doc-start': {
|
||
|
const doc = {
|
||
|
type: 'document',
|
||
|
offset: this.offset,
|
||
|
start: []
|
||
|
};
|
||
|
if (this.type === 'doc-start')
|
||
|
doc.start.push(this.sourceToken);
|
||
|
this.stack.push(doc);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
yield {
|
||
|
type: 'error',
|
||
|
offset: this.offset,
|
||
|
message: `Unexpected ${this.type} token in YAML stream`,
|
||
|
source: this.source
|
||
|
};
|
||
|
}
|
||
|
*document(doc) {
|
||
|
if (doc.value)
|
||
|
return yield* this.lineEnd(doc);
|
||
|
switch (this.type) {
|
||
|
case 'doc-start': {
|
||
|
if (includesNonEmpty(doc.start)) {
|
||
|
yield* this.pop();
|
||
|
yield* this.step();
|
||
|
}
|
||
|
else
|
||
|
doc.start.push(this.sourceToken);
|
||
|
return;
|
||
|
}
|
||
|
case 'anchor':
|
||
|
case 'tag':
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
doc.start.push(this.sourceToken);
|
||
|
return;
|
||
|
}
|
||
|
const bv = this.startBlockValue(doc);
|
||
|
if (bv)
|
||
|
this.stack.push(bv);
|
||
|
else {
|
||
|
yield {
|
||
|
type: 'error',
|
||
|
offset: this.offset,
|
||
|
message: `Unexpected ${this.type} token in YAML document`,
|
||
|
source: this.source
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
*scalar(scalar) {
|
||
|
if (this.type === 'map-value-ind') {
|
||
|
const prev = getPrevProps(this.peek(2));
|
||
|
const start = getFirstKeyStartProps(prev);
|
||
|
let sep;
|
||
|
if (scalar.end) {
|
||
|
sep = scalar.end;
|
||
|
sep.push(this.sourceToken);
|
||
|
delete scalar.end;
|
||
|
}
|
||
|
else
|
||
|
sep = [this.sourceToken];
|
||
|
const map = {
|
||
|
type: 'block-map',
|
||
|
offset: scalar.offset,
|
||
|
indent: scalar.indent,
|
||
|
items: [{ start, key: scalar, sep }]
|
||
|
};
|
||
|
this.onKeyLine = true;
|
||
|
this.stack[this.stack.length - 1] = map;
|
||
|
}
|
||
|
else
|
||
|
yield* this.lineEnd(scalar);
|
||
|
}
|
||
|
*blockScalar(scalar) {
|
||
|
switch (this.type) {
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
scalar.props.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'scalar':
|
||
|
scalar.source = this.source;
|
||
|
// block-scalar source includes trailing newline
|
||
|
this.atNewLine = true;
|
||
|
this.indent = 0;
|
||
|
if (this.onNewLine) {
|
||
|
let nl = this.source.indexOf('\n') + 1;
|
||
|
while (nl !== 0) {
|
||
|
this.onNewLine(this.offset + nl);
|
||
|
nl = this.source.indexOf('\n', nl) + 1;
|
||
|
}
|
||
|
}
|
||
|
yield* this.pop();
|
||
|
break;
|
||
|
/* istanbul ignore next should not happen */
|
||
|
default:
|
||
|
yield* this.pop();
|
||
|
yield* this.step();
|
||
|
}
|
||
|
}
|
||
|
*blockMap(map) {
|
||
|
var _a;
|
||
|
const it = map.items[map.items.length - 1];
|
||
|
// it.sep is true-ish if pair already has key or : separator
|
||
|
switch (this.type) {
|
||
|
case 'newline':
|
||
|
this.onKeyLine = false;
|
||
|
if (it.value) {
|
||
|
const end = 'end' in it.value ? it.value.end : undefined;
|
||
|
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
|
||
|
if ((last === null || last === void 0 ? void 0 : last.type) === 'comment')
|
||
|
end === null || end === void 0 ? void 0 : end.push(this.sourceToken);
|
||
|
else
|
||
|
map.items.push({ start: [this.sourceToken] });
|
||
|
}
|
||
|
else if (it.sep)
|
||
|
it.sep.push(this.sourceToken);
|
||
|
else
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
if (it.value)
|
||
|
map.items.push({ start: [this.sourceToken] });
|
||
|
else if (it.sep)
|
||
|
it.sep.push(this.sourceToken);
|
||
|
else {
|
||
|
if (this.atIndentedComment(it.start, map.indent)) {
|
||
|
const prev = map.items[map.items.length - 2];
|
||
|
const end = (_a = prev === null || prev === void 0 ? void 0 : prev.value) === null || _a === void 0 ? void 0 : _a.end;
|
||
|
if (Array.isArray(end)) {
|
||
|
Array.prototype.push.apply(end, it.start);
|
||
|
end.push(this.sourceToken);
|
||
|
map.items.pop();
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
it.start.push(this.sourceToken);
|
||
|
}
|
||
|
return;
|
||
|
}
|
||
|
if (this.indent >= map.indent) {
|
||
|
const atNextItem = !this.onKeyLine &&
|
||
|
this.indent === map.indent &&
|
||
|
(it.sep || includesNonEmpty(it.start));
|
||
|
switch (this.type) {
|
||
|
case 'anchor':
|
||
|
case 'tag':
|
||
|
if (atNextItem || it.value) {
|
||
|
map.items.push({ start: [this.sourceToken] });
|
||
|
this.onKeyLine = true;
|
||
|
}
|
||
|
else if (it.sep)
|
||
|
it.sep.push(this.sourceToken);
|
||
|
else
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'explicit-key-ind':
|
||
|
if (!it.sep && !includesToken(it.start, 'explicit-key-ind'))
|
||
|
it.start.push(this.sourceToken);
|
||
|
else if (atNextItem || it.value)
|
||
|
map.items.push({ start: [this.sourceToken] });
|
||
|
else
|
||
|
this.stack.push({
|
||
|
type: 'block-map',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start: [this.sourceToken] }]
|
||
|
});
|
||
|
this.onKeyLine = true;
|
||
|
return;
|
||
|
case 'map-value-ind':
|
||
|
if (!it.sep)
|
||
|
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||
|
else if (it.value ||
|
||
|
(atNextItem && !includesToken(it.start, 'explicit-key-ind')))
|
||
|
map.items.push({ start: [], key: null, sep: [this.sourceToken] });
|
||
|
else if (includesToken(it.sep, 'map-value-ind'))
|
||
|
this.stack.push({
|
||
|
type: 'block-map',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start: [], key: null, sep: [this.sourceToken] }]
|
||
|
});
|
||
|
else if (includesToken(it.start, 'explicit-key-ind') &&
|
||
|
isFlowToken(it.key) &&
|
||
|
!includesToken(it.sep, 'newline')) {
|
||
|
const start = getFirstKeyStartProps(it.start);
|
||
|
const key = it.key;
|
||
|
const sep = it.sep;
|
||
|
sep.push(this.sourceToken);
|
||
|
// @ts-ignore type guard is wrong here
|
||
|
delete it.key, delete it.sep;
|
||
|
this.stack.push({
|
||
|
type: 'block-map',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start, key, sep }]
|
||
|
});
|
||
|
}
|
||
|
else
|
||
|
it.sep.push(this.sourceToken);
|
||
|
this.onKeyLine = true;
|
||
|
return;
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar': {
|
||
|
const fs = this.flowScalar(this.type);
|
||
|
if (atNextItem || it.value) {
|
||
|
map.items.push({ start: [], key: fs, sep: [] });
|
||
|
this.onKeyLine = true;
|
||
|
}
|
||
|
else if (it.sep) {
|
||
|
this.stack.push(fs);
|
||
|
}
|
||
|
else {
|
||
|
Object.assign(it, { key: fs, sep: [] });
|
||
|
this.onKeyLine = true;
|
||
|
}
|
||
|
return;
|
||
|
}
|
||
|
default: {
|
||
|
const bv = this.startBlockValue(map);
|
||
|
if (bv) {
|
||
|
if (atNextItem &&
|
||
|
bv.type !== 'block-seq' &&
|
||
|
includesToken(it.start, 'explicit-key-ind'))
|
||
|
map.items.push({ start: [] });
|
||
|
this.stack.push(bv);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
yield* this.pop();
|
||
|
yield* this.step();
|
||
|
}
|
||
|
*blockSequence(seq) {
|
||
|
var _a;
|
||
|
const it = seq.items[seq.items.length - 1];
|
||
|
switch (this.type) {
|
||
|
case 'newline':
|
||
|
if (it.value) {
|
||
|
const end = 'end' in it.value ? it.value.end : undefined;
|
||
|
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
|
||
|
if ((last === null || last === void 0 ? void 0 : last.type) === 'comment')
|
||
|
end === null || end === void 0 ? void 0 : end.push(this.sourceToken);
|
||
|
else
|
||
|
seq.items.push({ start: [this.sourceToken] });
|
||
|
}
|
||
|
else
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
if (it.value)
|
||
|
seq.items.push({ start: [this.sourceToken] });
|
||
|
else {
|
||
|
if (this.atIndentedComment(it.start, seq.indent)) {
|
||
|
const prev = seq.items[seq.items.length - 2];
|
||
|
const end = (_a = prev === null || prev === void 0 ? void 0 : prev.value) === null || _a === void 0 ? void 0 : _a.end;
|
||
|
if (Array.isArray(end)) {
|
||
|
Array.prototype.push.apply(end, it.start);
|
||
|
end.push(this.sourceToken);
|
||
|
seq.items.pop();
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
it.start.push(this.sourceToken);
|
||
|
}
|
||
|
return;
|
||
|
case 'anchor':
|
||
|
case 'tag':
|
||
|
if (it.value || this.indent <= seq.indent)
|
||
|
break;
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'seq-item-ind':
|
||
|
if (this.indent !== seq.indent)
|
||
|
break;
|
||
|
if (it.value || includesToken(it.start, 'seq-item-ind'))
|
||
|
seq.items.push({ start: [this.sourceToken] });
|
||
|
else
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
}
|
||
|
if (this.indent > seq.indent) {
|
||
|
const bv = this.startBlockValue(seq);
|
||
|
if (bv) {
|
||
|
this.stack.push(bv);
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
yield* this.pop();
|
||
|
yield* this.step();
|
||
|
}
|
||
|
*flowCollection(fc) {
|
||
|
const it = fc.items[fc.items.length - 1];
|
||
|
if (this.type === 'flow-error-end') {
|
||
|
let top;
|
||
|
do {
|
||
|
yield* this.pop();
|
||
|
top = this.peek(1);
|
||
|
} while (top && top.type === 'flow-collection');
|
||
|
}
|
||
|
else if (fc.end.length === 0) {
|
||
|
switch (this.type) {
|
||
|
case 'comma':
|
||
|
case 'explicit-key-ind':
|
||
|
if (!it || it.sep)
|
||
|
fc.items.push({ start: [this.sourceToken] });
|
||
|
else
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'map-value-ind':
|
||
|
if (!it || it.value)
|
||
|
fc.items.push({ start: [], key: null, sep: [this.sourceToken] });
|
||
|
else if (it.sep)
|
||
|
it.sep.push(this.sourceToken);
|
||
|
else
|
||
|
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||
|
return;
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
case 'newline':
|
||
|
case 'anchor':
|
||
|
case 'tag':
|
||
|
if (!it || it.value)
|
||
|
fc.items.push({ start: [this.sourceToken] });
|
||
|
else if (it.sep)
|
||
|
it.sep.push(this.sourceToken);
|
||
|
else
|
||
|
it.start.push(this.sourceToken);
|
||
|
return;
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar': {
|
||
|
const fs = this.flowScalar(this.type);
|
||
|
if (!it || it.value)
|
||
|
fc.items.push({ start: [], key: fs, sep: [] });
|
||
|
else if (it.sep)
|
||
|
this.stack.push(fs);
|
||
|
else
|
||
|
Object.assign(it, { key: fs, sep: [] });
|
||
|
return;
|
||
|
}
|
||
|
case 'flow-map-end':
|
||
|
case 'flow-seq-end':
|
||
|
fc.end.push(this.sourceToken);
|
||
|
return;
|
||
|
}
|
||
|
const bv = this.startBlockValue(fc);
|
||
|
/* istanbul ignore else should not happen */
|
||
|
if (bv)
|
||
|
this.stack.push(bv);
|
||
|
else {
|
||
|
yield* this.pop();
|
||
|
yield* this.step();
|
||
|
}
|
||
|
}
|
||
|
else {
|
||
|
const parent = this.peek(2);
|
||
|
if (parent.type === 'block-map' &&
|
||
|
(this.type === 'map-value-ind' ||
|
||
|
(this.type === 'newline' &&
|
||
|
!parent.items[parent.items.length - 1].sep))) {
|
||
|
yield* this.pop();
|
||
|
yield* this.step();
|
||
|
}
|
||
|
else if (this.type === 'map-value-ind' &&
|
||
|
parent.type !== 'flow-collection') {
|
||
|
const prev = getPrevProps(parent);
|
||
|
const start = getFirstKeyStartProps(prev);
|
||
|
fixFlowSeqItems(fc);
|
||
|
const sep = fc.end.splice(1, fc.end.length);
|
||
|
sep.push(this.sourceToken);
|
||
|
const map = {
|
||
|
type: 'block-map',
|
||
|
offset: fc.offset,
|
||
|
indent: fc.indent,
|
||
|
items: [{ start, key: fc, sep }]
|
||
|
};
|
||
|
this.onKeyLine = true;
|
||
|
this.stack[this.stack.length - 1] = map;
|
||
|
}
|
||
|
else {
|
||
|
yield* this.lineEnd(fc);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
flowScalar(type) {
|
||
|
if (this.onNewLine) {
|
||
|
let nl = this.source.indexOf('\n') + 1;
|
||
|
while (nl !== 0) {
|
||
|
this.onNewLine(this.offset + nl);
|
||
|
nl = this.source.indexOf('\n', nl) + 1;
|
||
|
}
|
||
|
}
|
||
|
return {
|
||
|
type,
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
source: this.source
|
||
|
};
|
||
|
}
|
||
|
startBlockValue(parent) {
|
||
|
switch (this.type) {
|
||
|
case 'alias':
|
||
|
case 'scalar':
|
||
|
case 'single-quoted-scalar':
|
||
|
case 'double-quoted-scalar':
|
||
|
return this.flowScalar(this.type);
|
||
|
case 'block-scalar-header':
|
||
|
return {
|
||
|
type: 'block-scalar',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
props: [this.sourceToken],
|
||
|
source: ''
|
||
|
};
|
||
|
case 'flow-map-start':
|
||
|
case 'flow-seq-start':
|
||
|
return {
|
||
|
type: 'flow-collection',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
start: this.sourceToken,
|
||
|
items: [],
|
||
|
end: []
|
||
|
};
|
||
|
case 'seq-item-ind':
|
||
|
return {
|
||
|
type: 'block-seq',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start: [this.sourceToken] }]
|
||
|
};
|
||
|
case 'explicit-key-ind': {
|
||
|
this.onKeyLine = true;
|
||
|
const prev = getPrevProps(parent);
|
||
|
const start = getFirstKeyStartProps(prev);
|
||
|
start.push(this.sourceToken);
|
||
|
return {
|
||
|
type: 'block-map',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start }]
|
||
|
};
|
||
|
}
|
||
|
case 'map-value-ind': {
|
||
|
this.onKeyLine = true;
|
||
|
const prev = getPrevProps(parent);
|
||
|
const start = getFirstKeyStartProps(prev);
|
||
|
return {
|
||
|
type: 'block-map',
|
||
|
offset: this.offset,
|
||
|
indent: this.indent,
|
||
|
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
return null;
|
||
|
}
|
||
|
atIndentedComment(start, indent) {
|
||
|
if (this.type !== 'comment')
|
||
|
return false;
|
||
|
if (this.indent <= indent)
|
||
|
return false;
|
||
|
return start.every(st => st.type === 'newline' || st.type === 'space');
|
||
|
}
|
||
|
*documentEnd(docEnd) {
|
||
|
if (this.type !== 'doc-mode') {
|
||
|
if (docEnd.end)
|
||
|
docEnd.end.push(this.sourceToken);
|
||
|
else
|
||
|
docEnd.end = [this.sourceToken];
|
||
|
if (this.type === 'newline')
|
||
|
yield* this.pop();
|
||
|
}
|
||
|
}
|
||
|
*lineEnd(token) {
|
||
|
switch (this.type) {
|
||
|
case 'comma':
|
||
|
case 'doc-start':
|
||
|
case 'doc-end':
|
||
|
case 'flow-seq-end':
|
||
|
case 'flow-map-end':
|
||
|
case 'map-value-ind':
|
||
|
yield* this.pop();
|
||
|
yield* this.step();
|
||
|
break;
|
||
|
case 'newline':
|
||
|
this.onKeyLine = false;
|
||
|
// fallthrough
|
||
|
case 'space':
|
||
|
case 'comment':
|
||
|
default:
|
||
|
// all other values are errors
|
||
|
if (token.end)
|
||
|
token.end.push(this.sourceToken);
|
||
|
else
|
||
|
token.end = [this.sourceToken];
|
||
|
if (this.type === 'newline')
|
||
|
yield* this.pop();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function parseOptions(options) {
|
||
|
const prettyErrors = options.prettyErrors !== false;
|
||
|
const lineCounter = options.lineCounter || (prettyErrors && new LineCounter()) || null;
|
||
|
return { lineCounter, prettyErrors };
|
||
|
}
|
||
|
/** Parse an input string into a single YAML.Document */
|
||
|
function parseDocument(source, options = {}) {
|
||
|
const { lineCounter, prettyErrors } = parseOptions(options);
|
||
|
const parser = new Parser(lineCounter === null || lineCounter === void 0 ? void 0 : lineCounter.addNewLine);
|
||
|
const composer = new Composer(options);
|
||
|
// `doc` is always set by compose.end(true) at the very latest
|
||
|
let doc = null;
|
||
|
for (const _doc of composer.compose(parser.parse(source), true, source.length)) {
|
||
|
if (!doc)
|
||
|
doc = _doc;
|
||
|
else if (doc.options.logLevel !== 'silent') {
|
||
|
doc.errors.push(new YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
if (prettyErrors && lineCounter) {
|
||
|
doc.errors.forEach(prettifyError(source, lineCounter));
|
||
|
doc.warnings.forEach(prettifyError(source, lineCounter));
|
||
|
}
|
||
|
return doc;
|
||
|
}
|
||
|
|
||
|
class File {
|
||
|
|
||
|
constructor(app, filename, tagPositions, hasFrontMatter) {
|
||
|
this.app = app;
|
||
|
this.filename = filename;
|
||
|
this.basename = filename.split("/").pop();
|
||
|
this.tagPositions = tagPositions;
|
||
|
this.hasFrontMatter = !!hasFrontMatter;
|
||
|
}
|
||
|
|
||
|
/** @param {Replacement} replace */
|
||
|
async renamed(replace) {
|
||
|
const file = this.app.vault.getAbstractFileByPath(this.filename);
|
||
|
const original = await this.app.vault.read(file);
|
||
|
let text = original;
|
||
|
|
||
|
for (const { position: { start, end }, tag } of this.tagPositions) {
|
||
|
if (text.slice(start.offset, end.offset) !== tag) {
|
||
|
const msg = `File ${this.filename} has changed; skipping`;
|
||
|
new obsidian.Notice(msg);
|
||
|
console.error(msg);
|
||
|
console.debug(text.slice(start.offset, end.offset), tag);
|
||
|
return;
|
||
|
}
|
||
|
text = replace.inString(text, start.offset);
|
||
|
}
|
||
|
|
||
|
if (this.hasFrontMatter)
|
||
|
text = this.replaceInFrontMatter(text, replace);
|
||
|
|
||
|
if (text !== original) {
|
||
|
await this.app.vault.modify(file, text);
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/** @param {Replacement} replace */
|
||
|
replaceInFrontMatter(text, replace) {
|
||
|
const [empty, frontMatter] = text.split(/^---\r?$\n?/m, 2);
|
||
|
|
||
|
// Check for valid, non-empty, properly terminated front matter
|
||
|
if (empty.trim() !== "" || !frontMatter.trim() || !frontMatter.endsWith("\n"))
|
||
|
return text;
|
||
|
|
||
|
const parsed = parseDocument(frontMatter, {keepSourceTokens: true});
|
||
|
if (parsed.errors.length) {
|
||
|
const error = `YAML issue with ${this.filename}: ${parsed.errors[0]}`;
|
||
|
console.error(error); new obsidian.Notice(error + "; skipping frontmatter");
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
let changed = false, json = parsed.toJSON();
|
||
|
|
||
|
function setInNode(node, value, afterKey=false) {
|
||
|
setScalarValue(node.srcToken, value, {afterKey});
|
||
|
changed = true;
|
||
|
node.value = value;
|
||
|
}
|
||
|
|
||
|
function processField(prop, isAlias) {
|
||
|
const node = parsed.get(prop, true);
|
||
|
if (!node) return;
|
||
|
const field = json[prop];
|
||
|
if (!field || !field.length) return;
|
||
|
if (typeof field === "string") {
|
||
|
const parts = field.split(isAlias ? /(^\s+|\s*,\s*|\s+$)/ : /([\s,]+)/);
|
||
|
const after = replace.inArray(parts, true, isAlias).join("");
|
||
|
if (field != after) setInNode(node, after, true);
|
||
|
} else if (Array.isArray(field)) {
|
||
|
replace.inArray(field, false, isAlias).forEach((v, i) => {
|
||
|
if (field[i] !== v) setInNode(node.get(i, true), v);
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
|
||
|
for (const {key: {value:prop}} of parsed.contents.items) {
|
||
|
if (/^tags?$/i.test(prop)) {
|
||
|
processField(prop, false);
|
||
|
} else if (/^alias(es)?$/i.test(prop)) {
|
||
|
processField(prop, true);
|
||
|
}
|
||
|
}
|
||
|
return changed ? text.replace(frontMatter, stringify(parsed.contents.srcToken)) : text;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
async function renameTag(app, tagName) {
|
||
|
const newName = await promptForNewName(tagName);
|
||
|
if (newName === false) return; // aborted
|
||
|
|
||
|
if (!newName || newName === tagName) {
|
||
|
return new obsidian.Notice("Unchanged or empty tag: No changes made.");
|
||
|
}
|
||
|
|
||
|
const
|
||
|
oldTag = new Tag(tagName),
|
||
|
newTag = new Tag(newName),
|
||
|
replace = new Replacement(oldTag, newTag),
|
||
|
clashing = replace.willMergeTags(
|
||
|
allTags(app).reverse() // find longest clash first
|
||
|
),
|
||
|
shouldAbort = clashing &&
|
||
|
await shouldAbortDueToClash(clashing, oldTag, newTag)
|
||
|
;
|
||
|
|
||
|
if (shouldAbort) return;
|
||
|
|
||
|
const targets = await findTargets(app, oldTag);
|
||
|
if (!targets) return;
|
||
|
|
||
|
const progress = new Progress(`Renaming to #${newName}/*`, "Processing files...");
|
||
|
let renamed = 0;
|
||
|
await progress.forEach(targets, async (target) => {
|
||
|
progress.message = "Processing " + target.basename;
|
||
|
if (await target.renamed(replace)) renamed++;
|
||
|
});
|
||
|
|
||
|
return new obsidian.Notice(`Operation ${progress.aborted ? "cancelled" : "complete"}: ${renamed} file(s) updated`);
|
||
|
}
|
||
|
|
||
|
function allTags(app) {
|
||
|
return Object.keys(app.metadataCache.getTags());
|
||
|
}
|
||
|
|
||
|
async function findTargets(app, tag) {
|
||
|
const targets = [];
|
||
|
const progress = new Progress(`Searching for ${tag}/*`, "Matching files...");
|
||
|
await progress.forEach(
|
||
|
app.metadataCache.getCachedFiles(),
|
||
|
filename => {
|
||
|
let { frontmatter, tags } = app.metadataCache.getCache(filename) || {};
|
||
|
tags = (tags || []).filter(t => t.tag && tag.matches(t.tag)).reverse(); // last positions first
|
||
|
const fmtags = (obsidian.parseFrontMatterTags(frontmatter) || []).filter(tag.matches);
|
||
|
const aliasTags = (obsidian.parseFrontMatterAliases(frontmatter) || []).filter(Tag.isTag).filter(tag.matches);
|
||
|
if (tags.length || fmtags.length || aliasTags.length)
|
||
|
targets.push(new File(app, filename, tags, fmtags.length + aliasTags.length));
|
||
|
}
|
||
|
);
|
||
|
if (!progress.aborted)
|
||
|
return targets;
|
||
|
}
|
||
|
|
||
|
async function promptForNewName(tagName) {
|
||
|
try {
|
||
|
return await validatedInput(
|
||
|
`Renaming #${tagName} (and any sub-tags)`, "Enter new name (must be a valid Obsidian tag):\n",
|
||
|
tagName,
|
||
|
"[^\u2000-\u206F\u2E00-\u2E7F'!\"#$%&()*+,.:;<=>?@^`{|}~\\[\\]\\\\\\s]+",
|
||
|
"Obsidian tag name"
|
||
|
);
|
||
|
} catch(e) {
|
||
|
return false; // user cancelled
|
||
|
}
|
||
|
}
|
||
|
|
||
|
async function shouldAbortDueToClash([origin, clash], oldTag, newTag) {
|
||
|
try {
|
||
|
await confirm(
|
||
|
"WARNING: No Undo!",
|
||
|
`Renaming <code>${oldTag}</code> to <code>${newTag}</code> will merge ${
|
||
|
(origin.canonical === oldTag.canonical) ?
|
||
|
`these tags` : `multiple tags
|
||
|
into existing tags (such as <code>${origin}</code>
|
||
|
merging with <code>${clash}</code>)`
|
||
|
}.
|
||
|
|
||
|
This <b>cannot</b> be undone. Do you wish to proceed?`
|
||
|
);
|
||
|
} catch(e) {
|
||
|
return true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function around(obj, factories) {
|
||
|
const removers = Object.keys(factories).map(key => around1(obj, key, factories[key]));
|
||
|
return removers.length === 1 ? removers[0] : function () { removers.forEach(r => r()); };
|
||
|
}
|
||
|
function around1(obj, method, createWrapper) {
|
||
|
const original = obj[method], hadOwn = obj.hasOwnProperty(method);
|
||
|
let current = createWrapper(original);
|
||
|
// Let our wrapper inherit static props from the wrapping method,
|
||
|
// and the wrapping method, props from the original method
|
||
|
if (original)
|
||
|
Object.setPrototypeOf(current, original);
|
||
|
Object.setPrototypeOf(wrapper, current);
|
||
|
obj[method] = wrapper;
|
||
|
// Return a callback to allow safe removal
|
||
|
return remove;
|
||
|
function wrapper(...args) {
|
||
|
// If we have been deactivated and are no longer wrapped, remove ourselves
|
||
|
if (current === original && obj[method] === wrapper)
|
||
|
remove();
|
||
|
return current.apply(this, args);
|
||
|
}
|
||
|
function remove() {
|
||
|
// If no other patches, just do a direct removal
|
||
|
if (obj[method] === wrapper) {
|
||
|
if (hadOwn)
|
||
|
obj[method] = original;
|
||
|
else
|
||
|
delete obj[method];
|
||
|
}
|
||
|
if (current === original)
|
||
|
return;
|
||
|
// Else pass future calls through, and remove wrapper from the prototype chain
|
||
|
current = original;
|
||
|
Object.setPrototypeOf(wrapper, original || Function);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
const tagHoverMain = "tag-wrangler:tag-pane";
|
||
|
|
||
|
function onElement(el, event, selector, callback, options) {
|
||
|
el.on(event, selector, callback, options);
|
||
|
return () => el.off(event, selector, callback, options);
|
||
|
}
|
||
|
|
||
|
class TagWrangler extends obsidian.Plugin {
|
||
|
pageAliases = new Map();
|
||
|
tagPages = new Map();
|
||
|
|
||
|
tagPage(tag) {
|
||
|
return Array.from(this.tagPages.get(Tag.canonical(tag)) || "")[0]
|
||
|
}
|
||
|
|
||
|
openTagPage(file, isNew, newLeaf) {
|
||
|
const openState = {
|
||
|
eState: isNew ? {rename: "all"} : {focus: true}, // Rename new page, focus existing
|
||
|
...(isNew ? {state: {mode: "source"}} : {}) // and set source mode for new page
|
||
|
};
|
||
|
return this.app.workspace.getLeaf(newLeaf).openFile(file, openState);
|
||
|
}
|
||
|
|
||
|
async createTagPage(tagName, newLeaf) {
|
||
|
const baseName = new Tag(tagName).name.split("/").join(" ");
|
||
|
const folder = this.app.fileManager.getNewFileParent(this.app.workspace.getActiveFile()?.path || "");
|
||
|
const path = this.app.vault.getAvailablePath(folder.getParentPrefix()+baseName, "md");
|
||
|
this.openTagPage(await this.app.vault.create(path, [
|
||
|
"---",
|
||
|
`Aliases: [ ${JSON.stringify(Tag.toTag(tagName))} ]`,
|
||
|
"---",
|
||
|
""
|
||
|
].join("\n")), true, newLeaf);
|
||
|
}
|
||
|
|
||
|
async onload(){
|
||
|
this.register(
|
||
|
onElement(document, "contextmenu", ".tag-pane-tag", this.onMenu.bind(this), {capture: true})
|
||
|
);
|
||
|
|
||
|
this.app.workspace.registerHoverLinkSource(tagHoverMain, {display: 'Tag pane', defaultMod: true});
|
||
|
|
||
|
this.addChild(
|
||
|
// Tags in the tag pane
|
||
|
new TagPageUIHandler(this, {
|
||
|
hoverSource: tagHoverMain, selector: ".tag-pane-tag", container: ".tag-container",
|
||
|
toTag(el) { return el.find(".tag-pane-tag-text")?.textContent; }
|
||
|
})
|
||
|
);
|
||
|
|
||
|
this.addChild(
|
||
|
// Reading mode / tag links
|
||
|
new TagPageUIHandler(this, {
|
||
|
hoverSource: "preview", selector: 'a.tag[href^="#"]',
|
||
|
container: ".markdown-preview-view, .markdown-embed, .workspace-leaf-content",
|
||
|
toTag(el) { return el.getAttribute("href"); }
|
||
|
})
|
||
|
);
|
||
|
|
||
|
this.addChild(
|
||
|
// Edit mode
|
||
|
new TagPageUIHandler(this, {
|
||
|
hoverSource: "editor", selector: "span.cm-hashtag",
|
||
|
container: ".markdown-source-view",
|
||
|
toTag(el) {
|
||
|
// Multiple cm-hashtag elements can be side by side: join them all together:
|
||
|
let tagName = el.textContent;
|
||
|
for (let t=el.previousElementSibling; t?.matches("span.cm-hashtag"); t = t.previousElementSibling) {
|
||
|
tagName = t.textContent + tagName;
|
||
|
}
|
||
|
for (let t=el.nextElementSibling; t?.matches("span.cm-hashtag"); t = t.nextElementSibling) {
|
||
|
tagName += t.textContent;
|
||
|
}
|
||
|
return tagName;
|
||
|
}
|
||
|
})
|
||
|
);
|
||
|
|
||
|
// Track Tag Pages
|
||
|
const metaCache = this.app.metadataCache;
|
||
|
const plugin = this;
|
||
|
|
||
|
this.register(around(metaCache, {
|
||
|
getTags(old) {
|
||
|
return function getTags() {
|
||
|
const tags = old.call(this);
|
||
|
const names = new Set(Object.keys(tags).map(t => t.toLowerCase()));
|
||
|
for (const t of plugin.tagPages.keys()) {
|
||
|
if (!names.has(t)) tags[plugin.tagPages.get(t).tag] = 0;
|
||
|
}
|
||
|
return tags;
|
||
|
}
|
||
|
}
|
||
|
}));
|
||
|
|
||
|
this.app.workspace.onLayoutReady(() => {
|
||
|
metaCache.getCachedFiles().forEach(filename => {
|
||
|
const fm = metaCache.getCache(filename)?.frontmatter;
|
||
|
if (fm && obsidian.parseFrontMatterAliases(fm)?.filter(Tag.isTag)) this.updatePage(
|
||
|
this.app.vault.getAbstractFileByPath(filename), fm
|
||
|
);
|
||
|
});
|
||
|
this.registerEvent(metaCache.on("changed", (file, data, cache) => this.updatePage(file, cache?.frontmatter)));
|
||
|
this.registerEvent(this.app.vault.on("delete", file => this.updatePage(file)));
|
||
|
app.workspace.getLeavesOfType("tag").forEach(leaf => {leaf?.view?.requestUpdateTags?.();});
|
||
|
});
|
||
|
}
|
||
|
|
||
|
updatePage(file, frontmatter) {
|
||
|
const tags = obsidian.parseFrontMatterAliases(frontmatter)?.filter(Tag.isTag) || [];
|
||
|
if (this.pageAliases.has(file)) {
|
||
|
const oldTags = new Set(tags || []);
|
||
|
for (const tag of this.pageAliases.get(file)) {
|
||
|
if (oldTags.has(tag)) continue; // don't bother deleting what we'll just put back
|
||
|
const key = Tag.canonical(tag);
|
||
|
const tp = this.tagPages.get(key);
|
||
|
if (tp) {
|
||
|
tp.delete(file);
|
||
|
if (!tp.size) this.tagPages.delete(key);
|
||
|
}
|
||
|
}
|
||
|
if (!tags.length) this.pageAliases.delete(file);
|
||
|
}
|
||
|
if (tags.length) {
|
||
|
this.pageAliases.set(file, tags);
|
||
|
for (const tag of tags) {
|
||
|
const key = Tag.canonical(tag);
|
||
|
if (this.tagPages.has(key)) this.tagPages.get(key).add(file);
|
||
|
else {
|
||
|
const tagSet = new Set([file]);
|
||
|
tagSet.tag = Tag.toTag(tag);
|
||
|
this.tagPages.set(key, tagSet);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
onMenu(e, tagEl) {
|
||
|
if (!e.obsidian_contextmenu) {
|
||
|
e.obsidian_contextmenu = new obsidian.Menu(this.app);
|
||
|
setImmediate(() => menu.showAtPosition({x: e.pageX, y: e.pageY}));
|
||
|
}
|
||
|
|
||
|
const
|
||
|
tagName = tagEl.find(".tag-pane-tag-text").textContent,
|
||
|
tagPage = this.tagPage(tagName),
|
||
|
isHierarchy = tagEl.parentElement.parentElement.find(".collapse-icon"),
|
||
|
searchPlugin = this.app.internalPlugins.getPluginById("global-search"),
|
||
|
search = searchPlugin && searchPlugin.instance,
|
||
|
query = search && search.getGlobalSearchQuery(),
|
||
|
random = this.app.plugins.plugins["smart-random-note"],
|
||
|
menu = e.obsidian_contextmenu.addItem(item("pencil", "Rename #"+tagName, () => this.rename(tagName)));
|
||
|
|
||
|
menu.register(
|
||
|
onElement(document, "keydown", "*", e => {
|
||
|
if (e.key==="Escape") {
|
||
|
e.preventDefault();
|
||
|
e.stopPropagation();
|
||
|
menu.hide();
|
||
|
}
|
||
|
}, {capture: true})
|
||
|
);
|
||
|
|
||
|
menu.addSeparator();
|
||
|
if (tagPage) {
|
||
|
menu.addItem(
|
||
|
item("popup-open", "Open tag page", (e) => this.openTagPage(tagPage, false, obsidian.Keymap.isModEvent(e)))
|
||
|
);
|
||
|
} else {
|
||
|
menu.addItem(
|
||
|
item("create-new", "Create tag page", (e) => this.createTagPage(tagName, obsidian.Keymap.isModEvent(e)))
|
||
|
);
|
||
|
}
|
||
|
|
||
|
if (search) {
|
||
|
menu.addSeparator().addItem(
|
||
|
item("magnifying-glass", "New search for #"+tagName, () => search.openGlobalSearch("tag:" + tagName))
|
||
|
);
|
||
|
if (query) {
|
||
|
menu.addItem(
|
||
|
item("sheets-in-box", "Require #"+tagName+" in search" , () => search.openGlobalSearch(query+" tag:" + tagName))
|
||
|
);
|
||
|
}
|
||
|
menu.addItem(
|
||
|
item("crossed-star" , "Exclude #"+tagName+" from search", () => search.openGlobalSearch(query+" -tag:" + tagName))
|
||
|
);
|
||
|
}
|
||
|
|
||
|
if (random) {
|
||
|
menu.addSeparator().addItem(
|
||
|
item("dice", "Open random note", async () => {
|
||
|
const targets = await findTargets(this.app, new Tag(tagName));
|
||
|
random.openRandomNote(targets.map(f=> this.app.vault.getAbstractFileByPath(f.filename)));
|
||
|
})
|
||
|
);
|
||
|
}
|
||
|
|
||
|
this.app.workspace.trigger("tag-wrangler:contextmenu", menu, tagName, {search, query, isHierarchy, tagPage});
|
||
|
|
||
|
if (isHierarchy) {
|
||
|
const
|
||
|
tagParent = tagName.split("/").slice(0, -1).join("/"),
|
||
|
tagView = this.leafView(tagEl.matchParent(".workspace-leaf")),
|
||
|
tagContainer = tagParent ? tagView.tagDoms["#" + tagParent.toLowerCase()]: tagView.root
|
||
|
;
|
||
|
function toggle(collapse) {
|
||
|
for(const tag of tagContainer.children) tag.setCollapsed(collapse);
|
||
|
}
|
||
|
menu.addSeparator()
|
||
|
.addItem(item("vertical-three-dots", "Collapse tags at this level", () => toggle(true )))
|
||
|
.addItem(item("expand-vertically" , "Expand tags at this level" , () => toggle(false)));
|
||
|
}
|
||
|
}
|
||
|
|
||
|
leafView(containerEl) {
|
||
|
let view;
|
||
|
this.app.workspace.iterateAllLeaves((leaf) => {
|
||
|
if (leaf.containerEl === containerEl) { view = leaf.view; return true; }
|
||
|
});
|
||
|
return view;
|
||
|
}
|
||
|
|
||
|
|
||
|
async rename(tagName) {
|
||
|
const scope = new obsidian.Scope;
|
||
|
this.app.keymap.pushScope(scope);
|
||
|
try { await renameTag(this.app, tagName); }
|
||
|
catch (e) { console.error(e); new obsidian.Notice("error: " + e); }
|
||
|
this.app.keymap.popScope(scope);
|
||
|
}
|
||
|
|
||
|
}
|
||
|
|
||
|
function item(icon, title, click) {
|
||
|
return i => i.setIcon(icon).setTitle(title).onClick(click);
|
||
|
}
|
||
|
|
||
|
|
||
|
class TagPageUIHandler extends obsidian.Component {
|
||
|
// Handle hovering and clicks-to-open for tag pages
|
||
|
|
||
|
constructor(plugin, opts) {
|
||
|
super();
|
||
|
this.opts = opts;
|
||
|
this.plugin = plugin;
|
||
|
}
|
||
|
|
||
|
onload() {
|
||
|
const {selector, container, hoverSource, toTag} = this.opts;
|
||
|
this.register(
|
||
|
// Show tag page on hover
|
||
|
onElement(document, "mouseover", selector, (event, targetEl) => {
|
||
|
const tagName = toTag(targetEl), tp = tagName && this.plugin.tagPage(tagName);
|
||
|
if (tp) this.plugin.app.workspace.trigger('hover-link', {
|
||
|
event, source: hoverSource, targetEl, linktext: tp.path,
|
||
|
hoverParent: targetEl.matchParent(container)
|
||
|
});
|
||
|
}, {capture: false})
|
||
|
);
|
||
|
this.register(
|
||
|
// Open tag page w/alt click (current pane) or ctrl/cmd/middle click (new pane)
|
||
|
onElement(document, "click", selector, (event, targetEl) => {
|
||
|
const {altKey} = event;
|
||
|
if (!obsidian.Keymap.isModEvent(event) && !altKey) return;
|
||
|
const tagName = toTag(targetEl), tp = tagName && this.plugin.tagPage(tagName);
|
||
|
if (tp) {
|
||
|
this.plugin.openTagPage(tp, false, !altKey);
|
||
|
event.preventDefault();
|
||
|
event.stopPropagation();
|
||
|
return false;
|
||
|
}
|
||
|
}, {capture: true})
|
||
|
);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
module.exports = TagWrangler;
|
||
|
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWFpbi5qcyIsInNvdXJjZXMiOlsibm9kZV9tb2R1bGVzLy5wbnBtL2N1cnJpZnlANC4wLjAvbm9kZV9tb2R1bGVzL2N1cnJpZnkvbGliL2N1cnJpZnkuanMiLCJub2RlX21vZHVsZXMvLnBucG0vZnVsbHN0b3JlQDMuMC4wL25vZGVfbW9kdWxlcy9mdWxsc3RvcmUvbGliL2Z1bGxzdG9yZS5qcyIsIm5vZGVfbW9kdWxlcy8ucG5wbS9AY2xvdWRjbWQrY3JlYXRlLWVsZW1lbnRAMi4wLjIvbm9kZV9tb2R1bGVzL0BjbG91ZGNtZC9jcmVhdGUtZWxlbWVudC9saWIvY3JlYXRlLWVsZW1lbnQuanMiLCJub2RlX21vZHVsZXMvLnBucG0vc21hbGx0YWxrQDQuMC43L25vZGVfbW9kdWxlcy9zbWFsbHRhbGsvbGliL3NtYWxsdGFsay5qcyIsInNyYy9wcm9ncmVzcy5qcyIsInNyYy92YWxpZGF0aW9uLmpzIiwic3JjL1RhZy5qcyIsIm5vZGVfbW9kdWxlcy8ucG5wbS95YW1sQDIuMC4wLTEwL25vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9ub2Rlcy9Ob2RlLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3Zpc2l0LmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L2RvYy9kaXJlY3RpdmVzLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L2RvYy9hbmNob3JzLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L25vZGVzL0FsaWFzLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L25vZGVzL3RvSlMuanMiLCJub2RlX21vZHVsZXMvLnBucG0veWFtbEAyLjAuMC0xMC9ub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvbm9kZXMvU2NhbGFyLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L2RvYy9jcmVhdGVOb2RlLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L25vZGVzL0NvbGxlY3Rpb24uanMiLCJub2RlX21vZHVsZXMvLnBucG0veWFtbEAyLjAuMC0xMC9ub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvc3RyaW5naWZ5L3N0cmluZ2lmeUNvbW1lbnQuanMiLCJub2RlX21vZHVsZXMvLnBucG0veWFtbEAyLjAuMC0xMC9ub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvc3RyaW5naWZ5L2ZvbGRGbG93TGluZXMuanMiLCJub2RlX21vZHVsZXMvLnBucG0veWFtbEAyLjAuMC0xMC9ub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvc3RyaW5naWZ5L3N0cmluZ2lmeVN0cmluZy5qcyIsIm5vZGVfbW9kdWxlcy8ucG5wbS95YW1sQDIuMC4wLTEwL25vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9zdHJpbmdpZnkvc3RyaW5naWZ5LmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3N0cmluZ2lmeS9zdHJpbmdpZnlQYWlyLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L2xvZy5qcyIsIm5vZGVfbW9kdWxlcy8ucG5wbS95YW1sQDIuMC4wLTEwL25vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9ub2Rlcy9hZGRQYWlyVG9KU01hcC5qcyIsIm5vZGVfbW9kdWxlcy8ucG5wbS95YW1sQDIuMC4wLTEwL25vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9ub2Rlcy9QYWlyLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L29wdGlvbnMuanMiLCJub2RlX21vZHVsZXMvLnBucG0veWFtbEAyLjAuMC0xMC9ub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvc3RyaW5naWZ5L3N0cmluZ2lmeUNvbGxlY3Rpb24uanMiLCJub2RlX21vZHVsZXMvLnBucG0veWFtbEAyLjAuMC0xMC9ub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvbm9kZXMvWUFNTE1hcC5qcyIsIm5vZGVfbW9kdWxlcy8ucG5wbS95YW1sQDIuMC4wLTEwL25vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9zY2hlbWEvY29tbW9uL21hcC5qcyIsIm5vZGVfbW9kdWxlcy8ucG5wbS95YW1sQDIuMC4wLTEwL25vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9ub2Rlcy9ZQU1MU2VxLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3NjaGVtYS9jb21tb24vc2VxLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3NjaGVtYS9jb21tb24vc3RyaW5nLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3NjaGVtYS9jb21tb24vbnVsbC5qcyIsIm5vZGVfbW9kdWxlcy8ucG5wbS95YW1sQDIuMC4wLTEwL25vZGVfbW9kdWxlcy95YW1sL2Jyb3dzZXIvZGlzdC9zY2hlbWEvY29yZS9ib29sLmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3N0cmluZ2lmeS9zdHJpbmdpZnlOdW1iZXIuanMiLCJub2RlX21vZHVsZXMvLnBucG0veWFtbEAyLjAuMC0xMC9ub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvc2NoZW1hL2NvcmUvZmxvYXQuanMiLCJub2RlX21vZHVsZXMvLnBucG0veWFtbEAyLjAuMC0xMC9ub2RlX21vZHVsZXMveWFtbC9icm93c2VyL2Rpc3Qvc2NoZW1hL2NvcmUvaW50LmpzIiwibm9kZV9tb2R1bGVzLy5wbnBtL3lhbWxAMi4wLjAtMTAvbm9kZV9tb2R1bGVzL3lhbWwvYnJvd3Nlci9kaXN0L3NjaGVtYS9jb3JlL3NjaGVtYS5qcyIsIm5vZGVfbW9kdWxlcy8ucG5
|