mirror of
https://github.com/XRPLF/xrpl-dev-portal.git
synced 2025-11-20 11:45:50 +00:00
Add tool/migrate.sh as a one-stop conversion script for the whole repo. This script's duties include: - Changing all links from their old (.html) paths to new paths - Converting most Dactyl-specific syntax to Redocly equivalents - Generating Redocly sidebar and redirects YAML files This script is meant to be run from the repo top. It replaces syntax in-place. Unless this is the final migration phase, the results of running the migration script should be committed in a separate commit whose message starts with '[DROP]' so it can be re-run on the latest version of the master branch during rebasing. Many commits have been squashed into this one, including: - Add tool/migrate_links.sh as a one-stop conversion script for links. - Enable the update_links filter in dactyl config but make it inactive unless you pass the appropriate vars - Hack include_svg script to assume content/img instead of img [FOLD] Migration scripting improvements: - Roll scripting into all-in-one tool/migrate.sh - Script moving/renaming Japanese snippets into @i18n - Link replacment in snippets - Handle links with query params - Handle ref-links with anchors - Remove some macro syntax that breaks Redocly - Follow internal redirects in link replacement - Handle links to some non-md pages [FOLD] Migration script: handle more reflinks & imgs [FOLD] tweak link migration [FOLD] Fix substitution of reflinks Add sidebar script [FOLD] Fix link migration and whitespace noisiness [FOLD] Link migration: auto-generate better link replacements [FOLD] Convert badge syntax [FOLD] Migration script: handle :not_enabled: syntax [FOLD] Script generation of redirects [FOLD] Migration script: make reusable common links [FOLD] Fix common links code & conversion script comments [FOLD] Add more non-md links [FOLD] Fix filter_update_links syntax [FOLD] Fix script's common links include placement [FOLD] Migration script: update badge replacement to work w/ common-links [FOLD] Fix ordering of converting common-links vs partials [FOLD] Fix link substitution in common-links and fix trailing /index in redirects
16982 lines
473 KiB
JavaScript
16982 lines
473 KiB
JavaScript
var __create = Object.create;
|
||
var __defProp = Object.defineProperty;
|
||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
||
var __getOwnPropNames = Object.getOwnPropertyNames;
|
||
var __getProtoOf = Object.getPrototypeOf;
|
||
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
||
var __commonJS = (cb, mod) => function __require() {
|
||
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
||
};
|
||
var __export = (target, all2) => {
|
||
for (var name in all2)
|
||
__defProp(target, name, { get: all2[name], enumerable: true });
|
||
};
|
||
var __copyProps = (to, from, except, desc) => {
|
||
if (from && typeof from === "object" || typeof from === "function") {
|
||
for (let key of __getOwnPropNames(from))
|
||
if (!__hasOwnProp.call(to, key) && key !== except)
|
||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
||
}
|
||
return to;
|
||
};
|
||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
||
// If the importer is in node compatibility mode or this is not an ESM
|
||
// file that has been converted to a CommonJS file using a Babel-
|
||
// compatible transform (i.e. "__esModule" has not been set), then set
|
||
// "default" to the CommonJS "module.exports" for node compatibility.
|
||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
||
mod
|
||
));
|
||
|
||
// node_modules/extend/index.js
|
||
var require_extend = __commonJS({
|
||
"node_modules/extend/index.js"(exports, module2) {
|
||
"use strict";
|
||
var hasOwn = Object.prototype.hasOwnProperty;
|
||
var toStr = Object.prototype.toString;
|
||
var defineProperty = Object.defineProperty;
|
||
var gOPD = Object.getOwnPropertyDescriptor;
|
||
var isArray = function isArray2(arr) {
|
||
if (typeof Array.isArray === "function") {
|
||
return Array.isArray(arr);
|
||
}
|
||
return toStr.call(arr) === "[object Array]";
|
||
};
|
||
var isPlainObject2 = function isPlainObject3(obj) {
|
||
if (!obj || toStr.call(obj) !== "[object Object]") {
|
||
return false;
|
||
}
|
||
var hasOwnConstructor = hasOwn.call(obj, "constructor");
|
||
var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, "isPrototypeOf");
|
||
if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {
|
||
return false;
|
||
}
|
||
var key;
|
||
for (key in obj) {
|
||
}
|
||
return typeof key === "undefined" || hasOwn.call(obj, key);
|
||
};
|
||
var setProperty = function setProperty2(target, options) {
|
||
if (defineProperty && options.name === "__proto__") {
|
||
defineProperty(target, options.name, {
|
||
enumerable: true,
|
||
configurable: true,
|
||
value: options.newValue,
|
||
writable: true
|
||
});
|
||
} else {
|
||
target[options.name] = options.newValue;
|
||
}
|
||
};
|
||
var getProperty = function getProperty2(obj, name) {
|
||
if (name === "__proto__") {
|
||
if (!hasOwn.call(obj, name)) {
|
||
return void 0;
|
||
} else if (gOPD) {
|
||
return gOPD(obj, name).value;
|
||
}
|
||
}
|
||
return obj[name];
|
||
};
|
||
module2.exports = function extend2() {
|
||
var options, name, src, copy, copyIsArray, clone;
|
||
var target = arguments[0];
|
||
var i = 1;
|
||
var length = arguments.length;
|
||
var deep = false;
|
||
if (typeof target === "boolean") {
|
||
deep = target;
|
||
target = arguments[1] || {};
|
||
i = 2;
|
||
}
|
||
if (target == null || typeof target !== "object" && typeof target !== "function") {
|
||
target = {};
|
||
}
|
||
for (; i < length; ++i) {
|
||
options = arguments[i];
|
||
if (options != null) {
|
||
for (name in options) {
|
||
src = getProperty(target, name);
|
||
copy = getProperty(options, name);
|
||
if (target !== copy) {
|
||
if (deep && copy && (isPlainObject2(copy) || (copyIsArray = isArray(copy)))) {
|
||
if (copyIsArray) {
|
||
copyIsArray = false;
|
||
clone = src && isArray(src) ? src : [];
|
||
} else {
|
||
clone = src && isPlainObject2(src) ? src : {};
|
||
}
|
||
setProperty(target, { name, newValue: extend2(deep, clone, copy) });
|
||
} else if (typeof copy !== "undefined") {
|
||
setProperty(target, { name, newValue: copy });
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
return target;
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/format/format.js
|
||
var require_format = __commonJS({
|
||
"node_modules/format/format.js"(exports, module2) {
|
||
(function() {
|
||
var namespace;
|
||
if (typeof module2 !== "undefined") {
|
||
namespace = module2.exports = format;
|
||
} else {
|
||
namespace = function() {
|
||
return this || (1, eval)("this");
|
||
}();
|
||
}
|
||
namespace.format = format;
|
||
namespace.vsprintf = vsprintf;
|
||
if (typeof console !== "undefined" && typeof console.log === "function") {
|
||
namespace.printf = printf;
|
||
}
|
||
function printf() {
|
||
console.log(format.apply(null, arguments));
|
||
}
|
||
function vsprintf(fmt, replacements) {
|
||
return format.apply(null, [fmt].concat(replacements));
|
||
}
|
||
function format(fmt) {
|
||
var argIndex = 1, args = [].slice.call(arguments), i = 0, n = fmt.length, result = "", c, escaped = false, arg, tmp, leadingZero = false, precision, nextArg = function() {
|
||
return args[argIndex++];
|
||
}, slurpNumber = function() {
|
||
var digits = "";
|
||
while (/\d/.test(fmt[i])) {
|
||
digits += fmt[i++];
|
||
c = fmt[i];
|
||
}
|
||
return digits.length > 0 ? parseInt(digits) : null;
|
||
};
|
||
for (; i < n; ++i) {
|
||
c = fmt[i];
|
||
if (escaped) {
|
||
escaped = false;
|
||
if (c == ".") {
|
||
leadingZero = false;
|
||
c = fmt[++i];
|
||
} else if (c == "0" && fmt[i + 1] == ".") {
|
||
leadingZero = true;
|
||
i += 2;
|
||
c = fmt[i];
|
||
} else {
|
||
leadingZero = true;
|
||
}
|
||
precision = slurpNumber();
|
||
switch (c) {
|
||
case "b":
|
||
result += parseInt(nextArg(), 10).toString(2);
|
||
break;
|
||
case "c":
|
||
arg = nextArg();
|
||
if (typeof arg === "string" || arg instanceof String)
|
||
result += arg;
|
||
else
|
||
result += String.fromCharCode(parseInt(arg, 10));
|
||
break;
|
||
case "d":
|
||
result += parseInt(nextArg(), 10);
|
||
break;
|
||
case "f":
|
||
tmp = String(parseFloat(nextArg()).toFixed(precision || 6));
|
||
result += leadingZero ? tmp : tmp.replace(/^0/, "");
|
||
break;
|
||
case "j":
|
||
result += JSON.stringify(nextArg());
|
||
break;
|
||
case "o":
|
||
result += "0" + parseInt(nextArg(), 10).toString(8);
|
||
break;
|
||
case "s":
|
||
result += nextArg();
|
||
break;
|
||
case "x":
|
||
result += "0x" + parseInt(nextArg(), 10).toString(16);
|
||
break;
|
||
case "X":
|
||
result += "0x" + parseInt(nextArg(), 10).toString(16).toUpperCase();
|
||
break;
|
||
default:
|
||
result += c;
|
||
break;
|
||
}
|
||
} else if (c === "%") {
|
||
escaped = true;
|
||
} else {
|
||
result += c;
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
})();
|
||
}
|
||
});
|
||
|
||
// node_modules/balanced-match/index.js
|
||
var require_balanced_match = __commonJS({
|
||
"node_modules/balanced-match/index.js"(exports, module2) {
|
||
"use strict";
|
||
module2.exports = balanced;
|
||
function balanced(a, b, str) {
|
||
if (a instanceof RegExp)
|
||
a = maybeMatch(a, str);
|
||
if (b instanceof RegExp)
|
||
b = maybeMatch(b, str);
|
||
var r = range(a, b, str);
|
||
return r && {
|
||
start: r[0],
|
||
end: r[1],
|
||
pre: str.slice(0, r[0]),
|
||
body: str.slice(r[0] + a.length, r[1]),
|
||
post: str.slice(r[1] + b.length)
|
||
};
|
||
}
|
||
function maybeMatch(reg, str) {
|
||
var m = str.match(reg);
|
||
return m ? m[0] : null;
|
||
}
|
||
balanced.range = range;
|
||
function range(a, b, str) {
|
||
var begs, beg, left, right, result;
|
||
var ai = str.indexOf(a);
|
||
var bi = str.indexOf(b, ai + 1);
|
||
var i = ai;
|
||
if (ai >= 0 && bi > 0) {
|
||
if (a === b) {
|
||
return [ai, bi];
|
||
}
|
||
begs = [];
|
||
left = str.length;
|
||
while (i >= 0 && !result) {
|
||
if (i == ai) {
|
||
begs.push(i);
|
||
ai = str.indexOf(a, i + 1);
|
||
} else if (begs.length == 1) {
|
||
result = [begs.pop(), bi];
|
||
} else {
|
||
beg = begs.pop();
|
||
if (beg < left) {
|
||
left = beg;
|
||
right = bi;
|
||
}
|
||
bi = str.indexOf(b, i + 1);
|
||
}
|
||
i = ai < bi && ai >= 0 ? ai : bi;
|
||
}
|
||
if (begs.length) {
|
||
result = [left, right];
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
}
|
||
});
|
||
|
||
// node_modules/brace-expansion/index.js
|
||
var require_brace_expansion = __commonJS({
|
||
"node_modules/brace-expansion/index.js"(exports, module2) {
|
||
var balanced = require_balanced_match();
|
||
module2.exports = expandTop;
|
||
var escSlash = "\0SLASH" + Math.random() + "\0";
|
||
var escOpen = "\0OPEN" + Math.random() + "\0";
|
||
var escClose = "\0CLOSE" + Math.random() + "\0";
|
||
var escComma = "\0COMMA" + Math.random() + "\0";
|
||
var escPeriod = "\0PERIOD" + Math.random() + "\0";
|
||
function numeric(str) {
|
||
return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0);
|
||
}
|
||
function escapeBraces(str) {
|
||
return str.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod);
|
||
}
|
||
function unescapeBraces(str) {
|
||
return str.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join(".");
|
||
}
|
||
function parseCommaParts(str) {
|
||
if (!str)
|
||
return [""];
|
||
var parts = [];
|
||
var m = balanced("{", "}", str);
|
||
if (!m)
|
||
return str.split(",");
|
||
var pre = m.pre;
|
||
var body = m.body;
|
||
var post = m.post;
|
||
var p = pre.split(",");
|
||
p[p.length - 1] += "{" + body + "}";
|
||
var postParts = parseCommaParts(post);
|
||
if (post.length) {
|
||
p[p.length - 1] += postParts.shift();
|
||
p.push.apply(p, postParts);
|
||
}
|
||
parts.push.apply(parts, p);
|
||
return parts;
|
||
}
|
||
function expandTop(str) {
|
||
if (!str)
|
||
return [];
|
||
if (str.substr(0, 2) === "{}") {
|
||
str = "\\{\\}" + str.substr(2);
|
||
}
|
||
return expand2(escapeBraces(str), true).map(unescapeBraces);
|
||
}
|
||
function embrace(str) {
|
||
return "{" + str + "}";
|
||
}
|
||
function isPadded(el) {
|
||
return /^-?0\d/.test(el);
|
||
}
|
||
function lte(i, y) {
|
||
return i <= y;
|
||
}
|
||
function gte(i, y) {
|
||
return i >= y;
|
||
}
|
||
function expand2(str, isTop) {
|
||
var expansions = [];
|
||
var m = balanced("{", "}", str);
|
||
if (!m)
|
||
return [str];
|
||
var pre = m.pre;
|
||
var post = m.post.length ? expand2(m.post, false) : [""];
|
||
if (/\$$/.test(m.pre)) {
|
||
for (var k = 0; k < post.length; k++) {
|
||
var expansion = pre + "{" + m.body + "}" + post[k];
|
||
expansions.push(expansion);
|
||
}
|
||
} else {
|
||
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
||
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
||
var isSequence = isNumericSequence || isAlphaSequence;
|
||
var isOptions = m.body.indexOf(",") >= 0;
|
||
if (!isSequence && !isOptions) {
|
||
if (m.post.match(/,.*\}/)) {
|
||
str = m.pre + "{" + m.body + escClose + m.post;
|
||
return expand2(str);
|
||
}
|
||
return [str];
|
||
}
|
||
var n;
|
||
if (isSequence) {
|
||
n = m.body.split(/\.\./);
|
||
} else {
|
||
n = parseCommaParts(m.body);
|
||
if (n.length === 1) {
|
||
n = expand2(n[0], false).map(embrace);
|
||
if (n.length === 1) {
|
||
return post.map(function(p) {
|
||
return m.pre + n[0] + p;
|
||
});
|
||
}
|
||
}
|
||
}
|
||
var N;
|
||
if (isSequence) {
|
||
var x = numeric(n[0]);
|
||
var y = numeric(n[1]);
|
||
var width = Math.max(n[0].length, n[1].length);
|
||
var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1;
|
||
var test = lte;
|
||
var reverse = y < x;
|
||
if (reverse) {
|
||
incr *= -1;
|
||
test = gte;
|
||
}
|
||
var pad = n.some(isPadded);
|
||
N = [];
|
||
for (var i = x; test(i, y); i += incr) {
|
||
var c;
|
||
if (isAlphaSequence) {
|
||
c = String.fromCharCode(i);
|
||
if (c === "\\")
|
||
c = "";
|
||
} else {
|
||
c = String(i);
|
||
if (pad) {
|
||
var need = width - c.length;
|
||
if (need > 0) {
|
||
var z = new Array(need + 1).join("0");
|
||
if (i < 0)
|
||
c = "-" + z + c.slice(1);
|
||
else
|
||
c = z + c;
|
||
}
|
||
}
|
||
}
|
||
N.push(c);
|
||
}
|
||
} else {
|
||
N = [];
|
||
for (var j = 0; j < n.length; j++) {
|
||
N.push.apply(N, expand2(n[j], false));
|
||
}
|
||
}
|
||
for (var j = 0; j < N.length; j++) {
|
||
for (var k = 0; k < post.length; k++) {
|
||
var expansion = pre + N[j] + post[k];
|
||
if (!isTop || isSequence || expansion)
|
||
expansions.push(expansion);
|
||
}
|
||
}
|
||
}
|
||
return expansions;
|
||
}
|
||
}
|
||
});
|
||
|
||
// node_modules/min-indent/index.js
|
||
var require_min_indent = __commonJS({
|
||
"node_modules/min-indent/index.js"(exports, module2) {
|
||
"use strict";
|
||
module2.exports = (string3) => {
|
||
const match2 = string3.match(/^[ \t]*(?=\S)/gm);
|
||
if (!match2) {
|
||
return 0;
|
||
}
|
||
return match2.reduce((r, a) => Math.min(r, a.length), Infinity);
|
||
};
|
||
}
|
||
});
|
||
|
||
// node_modules/mdast-util-to-string/lib/index.js
|
||
var emptyOptions = {};
|
||
function toString(value2, options) {
|
||
const settings = options || emptyOptions;
|
||
const includeImageAlt = typeof settings.includeImageAlt === "boolean" ? settings.includeImageAlt : true;
|
||
const includeHtml = typeof settings.includeHtml === "boolean" ? settings.includeHtml : true;
|
||
return one(value2, includeImageAlt, includeHtml);
|
||
}
|
||
function one(value2, includeImageAlt, includeHtml) {
|
||
if (node(value2)) {
|
||
if ("value" in value2) {
|
||
return value2.type === "html" && !includeHtml ? "" : value2.value;
|
||
}
|
||
if (includeImageAlt && "alt" in value2 && value2.alt) {
|
||
return value2.alt;
|
||
}
|
||
if ("children" in value2) {
|
||
return all(value2.children, includeImageAlt, includeHtml);
|
||
}
|
||
}
|
||
if (Array.isArray(value2)) {
|
||
return all(value2, includeImageAlt, includeHtml);
|
||
}
|
||
return "";
|
||
}
|
||
function all(values, includeImageAlt, includeHtml) {
|
||
const result = [];
|
||
let index2 = -1;
|
||
while (++index2 < values.length) {
|
||
result[index2] = one(values[index2], includeImageAlt, includeHtml);
|
||
}
|
||
return result.join("");
|
||
}
|
||
function node(value2) {
|
||
return Boolean(value2 && typeof value2 === "object");
|
||
}
|
||
|
||
// node_modules/character-entities/index.js
|
||
var characterEntities = {
|
||
AElig: "\xC6",
|
||
AMP: "&",
|
||
Aacute: "\xC1",
|
||
Abreve: "\u0102",
|
||
Acirc: "\xC2",
|
||
Acy: "\u0410",
|
||
Afr: "\u{1D504}",
|
||
Agrave: "\xC0",
|
||
Alpha: "\u0391",
|
||
Amacr: "\u0100",
|
||
And: "\u2A53",
|
||
Aogon: "\u0104",
|
||
Aopf: "\u{1D538}",
|
||
ApplyFunction: "\u2061",
|
||
Aring: "\xC5",
|
||
Ascr: "\u{1D49C}",
|
||
Assign: "\u2254",
|
||
Atilde: "\xC3",
|
||
Auml: "\xC4",
|
||
Backslash: "\u2216",
|
||
Barv: "\u2AE7",
|
||
Barwed: "\u2306",
|
||
Bcy: "\u0411",
|
||
Because: "\u2235",
|
||
Bernoullis: "\u212C",
|
||
Beta: "\u0392",
|
||
Bfr: "\u{1D505}",
|
||
Bopf: "\u{1D539}",
|
||
Breve: "\u02D8",
|
||
Bscr: "\u212C",
|
||
Bumpeq: "\u224E",
|
||
CHcy: "\u0427",
|
||
COPY: "\xA9",
|
||
Cacute: "\u0106",
|
||
Cap: "\u22D2",
|
||
CapitalDifferentialD: "\u2145",
|
||
Cayleys: "\u212D",
|
||
Ccaron: "\u010C",
|
||
Ccedil: "\xC7",
|
||
Ccirc: "\u0108",
|
||
Cconint: "\u2230",
|
||
Cdot: "\u010A",
|
||
Cedilla: "\xB8",
|
||
CenterDot: "\xB7",
|
||
Cfr: "\u212D",
|
||
Chi: "\u03A7",
|
||
CircleDot: "\u2299",
|
||
CircleMinus: "\u2296",
|
||
CirclePlus: "\u2295",
|
||
CircleTimes: "\u2297",
|
||
ClockwiseContourIntegral: "\u2232",
|
||
CloseCurlyDoubleQuote: "\u201D",
|
||
CloseCurlyQuote: "\u2019",
|
||
Colon: "\u2237",
|
||
Colone: "\u2A74",
|
||
Congruent: "\u2261",
|
||
Conint: "\u222F",
|
||
ContourIntegral: "\u222E",
|
||
Copf: "\u2102",
|
||
Coproduct: "\u2210",
|
||
CounterClockwiseContourIntegral: "\u2233",
|
||
Cross: "\u2A2F",
|
||
Cscr: "\u{1D49E}",
|
||
Cup: "\u22D3",
|
||
CupCap: "\u224D",
|
||
DD: "\u2145",
|
||
DDotrahd: "\u2911",
|
||
DJcy: "\u0402",
|
||
DScy: "\u0405",
|
||
DZcy: "\u040F",
|
||
Dagger: "\u2021",
|
||
Darr: "\u21A1",
|
||
Dashv: "\u2AE4",
|
||
Dcaron: "\u010E",
|
||
Dcy: "\u0414",
|
||
Del: "\u2207",
|
||
Delta: "\u0394",
|
||
Dfr: "\u{1D507}",
|
||
DiacriticalAcute: "\xB4",
|
||
DiacriticalDot: "\u02D9",
|
||
DiacriticalDoubleAcute: "\u02DD",
|
||
DiacriticalGrave: "`",
|
||
DiacriticalTilde: "\u02DC",
|
||
Diamond: "\u22C4",
|
||
DifferentialD: "\u2146",
|
||
Dopf: "\u{1D53B}",
|
||
Dot: "\xA8",
|
||
DotDot: "\u20DC",
|
||
DotEqual: "\u2250",
|
||
DoubleContourIntegral: "\u222F",
|
||
DoubleDot: "\xA8",
|
||
DoubleDownArrow: "\u21D3",
|
||
DoubleLeftArrow: "\u21D0",
|
||
DoubleLeftRightArrow: "\u21D4",
|
||
DoubleLeftTee: "\u2AE4",
|
||
DoubleLongLeftArrow: "\u27F8",
|
||
DoubleLongLeftRightArrow: "\u27FA",
|
||
DoubleLongRightArrow: "\u27F9",
|
||
DoubleRightArrow: "\u21D2",
|
||
DoubleRightTee: "\u22A8",
|
||
DoubleUpArrow: "\u21D1",
|
||
DoubleUpDownArrow: "\u21D5",
|
||
DoubleVerticalBar: "\u2225",
|
||
DownArrow: "\u2193",
|
||
DownArrowBar: "\u2913",
|
||
DownArrowUpArrow: "\u21F5",
|
||
DownBreve: "\u0311",
|
||
DownLeftRightVector: "\u2950",
|
||
DownLeftTeeVector: "\u295E",
|
||
DownLeftVector: "\u21BD",
|
||
DownLeftVectorBar: "\u2956",
|
||
DownRightTeeVector: "\u295F",
|
||
DownRightVector: "\u21C1",
|
||
DownRightVectorBar: "\u2957",
|
||
DownTee: "\u22A4",
|
||
DownTeeArrow: "\u21A7",
|
||
Downarrow: "\u21D3",
|
||
Dscr: "\u{1D49F}",
|
||
Dstrok: "\u0110",
|
||
ENG: "\u014A",
|
||
ETH: "\xD0",
|
||
Eacute: "\xC9",
|
||
Ecaron: "\u011A",
|
||
Ecirc: "\xCA",
|
||
Ecy: "\u042D",
|
||
Edot: "\u0116",
|
||
Efr: "\u{1D508}",
|
||
Egrave: "\xC8",
|
||
Element: "\u2208",
|
||
Emacr: "\u0112",
|
||
EmptySmallSquare: "\u25FB",
|
||
EmptyVerySmallSquare: "\u25AB",
|
||
Eogon: "\u0118",
|
||
Eopf: "\u{1D53C}",
|
||
Epsilon: "\u0395",
|
||
Equal: "\u2A75",
|
||
EqualTilde: "\u2242",
|
||
Equilibrium: "\u21CC",
|
||
Escr: "\u2130",
|
||
Esim: "\u2A73",
|
||
Eta: "\u0397",
|
||
Euml: "\xCB",
|
||
Exists: "\u2203",
|
||
ExponentialE: "\u2147",
|
||
Fcy: "\u0424",
|
||
Ffr: "\u{1D509}",
|
||
FilledSmallSquare: "\u25FC",
|
||
FilledVerySmallSquare: "\u25AA",
|
||
Fopf: "\u{1D53D}",
|
||
ForAll: "\u2200",
|
||
Fouriertrf: "\u2131",
|
||
Fscr: "\u2131",
|
||
GJcy: "\u0403",
|
||
GT: ">",
|
||
Gamma: "\u0393",
|
||
Gammad: "\u03DC",
|
||
Gbreve: "\u011E",
|
||
Gcedil: "\u0122",
|
||
Gcirc: "\u011C",
|
||
Gcy: "\u0413",
|
||
Gdot: "\u0120",
|
||
Gfr: "\u{1D50A}",
|
||
Gg: "\u22D9",
|
||
Gopf: "\u{1D53E}",
|
||
GreaterEqual: "\u2265",
|
||
GreaterEqualLess: "\u22DB",
|
||
GreaterFullEqual: "\u2267",
|
||
GreaterGreater: "\u2AA2",
|
||
GreaterLess: "\u2277",
|
||
GreaterSlantEqual: "\u2A7E",
|
||
GreaterTilde: "\u2273",
|
||
Gscr: "\u{1D4A2}",
|
||
Gt: "\u226B",
|
||
HARDcy: "\u042A",
|
||
Hacek: "\u02C7",
|
||
Hat: "^",
|
||
Hcirc: "\u0124",
|
||
Hfr: "\u210C",
|
||
HilbertSpace: "\u210B",
|
||
Hopf: "\u210D",
|
||
HorizontalLine: "\u2500",
|
||
Hscr: "\u210B",
|
||
Hstrok: "\u0126",
|
||
HumpDownHump: "\u224E",
|
||
HumpEqual: "\u224F",
|
||
IEcy: "\u0415",
|
||
IJlig: "\u0132",
|
||
IOcy: "\u0401",
|
||
Iacute: "\xCD",
|
||
Icirc: "\xCE",
|
||
Icy: "\u0418",
|
||
Idot: "\u0130",
|
||
Ifr: "\u2111",
|
||
Igrave: "\xCC",
|
||
Im: "\u2111",
|
||
Imacr: "\u012A",
|
||
ImaginaryI: "\u2148",
|
||
Implies: "\u21D2",
|
||
Int: "\u222C",
|
||
Integral: "\u222B",
|
||
Intersection: "\u22C2",
|
||
InvisibleComma: "\u2063",
|
||
InvisibleTimes: "\u2062",
|
||
Iogon: "\u012E",
|
||
Iopf: "\u{1D540}",
|
||
Iota: "\u0399",
|
||
Iscr: "\u2110",
|
||
Itilde: "\u0128",
|
||
Iukcy: "\u0406",
|
||
Iuml: "\xCF",
|
||
Jcirc: "\u0134",
|
||
Jcy: "\u0419",
|
||
Jfr: "\u{1D50D}",
|
||
Jopf: "\u{1D541}",
|
||
Jscr: "\u{1D4A5}",
|
||
Jsercy: "\u0408",
|
||
Jukcy: "\u0404",
|
||
KHcy: "\u0425",
|
||
KJcy: "\u040C",
|
||
Kappa: "\u039A",
|
||
Kcedil: "\u0136",
|
||
Kcy: "\u041A",
|
||
Kfr: "\u{1D50E}",
|
||
Kopf: "\u{1D542}",
|
||
Kscr: "\u{1D4A6}",
|
||
LJcy: "\u0409",
|
||
LT: "<",
|
||
Lacute: "\u0139",
|
||
Lambda: "\u039B",
|
||
Lang: "\u27EA",
|
||
Laplacetrf: "\u2112",
|
||
Larr: "\u219E",
|
||
Lcaron: "\u013D",
|
||
Lcedil: "\u013B",
|
||
Lcy: "\u041B",
|
||
LeftAngleBracket: "\u27E8",
|
||
LeftArrow: "\u2190",
|
||
LeftArrowBar: "\u21E4",
|
||
LeftArrowRightArrow: "\u21C6",
|
||
LeftCeiling: "\u2308",
|
||
LeftDoubleBracket: "\u27E6",
|
||
LeftDownTeeVector: "\u2961",
|
||
LeftDownVector: "\u21C3",
|
||
LeftDownVectorBar: "\u2959",
|
||
LeftFloor: "\u230A",
|
||
LeftRightArrow: "\u2194",
|
||
LeftRightVector: "\u294E",
|
||
LeftTee: "\u22A3",
|
||
LeftTeeArrow: "\u21A4",
|
||
LeftTeeVector: "\u295A",
|
||
LeftTriangle: "\u22B2",
|
||
LeftTriangleBar: "\u29CF",
|
||
LeftTriangleEqual: "\u22B4",
|
||
LeftUpDownVector: "\u2951",
|
||
LeftUpTeeVector: "\u2960",
|
||
LeftUpVector: "\u21BF",
|
||
LeftUpVectorBar: "\u2958",
|
||
LeftVector: "\u21BC",
|
||
LeftVectorBar: "\u2952",
|
||
Leftarrow: "\u21D0",
|
||
Leftrightarrow: "\u21D4",
|
||
LessEqualGreater: "\u22DA",
|
||
LessFullEqual: "\u2266",
|
||
LessGreater: "\u2276",
|
||
LessLess: "\u2AA1",
|
||
LessSlantEqual: "\u2A7D",
|
||
LessTilde: "\u2272",
|
||
Lfr: "\u{1D50F}",
|
||
Ll: "\u22D8",
|
||
Lleftarrow: "\u21DA",
|
||
Lmidot: "\u013F",
|
||
LongLeftArrow: "\u27F5",
|
||
LongLeftRightArrow: "\u27F7",
|
||
LongRightArrow: "\u27F6",
|
||
Longleftarrow: "\u27F8",
|
||
Longleftrightarrow: "\u27FA",
|
||
Longrightarrow: "\u27F9",
|
||
Lopf: "\u{1D543}",
|
||
LowerLeftArrow: "\u2199",
|
||
LowerRightArrow: "\u2198",
|
||
Lscr: "\u2112",
|
||
Lsh: "\u21B0",
|
||
Lstrok: "\u0141",
|
||
Lt: "\u226A",
|
||
Map: "\u2905",
|
||
Mcy: "\u041C",
|
||
MediumSpace: "\u205F",
|
||
Mellintrf: "\u2133",
|
||
Mfr: "\u{1D510}",
|
||
MinusPlus: "\u2213",
|
||
Mopf: "\u{1D544}",
|
||
Mscr: "\u2133",
|
||
Mu: "\u039C",
|
||
NJcy: "\u040A",
|
||
Nacute: "\u0143",
|
||
Ncaron: "\u0147",
|
||
Ncedil: "\u0145",
|
||
Ncy: "\u041D",
|
||
NegativeMediumSpace: "\u200B",
|
||
NegativeThickSpace: "\u200B",
|
||
NegativeThinSpace: "\u200B",
|
||
NegativeVeryThinSpace: "\u200B",
|
||
NestedGreaterGreater: "\u226B",
|
||
NestedLessLess: "\u226A",
|
||
NewLine: "\n",
|
||
Nfr: "\u{1D511}",
|
||
NoBreak: "\u2060",
|
||
NonBreakingSpace: "\xA0",
|
||
Nopf: "\u2115",
|
||
Not: "\u2AEC",
|
||
NotCongruent: "\u2262",
|
||
NotCupCap: "\u226D",
|
||
NotDoubleVerticalBar: "\u2226",
|
||
NotElement: "\u2209",
|
||
NotEqual: "\u2260",
|
||
NotEqualTilde: "\u2242\u0338",
|
||
NotExists: "\u2204",
|
||
NotGreater: "\u226F",
|
||
NotGreaterEqual: "\u2271",
|
||
NotGreaterFullEqual: "\u2267\u0338",
|
||
NotGreaterGreater: "\u226B\u0338",
|
||
NotGreaterLess: "\u2279",
|
||
NotGreaterSlantEqual: "\u2A7E\u0338",
|
||
NotGreaterTilde: "\u2275",
|
||
NotHumpDownHump: "\u224E\u0338",
|
||
NotHumpEqual: "\u224F\u0338",
|
||
NotLeftTriangle: "\u22EA",
|
||
NotLeftTriangleBar: "\u29CF\u0338",
|
||
NotLeftTriangleEqual: "\u22EC",
|
||
NotLess: "\u226E",
|
||
NotLessEqual: "\u2270",
|
||
NotLessGreater: "\u2278",
|
||
NotLessLess: "\u226A\u0338",
|
||
NotLessSlantEqual: "\u2A7D\u0338",
|
||
NotLessTilde: "\u2274",
|
||
NotNestedGreaterGreater: "\u2AA2\u0338",
|
||
NotNestedLessLess: "\u2AA1\u0338",
|
||
NotPrecedes: "\u2280",
|
||
NotPrecedesEqual: "\u2AAF\u0338",
|
||
NotPrecedesSlantEqual: "\u22E0",
|
||
NotReverseElement: "\u220C",
|
||
NotRightTriangle: "\u22EB",
|
||
NotRightTriangleBar: "\u29D0\u0338",
|
||
NotRightTriangleEqual: "\u22ED",
|
||
NotSquareSubset: "\u228F\u0338",
|
||
NotSquareSubsetEqual: "\u22E2",
|
||
NotSquareSuperset: "\u2290\u0338",
|
||
NotSquareSupersetEqual: "\u22E3",
|
||
NotSubset: "\u2282\u20D2",
|
||
NotSubsetEqual: "\u2288",
|
||
NotSucceeds: "\u2281",
|
||
NotSucceedsEqual: "\u2AB0\u0338",
|
||
NotSucceedsSlantEqual: "\u22E1",
|
||
NotSucceedsTilde: "\u227F\u0338",
|
||
NotSuperset: "\u2283\u20D2",
|
||
NotSupersetEqual: "\u2289",
|
||
NotTilde: "\u2241",
|
||
NotTildeEqual: "\u2244",
|
||
NotTildeFullEqual: "\u2247",
|
||
NotTildeTilde: "\u2249",
|
||
NotVerticalBar: "\u2224",
|
||
Nscr: "\u{1D4A9}",
|
||
Ntilde: "\xD1",
|
||
Nu: "\u039D",
|
||
OElig: "\u0152",
|
||
Oacute: "\xD3",
|
||
Ocirc: "\xD4",
|
||
Ocy: "\u041E",
|
||
Odblac: "\u0150",
|
||
Ofr: "\u{1D512}",
|
||
Ograve: "\xD2",
|
||
Omacr: "\u014C",
|
||
Omega: "\u03A9",
|
||
Omicron: "\u039F",
|
||
Oopf: "\u{1D546}",
|
||
OpenCurlyDoubleQuote: "\u201C",
|
||
OpenCurlyQuote: "\u2018",
|
||
Or: "\u2A54",
|
||
Oscr: "\u{1D4AA}",
|
||
Oslash: "\xD8",
|
||
Otilde: "\xD5",
|
||
Otimes: "\u2A37",
|
||
Ouml: "\xD6",
|
||
OverBar: "\u203E",
|
||
OverBrace: "\u23DE",
|
||
OverBracket: "\u23B4",
|
||
OverParenthesis: "\u23DC",
|
||
PartialD: "\u2202",
|
||
Pcy: "\u041F",
|
||
Pfr: "\u{1D513}",
|
||
Phi: "\u03A6",
|
||
Pi: "\u03A0",
|
||
PlusMinus: "\xB1",
|
||
Poincareplane: "\u210C",
|
||
Popf: "\u2119",
|
||
Pr: "\u2ABB",
|
||
Precedes: "\u227A",
|
||
PrecedesEqual: "\u2AAF",
|
||
PrecedesSlantEqual: "\u227C",
|
||
PrecedesTilde: "\u227E",
|
||
Prime: "\u2033",
|
||
Product: "\u220F",
|
||
Proportion: "\u2237",
|
||
Proportional: "\u221D",
|
||
Pscr: "\u{1D4AB}",
|
||
Psi: "\u03A8",
|
||
QUOT: '"',
|
||
Qfr: "\u{1D514}",
|
||
Qopf: "\u211A",
|
||
Qscr: "\u{1D4AC}",
|
||
RBarr: "\u2910",
|
||
REG: "\xAE",
|
||
Racute: "\u0154",
|
||
Rang: "\u27EB",
|
||
Rarr: "\u21A0",
|
||
Rarrtl: "\u2916",
|
||
Rcaron: "\u0158",
|
||
Rcedil: "\u0156",
|
||
Rcy: "\u0420",
|
||
Re: "\u211C",
|
||
ReverseElement: "\u220B",
|
||
ReverseEquilibrium: "\u21CB",
|
||
ReverseUpEquilibrium: "\u296F",
|
||
Rfr: "\u211C",
|
||
Rho: "\u03A1",
|
||
RightAngleBracket: "\u27E9",
|
||
RightArrow: "\u2192",
|
||
RightArrowBar: "\u21E5",
|
||
RightArrowLeftArrow: "\u21C4",
|
||
RightCeiling: "\u2309",
|
||
RightDoubleBracket: "\u27E7",
|
||
RightDownTeeVector: "\u295D",
|
||
RightDownVector: "\u21C2",
|
||
RightDownVectorBar: "\u2955",
|
||
RightFloor: "\u230B",
|
||
RightTee: "\u22A2",
|
||
RightTeeArrow: "\u21A6",
|
||
RightTeeVector: "\u295B",
|
||
RightTriangle: "\u22B3",
|
||
RightTriangleBar: "\u29D0",
|
||
RightTriangleEqual: "\u22B5",
|
||
RightUpDownVector: "\u294F",
|
||
RightUpTeeVector: "\u295C",
|
||
RightUpVector: "\u21BE",
|
||
RightUpVectorBar: "\u2954",
|
||
RightVector: "\u21C0",
|
||
RightVectorBar: "\u2953",
|
||
Rightarrow: "\u21D2",
|
||
Ropf: "\u211D",
|
||
RoundImplies: "\u2970",
|
||
Rrightarrow: "\u21DB",
|
||
Rscr: "\u211B",
|
||
Rsh: "\u21B1",
|
||
RuleDelayed: "\u29F4",
|
||
SHCHcy: "\u0429",
|
||
SHcy: "\u0428",
|
||
SOFTcy: "\u042C",
|
||
Sacute: "\u015A",
|
||
Sc: "\u2ABC",
|
||
Scaron: "\u0160",
|
||
Scedil: "\u015E",
|
||
Scirc: "\u015C",
|
||
Scy: "\u0421",
|
||
Sfr: "\u{1D516}",
|
||
ShortDownArrow: "\u2193",
|
||
ShortLeftArrow: "\u2190",
|
||
ShortRightArrow: "\u2192",
|
||
ShortUpArrow: "\u2191",
|
||
Sigma: "\u03A3",
|
||
SmallCircle: "\u2218",
|
||
Sopf: "\u{1D54A}",
|
||
Sqrt: "\u221A",
|
||
Square: "\u25A1",
|
||
SquareIntersection: "\u2293",
|
||
SquareSubset: "\u228F",
|
||
SquareSubsetEqual: "\u2291",
|
||
SquareSuperset: "\u2290",
|
||
SquareSupersetEqual: "\u2292",
|
||
SquareUnion: "\u2294",
|
||
Sscr: "\u{1D4AE}",
|
||
Star: "\u22C6",
|
||
Sub: "\u22D0",
|
||
Subset: "\u22D0",
|
||
SubsetEqual: "\u2286",
|
||
Succeeds: "\u227B",
|
||
SucceedsEqual: "\u2AB0",
|
||
SucceedsSlantEqual: "\u227D",
|
||
SucceedsTilde: "\u227F",
|
||
SuchThat: "\u220B",
|
||
Sum: "\u2211",
|
||
Sup: "\u22D1",
|
||
Superset: "\u2283",
|
||
SupersetEqual: "\u2287",
|
||
Supset: "\u22D1",
|
||
THORN: "\xDE",
|
||
TRADE: "\u2122",
|
||
TSHcy: "\u040B",
|
||
TScy: "\u0426",
|
||
Tab: " ",
|
||
Tau: "\u03A4",
|
||
Tcaron: "\u0164",
|
||
Tcedil: "\u0162",
|
||
Tcy: "\u0422",
|
||
Tfr: "\u{1D517}",
|
||
Therefore: "\u2234",
|
||
Theta: "\u0398",
|
||
ThickSpace: "\u205F\u200A",
|
||
ThinSpace: "\u2009",
|
||
Tilde: "\u223C",
|
||
TildeEqual: "\u2243",
|
||
TildeFullEqual: "\u2245",
|
||
TildeTilde: "\u2248",
|
||
Topf: "\u{1D54B}",
|
||
TripleDot: "\u20DB",
|
||
Tscr: "\u{1D4AF}",
|
||
Tstrok: "\u0166",
|
||
Uacute: "\xDA",
|
||
Uarr: "\u219F",
|
||
Uarrocir: "\u2949",
|
||
Ubrcy: "\u040E",
|
||
Ubreve: "\u016C",
|
||
Ucirc: "\xDB",
|
||
Ucy: "\u0423",
|
||
Udblac: "\u0170",
|
||
Ufr: "\u{1D518}",
|
||
Ugrave: "\xD9",
|
||
Umacr: "\u016A",
|
||
UnderBar: "_",
|
||
UnderBrace: "\u23DF",
|
||
UnderBracket: "\u23B5",
|
||
UnderParenthesis: "\u23DD",
|
||
Union: "\u22C3",
|
||
UnionPlus: "\u228E",
|
||
Uogon: "\u0172",
|
||
Uopf: "\u{1D54C}",
|
||
UpArrow: "\u2191",
|
||
UpArrowBar: "\u2912",
|
||
UpArrowDownArrow: "\u21C5",
|
||
UpDownArrow: "\u2195",
|
||
UpEquilibrium: "\u296E",
|
||
UpTee: "\u22A5",
|
||
UpTeeArrow: "\u21A5",
|
||
Uparrow: "\u21D1",
|
||
Updownarrow: "\u21D5",
|
||
UpperLeftArrow: "\u2196",
|
||
UpperRightArrow: "\u2197",
|
||
Upsi: "\u03D2",
|
||
Upsilon: "\u03A5",
|
||
Uring: "\u016E",
|
||
Uscr: "\u{1D4B0}",
|
||
Utilde: "\u0168",
|
||
Uuml: "\xDC",
|
||
VDash: "\u22AB",
|
||
Vbar: "\u2AEB",
|
||
Vcy: "\u0412",
|
||
Vdash: "\u22A9",
|
||
Vdashl: "\u2AE6",
|
||
Vee: "\u22C1",
|
||
Verbar: "\u2016",
|
||
Vert: "\u2016",
|
||
VerticalBar: "\u2223",
|
||
VerticalLine: "|",
|
||
VerticalSeparator: "\u2758",
|
||
VerticalTilde: "\u2240",
|
||
VeryThinSpace: "\u200A",
|
||
Vfr: "\u{1D519}",
|
||
Vopf: "\u{1D54D}",
|
||
Vscr: "\u{1D4B1}",
|
||
Vvdash: "\u22AA",
|
||
Wcirc: "\u0174",
|
||
Wedge: "\u22C0",
|
||
Wfr: "\u{1D51A}",
|
||
Wopf: "\u{1D54E}",
|
||
Wscr: "\u{1D4B2}",
|
||
Xfr: "\u{1D51B}",
|
||
Xi: "\u039E",
|
||
Xopf: "\u{1D54F}",
|
||
Xscr: "\u{1D4B3}",
|
||
YAcy: "\u042F",
|
||
YIcy: "\u0407",
|
||
YUcy: "\u042E",
|
||
Yacute: "\xDD",
|
||
Ycirc: "\u0176",
|
||
Ycy: "\u042B",
|
||
Yfr: "\u{1D51C}",
|
||
Yopf: "\u{1D550}",
|
||
Yscr: "\u{1D4B4}",
|
||
Yuml: "\u0178",
|
||
ZHcy: "\u0416",
|
||
Zacute: "\u0179",
|
||
Zcaron: "\u017D",
|
||
Zcy: "\u0417",
|
||
Zdot: "\u017B",
|
||
ZeroWidthSpace: "\u200B",
|
||
Zeta: "\u0396",
|
||
Zfr: "\u2128",
|
||
Zopf: "\u2124",
|
||
Zscr: "\u{1D4B5}",
|
||
aacute: "\xE1",
|
||
abreve: "\u0103",
|
||
ac: "\u223E",
|
||
acE: "\u223E\u0333",
|
||
acd: "\u223F",
|
||
acirc: "\xE2",
|
||
acute: "\xB4",
|
||
acy: "\u0430",
|
||
aelig: "\xE6",
|
||
af: "\u2061",
|
||
afr: "\u{1D51E}",
|
||
agrave: "\xE0",
|
||
alefsym: "\u2135",
|
||
aleph: "\u2135",
|
||
alpha: "\u03B1",
|
||
amacr: "\u0101",
|
||
amalg: "\u2A3F",
|
||
amp: "&",
|
||
and: "\u2227",
|
||
andand: "\u2A55",
|
||
andd: "\u2A5C",
|
||
andslope: "\u2A58",
|
||
andv: "\u2A5A",
|
||
ang: "\u2220",
|
||
ange: "\u29A4",
|
||
angle: "\u2220",
|
||
angmsd: "\u2221",
|
||
angmsdaa: "\u29A8",
|
||
angmsdab: "\u29A9",
|
||
angmsdac: "\u29AA",
|
||
angmsdad: "\u29AB",
|
||
angmsdae: "\u29AC",
|
||
angmsdaf: "\u29AD",
|
||
angmsdag: "\u29AE",
|
||
angmsdah: "\u29AF",
|
||
angrt: "\u221F",
|
||
angrtvb: "\u22BE",
|
||
angrtvbd: "\u299D",
|
||
angsph: "\u2222",
|
||
angst: "\xC5",
|
||
angzarr: "\u237C",
|
||
aogon: "\u0105",
|
||
aopf: "\u{1D552}",
|
||
ap: "\u2248",
|
||
apE: "\u2A70",
|
||
apacir: "\u2A6F",
|
||
ape: "\u224A",
|
||
apid: "\u224B",
|
||
apos: "'",
|
||
approx: "\u2248",
|
||
approxeq: "\u224A",
|
||
aring: "\xE5",
|
||
ascr: "\u{1D4B6}",
|
||
ast: "*",
|
||
asymp: "\u2248",
|
||
asympeq: "\u224D",
|
||
atilde: "\xE3",
|
||
auml: "\xE4",
|
||
awconint: "\u2233",
|
||
awint: "\u2A11",
|
||
bNot: "\u2AED",
|
||
backcong: "\u224C",
|
||
backepsilon: "\u03F6",
|
||
backprime: "\u2035",
|
||
backsim: "\u223D",
|
||
backsimeq: "\u22CD",
|
||
barvee: "\u22BD",
|
||
barwed: "\u2305",
|
||
barwedge: "\u2305",
|
||
bbrk: "\u23B5",
|
||
bbrktbrk: "\u23B6",
|
||
bcong: "\u224C",
|
||
bcy: "\u0431",
|
||
bdquo: "\u201E",
|
||
becaus: "\u2235",
|
||
because: "\u2235",
|
||
bemptyv: "\u29B0",
|
||
bepsi: "\u03F6",
|
||
bernou: "\u212C",
|
||
beta: "\u03B2",
|
||
beth: "\u2136",
|
||
between: "\u226C",
|
||
bfr: "\u{1D51F}",
|
||
bigcap: "\u22C2",
|
||
bigcirc: "\u25EF",
|
||
bigcup: "\u22C3",
|
||
bigodot: "\u2A00",
|
||
bigoplus: "\u2A01",
|
||
bigotimes: "\u2A02",
|
||
bigsqcup: "\u2A06",
|
||
bigstar: "\u2605",
|
||
bigtriangledown: "\u25BD",
|
||
bigtriangleup: "\u25B3",
|
||
biguplus: "\u2A04",
|
||
bigvee: "\u22C1",
|
||
bigwedge: "\u22C0",
|
||
bkarow: "\u290D",
|
||
blacklozenge: "\u29EB",
|
||
blacksquare: "\u25AA",
|
||
blacktriangle: "\u25B4",
|
||
blacktriangledown: "\u25BE",
|
||
blacktriangleleft: "\u25C2",
|
||
blacktriangleright: "\u25B8",
|
||
blank: "\u2423",
|
||
blk12: "\u2592",
|
||
blk14: "\u2591",
|
||
blk34: "\u2593",
|
||
block: "\u2588",
|
||
bne: "=\u20E5",
|
||
bnequiv: "\u2261\u20E5",
|
||
bnot: "\u2310",
|
||
bopf: "\u{1D553}",
|
||
bot: "\u22A5",
|
||
bottom: "\u22A5",
|
||
bowtie: "\u22C8",
|
||
boxDL: "\u2557",
|
||
boxDR: "\u2554",
|
||
boxDl: "\u2556",
|
||
boxDr: "\u2553",
|
||
boxH: "\u2550",
|
||
boxHD: "\u2566",
|
||
boxHU: "\u2569",
|
||
boxHd: "\u2564",
|
||
boxHu: "\u2567",
|
||
boxUL: "\u255D",
|
||
boxUR: "\u255A",
|
||
boxUl: "\u255C",
|
||
boxUr: "\u2559",
|
||
boxV: "\u2551",
|
||
boxVH: "\u256C",
|
||
boxVL: "\u2563",
|
||
boxVR: "\u2560",
|
||
boxVh: "\u256B",
|
||
boxVl: "\u2562",
|
||
boxVr: "\u255F",
|
||
boxbox: "\u29C9",
|
||
boxdL: "\u2555",
|
||
boxdR: "\u2552",
|
||
boxdl: "\u2510",
|
||
boxdr: "\u250C",
|
||
boxh: "\u2500",
|
||
boxhD: "\u2565",
|
||
boxhU: "\u2568",
|
||
boxhd: "\u252C",
|
||
boxhu: "\u2534",
|
||
boxminus: "\u229F",
|
||
boxplus: "\u229E",
|
||
boxtimes: "\u22A0",
|
||
boxuL: "\u255B",
|
||
boxuR: "\u2558",
|
||
boxul: "\u2518",
|
||
boxur: "\u2514",
|
||
boxv: "\u2502",
|
||
boxvH: "\u256A",
|
||
boxvL: "\u2561",
|
||
boxvR: "\u255E",
|
||
boxvh: "\u253C",
|
||
boxvl: "\u2524",
|
||
boxvr: "\u251C",
|
||
bprime: "\u2035",
|
||
breve: "\u02D8",
|
||
brvbar: "\xA6",
|
||
bscr: "\u{1D4B7}",
|
||
bsemi: "\u204F",
|
||
bsim: "\u223D",
|
||
bsime: "\u22CD",
|
||
bsol: "\\",
|
||
bsolb: "\u29C5",
|
||
bsolhsub: "\u27C8",
|
||
bull: "\u2022",
|
||
bullet: "\u2022",
|
||
bump: "\u224E",
|
||
bumpE: "\u2AAE",
|
||
bumpe: "\u224F",
|
||
bumpeq: "\u224F",
|
||
cacute: "\u0107",
|
||
cap: "\u2229",
|
||
capand: "\u2A44",
|
||
capbrcup: "\u2A49",
|
||
capcap: "\u2A4B",
|
||
capcup: "\u2A47",
|
||
capdot: "\u2A40",
|
||
caps: "\u2229\uFE00",
|
||
caret: "\u2041",
|
||
caron: "\u02C7",
|
||
ccaps: "\u2A4D",
|
||
ccaron: "\u010D",
|
||
ccedil: "\xE7",
|
||
ccirc: "\u0109",
|
||
ccups: "\u2A4C",
|
||
ccupssm: "\u2A50",
|
||
cdot: "\u010B",
|
||
cedil: "\xB8",
|
||
cemptyv: "\u29B2",
|
||
cent: "\xA2",
|
||
centerdot: "\xB7",
|
||
cfr: "\u{1D520}",
|
||
chcy: "\u0447",
|
||
check: "\u2713",
|
||
checkmark: "\u2713",
|
||
chi: "\u03C7",
|
||
cir: "\u25CB",
|
||
cirE: "\u29C3",
|
||
circ: "\u02C6",
|
||
circeq: "\u2257",
|
||
circlearrowleft: "\u21BA",
|
||
circlearrowright: "\u21BB",
|
||
circledR: "\xAE",
|
||
circledS: "\u24C8",
|
||
circledast: "\u229B",
|
||
circledcirc: "\u229A",
|
||
circleddash: "\u229D",
|
||
cire: "\u2257",
|
||
cirfnint: "\u2A10",
|
||
cirmid: "\u2AEF",
|
||
cirscir: "\u29C2",
|
||
clubs: "\u2663",
|
||
clubsuit: "\u2663",
|
||
colon: ":",
|
||
colone: "\u2254",
|
||
coloneq: "\u2254",
|
||
comma: ",",
|
||
commat: "@",
|
||
comp: "\u2201",
|
||
compfn: "\u2218",
|
||
complement: "\u2201",
|
||
complexes: "\u2102",
|
||
cong: "\u2245",
|
||
congdot: "\u2A6D",
|
||
conint: "\u222E",
|
||
copf: "\u{1D554}",
|
||
coprod: "\u2210",
|
||
copy: "\xA9",
|
||
copysr: "\u2117",
|
||
crarr: "\u21B5",
|
||
cross: "\u2717",
|
||
cscr: "\u{1D4B8}",
|
||
csub: "\u2ACF",
|
||
csube: "\u2AD1",
|
||
csup: "\u2AD0",
|
||
csupe: "\u2AD2",
|
||
ctdot: "\u22EF",
|
||
cudarrl: "\u2938",
|
||
cudarrr: "\u2935",
|
||
cuepr: "\u22DE",
|
||
cuesc: "\u22DF",
|
||
cularr: "\u21B6",
|
||
cularrp: "\u293D",
|
||
cup: "\u222A",
|
||
cupbrcap: "\u2A48",
|
||
cupcap: "\u2A46",
|
||
cupcup: "\u2A4A",
|
||
cupdot: "\u228D",
|
||
cupor: "\u2A45",
|
||
cups: "\u222A\uFE00",
|
||
curarr: "\u21B7",
|
||
curarrm: "\u293C",
|
||
curlyeqprec: "\u22DE",
|
||
curlyeqsucc: "\u22DF",
|
||
curlyvee: "\u22CE",
|
||
curlywedge: "\u22CF",
|
||
curren: "\xA4",
|
||
curvearrowleft: "\u21B6",
|
||
curvearrowright: "\u21B7",
|
||
cuvee: "\u22CE",
|
||
cuwed: "\u22CF",
|
||
cwconint: "\u2232",
|
||
cwint: "\u2231",
|
||
cylcty: "\u232D",
|
||
dArr: "\u21D3",
|
||
dHar: "\u2965",
|
||
dagger: "\u2020",
|
||
daleth: "\u2138",
|
||
darr: "\u2193",
|
||
dash: "\u2010",
|
||
dashv: "\u22A3",
|
||
dbkarow: "\u290F",
|
||
dblac: "\u02DD",
|
||
dcaron: "\u010F",
|
||
dcy: "\u0434",
|
||
dd: "\u2146",
|
||
ddagger: "\u2021",
|
||
ddarr: "\u21CA",
|
||
ddotseq: "\u2A77",
|
||
deg: "\xB0",
|
||
delta: "\u03B4",
|
||
demptyv: "\u29B1",
|
||
dfisht: "\u297F",
|
||
dfr: "\u{1D521}",
|
||
dharl: "\u21C3",
|
||
dharr: "\u21C2",
|
||
diam: "\u22C4",
|
||
diamond: "\u22C4",
|
||
diamondsuit: "\u2666",
|
||
diams: "\u2666",
|
||
die: "\xA8",
|
||
digamma: "\u03DD",
|
||
disin: "\u22F2",
|
||
div: "\xF7",
|
||
divide: "\xF7",
|
||
divideontimes: "\u22C7",
|
||
divonx: "\u22C7",
|
||
djcy: "\u0452",
|
||
dlcorn: "\u231E",
|
||
dlcrop: "\u230D",
|
||
dollar: "$",
|
||
dopf: "\u{1D555}",
|
||
dot: "\u02D9",
|
||
doteq: "\u2250",
|
||
doteqdot: "\u2251",
|
||
dotminus: "\u2238",
|
||
dotplus: "\u2214",
|
||
dotsquare: "\u22A1",
|
||
doublebarwedge: "\u2306",
|
||
downarrow: "\u2193",
|
||
downdownarrows: "\u21CA",
|
||
downharpoonleft: "\u21C3",
|
||
downharpoonright: "\u21C2",
|
||
drbkarow: "\u2910",
|
||
drcorn: "\u231F",
|
||
drcrop: "\u230C",
|
||
dscr: "\u{1D4B9}",
|
||
dscy: "\u0455",
|
||
dsol: "\u29F6",
|
||
dstrok: "\u0111",
|
||
dtdot: "\u22F1",
|
||
dtri: "\u25BF",
|
||
dtrif: "\u25BE",
|
||
duarr: "\u21F5",
|
||
duhar: "\u296F",
|
||
dwangle: "\u29A6",
|
||
dzcy: "\u045F",
|
||
dzigrarr: "\u27FF",
|
||
eDDot: "\u2A77",
|
||
eDot: "\u2251",
|
||
eacute: "\xE9",
|
||
easter: "\u2A6E",
|
||
ecaron: "\u011B",
|
||
ecir: "\u2256",
|
||
ecirc: "\xEA",
|
||
ecolon: "\u2255",
|
||
ecy: "\u044D",
|
||
edot: "\u0117",
|
||
ee: "\u2147",
|
||
efDot: "\u2252",
|
||
efr: "\u{1D522}",
|
||
eg: "\u2A9A",
|
||
egrave: "\xE8",
|
||
egs: "\u2A96",
|
||
egsdot: "\u2A98",
|
||
el: "\u2A99",
|
||
elinters: "\u23E7",
|
||
ell: "\u2113",
|
||
els: "\u2A95",
|
||
elsdot: "\u2A97",
|
||
emacr: "\u0113",
|
||
empty: "\u2205",
|
||
emptyset: "\u2205",
|
||
emptyv: "\u2205",
|
||
emsp13: "\u2004",
|
||
emsp14: "\u2005",
|
||
emsp: "\u2003",
|
||
eng: "\u014B",
|
||
ensp: "\u2002",
|
||
eogon: "\u0119",
|
||
eopf: "\u{1D556}",
|
||
epar: "\u22D5",
|
||
eparsl: "\u29E3",
|
||
eplus: "\u2A71",
|
||
epsi: "\u03B5",
|
||
epsilon: "\u03B5",
|
||
epsiv: "\u03F5",
|
||
eqcirc: "\u2256",
|
||
eqcolon: "\u2255",
|
||
eqsim: "\u2242",
|
||
eqslantgtr: "\u2A96",
|
||
eqslantless: "\u2A95",
|
||
equals: "=",
|
||
equest: "\u225F",
|
||
equiv: "\u2261",
|
||
equivDD: "\u2A78",
|
||
eqvparsl: "\u29E5",
|
||
erDot: "\u2253",
|
||
erarr: "\u2971",
|
||
escr: "\u212F",
|
||
esdot: "\u2250",
|
||
esim: "\u2242",
|
||
eta: "\u03B7",
|
||
eth: "\xF0",
|
||
euml: "\xEB",
|
||
euro: "\u20AC",
|
||
excl: "!",
|
||
exist: "\u2203",
|
||
expectation: "\u2130",
|
||
exponentiale: "\u2147",
|
||
fallingdotseq: "\u2252",
|
||
fcy: "\u0444",
|
||
female: "\u2640",
|
||
ffilig: "\uFB03",
|
||
fflig: "\uFB00",
|
||
ffllig: "\uFB04",
|
||
ffr: "\u{1D523}",
|
||
filig: "\uFB01",
|
||
fjlig: "fj",
|
||
flat: "\u266D",
|
||
fllig: "\uFB02",
|
||
fltns: "\u25B1",
|
||
fnof: "\u0192",
|
||
fopf: "\u{1D557}",
|
||
forall: "\u2200",
|
||
fork: "\u22D4",
|
||
forkv: "\u2AD9",
|
||
fpartint: "\u2A0D",
|
||
frac12: "\xBD",
|
||
frac13: "\u2153",
|
||
frac14: "\xBC",
|
||
frac15: "\u2155",
|
||
frac16: "\u2159",
|
||
frac18: "\u215B",
|
||
frac23: "\u2154",
|
||
frac25: "\u2156",
|
||
frac34: "\xBE",
|
||
frac35: "\u2157",
|
||
frac38: "\u215C",
|
||
frac45: "\u2158",
|
||
frac56: "\u215A",
|
||
frac58: "\u215D",
|
||
frac78: "\u215E",
|
||
frasl: "\u2044",
|
||
frown: "\u2322",
|
||
fscr: "\u{1D4BB}",
|
||
gE: "\u2267",
|
||
gEl: "\u2A8C",
|
||
gacute: "\u01F5",
|
||
gamma: "\u03B3",
|
||
gammad: "\u03DD",
|
||
gap: "\u2A86",
|
||
gbreve: "\u011F",
|
||
gcirc: "\u011D",
|
||
gcy: "\u0433",
|
||
gdot: "\u0121",
|
||
ge: "\u2265",
|
||
gel: "\u22DB",
|
||
geq: "\u2265",
|
||
geqq: "\u2267",
|
||
geqslant: "\u2A7E",
|
||
ges: "\u2A7E",
|
||
gescc: "\u2AA9",
|
||
gesdot: "\u2A80",
|
||
gesdoto: "\u2A82",
|
||
gesdotol: "\u2A84",
|
||
gesl: "\u22DB\uFE00",
|
||
gesles: "\u2A94",
|
||
gfr: "\u{1D524}",
|
||
gg: "\u226B",
|
||
ggg: "\u22D9",
|
||
gimel: "\u2137",
|
||
gjcy: "\u0453",
|
||
gl: "\u2277",
|
||
glE: "\u2A92",
|
||
gla: "\u2AA5",
|
||
glj: "\u2AA4",
|
||
gnE: "\u2269",
|
||
gnap: "\u2A8A",
|
||
gnapprox: "\u2A8A",
|
||
gne: "\u2A88",
|
||
gneq: "\u2A88",
|
||
gneqq: "\u2269",
|
||
gnsim: "\u22E7",
|
||
gopf: "\u{1D558}",
|
||
grave: "`",
|
||
gscr: "\u210A",
|
||
gsim: "\u2273",
|
||
gsime: "\u2A8E",
|
||
gsiml: "\u2A90",
|
||
gt: ">",
|
||
gtcc: "\u2AA7",
|
||
gtcir: "\u2A7A",
|
||
gtdot: "\u22D7",
|
||
gtlPar: "\u2995",
|
||
gtquest: "\u2A7C",
|
||
gtrapprox: "\u2A86",
|
||
gtrarr: "\u2978",
|
||
gtrdot: "\u22D7",
|
||
gtreqless: "\u22DB",
|
||
gtreqqless: "\u2A8C",
|
||
gtrless: "\u2277",
|
||
gtrsim: "\u2273",
|
||
gvertneqq: "\u2269\uFE00",
|
||
gvnE: "\u2269\uFE00",
|
||
hArr: "\u21D4",
|
||
hairsp: "\u200A",
|
||
half: "\xBD",
|
||
hamilt: "\u210B",
|
||
hardcy: "\u044A",
|
||
harr: "\u2194",
|
||
harrcir: "\u2948",
|
||
harrw: "\u21AD",
|
||
hbar: "\u210F",
|
||
hcirc: "\u0125",
|
||
hearts: "\u2665",
|
||
heartsuit: "\u2665",
|
||
hellip: "\u2026",
|
||
hercon: "\u22B9",
|
||
hfr: "\u{1D525}",
|
||
hksearow: "\u2925",
|
||
hkswarow: "\u2926",
|
||
hoarr: "\u21FF",
|
||
homtht: "\u223B",
|
||
hookleftarrow: "\u21A9",
|
||
hookrightarrow: "\u21AA",
|
||
hopf: "\u{1D559}",
|
||
horbar: "\u2015",
|
||
hscr: "\u{1D4BD}",
|
||
hslash: "\u210F",
|
||
hstrok: "\u0127",
|
||
hybull: "\u2043",
|
||
hyphen: "\u2010",
|
||
iacute: "\xED",
|
||
ic: "\u2063",
|
||
icirc: "\xEE",
|
||
icy: "\u0438",
|
||
iecy: "\u0435",
|
||
iexcl: "\xA1",
|
||
iff: "\u21D4",
|
||
ifr: "\u{1D526}",
|
||
igrave: "\xEC",
|
||
ii: "\u2148",
|
||
iiiint: "\u2A0C",
|
||
iiint: "\u222D",
|
||
iinfin: "\u29DC",
|
||
iiota: "\u2129",
|
||
ijlig: "\u0133",
|
||
imacr: "\u012B",
|
||
image: "\u2111",
|
||
imagline: "\u2110",
|
||
imagpart: "\u2111",
|
||
imath: "\u0131",
|
||
imof: "\u22B7",
|
||
imped: "\u01B5",
|
||
in: "\u2208",
|
||
incare: "\u2105",
|
||
infin: "\u221E",
|
||
infintie: "\u29DD",
|
||
inodot: "\u0131",
|
||
int: "\u222B",
|
||
intcal: "\u22BA",
|
||
integers: "\u2124",
|
||
intercal: "\u22BA",
|
||
intlarhk: "\u2A17",
|
||
intprod: "\u2A3C",
|
||
iocy: "\u0451",
|
||
iogon: "\u012F",
|
||
iopf: "\u{1D55A}",
|
||
iota: "\u03B9",
|
||
iprod: "\u2A3C",
|
||
iquest: "\xBF",
|
||
iscr: "\u{1D4BE}",
|
||
isin: "\u2208",
|
||
isinE: "\u22F9",
|
||
isindot: "\u22F5",
|
||
isins: "\u22F4",
|
||
isinsv: "\u22F3",
|
||
isinv: "\u2208",
|
||
it: "\u2062",
|
||
itilde: "\u0129",
|
||
iukcy: "\u0456",
|
||
iuml: "\xEF",
|
||
jcirc: "\u0135",
|
||
jcy: "\u0439",
|
||
jfr: "\u{1D527}",
|
||
jmath: "\u0237",
|
||
jopf: "\u{1D55B}",
|
||
jscr: "\u{1D4BF}",
|
||
jsercy: "\u0458",
|
||
jukcy: "\u0454",
|
||
kappa: "\u03BA",
|
||
kappav: "\u03F0",
|
||
kcedil: "\u0137",
|
||
kcy: "\u043A",
|
||
kfr: "\u{1D528}",
|
||
kgreen: "\u0138",
|
||
khcy: "\u0445",
|
||
kjcy: "\u045C",
|
||
kopf: "\u{1D55C}",
|
||
kscr: "\u{1D4C0}",
|
||
lAarr: "\u21DA",
|
||
lArr: "\u21D0",
|
||
lAtail: "\u291B",
|
||
lBarr: "\u290E",
|
||
lE: "\u2266",
|
||
lEg: "\u2A8B",
|
||
lHar: "\u2962",
|
||
lacute: "\u013A",
|
||
laemptyv: "\u29B4",
|
||
lagran: "\u2112",
|
||
lambda: "\u03BB",
|
||
lang: "\u27E8",
|
||
langd: "\u2991",
|
||
langle: "\u27E8",
|
||
lap: "\u2A85",
|
||
laquo: "\xAB",
|
||
larr: "\u2190",
|
||
larrb: "\u21E4",
|
||
larrbfs: "\u291F",
|
||
larrfs: "\u291D",
|
||
larrhk: "\u21A9",
|
||
larrlp: "\u21AB",
|
||
larrpl: "\u2939",
|
||
larrsim: "\u2973",
|
||
larrtl: "\u21A2",
|
||
lat: "\u2AAB",
|
||
latail: "\u2919",
|
||
late: "\u2AAD",
|
||
lates: "\u2AAD\uFE00",
|
||
lbarr: "\u290C",
|
||
lbbrk: "\u2772",
|
||
lbrace: "{",
|
||
lbrack: "[",
|
||
lbrke: "\u298B",
|
||
lbrksld: "\u298F",
|
||
lbrkslu: "\u298D",
|
||
lcaron: "\u013E",
|
||
lcedil: "\u013C",
|
||
lceil: "\u2308",
|
||
lcub: "{",
|
||
lcy: "\u043B",
|
||
ldca: "\u2936",
|
||
ldquo: "\u201C",
|
||
ldquor: "\u201E",
|
||
ldrdhar: "\u2967",
|
||
ldrushar: "\u294B",
|
||
ldsh: "\u21B2",
|
||
le: "\u2264",
|
||
leftarrow: "\u2190",
|
||
leftarrowtail: "\u21A2",
|
||
leftharpoondown: "\u21BD",
|
||
leftharpoonup: "\u21BC",
|
||
leftleftarrows: "\u21C7",
|
||
leftrightarrow: "\u2194",
|
||
leftrightarrows: "\u21C6",
|
||
leftrightharpoons: "\u21CB",
|
||
leftrightsquigarrow: "\u21AD",
|
||
leftthreetimes: "\u22CB",
|
||
leg: "\u22DA",
|
||
leq: "\u2264",
|
||
leqq: "\u2266",
|
||
leqslant: "\u2A7D",
|
||
les: "\u2A7D",
|
||
lescc: "\u2AA8",
|
||
lesdot: "\u2A7F",
|
||
lesdoto: "\u2A81",
|
||
lesdotor: "\u2A83",
|
||
lesg: "\u22DA\uFE00",
|
||
lesges: "\u2A93",
|
||
lessapprox: "\u2A85",
|
||
lessdot: "\u22D6",
|
||
lesseqgtr: "\u22DA",
|
||
lesseqqgtr: "\u2A8B",
|
||
lessgtr: "\u2276",
|
||
lesssim: "\u2272",
|
||
lfisht: "\u297C",
|
||
lfloor: "\u230A",
|
||
lfr: "\u{1D529}",
|
||
lg: "\u2276",
|
||
lgE: "\u2A91",
|
||
lhard: "\u21BD",
|
||
lharu: "\u21BC",
|
||
lharul: "\u296A",
|
||
lhblk: "\u2584",
|
||
ljcy: "\u0459",
|
||
ll: "\u226A",
|
||
llarr: "\u21C7",
|
||
llcorner: "\u231E",
|
||
llhard: "\u296B",
|
||
lltri: "\u25FA",
|
||
lmidot: "\u0140",
|
||
lmoust: "\u23B0",
|
||
lmoustache: "\u23B0",
|
||
lnE: "\u2268",
|
||
lnap: "\u2A89",
|
||
lnapprox: "\u2A89",
|
||
lne: "\u2A87",
|
||
lneq: "\u2A87",
|
||
lneqq: "\u2268",
|
||
lnsim: "\u22E6",
|
||
loang: "\u27EC",
|
||
loarr: "\u21FD",
|
||
lobrk: "\u27E6",
|
||
longleftarrow: "\u27F5",
|
||
longleftrightarrow: "\u27F7",
|
||
longmapsto: "\u27FC",
|
||
longrightarrow: "\u27F6",
|
||
looparrowleft: "\u21AB",
|
||
looparrowright: "\u21AC",
|
||
lopar: "\u2985",
|
||
lopf: "\u{1D55D}",
|
||
loplus: "\u2A2D",
|
||
lotimes: "\u2A34",
|
||
lowast: "\u2217",
|
||
lowbar: "_",
|
||
loz: "\u25CA",
|
||
lozenge: "\u25CA",
|
||
lozf: "\u29EB",
|
||
lpar: "(",
|
||
lparlt: "\u2993",
|
||
lrarr: "\u21C6",
|
||
lrcorner: "\u231F",
|
||
lrhar: "\u21CB",
|
||
lrhard: "\u296D",
|
||
lrm: "\u200E",
|
||
lrtri: "\u22BF",
|
||
lsaquo: "\u2039",
|
||
lscr: "\u{1D4C1}",
|
||
lsh: "\u21B0",
|
||
lsim: "\u2272",
|
||
lsime: "\u2A8D",
|
||
lsimg: "\u2A8F",
|
||
lsqb: "[",
|
||
lsquo: "\u2018",
|
||
lsquor: "\u201A",
|
||
lstrok: "\u0142",
|
||
lt: "<",
|
||
ltcc: "\u2AA6",
|
||
ltcir: "\u2A79",
|
||
ltdot: "\u22D6",
|
||
lthree: "\u22CB",
|
||
ltimes: "\u22C9",
|
||
ltlarr: "\u2976",
|
||
ltquest: "\u2A7B",
|
||
ltrPar: "\u2996",
|
||
ltri: "\u25C3",
|
||
ltrie: "\u22B4",
|
||
ltrif: "\u25C2",
|
||
lurdshar: "\u294A",
|
||
luruhar: "\u2966",
|
||
lvertneqq: "\u2268\uFE00",
|
||
lvnE: "\u2268\uFE00",
|
||
mDDot: "\u223A",
|
||
macr: "\xAF",
|
||
male: "\u2642",
|
||
malt: "\u2720",
|
||
maltese: "\u2720",
|
||
map: "\u21A6",
|
||
mapsto: "\u21A6",
|
||
mapstodown: "\u21A7",
|
||
mapstoleft: "\u21A4",
|
||
mapstoup: "\u21A5",
|
||
marker: "\u25AE",
|
||
mcomma: "\u2A29",
|
||
mcy: "\u043C",
|
||
mdash: "\u2014",
|
||
measuredangle: "\u2221",
|
||
mfr: "\u{1D52A}",
|
||
mho: "\u2127",
|
||
micro: "\xB5",
|
||
mid: "\u2223",
|
||
midast: "*",
|
||
midcir: "\u2AF0",
|
||
middot: "\xB7",
|
||
minus: "\u2212",
|
||
minusb: "\u229F",
|
||
minusd: "\u2238",
|
||
minusdu: "\u2A2A",
|
||
mlcp: "\u2ADB",
|
||
mldr: "\u2026",
|
||
mnplus: "\u2213",
|
||
models: "\u22A7",
|
||
mopf: "\u{1D55E}",
|
||
mp: "\u2213",
|
||
mscr: "\u{1D4C2}",
|
||
mstpos: "\u223E",
|
||
mu: "\u03BC",
|
||
multimap: "\u22B8",
|
||
mumap: "\u22B8",
|
||
nGg: "\u22D9\u0338",
|
||
nGt: "\u226B\u20D2",
|
||
nGtv: "\u226B\u0338",
|
||
nLeftarrow: "\u21CD",
|
||
nLeftrightarrow: "\u21CE",
|
||
nLl: "\u22D8\u0338",
|
||
nLt: "\u226A\u20D2",
|
||
nLtv: "\u226A\u0338",
|
||
nRightarrow: "\u21CF",
|
||
nVDash: "\u22AF",
|
||
nVdash: "\u22AE",
|
||
nabla: "\u2207",
|
||
nacute: "\u0144",
|
||
nang: "\u2220\u20D2",
|
||
nap: "\u2249",
|
||
napE: "\u2A70\u0338",
|
||
napid: "\u224B\u0338",
|
||
napos: "\u0149",
|
||
napprox: "\u2249",
|
||
natur: "\u266E",
|
||
natural: "\u266E",
|
||
naturals: "\u2115",
|
||
nbsp: "\xA0",
|
||
nbump: "\u224E\u0338",
|
||
nbumpe: "\u224F\u0338",
|
||
ncap: "\u2A43",
|
||
ncaron: "\u0148",
|
||
ncedil: "\u0146",
|
||
ncong: "\u2247",
|
||
ncongdot: "\u2A6D\u0338",
|
||
ncup: "\u2A42",
|
||
ncy: "\u043D",
|
||
ndash: "\u2013",
|
||
ne: "\u2260",
|
||
neArr: "\u21D7",
|
||
nearhk: "\u2924",
|
||
nearr: "\u2197",
|
||
nearrow: "\u2197",
|
||
nedot: "\u2250\u0338",
|
||
nequiv: "\u2262",
|
||
nesear: "\u2928",
|
||
nesim: "\u2242\u0338",
|
||
nexist: "\u2204",
|
||
nexists: "\u2204",
|
||
nfr: "\u{1D52B}",
|
||
ngE: "\u2267\u0338",
|
||
nge: "\u2271",
|
||
ngeq: "\u2271",
|
||
ngeqq: "\u2267\u0338",
|
||
ngeqslant: "\u2A7E\u0338",
|
||
nges: "\u2A7E\u0338",
|
||
ngsim: "\u2275",
|
||
ngt: "\u226F",
|
||
ngtr: "\u226F",
|
||
nhArr: "\u21CE",
|
||
nharr: "\u21AE",
|
||
nhpar: "\u2AF2",
|
||
ni: "\u220B",
|
||
nis: "\u22FC",
|
||
nisd: "\u22FA",
|
||
niv: "\u220B",
|
||
njcy: "\u045A",
|
||
nlArr: "\u21CD",
|
||
nlE: "\u2266\u0338",
|
||
nlarr: "\u219A",
|
||
nldr: "\u2025",
|
||
nle: "\u2270",
|
||
nleftarrow: "\u219A",
|
||
nleftrightarrow: "\u21AE",
|
||
nleq: "\u2270",
|
||
nleqq: "\u2266\u0338",
|
||
nleqslant: "\u2A7D\u0338",
|
||
nles: "\u2A7D\u0338",
|
||
nless: "\u226E",
|
||
nlsim: "\u2274",
|
||
nlt: "\u226E",
|
||
nltri: "\u22EA",
|
||
nltrie: "\u22EC",
|
||
nmid: "\u2224",
|
||
nopf: "\u{1D55F}",
|
||
not: "\xAC",
|
||
notin: "\u2209",
|
||
notinE: "\u22F9\u0338",
|
||
notindot: "\u22F5\u0338",
|
||
notinva: "\u2209",
|
||
notinvb: "\u22F7",
|
||
notinvc: "\u22F6",
|
||
notni: "\u220C",
|
||
notniva: "\u220C",
|
||
notnivb: "\u22FE",
|
||
notnivc: "\u22FD",
|
||
npar: "\u2226",
|
||
nparallel: "\u2226",
|
||
nparsl: "\u2AFD\u20E5",
|
||
npart: "\u2202\u0338",
|
||
npolint: "\u2A14",
|
||
npr: "\u2280",
|
||
nprcue: "\u22E0",
|
||
npre: "\u2AAF\u0338",
|
||
nprec: "\u2280",
|
||
npreceq: "\u2AAF\u0338",
|
||
nrArr: "\u21CF",
|
||
nrarr: "\u219B",
|
||
nrarrc: "\u2933\u0338",
|
||
nrarrw: "\u219D\u0338",
|
||
nrightarrow: "\u219B",
|
||
nrtri: "\u22EB",
|
||
nrtrie: "\u22ED",
|
||
nsc: "\u2281",
|
||
nsccue: "\u22E1",
|
||
nsce: "\u2AB0\u0338",
|
||
nscr: "\u{1D4C3}",
|
||
nshortmid: "\u2224",
|
||
nshortparallel: "\u2226",
|
||
nsim: "\u2241",
|
||
nsime: "\u2244",
|
||
nsimeq: "\u2244",
|
||
nsmid: "\u2224",
|
||
nspar: "\u2226",
|
||
nsqsube: "\u22E2",
|
||
nsqsupe: "\u22E3",
|
||
nsub: "\u2284",
|
||
nsubE: "\u2AC5\u0338",
|
||
nsube: "\u2288",
|
||
nsubset: "\u2282\u20D2",
|
||
nsubseteq: "\u2288",
|
||
nsubseteqq: "\u2AC5\u0338",
|
||
nsucc: "\u2281",
|
||
nsucceq: "\u2AB0\u0338",
|
||
nsup: "\u2285",
|
||
nsupE: "\u2AC6\u0338",
|
||
nsupe: "\u2289",
|
||
nsupset: "\u2283\u20D2",
|
||
nsupseteq: "\u2289",
|
||
nsupseteqq: "\u2AC6\u0338",
|
||
ntgl: "\u2279",
|
||
ntilde: "\xF1",
|
||
ntlg: "\u2278",
|
||
ntriangleleft: "\u22EA",
|
||
ntrianglelefteq: "\u22EC",
|
||
ntriangleright: "\u22EB",
|
||
ntrianglerighteq: "\u22ED",
|
||
nu: "\u03BD",
|
||
num: "#",
|
||
numero: "\u2116",
|
||
numsp: "\u2007",
|
||
nvDash: "\u22AD",
|
||
nvHarr: "\u2904",
|
||
nvap: "\u224D\u20D2",
|
||
nvdash: "\u22AC",
|
||
nvge: "\u2265\u20D2",
|
||
nvgt: ">\u20D2",
|
||
nvinfin: "\u29DE",
|
||
nvlArr: "\u2902",
|
||
nvle: "\u2264\u20D2",
|
||
nvlt: "<\u20D2",
|
||
nvltrie: "\u22B4\u20D2",
|
||
nvrArr: "\u2903",
|
||
nvrtrie: "\u22B5\u20D2",
|
||
nvsim: "\u223C\u20D2",
|
||
nwArr: "\u21D6",
|
||
nwarhk: "\u2923",
|
||
nwarr: "\u2196",
|
||
nwarrow: "\u2196",
|
||
nwnear: "\u2927",
|
||
oS: "\u24C8",
|
||
oacute: "\xF3",
|
||
oast: "\u229B",
|
||
ocir: "\u229A",
|
||
ocirc: "\xF4",
|
||
ocy: "\u043E",
|
||
odash: "\u229D",
|
||
odblac: "\u0151",
|
||
odiv: "\u2A38",
|
||
odot: "\u2299",
|
||
odsold: "\u29BC",
|
||
oelig: "\u0153",
|
||
ofcir: "\u29BF",
|
||
ofr: "\u{1D52C}",
|
||
ogon: "\u02DB",
|
||
ograve: "\xF2",
|
||
ogt: "\u29C1",
|
||
ohbar: "\u29B5",
|
||
ohm: "\u03A9",
|
||
oint: "\u222E",
|
||
olarr: "\u21BA",
|
||
olcir: "\u29BE",
|
||
olcross: "\u29BB",
|
||
oline: "\u203E",
|
||
olt: "\u29C0",
|
||
omacr: "\u014D",
|
||
omega: "\u03C9",
|
||
omicron: "\u03BF",
|
||
omid: "\u29B6",
|
||
ominus: "\u2296",
|
||
oopf: "\u{1D560}",
|
||
opar: "\u29B7",
|
||
operp: "\u29B9",
|
||
oplus: "\u2295",
|
||
or: "\u2228",
|
||
orarr: "\u21BB",
|
||
ord: "\u2A5D",
|
||
order: "\u2134",
|
||
orderof: "\u2134",
|
||
ordf: "\xAA",
|
||
ordm: "\xBA",
|
||
origof: "\u22B6",
|
||
oror: "\u2A56",
|
||
orslope: "\u2A57",
|
||
orv: "\u2A5B",
|
||
oscr: "\u2134",
|
||
oslash: "\xF8",
|
||
osol: "\u2298",
|
||
otilde: "\xF5",
|
||
otimes: "\u2297",
|
||
otimesas: "\u2A36",
|
||
ouml: "\xF6",
|
||
ovbar: "\u233D",
|
||
par: "\u2225",
|
||
para: "\xB6",
|
||
parallel: "\u2225",
|
||
parsim: "\u2AF3",
|
||
parsl: "\u2AFD",
|
||
part: "\u2202",
|
||
pcy: "\u043F",
|
||
percnt: "%",
|
||
period: ".",
|
||
permil: "\u2030",
|
||
perp: "\u22A5",
|
||
pertenk: "\u2031",
|
||
pfr: "\u{1D52D}",
|
||
phi: "\u03C6",
|
||
phiv: "\u03D5",
|
||
phmmat: "\u2133",
|
||
phone: "\u260E",
|
||
pi: "\u03C0",
|
||
pitchfork: "\u22D4",
|
||
piv: "\u03D6",
|
||
planck: "\u210F",
|
||
planckh: "\u210E",
|
||
plankv: "\u210F",
|
||
plus: "+",
|
||
plusacir: "\u2A23",
|
||
plusb: "\u229E",
|
||
pluscir: "\u2A22",
|
||
plusdo: "\u2214",
|
||
plusdu: "\u2A25",
|
||
pluse: "\u2A72",
|
||
plusmn: "\xB1",
|
||
plussim: "\u2A26",
|
||
plustwo: "\u2A27",
|
||
pm: "\xB1",
|
||
pointint: "\u2A15",
|
||
popf: "\u{1D561}",
|
||
pound: "\xA3",
|
||
pr: "\u227A",
|
||
prE: "\u2AB3",
|
||
prap: "\u2AB7",
|
||
prcue: "\u227C",
|
||
pre: "\u2AAF",
|
||
prec: "\u227A",
|
||
precapprox: "\u2AB7",
|
||
preccurlyeq: "\u227C",
|
||
preceq: "\u2AAF",
|
||
precnapprox: "\u2AB9",
|
||
precneqq: "\u2AB5",
|
||
precnsim: "\u22E8",
|
||
precsim: "\u227E",
|
||
prime: "\u2032",
|
||
primes: "\u2119",
|
||
prnE: "\u2AB5",
|
||
prnap: "\u2AB9",
|
||
prnsim: "\u22E8",
|
||
prod: "\u220F",
|
||
profalar: "\u232E",
|
||
profline: "\u2312",
|
||
profsurf: "\u2313",
|
||
prop: "\u221D",
|
||
propto: "\u221D",
|
||
prsim: "\u227E",
|
||
prurel: "\u22B0",
|
||
pscr: "\u{1D4C5}",
|
||
psi: "\u03C8",
|
||
puncsp: "\u2008",
|
||
qfr: "\u{1D52E}",
|
||
qint: "\u2A0C",
|
||
qopf: "\u{1D562}",
|
||
qprime: "\u2057",
|
||
qscr: "\u{1D4C6}",
|
||
quaternions: "\u210D",
|
||
quatint: "\u2A16",
|
||
quest: "?",
|
||
questeq: "\u225F",
|
||
quot: '"',
|
||
rAarr: "\u21DB",
|
||
rArr: "\u21D2",
|
||
rAtail: "\u291C",
|
||
rBarr: "\u290F",
|
||
rHar: "\u2964",
|
||
race: "\u223D\u0331",
|
||
racute: "\u0155",
|
||
radic: "\u221A",
|
||
raemptyv: "\u29B3",
|
||
rang: "\u27E9",
|
||
rangd: "\u2992",
|
||
range: "\u29A5",
|
||
rangle: "\u27E9",
|
||
raquo: "\xBB",
|
||
rarr: "\u2192",
|
||
rarrap: "\u2975",
|
||
rarrb: "\u21E5",
|
||
rarrbfs: "\u2920",
|
||
rarrc: "\u2933",
|
||
rarrfs: "\u291E",
|
||
rarrhk: "\u21AA",
|
||
rarrlp: "\u21AC",
|
||
rarrpl: "\u2945",
|
||
rarrsim: "\u2974",
|
||
rarrtl: "\u21A3",
|
||
rarrw: "\u219D",
|
||
ratail: "\u291A",
|
||
ratio: "\u2236",
|
||
rationals: "\u211A",
|
||
rbarr: "\u290D",
|
||
rbbrk: "\u2773",
|
||
rbrace: "}",
|
||
rbrack: "]",
|
||
rbrke: "\u298C",
|
||
rbrksld: "\u298E",
|
||
rbrkslu: "\u2990",
|
||
rcaron: "\u0159",
|
||
rcedil: "\u0157",
|
||
rceil: "\u2309",
|
||
rcub: "}",
|
||
rcy: "\u0440",
|
||
rdca: "\u2937",
|
||
rdldhar: "\u2969",
|
||
rdquo: "\u201D",
|
||
rdquor: "\u201D",
|
||
rdsh: "\u21B3",
|
||
real: "\u211C",
|
||
realine: "\u211B",
|
||
realpart: "\u211C",
|
||
reals: "\u211D",
|
||
rect: "\u25AD",
|
||
reg: "\xAE",
|
||
rfisht: "\u297D",
|
||
rfloor: "\u230B",
|
||
rfr: "\u{1D52F}",
|
||
rhard: "\u21C1",
|
||
rharu: "\u21C0",
|
||
rharul: "\u296C",
|
||
rho: "\u03C1",
|
||
rhov: "\u03F1",
|
||
rightarrow: "\u2192",
|
||
rightarrowtail: "\u21A3",
|
||
rightharpoondown: "\u21C1",
|
||
rightharpoonup: "\u21C0",
|
||
rightleftarrows: "\u21C4",
|
||
rightleftharpoons: "\u21CC",
|
||
rightrightarrows: "\u21C9",
|
||
rightsquigarrow: "\u219D",
|
||
rightthreetimes: "\u22CC",
|
||
ring: "\u02DA",
|
||
risingdotseq: "\u2253",
|
||
rlarr: "\u21C4",
|
||
rlhar: "\u21CC",
|
||
rlm: "\u200F",
|
||
rmoust: "\u23B1",
|
||
rmoustache: "\u23B1",
|
||
rnmid: "\u2AEE",
|
||
roang: "\u27ED",
|
||
roarr: "\u21FE",
|
||
robrk: "\u27E7",
|
||
ropar: "\u2986",
|
||
ropf: "\u{1D563}",
|
||
roplus: "\u2A2E",
|
||
rotimes: "\u2A35",
|
||
rpar: ")",
|
||
rpargt: "\u2994",
|
||
rppolint: "\u2A12",
|
||
rrarr: "\u21C9",
|
||
rsaquo: "\u203A",
|
||
rscr: "\u{1D4C7}",
|
||
rsh: "\u21B1",
|
||
rsqb: "]",
|
||
rsquo: "\u2019",
|
||
rsquor: "\u2019",
|
||
rthree: "\u22CC",
|
||
rtimes: "\u22CA",
|
||
rtri: "\u25B9",
|
||
rtrie: "\u22B5",
|
||
rtrif: "\u25B8",
|
||
rtriltri: "\u29CE",
|
||
ruluhar: "\u2968",
|
||
rx: "\u211E",
|
||
sacute: "\u015B",
|
||
sbquo: "\u201A",
|
||
sc: "\u227B",
|
||
scE: "\u2AB4",
|
||
scap: "\u2AB8",
|
||
scaron: "\u0161",
|
||
sccue: "\u227D",
|
||
sce: "\u2AB0",
|
||
scedil: "\u015F",
|
||
scirc: "\u015D",
|
||
scnE: "\u2AB6",
|
||
scnap: "\u2ABA",
|
||
scnsim: "\u22E9",
|
||
scpolint: "\u2A13",
|
||
scsim: "\u227F",
|
||
scy: "\u0441",
|
||
sdot: "\u22C5",
|
||
sdotb: "\u22A1",
|
||
sdote: "\u2A66",
|
||
seArr: "\u21D8",
|
||
searhk: "\u2925",
|
||
searr: "\u2198",
|
||
searrow: "\u2198",
|
||
sect: "\xA7",
|
||
semi: ";",
|
||
seswar: "\u2929",
|
||
setminus: "\u2216",
|
||
setmn: "\u2216",
|
||
sext: "\u2736",
|
||
sfr: "\u{1D530}",
|
||
sfrown: "\u2322",
|
||
sharp: "\u266F",
|
||
shchcy: "\u0449",
|
||
shcy: "\u0448",
|
||
shortmid: "\u2223",
|
||
shortparallel: "\u2225",
|
||
shy: "\xAD",
|
||
sigma: "\u03C3",
|
||
sigmaf: "\u03C2",
|
||
sigmav: "\u03C2",
|
||
sim: "\u223C",
|
||
simdot: "\u2A6A",
|
||
sime: "\u2243",
|
||
simeq: "\u2243",
|
||
simg: "\u2A9E",
|
||
simgE: "\u2AA0",
|
||
siml: "\u2A9D",
|
||
simlE: "\u2A9F",
|
||
simne: "\u2246",
|
||
simplus: "\u2A24",
|
||
simrarr: "\u2972",
|
||
slarr: "\u2190",
|
||
smallsetminus: "\u2216",
|
||
smashp: "\u2A33",
|
||
smeparsl: "\u29E4",
|
||
smid: "\u2223",
|
||
smile: "\u2323",
|
||
smt: "\u2AAA",
|
||
smte: "\u2AAC",
|
||
smtes: "\u2AAC\uFE00",
|
||
softcy: "\u044C",
|
||
sol: "/",
|
||
solb: "\u29C4",
|
||
solbar: "\u233F",
|
||
sopf: "\u{1D564}",
|
||
spades: "\u2660",
|
||
spadesuit: "\u2660",
|
||
spar: "\u2225",
|
||
sqcap: "\u2293",
|
||
sqcaps: "\u2293\uFE00",
|
||
sqcup: "\u2294",
|
||
sqcups: "\u2294\uFE00",
|
||
sqsub: "\u228F",
|
||
sqsube: "\u2291",
|
||
sqsubset: "\u228F",
|
||
sqsubseteq: "\u2291",
|
||
sqsup: "\u2290",
|
||
sqsupe: "\u2292",
|
||
sqsupset: "\u2290",
|
||
sqsupseteq: "\u2292",
|
||
squ: "\u25A1",
|
||
square: "\u25A1",
|
||
squarf: "\u25AA",
|
||
squf: "\u25AA",
|
||
srarr: "\u2192",
|
||
sscr: "\u{1D4C8}",
|
||
ssetmn: "\u2216",
|
||
ssmile: "\u2323",
|
||
sstarf: "\u22C6",
|
||
star: "\u2606",
|
||
starf: "\u2605",
|
||
straightepsilon: "\u03F5",
|
||
straightphi: "\u03D5",
|
||
strns: "\xAF",
|
||
sub: "\u2282",
|
||
subE: "\u2AC5",
|
||
subdot: "\u2ABD",
|
||
sube: "\u2286",
|
||
subedot: "\u2AC3",
|
||
submult: "\u2AC1",
|
||
subnE: "\u2ACB",
|
||
subne: "\u228A",
|
||
subplus: "\u2ABF",
|
||
subrarr: "\u2979",
|
||
subset: "\u2282",
|
||
subseteq: "\u2286",
|
||
subseteqq: "\u2AC5",
|
||
subsetneq: "\u228A",
|
||
subsetneqq: "\u2ACB",
|
||
subsim: "\u2AC7",
|
||
subsub: "\u2AD5",
|
||
subsup: "\u2AD3",
|
||
succ: "\u227B",
|
||
succapprox: "\u2AB8",
|
||
succcurlyeq: "\u227D",
|
||
succeq: "\u2AB0",
|
||
succnapprox: "\u2ABA",
|
||
succneqq: "\u2AB6",
|
||
succnsim: "\u22E9",
|
||
succsim: "\u227F",
|
||
sum: "\u2211",
|
||
sung: "\u266A",
|
||
sup1: "\xB9",
|
||
sup2: "\xB2",
|
||
sup3: "\xB3",
|
||
sup: "\u2283",
|
||
supE: "\u2AC6",
|
||
supdot: "\u2ABE",
|
||
supdsub: "\u2AD8",
|
||
supe: "\u2287",
|
||
supedot: "\u2AC4",
|
||
suphsol: "\u27C9",
|
||
suphsub: "\u2AD7",
|
||
suplarr: "\u297B",
|
||
supmult: "\u2AC2",
|
||
supnE: "\u2ACC",
|
||
supne: "\u228B",
|
||
supplus: "\u2AC0",
|
||
supset: "\u2283",
|
||
supseteq: "\u2287",
|
||
supseteqq: "\u2AC6",
|
||
supsetneq: "\u228B",
|
||
supsetneqq: "\u2ACC",
|
||
supsim: "\u2AC8",
|
||
supsub: "\u2AD4",
|
||
supsup: "\u2AD6",
|
||
swArr: "\u21D9",
|
||
swarhk: "\u2926",
|
||
swarr: "\u2199",
|
||
swarrow: "\u2199",
|
||
swnwar: "\u292A",
|
||
szlig: "\xDF",
|
||
target: "\u2316",
|
||
tau: "\u03C4",
|
||
tbrk: "\u23B4",
|
||
tcaron: "\u0165",
|
||
tcedil: "\u0163",
|
||
tcy: "\u0442",
|
||
tdot: "\u20DB",
|
||
telrec: "\u2315",
|
||
tfr: "\u{1D531}",
|
||
there4: "\u2234",
|
||
therefore: "\u2234",
|
||
theta: "\u03B8",
|
||
thetasym: "\u03D1",
|
||
thetav: "\u03D1",
|
||
thickapprox: "\u2248",
|
||
thicksim: "\u223C",
|
||
thinsp: "\u2009",
|
||
thkap: "\u2248",
|
||
thksim: "\u223C",
|
||
thorn: "\xFE",
|
||
tilde: "\u02DC",
|
||
times: "\xD7",
|
||
timesb: "\u22A0",
|
||
timesbar: "\u2A31",
|
||
timesd: "\u2A30",
|
||
tint: "\u222D",
|
||
toea: "\u2928",
|
||
top: "\u22A4",
|
||
topbot: "\u2336",
|
||
topcir: "\u2AF1",
|
||
topf: "\u{1D565}",
|
||
topfork: "\u2ADA",
|
||
tosa: "\u2929",
|
||
tprime: "\u2034",
|
||
trade: "\u2122",
|
||
triangle: "\u25B5",
|
||
triangledown: "\u25BF",
|
||
triangleleft: "\u25C3",
|
||
trianglelefteq: "\u22B4",
|
||
triangleq: "\u225C",
|
||
triangleright: "\u25B9",
|
||
trianglerighteq: "\u22B5",
|
||
tridot: "\u25EC",
|
||
trie: "\u225C",
|
||
triminus: "\u2A3A",
|
||
triplus: "\u2A39",
|
||
trisb: "\u29CD",
|
||
tritime: "\u2A3B",
|
||
trpezium: "\u23E2",
|
||
tscr: "\u{1D4C9}",
|
||
tscy: "\u0446",
|
||
tshcy: "\u045B",
|
||
tstrok: "\u0167",
|
||
twixt: "\u226C",
|
||
twoheadleftarrow: "\u219E",
|
||
twoheadrightarrow: "\u21A0",
|
||
uArr: "\u21D1",
|
||
uHar: "\u2963",
|
||
uacute: "\xFA",
|
||
uarr: "\u2191",
|
||
ubrcy: "\u045E",
|
||
ubreve: "\u016D",
|
||
ucirc: "\xFB",
|
||
ucy: "\u0443",
|
||
udarr: "\u21C5",
|
||
udblac: "\u0171",
|
||
udhar: "\u296E",
|
||
ufisht: "\u297E",
|
||
ufr: "\u{1D532}",
|
||
ugrave: "\xF9",
|
||
uharl: "\u21BF",
|
||
uharr: "\u21BE",
|
||
uhblk: "\u2580",
|
||
ulcorn: "\u231C",
|
||
ulcorner: "\u231C",
|
||
ulcrop: "\u230F",
|
||
ultri: "\u25F8",
|
||
umacr: "\u016B",
|
||
uml: "\xA8",
|
||
uogon: "\u0173",
|
||
uopf: "\u{1D566}",
|
||
uparrow: "\u2191",
|
||
updownarrow: "\u2195",
|
||
upharpoonleft: "\u21BF",
|
||
upharpoonright: "\u21BE",
|
||
uplus: "\u228E",
|
||
upsi: "\u03C5",
|
||
upsih: "\u03D2",
|
||
upsilon: "\u03C5",
|
||
upuparrows: "\u21C8",
|
||
urcorn: "\u231D",
|
||
urcorner: "\u231D",
|
||
urcrop: "\u230E",
|
||
uring: "\u016F",
|
||
urtri: "\u25F9",
|
||
uscr: "\u{1D4CA}",
|
||
utdot: "\u22F0",
|
||
utilde: "\u0169",
|
||
utri: "\u25B5",
|
||
utrif: "\u25B4",
|
||
uuarr: "\u21C8",
|
||
uuml: "\xFC",
|
||
uwangle: "\u29A7",
|
||
vArr: "\u21D5",
|
||
vBar: "\u2AE8",
|
||
vBarv: "\u2AE9",
|
||
vDash: "\u22A8",
|
||
vangrt: "\u299C",
|
||
varepsilon: "\u03F5",
|
||
varkappa: "\u03F0",
|
||
varnothing: "\u2205",
|
||
varphi: "\u03D5",
|
||
varpi: "\u03D6",
|
||
varpropto: "\u221D",
|
||
varr: "\u2195",
|
||
varrho: "\u03F1",
|
||
varsigma: "\u03C2",
|
||
varsubsetneq: "\u228A\uFE00",
|
||
varsubsetneqq: "\u2ACB\uFE00",
|
||
varsupsetneq: "\u228B\uFE00",
|
||
varsupsetneqq: "\u2ACC\uFE00",
|
||
vartheta: "\u03D1",
|
||
vartriangleleft: "\u22B2",
|
||
vartriangleright: "\u22B3",
|
||
vcy: "\u0432",
|
||
vdash: "\u22A2",
|
||
vee: "\u2228",
|
||
veebar: "\u22BB",
|
||
veeeq: "\u225A",
|
||
vellip: "\u22EE",
|
||
verbar: "|",
|
||
vert: "|",
|
||
vfr: "\u{1D533}",
|
||
vltri: "\u22B2",
|
||
vnsub: "\u2282\u20D2",
|
||
vnsup: "\u2283\u20D2",
|
||
vopf: "\u{1D567}",
|
||
vprop: "\u221D",
|
||
vrtri: "\u22B3",
|
||
vscr: "\u{1D4CB}",
|
||
vsubnE: "\u2ACB\uFE00",
|
||
vsubne: "\u228A\uFE00",
|
||
vsupnE: "\u2ACC\uFE00",
|
||
vsupne: "\u228B\uFE00",
|
||
vzigzag: "\u299A",
|
||
wcirc: "\u0175",
|
||
wedbar: "\u2A5F",
|
||
wedge: "\u2227",
|
||
wedgeq: "\u2259",
|
||
weierp: "\u2118",
|
||
wfr: "\u{1D534}",
|
||
wopf: "\u{1D568}",
|
||
wp: "\u2118",
|
||
wr: "\u2240",
|
||
wreath: "\u2240",
|
||
wscr: "\u{1D4CC}",
|
||
xcap: "\u22C2",
|
||
xcirc: "\u25EF",
|
||
xcup: "\u22C3",
|
||
xdtri: "\u25BD",
|
||
xfr: "\u{1D535}",
|
||
xhArr: "\u27FA",
|
||
xharr: "\u27F7",
|
||
xi: "\u03BE",
|
||
xlArr: "\u27F8",
|
||
xlarr: "\u27F5",
|
||
xmap: "\u27FC",
|
||
xnis: "\u22FB",
|
||
xodot: "\u2A00",
|
||
xopf: "\u{1D569}",
|
||
xoplus: "\u2A01",
|
||
xotime: "\u2A02",
|
||
xrArr: "\u27F9",
|
||
xrarr: "\u27F6",
|
||
xscr: "\u{1D4CD}",
|
||
xsqcup: "\u2A06",
|
||
xuplus: "\u2A04",
|
||
xutri: "\u25B3",
|
||
xvee: "\u22C1",
|
||
xwedge: "\u22C0",
|
||
yacute: "\xFD",
|
||
yacy: "\u044F",
|
||
ycirc: "\u0177",
|
||
ycy: "\u044B",
|
||
yen: "\xA5",
|
||
yfr: "\u{1D536}",
|
||
yicy: "\u0457",
|
||
yopf: "\u{1D56A}",
|
||
yscr: "\u{1D4CE}",
|
||
yucy: "\u044E",
|
||
yuml: "\xFF",
|
||
zacute: "\u017A",
|
||
zcaron: "\u017E",
|
||
zcy: "\u0437",
|
||
zdot: "\u017C",
|
||
zeetrf: "\u2128",
|
||
zeta: "\u03B6",
|
||
zfr: "\u{1D537}",
|
||
zhcy: "\u0436",
|
||
zigrarr: "\u21DD",
|
||
zopf: "\u{1D56B}",
|
||
zscr: "\u{1D4CF}",
|
||
zwj: "\u200D",
|
||
zwnj: "\u200C"
|
||
};
|
||
|
||
// node_modules/decode-named-character-reference/index.js
|
||
var own = {}.hasOwnProperty;
|
||
function decodeNamedCharacterReference(value2) {
|
||
return own.call(characterEntities, value2) ? characterEntities[value2] : false;
|
||
}
|
||
|
||
// node_modules/micromark-util-chunked/index.js
|
||
function splice(list4, start, remove, items) {
|
||
const end = list4.length;
|
||
let chunkStart = 0;
|
||
let parameters;
|
||
if (start < 0) {
|
||
start = -start > end ? 0 : end + start;
|
||
} else {
|
||
start = start > end ? end : start;
|
||
}
|
||
remove = remove > 0 ? remove : 0;
|
||
if (items.length < 1e4) {
|
||
parameters = Array.from(items);
|
||
parameters.unshift(start, remove);
|
||
list4.splice(...parameters);
|
||
} else {
|
||
if (remove)
|
||
list4.splice(start, remove);
|
||
while (chunkStart < items.length) {
|
||
parameters = items.slice(chunkStart, chunkStart + 1e4);
|
||
parameters.unshift(start, 0);
|
||
list4.splice(...parameters);
|
||
chunkStart += 1e4;
|
||
start += 1e4;
|
||
}
|
||
}
|
||
}
|
||
function push(list4, items) {
|
||
if (list4.length > 0) {
|
||
splice(list4, list4.length, 0, items);
|
||
return list4;
|
||
}
|
||
return items;
|
||
}
|
||
|
||
// node_modules/micromark-util-combine-extensions/index.js
|
||
var hasOwnProperty = {}.hasOwnProperty;
|
||
function combineExtensions(extensions) {
|
||
const all2 = {};
|
||
let index2 = -1;
|
||
while (++index2 < extensions.length) {
|
||
syntaxExtension(all2, extensions[index2]);
|
||
}
|
||
return all2;
|
||
}
|
||
function syntaxExtension(all2, extension2) {
|
||
let hook;
|
||
for (hook in extension2) {
|
||
const maybe = hasOwnProperty.call(all2, hook) ? all2[hook] : void 0;
|
||
const left = maybe || (all2[hook] = {});
|
||
const right = extension2[hook];
|
||
let code2;
|
||
if (right) {
|
||
for (code2 in right) {
|
||
if (!hasOwnProperty.call(left, code2))
|
||
left[code2] = [];
|
||
const value2 = right[code2];
|
||
constructs(
|
||
// @ts-expect-error Looks like a list.
|
||
left[code2],
|
||
Array.isArray(value2) ? value2 : value2 ? [value2] : []
|
||
);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
function constructs(existing, list4) {
|
||
let index2 = -1;
|
||
const before = [];
|
||
while (++index2 < list4.length) {
|
||
;
|
||
(list4[index2].add === "after" ? existing : before).push(list4[index2]);
|
||
}
|
||
splice(existing, 0, 0, before);
|
||
}
|
||
|
||
// node_modules/micromark-util-decode-numeric-character-reference/index.js
|
||
function decodeNumericCharacterReference(value2, base) {
|
||
const code2 = Number.parseInt(value2, base);
|
||
if (
|
||
// C0 except for HT, LF, FF, CR, space.
|
||
code2 < 9 || code2 === 11 || code2 > 13 && code2 < 32 || // Control character (DEL) of C0, and C1 controls.
|
||
code2 > 126 && code2 < 160 || // Lone high surrogates and low surrogates.
|
||
code2 > 55295 && code2 < 57344 || // Noncharacters.
|
||
code2 > 64975 && code2 < 65008 || /* eslint-disable no-bitwise */
|
||
(code2 & 65535) === 65535 || (code2 & 65535) === 65534 || /* eslint-enable no-bitwise */
|
||
// Out of range
|
||
code2 > 1114111
|
||
) {
|
||
return "\uFFFD";
|
||
}
|
||
return String.fromCodePoint(code2);
|
||
}
|
||
|
||
// node_modules/micromark-util-normalize-identifier/index.js
|
||
function normalizeIdentifier(value2) {
|
||
return value2.replace(/[\t\n\r ]+/g, " ").replace(/^ | $/g, "").toLowerCase().toUpperCase();
|
||
}
|
||
|
||
// node_modules/micromark-util-character/index.js
|
||
var unicodePunctuationInternal = regexCheck(/\p{P}/u);
|
||
var asciiAlpha = regexCheck(/[A-Za-z]/);
|
||
var asciiAlphanumeric = regexCheck(/[\dA-Za-z]/);
|
||
var asciiAtext = regexCheck(/[#-'*+\--9=?A-Z^-~]/);
|
||
function asciiControl(code2) {
|
||
return (
|
||
// Special whitespace codes (which have negative values), C0 and Control
|
||
// character DEL
|
||
code2 !== null && (code2 < 32 || code2 === 127)
|
||
);
|
||
}
|
||
var asciiDigit = regexCheck(/\d/);
|
||
var asciiHexDigit = regexCheck(/[\dA-Fa-f]/);
|
||
var asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/);
|
||
function markdownLineEnding(code2) {
|
||
return code2 !== null && code2 < -2;
|
||
}
|
||
function markdownLineEndingOrSpace(code2) {
|
||
return code2 !== null && (code2 < 0 || code2 === 32);
|
||
}
|
||
function markdownSpace(code2) {
|
||
return code2 === -2 || code2 === -1 || code2 === 32;
|
||
}
|
||
function unicodePunctuation(code2) {
|
||
return asciiPunctuation(code2) || unicodePunctuationInternal(code2);
|
||
}
|
||
var unicodeWhitespace = regexCheck(/\s/);
|
||
function regexCheck(regex) {
|
||
return check;
|
||
function check(code2) {
|
||
return code2 !== null && code2 > -1 && regex.test(String.fromCharCode(code2));
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-space/index.js
|
||
function factorySpace(effects, ok3, type, max) {
|
||
const limit = max ? max - 1 : Number.POSITIVE_INFINITY;
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.enter(type);
|
||
return prefix(code2);
|
||
}
|
||
return ok3(code2);
|
||
}
|
||
function prefix(code2) {
|
||
if (markdownSpace(code2) && size++ < limit) {
|
||
effects.consume(code2);
|
||
return prefix;
|
||
}
|
||
effects.exit(type);
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/content.js
|
||
var content = {
|
||
tokenize: initializeContent
|
||
};
|
||
function initializeContent(effects) {
|
||
const contentStart = effects.attempt(
|
||
this.parser.constructs.contentInitial,
|
||
afterContentStartConstruct,
|
||
paragraphInitial
|
||
);
|
||
let previous2;
|
||
return contentStart;
|
||
function afterContentStartConstruct(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, contentStart, "linePrefix");
|
||
}
|
||
function paragraphInitial(code2) {
|
||
effects.enter("paragraph");
|
||
return lineStart(code2);
|
||
}
|
||
function lineStart(code2) {
|
||
const token = effects.enter("chunkText", {
|
||
contentType: "text",
|
||
previous: previous2
|
||
});
|
||
if (previous2) {
|
||
previous2.next = token;
|
||
}
|
||
previous2 = token;
|
||
return data(code2);
|
||
}
|
||
function data(code2) {
|
||
if (code2 === null) {
|
||
effects.exit("chunkText");
|
||
effects.exit("paragraph");
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.consume(code2);
|
||
effects.exit("chunkText");
|
||
return lineStart;
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/document.js
|
||
var document = {
|
||
tokenize: initializeDocument
|
||
};
|
||
var containerConstruct = {
|
||
tokenize: tokenizeContainer
|
||
};
|
||
function initializeDocument(effects) {
|
||
const self = this;
|
||
const stack = [];
|
||
let continued = 0;
|
||
let childFlow;
|
||
let childToken;
|
||
let lineStartOffset;
|
||
return start;
|
||
function start(code2) {
|
||
if (continued < stack.length) {
|
||
const item = stack[continued];
|
||
self.containerState = item[1];
|
||
return effects.attempt(
|
||
item[0].continuation,
|
||
documentContinue,
|
||
checkNewContainers
|
||
)(code2);
|
||
}
|
||
return checkNewContainers(code2);
|
||
}
|
||
function documentContinue(code2) {
|
||
continued++;
|
||
if (self.containerState._closeFlow) {
|
||
self.containerState._closeFlow = void 0;
|
||
if (childFlow) {
|
||
closeFlow();
|
||
}
|
||
const indexBeforeExits = self.events.length;
|
||
let indexBeforeFlow = indexBeforeExits;
|
||
let point3;
|
||
while (indexBeforeFlow--) {
|
||
if (self.events[indexBeforeFlow][0] === "exit" && self.events[indexBeforeFlow][1].type === "chunkFlow") {
|
||
point3 = self.events[indexBeforeFlow][1].end;
|
||
break;
|
||
}
|
||
}
|
||
exitContainers(continued);
|
||
let index2 = indexBeforeExits;
|
||
while (index2 < self.events.length) {
|
||
self.events[index2][1].end = Object.assign({}, point3);
|
||
index2++;
|
||
}
|
||
splice(
|
||
self.events,
|
||
indexBeforeFlow + 1,
|
||
0,
|
||
self.events.slice(indexBeforeExits)
|
||
);
|
||
self.events.length = index2;
|
||
return checkNewContainers(code2);
|
||
}
|
||
return start(code2);
|
||
}
|
||
function checkNewContainers(code2) {
|
||
if (continued === stack.length) {
|
||
if (!childFlow) {
|
||
return documentContinued(code2);
|
||
}
|
||
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {
|
||
return flowStart(code2);
|
||
}
|
||
self.interrupt = Boolean(
|
||
childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack
|
||
);
|
||
}
|
||
self.containerState = {};
|
||
return effects.check(
|
||
containerConstruct,
|
||
thereIsANewContainer,
|
||
thereIsNoNewContainer
|
||
)(code2);
|
||
}
|
||
function thereIsANewContainer(code2) {
|
||
if (childFlow)
|
||
closeFlow();
|
||
exitContainers(continued);
|
||
return documentContinued(code2);
|
||
}
|
||
function thereIsNoNewContainer(code2) {
|
||
self.parser.lazy[self.now().line] = continued !== stack.length;
|
||
lineStartOffset = self.now().offset;
|
||
return flowStart(code2);
|
||
}
|
||
function documentContinued(code2) {
|
||
self.containerState = {};
|
||
return effects.attempt(
|
||
containerConstruct,
|
||
containerContinue,
|
||
flowStart
|
||
)(code2);
|
||
}
|
||
function containerContinue(code2) {
|
||
continued++;
|
||
stack.push([self.currentConstruct, self.containerState]);
|
||
return documentContinued(code2);
|
||
}
|
||
function flowStart(code2) {
|
||
if (code2 === null) {
|
||
if (childFlow)
|
||
closeFlow();
|
||
exitContainers(0);
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
childFlow = childFlow || self.parser.flow(self.now());
|
||
effects.enter("chunkFlow", {
|
||
contentType: "flow",
|
||
previous: childToken,
|
||
_tokenizer: childFlow
|
||
});
|
||
return flowContinue(code2);
|
||
}
|
||
function flowContinue(code2) {
|
||
if (code2 === null) {
|
||
writeToChild(effects.exit("chunkFlow"), true);
|
||
exitContainers(0);
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.consume(code2);
|
||
writeToChild(effects.exit("chunkFlow"));
|
||
continued = 0;
|
||
self.interrupt = void 0;
|
||
return start;
|
||
}
|
||
effects.consume(code2);
|
||
return flowContinue;
|
||
}
|
||
function writeToChild(token, eof) {
|
||
const stream2 = self.sliceStream(token);
|
||
if (eof)
|
||
stream2.push(null);
|
||
token.previous = childToken;
|
||
if (childToken)
|
||
childToken.next = token;
|
||
childToken = token;
|
||
childFlow.defineSkip(token.start);
|
||
childFlow.write(stream2);
|
||
if (self.parser.lazy[token.start.line]) {
|
||
let index2 = childFlow.events.length;
|
||
while (index2--) {
|
||
if (
|
||
// The token starts before the line ending…
|
||
childFlow.events[index2][1].start.offset < lineStartOffset && // …and either is not ended yet…
|
||
(!childFlow.events[index2][1].end || // …or ends after it.
|
||
childFlow.events[index2][1].end.offset > lineStartOffset)
|
||
) {
|
||
return;
|
||
}
|
||
}
|
||
const indexBeforeExits = self.events.length;
|
||
let indexBeforeFlow = indexBeforeExits;
|
||
let seen;
|
||
let point3;
|
||
while (indexBeforeFlow--) {
|
||
if (self.events[indexBeforeFlow][0] === "exit" && self.events[indexBeforeFlow][1].type === "chunkFlow") {
|
||
if (seen) {
|
||
point3 = self.events[indexBeforeFlow][1].end;
|
||
break;
|
||
}
|
||
seen = true;
|
||
}
|
||
}
|
||
exitContainers(continued);
|
||
index2 = indexBeforeExits;
|
||
while (index2 < self.events.length) {
|
||
self.events[index2][1].end = Object.assign({}, point3);
|
||
index2++;
|
||
}
|
||
splice(
|
||
self.events,
|
||
indexBeforeFlow + 1,
|
||
0,
|
||
self.events.slice(indexBeforeExits)
|
||
);
|
||
self.events.length = index2;
|
||
}
|
||
}
|
||
function exitContainers(size) {
|
||
let index2 = stack.length;
|
||
while (index2-- > size) {
|
||
const entry = stack[index2];
|
||
self.containerState = entry[1];
|
||
entry[0].exit.call(self, effects);
|
||
}
|
||
stack.length = size;
|
||
}
|
||
function closeFlow() {
|
||
childFlow.write([null]);
|
||
childToken = void 0;
|
||
childFlow = void 0;
|
||
self.containerState._closeFlow = void 0;
|
||
}
|
||
}
|
||
function tokenizeContainer(effects, ok3, nok) {
|
||
return factorySpace(
|
||
effects,
|
||
effects.attempt(this.parser.constructs.document, ok3, nok),
|
||
"linePrefix",
|
||
this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
|
||
);
|
||
}
|
||
|
||
// node_modules/micromark-util-classify-character/index.js
|
||
function classifyCharacter(code2) {
|
||
if (code2 === null || markdownLineEndingOrSpace(code2) || unicodeWhitespace(code2)) {
|
||
return 1;
|
||
}
|
||
if (unicodePunctuation(code2)) {
|
||
return 2;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-resolve-all/index.js
|
||
function resolveAll(constructs2, events, context) {
|
||
const called = [];
|
||
let index2 = -1;
|
||
while (++index2 < constructs2.length) {
|
||
const resolve = constructs2[index2].resolveAll;
|
||
if (resolve && !called.includes(resolve)) {
|
||
events = resolve(events, context);
|
||
called.push(resolve);
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/attention.js
|
||
var attention = {
|
||
name: "attention",
|
||
tokenize: tokenizeAttention,
|
||
resolveAll: resolveAllAttention
|
||
};
|
||
function resolveAllAttention(events, context) {
|
||
let index2 = -1;
|
||
let open;
|
||
let group;
|
||
let text4;
|
||
let openingSequence;
|
||
let closingSequence;
|
||
let use;
|
||
let nextEvents;
|
||
let offset;
|
||
while (++index2 < events.length) {
|
||
if (events[index2][0] === "enter" && events[index2][1].type === "attentionSequence" && events[index2][1]._close) {
|
||
open = index2;
|
||
while (open--) {
|
||
if (events[open][0] === "exit" && events[open][1].type === "attentionSequence" && events[open][1]._open && // If the markers are the same:
|
||
context.sliceSerialize(events[open][1]).charCodeAt(0) === context.sliceSerialize(events[index2][1]).charCodeAt(0)) {
|
||
if ((events[open][1]._close || events[index2][1]._open) && (events[index2][1].end.offset - events[index2][1].start.offset) % 3 && !((events[open][1].end.offset - events[open][1].start.offset + events[index2][1].end.offset - events[index2][1].start.offset) % 3)) {
|
||
continue;
|
||
}
|
||
use = events[open][1].end.offset - events[open][1].start.offset > 1 && events[index2][1].end.offset - events[index2][1].start.offset > 1 ? 2 : 1;
|
||
const start = Object.assign({}, events[open][1].end);
|
||
const end = Object.assign({}, events[index2][1].start);
|
||
movePoint(start, -use);
|
||
movePoint(end, use);
|
||
openingSequence = {
|
||
type: use > 1 ? "strongSequence" : "emphasisSequence",
|
||
start,
|
||
end: Object.assign({}, events[open][1].end)
|
||
};
|
||
closingSequence = {
|
||
type: use > 1 ? "strongSequence" : "emphasisSequence",
|
||
start: Object.assign({}, events[index2][1].start),
|
||
end
|
||
};
|
||
text4 = {
|
||
type: use > 1 ? "strongText" : "emphasisText",
|
||
start: Object.assign({}, events[open][1].end),
|
||
end: Object.assign({}, events[index2][1].start)
|
||
};
|
||
group = {
|
||
type: use > 1 ? "strong" : "emphasis",
|
||
start: Object.assign({}, openingSequence.start),
|
||
end: Object.assign({}, closingSequence.end)
|
||
};
|
||
events[open][1].end = Object.assign({}, openingSequence.start);
|
||
events[index2][1].start = Object.assign({}, closingSequence.end);
|
||
nextEvents = [];
|
||
if (events[open][1].end.offset - events[open][1].start.offset) {
|
||
nextEvents = push(nextEvents, [
|
||
["enter", events[open][1], context],
|
||
["exit", events[open][1], context]
|
||
]);
|
||
}
|
||
nextEvents = push(nextEvents, [
|
||
["enter", group, context],
|
||
["enter", openingSequence, context],
|
||
["exit", openingSequence, context],
|
||
["enter", text4, context]
|
||
]);
|
||
nextEvents = push(
|
||
nextEvents,
|
||
resolveAll(
|
||
context.parser.constructs.insideSpan.null,
|
||
events.slice(open + 1, index2),
|
||
context
|
||
)
|
||
);
|
||
nextEvents = push(nextEvents, [
|
||
["exit", text4, context],
|
||
["enter", closingSequence, context],
|
||
["exit", closingSequence, context],
|
||
["exit", group, context]
|
||
]);
|
||
if (events[index2][1].end.offset - events[index2][1].start.offset) {
|
||
offset = 2;
|
||
nextEvents = push(nextEvents, [
|
||
["enter", events[index2][1], context],
|
||
["exit", events[index2][1], context]
|
||
]);
|
||
} else {
|
||
offset = 0;
|
||
}
|
||
splice(events, open - 1, index2 - open + 3, nextEvents);
|
||
index2 = open + nextEvents.length - offset - 2;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
index2 = -1;
|
||
while (++index2 < events.length) {
|
||
if (events[index2][1].type === "attentionSequence") {
|
||
events[index2][1].type = "data";
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeAttention(effects, ok3) {
|
||
const attentionMarkers2 = this.parser.constructs.attentionMarkers.null;
|
||
const previous2 = this.previous;
|
||
const before = classifyCharacter(previous2);
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
marker = code2;
|
||
effects.enter("attentionSequence");
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
const token = effects.exit("attentionSequence");
|
||
const after = classifyCharacter(code2);
|
||
const open = !after || after === 2 && before || attentionMarkers2.includes(code2);
|
||
const close2 = !before || before === 2 && after || attentionMarkers2.includes(previous2);
|
||
token._open = Boolean(marker === 42 ? open : open && (before || !close2));
|
||
token._close = Boolean(marker === 42 ? close2 : close2 && (after || !open));
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function movePoint(point3, offset) {
|
||
point3.column += offset;
|
||
point3.offset += offset;
|
||
point3._bufferIndex += offset;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/autolink.js
|
||
var autolink = {
|
||
name: "autolink",
|
||
tokenize: tokenizeAutolink
|
||
};
|
||
function tokenizeAutolink(effects, ok3, nok) {
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("autolink");
|
||
effects.enter("autolinkMarker");
|
||
effects.consume(code2);
|
||
effects.exit("autolinkMarker");
|
||
effects.enter("autolinkProtocol");
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return schemeOrEmailAtext;
|
||
}
|
||
return emailAtext(code2);
|
||
}
|
||
function schemeOrEmailAtext(code2) {
|
||
if (code2 === 43 || code2 === 45 || code2 === 46 || asciiAlphanumeric(code2)) {
|
||
size = 1;
|
||
return schemeInsideOrEmailAtext(code2);
|
||
}
|
||
return emailAtext(code2);
|
||
}
|
||
function schemeInsideOrEmailAtext(code2) {
|
||
if (code2 === 58) {
|
||
effects.consume(code2);
|
||
size = 0;
|
||
return urlInside;
|
||
}
|
||
if ((code2 === 43 || code2 === 45 || code2 === 46 || asciiAlphanumeric(code2)) && size++ < 32) {
|
||
effects.consume(code2);
|
||
return schemeInsideOrEmailAtext;
|
||
}
|
||
size = 0;
|
||
return emailAtext(code2);
|
||
}
|
||
function urlInside(code2) {
|
||
if (code2 === 62) {
|
||
effects.exit("autolinkProtocol");
|
||
effects.enter("autolinkMarker");
|
||
effects.consume(code2);
|
||
effects.exit("autolinkMarker");
|
||
effects.exit("autolink");
|
||
return ok3;
|
||
}
|
||
if (code2 === null || code2 === 32 || code2 === 60 || asciiControl(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return urlInside;
|
||
}
|
||
function emailAtext(code2) {
|
||
if (code2 === 64) {
|
||
effects.consume(code2);
|
||
return emailAtSignOrDot;
|
||
}
|
||
if (asciiAtext(code2)) {
|
||
effects.consume(code2);
|
||
return emailAtext;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function emailAtSignOrDot(code2) {
|
||
return asciiAlphanumeric(code2) ? emailLabel(code2) : nok(code2);
|
||
}
|
||
function emailLabel(code2) {
|
||
if (code2 === 46) {
|
||
effects.consume(code2);
|
||
size = 0;
|
||
return emailAtSignOrDot;
|
||
}
|
||
if (code2 === 62) {
|
||
effects.exit("autolinkProtocol").type = "autolinkEmail";
|
||
effects.enter("autolinkMarker");
|
||
effects.consume(code2);
|
||
effects.exit("autolinkMarker");
|
||
effects.exit("autolink");
|
||
return ok3;
|
||
}
|
||
return emailValue(code2);
|
||
}
|
||
function emailValue(code2) {
|
||
if ((code2 === 45 || asciiAlphanumeric(code2)) && size++ < 63) {
|
||
const next = code2 === 45 ? emailValue : emailLabel;
|
||
effects.consume(code2);
|
||
return next;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/blank-line.js
|
||
var blankLine = {
|
||
tokenize: tokenizeBlankLine,
|
||
partial: true
|
||
};
|
||
function tokenizeBlankLine(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
return markdownSpace(code2) ? factorySpace(effects, after, "linePrefix")(code2) : after(code2);
|
||
}
|
||
function after(code2) {
|
||
return code2 === null || markdownLineEnding(code2) ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/block-quote.js
|
||
var blockQuote = {
|
||
name: "blockQuote",
|
||
tokenize: tokenizeBlockQuoteStart,
|
||
continuation: {
|
||
tokenize: tokenizeBlockQuoteContinuation
|
||
},
|
||
exit
|
||
};
|
||
function tokenizeBlockQuoteStart(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === 62) {
|
||
const state = self.containerState;
|
||
if (!state.open) {
|
||
effects.enter("blockQuote", {
|
||
_container: true
|
||
});
|
||
state.open = true;
|
||
}
|
||
effects.enter("blockQuotePrefix");
|
||
effects.enter("blockQuoteMarker");
|
||
effects.consume(code2);
|
||
effects.exit("blockQuoteMarker");
|
||
return after;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function after(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.enter("blockQuotePrefixWhitespace");
|
||
effects.consume(code2);
|
||
effects.exit("blockQuotePrefixWhitespace");
|
||
effects.exit("blockQuotePrefix");
|
||
return ok3;
|
||
}
|
||
effects.exit("blockQuotePrefix");
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeBlockQuoteContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
return contStart;
|
||
function contStart(code2) {
|
||
if (markdownSpace(code2)) {
|
||
return factorySpace(
|
||
effects,
|
||
contBefore,
|
||
"linePrefix",
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
|
||
)(code2);
|
||
}
|
||
return contBefore(code2);
|
||
}
|
||
function contBefore(code2) {
|
||
return effects.attempt(blockQuote, ok3, nok)(code2);
|
||
}
|
||
}
|
||
function exit(effects) {
|
||
effects.exit("blockQuote");
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/character-escape.js
|
||
var characterEscape = {
|
||
name: "characterEscape",
|
||
tokenize: tokenizeCharacterEscape
|
||
};
|
||
function tokenizeCharacterEscape(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("characterEscape");
|
||
effects.enter("escapeMarker");
|
||
effects.consume(code2);
|
||
effects.exit("escapeMarker");
|
||
return inside;
|
||
}
|
||
function inside(code2) {
|
||
if (asciiPunctuation(code2)) {
|
||
effects.enter("characterEscapeValue");
|
||
effects.consume(code2);
|
||
effects.exit("characterEscapeValue");
|
||
effects.exit("characterEscape");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/character-reference.js
|
||
var characterReference = {
|
||
name: "characterReference",
|
||
tokenize: tokenizeCharacterReference
|
||
};
|
||
function tokenizeCharacterReference(effects, ok3, nok) {
|
||
const self = this;
|
||
let size = 0;
|
||
let max;
|
||
let test;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("characterReference");
|
||
effects.enter("characterReferenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarker");
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 35) {
|
||
effects.enter("characterReferenceMarkerNumeric");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarkerNumeric");
|
||
return numeric;
|
||
}
|
||
effects.enter("characterReferenceValue");
|
||
max = 31;
|
||
test = asciiAlphanumeric;
|
||
return value2(code2);
|
||
}
|
||
function numeric(code2) {
|
||
if (code2 === 88 || code2 === 120) {
|
||
effects.enter("characterReferenceMarkerHexadecimal");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarkerHexadecimal");
|
||
effects.enter("characterReferenceValue");
|
||
max = 6;
|
||
test = asciiHexDigit;
|
||
return value2;
|
||
}
|
||
effects.enter("characterReferenceValue");
|
||
max = 7;
|
||
test = asciiDigit;
|
||
return value2(code2);
|
||
}
|
||
function value2(code2) {
|
||
if (code2 === 59 && size) {
|
||
const token = effects.exit("characterReferenceValue");
|
||
if (test === asciiAlphanumeric && !decodeNamedCharacterReference(self.sliceSerialize(token))) {
|
||
return nok(code2);
|
||
}
|
||
effects.enter("characterReferenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("characterReferenceMarker");
|
||
effects.exit("characterReference");
|
||
return ok3;
|
||
}
|
||
if (test(code2) && size++ < max) {
|
||
effects.consume(code2);
|
||
return value2;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/code-fenced.js
|
||
var nonLazyContinuation = {
|
||
tokenize: tokenizeNonLazyContinuation,
|
||
partial: true
|
||
};
|
||
var codeFenced = {
|
||
name: "codeFenced",
|
||
tokenize: tokenizeCodeFenced,
|
||
concrete: true
|
||
};
|
||
function tokenizeCodeFenced(effects, ok3, nok) {
|
||
const self = this;
|
||
const closeStart = {
|
||
tokenize: tokenizeCloseStart,
|
||
partial: true
|
||
};
|
||
let initialPrefix = 0;
|
||
let sizeOpen = 0;
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
return beforeSequenceOpen(code2);
|
||
}
|
||
function beforeSequenceOpen(code2) {
|
||
const tail = self.events[self.events.length - 1];
|
||
initialPrefix = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
|
||
marker = code2;
|
||
effects.enter("codeFenced");
|
||
effects.enter("codeFencedFence");
|
||
effects.enter("codeFencedFenceSequence");
|
||
return sequenceOpen(code2);
|
||
}
|
||
function sequenceOpen(code2) {
|
||
if (code2 === marker) {
|
||
sizeOpen++;
|
||
effects.consume(code2);
|
||
return sequenceOpen;
|
||
}
|
||
if (sizeOpen < 3) {
|
||
return nok(code2);
|
||
}
|
||
effects.exit("codeFencedFenceSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects, infoBefore, "whitespace")(code2) : infoBefore(code2);
|
||
}
|
||
function infoBefore(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("codeFencedFence");
|
||
return self.interrupt ? ok3(code2) : effects.check(nonLazyContinuation, atNonLazyBreak, after)(code2);
|
||
}
|
||
effects.enter("codeFencedFenceInfo");
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return info(code2);
|
||
}
|
||
function info(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("chunkString");
|
||
effects.exit("codeFencedFenceInfo");
|
||
return infoBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.exit("chunkString");
|
||
effects.exit("codeFencedFenceInfo");
|
||
return factorySpace(effects, metaBefore, "whitespace")(code2);
|
||
}
|
||
if (code2 === 96 && code2 === marker) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return info;
|
||
}
|
||
function metaBefore(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return infoBefore(code2);
|
||
}
|
||
effects.enter("codeFencedFenceMeta");
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return meta(code2);
|
||
}
|
||
function meta(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("chunkString");
|
||
effects.exit("codeFencedFenceMeta");
|
||
return infoBefore(code2);
|
||
}
|
||
if (code2 === 96 && code2 === marker) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return meta;
|
||
}
|
||
function atNonLazyBreak(code2) {
|
||
return effects.attempt(closeStart, after, contentBefore)(code2);
|
||
}
|
||
function contentBefore(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return contentStart;
|
||
}
|
||
function contentStart(code2) {
|
||
return initialPrefix > 0 && markdownSpace(code2) ? factorySpace(
|
||
effects,
|
||
beforeContentChunk,
|
||
"linePrefix",
|
||
initialPrefix + 1
|
||
)(code2) : beforeContentChunk(code2);
|
||
}
|
||
function beforeContentChunk(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code2);
|
||
}
|
||
effects.enter("codeFlowValue");
|
||
return contentChunk(code2);
|
||
}
|
||
function contentChunk(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("codeFlowValue");
|
||
return beforeContentChunk(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return contentChunk;
|
||
}
|
||
function after(code2) {
|
||
effects.exit("codeFenced");
|
||
return ok3(code2);
|
||
}
|
||
function tokenizeCloseStart(effects2, ok4, nok2) {
|
||
let size = 0;
|
||
return startBefore;
|
||
function startBefore(code2) {
|
||
effects2.enter("lineEnding");
|
||
effects2.consume(code2);
|
||
effects2.exit("lineEnding");
|
||
return start2;
|
||
}
|
||
function start2(code2) {
|
||
effects2.enter("codeFencedFence");
|
||
return markdownSpace(code2) ? factorySpace(
|
||
effects2,
|
||
beforeSequenceClose,
|
||
"linePrefix",
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
|
||
)(code2) : beforeSequenceClose(code2);
|
||
}
|
||
function beforeSequenceClose(code2) {
|
||
if (code2 === marker) {
|
||
effects2.enter("codeFencedFenceSequence");
|
||
return sequenceClose(code2);
|
||
}
|
||
return nok2(code2);
|
||
}
|
||
function sequenceClose(code2) {
|
||
if (code2 === marker) {
|
||
size++;
|
||
effects2.consume(code2);
|
||
return sequenceClose;
|
||
}
|
||
if (size >= sizeOpen) {
|
||
effects2.exit("codeFencedFenceSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects2, sequenceCloseAfter, "whitespace")(code2) : sequenceCloseAfter(code2);
|
||
}
|
||
return nok2(code2);
|
||
}
|
||
function sequenceCloseAfter(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects2.exit("codeFencedFence");
|
||
return ok4(code2);
|
||
}
|
||
return nok2(code2);
|
||
}
|
||
}
|
||
}
|
||
function tokenizeNonLazyContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return lineStart;
|
||
}
|
||
function lineStart(code2) {
|
||
return self.parser.lazy[self.now().line] ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/code-indented.js
|
||
var codeIndented = {
|
||
name: "codeIndented",
|
||
tokenize: tokenizeCodeIndented
|
||
};
|
||
var furtherStart = {
|
||
tokenize: tokenizeFurtherStart,
|
||
partial: true
|
||
};
|
||
function tokenizeCodeIndented(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("codeIndented");
|
||
return factorySpace(effects, afterPrefix, "linePrefix", 4 + 1)(code2);
|
||
}
|
||
function afterPrefix(code2) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? atBreak(code2) : nok(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === null) {
|
||
return after(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
return effects.attempt(furtherStart, atBreak, after)(code2);
|
||
}
|
||
effects.enter("codeFlowValue");
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("codeFlowValue");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
function after(code2) {
|
||
effects.exit("codeIndented");
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeFurtherStart(effects, ok3, nok) {
|
||
const self = this;
|
||
return furtherStart2;
|
||
function furtherStart2(code2) {
|
||
if (self.parser.lazy[self.now().line]) {
|
||
return nok(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return furtherStart2;
|
||
}
|
||
return factorySpace(effects, afterPrefix, "linePrefix", 4 + 1)(code2);
|
||
}
|
||
function afterPrefix(code2) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? ok3(code2) : markdownLineEnding(code2) ? furtherStart2(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/code-text.js
|
||
var codeText = {
|
||
name: "codeText",
|
||
tokenize: tokenizeCodeText,
|
||
resolve: resolveCodeText,
|
||
previous
|
||
};
|
||
function resolveCodeText(events) {
|
||
let tailExitIndex = events.length - 4;
|
||
let headEnterIndex = 3;
|
||
let index2;
|
||
let enter;
|
||
if ((events[headEnterIndex][1].type === "lineEnding" || events[headEnterIndex][1].type === "space") && (events[tailExitIndex][1].type === "lineEnding" || events[tailExitIndex][1].type === "space")) {
|
||
index2 = headEnterIndex;
|
||
while (++index2 < tailExitIndex) {
|
||
if (events[index2][1].type === "codeTextData") {
|
||
events[headEnterIndex][1].type = "codeTextPadding";
|
||
events[tailExitIndex][1].type = "codeTextPadding";
|
||
headEnterIndex += 2;
|
||
tailExitIndex -= 2;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
index2 = headEnterIndex - 1;
|
||
tailExitIndex++;
|
||
while (++index2 <= tailExitIndex) {
|
||
if (enter === void 0) {
|
||
if (index2 !== tailExitIndex && events[index2][1].type !== "lineEnding") {
|
||
enter = index2;
|
||
}
|
||
} else if (index2 === tailExitIndex || events[index2][1].type === "lineEnding") {
|
||
events[enter][1].type = "codeTextData";
|
||
if (index2 !== enter + 2) {
|
||
events[enter][1].end = events[index2 - 1][1].end;
|
||
events.splice(enter + 2, index2 - enter - 2);
|
||
tailExitIndex -= index2 - enter - 2;
|
||
index2 = enter + 2;
|
||
}
|
||
enter = void 0;
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
function previous(code2) {
|
||
return code2 !== 96 || this.events[this.events.length - 1][1].type === "characterEscape";
|
||
}
|
||
function tokenizeCodeText(effects, ok3, nok) {
|
||
const self = this;
|
||
let sizeOpen = 0;
|
||
let size;
|
||
let token;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("codeText");
|
||
effects.enter("codeTextSequence");
|
||
return sequenceOpen(code2);
|
||
}
|
||
function sequenceOpen(code2) {
|
||
if (code2 === 96) {
|
||
effects.consume(code2);
|
||
sizeOpen++;
|
||
return sequenceOpen;
|
||
}
|
||
effects.exit("codeTextSequence");
|
||
return between2(code2);
|
||
}
|
||
function between2(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 32) {
|
||
effects.enter("space");
|
||
effects.consume(code2);
|
||
effects.exit("space");
|
||
return between2;
|
||
}
|
||
if (code2 === 96) {
|
||
token = effects.enter("codeTextSequence");
|
||
size = 0;
|
||
return sequenceClose(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return between2;
|
||
}
|
||
effects.enter("codeTextData");
|
||
return data(code2);
|
||
}
|
||
function data(code2) {
|
||
if (code2 === null || code2 === 32 || code2 === 96 || markdownLineEnding(code2)) {
|
||
effects.exit("codeTextData");
|
||
return between2(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
function sequenceClose(code2) {
|
||
if (code2 === 96) {
|
||
effects.consume(code2);
|
||
size++;
|
||
return sequenceClose;
|
||
}
|
||
if (size === sizeOpen) {
|
||
effects.exit("codeTextSequence");
|
||
effects.exit("codeText");
|
||
return ok3(code2);
|
||
}
|
||
token.type = "codeTextData";
|
||
return data(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-subtokenize/index.js
|
||
function subtokenize(events) {
|
||
const jumps = {};
|
||
let index2 = -1;
|
||
let event;
|
||
let lineIndex;
|
||
let otherIndex;
|
||
let otherEvent;
|
||
let parameters;
|
||
let subevents;
|
||
let more;
|
||
while (++index2 < events.length) {
|
||
while (index2 in jumps) {
|
||
index2 = jumps[index2];
|
||
}
|
||
event = events[index2];
|
||
if (index2 && event[1].type === "chunkFlow" && events[index2 - 1][1].type === "listItemPrefix") {
|
||
subevents = event[1]._tokenizer.events;
|
||
otherIndex = 0;
|
||
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "lineEndingBlank") {
|
||
otherIndex += 2;
|
||
}
|
||
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "content") {
|
||
while (++otherIndex < subevents.length) {
|
||
if (subevents[otherIndex][1].type === "content") {
|
||
break;
|
||
}
|
||
if (subevents[otherIndex][1].type === "chunkText") {
|
||
subevents[otherIndex][1]._isInFirstContentOfListItem = true;
|
||
otherIndex++;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (event[0] === "enter") {
|
||
if (event[1].contentType) {
|
||
Object.assign(jumps, subcontent(events, index2));
|
||
index2 = jumps[index2];
|
||
more = true;
|
||
}
|
||
} else if (event[1]._container) {
|
||
otherIndex = index2;
|
||
lineIndex = void 0;
|
||
while (otherIndex--) {
|
||
otherEvent = events[otherIndex];
|
||
if (otherEvent[1].type === "lineEnding" || otherEvent[1].type === "lineEndingBlank") {
|
||
if (otherEvent[0] === "enter") {
|
||
if (lineIndex) {
|
||
events[lineIndex][1].type = "lineEndingBlank";
|
||
}
|
||
otherEvent[1].type = "lineEnding";
|
||
lineIndex = otherIndex;
|
||
}
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
if (lineIndex) {
|
||
event[1].end = Object.assign({}, events[lineIndex][1].start);
|
||
parameters = events.slice(lineIndex, index2);
|
||
parameters.unshift(event);
|
||
splice(events, lineIndex, index2 - lineIndex + 1, parameters);
|
||
}
|
||
}
|
||
}
|
||
return !more;
|
||
}
|
||
function subcontent(events, eventIndex) {
|
||
const token = events[eventIndex][1];
|
||
const context = events[eventIndex][2];
|
||
let startPosition = eventIndex - 1;
|
||
const startPositions = [];
|
||
const tokenizer = token._tokenizer || context.parser[token.contentType](token.start);
|
||
const childEvents = tokenizer.events;
|
||
const jumps = [];
|
||
const gaps = {};
|
||
let stream2;
|
||
let previous2;
|
||
let index2 = -1;
|
||
let current = token;
|
||
let adjust = 0;
|
||
let start = 0;
|
||
const breaks = [start];
|
||
while (current) {
|
||
while (events[++startPosition][1] !== current) {
|
||
}
|
||
startPositions.push(startPosition);
|
||
if (!current._tokenizer) {
|
||
stream2 = context.sliceStream(current);
|
||
if (!current.next) {
|
||
stream2.push(null);
|
||
}
|
||
if (previous2) {
|
||
tokenizer.defineSkip(current.start);
|
||
}
|
||
if (current._isInFirstContentOfListItem) {
|
||
tokenizer._gfmTasklistFirstContentOfListItem = true;
|
||
}
|
||
tokenizer.write(stream2);
|
||
if (current._isInFirstContentOfListItem) {
|
||
tokenizer._gfmTasklistFirstContentOfListItem = void 0;
|
||
}
|
||
}
|
||
previous2 = current;
|
||
current = current.next;
|
||
}
|
||
current = token;
|
||
while (++index2 < childEvents.length) {
|
||
if (
|
||
// Find a void token that includes a break.
|
||
childEvents[index2][0] === "exit" && childEvents[index2 - 1][0] === "enter" && childEvents[index2][1].type === childEvents[index2 - 1][1].type && childEvents[index2][1].start.line !== childEvents[index2][1].end.line
|
||
) {
|
||
start = index2 + 1;
|
||
breaks.push(start);
|
||
current._tokenizer = void 0;
|
||
current.previous = void 0;
|
||
current = current.next;
|
||
}
|
||
}
|
||
tokenizer.events = [];
|
||
if (current) {
|
||
current._tokenizer = void 0;
|
||
current.previous = void 0;
|
||
} else {
|
||
breaks.pop();
|
||
}
|
||
index2 = breaks.length;
|
||
while (index2--) {
|
||
const slice = childEvents.slice(breaks[index2], breaks[index2 + 1]);
|
||
const start2 = startPositions.pop();
|
||
jumps.unshift([start2, start2 + slice.length - 1]);
|
||
splice(events, start2, 2, slice);
|
||
}
|
||
index2 = -1;
|
||
while (++index2 < jumps.length) {
|
||
gaps[adjust + jumps[index2][0]] = adjust + jumps[index2][1];
|
||
adjust += jumps[index2][1] - jumps[index2][0] - 1;
|
||
}
|
||
return gaps;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/content.js
|
||
var content2 = {
|
||
tokenize: tokenizeContent,
|
||
resolve: resolveContent
|
||
};
|
||
var continuationConstruct = {
|
||
tokenize: tokenizeContinuation,
|
||
partial: true
|
||
};
|
||
function resolveContent(events) {
|
||
subtokenize(events);
|
||
return events;
|
||
}
|
||
function tokenizeContent(effects, ok3) {
|
||
let previous2;
|
||
return chunkStart;
|
||
function chunkStart(code2) {
|
||
effects.enter("content");
|
||
previous2 = effects.enter("chunkContent", {
|
||
contentType: "content"
|
||
});
|
||
return chunkInside(code2);
|
||
}
|
||
function chunkInside(code2) {
|
||
if (code2 === null) {
|
||
return contentEnd(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
return effects.check(
|
||
continuationConstruct,
|
||
contentContinue,
|
||
contentEnd
|
||
)(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return chunkInside;
|
||
}
|
||
function contentEnd(code2) {
|
||
effects.exit("chunkContent");
|
||
effects.exit("content");
|
||
return ok3(code2);
|
||
}
|
||
function contentContinue(code2) {
|
||
effects.consume(code2);
|
||
effects.exit("chunkContent");
|
||
previous2.next = effects.enter("chunkContent", {
|
||
contentType: "content",
|
||
previous: previous2
|
||
});
|
||
previous2 = previous2.next;
|
||
return chunkInside;
|
||
}
|
||
}
|
||
function tokenizeContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
return startLookahead;
|
||
function startLookahead(code2) {
|
||
effects.exit("chunkContent");
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, prefixed, "linePrefix");
|
||
}
|
||
function prefixed(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return nok(code2);
|
||
}
|
||
const tail = self.events[self.events.length - 1];
|
||
if (!self.parser.constructs.disable.null.includes("codeIndented") && tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4) {
|
||
return ok3(code2);
|
||
}
|
||
return effects.interrupt(self.parser.constructs.flow, nok, ok3)(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-destination/index.js
|
||
function factoryDestination(effects, ok3, nok, type, literalType, literalMarkerType, rawType, stringType, max) {
|
||
const limit = max || Number.POSITIVE_INFINITY;
|
||
let balance = 0;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === 60) {
|
||
effects.enter(type);
|
||
effects.enter(literalType);
|
||
effects.enter(literalMarkerType);
|
||
effects.consume(code2);
|
||
effects.exit(literalMarkerType);
|
||
return enclosedBefore;
|
||
}
|
||
if (code2 === null || code2 === 32 || code2 === 41 || asciiControl(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.enter(type);
|
||
effects.enter(rawType);
|
||
effects.enter(stringType);
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return raw(code2);
|
||
}
|
||
function enclosedBefore(code2) {
|
||
if (code2 === 62) {
|
||
effects.enter(literalMarkerType);
|
||
effects.consume(code2);
|
||
effects.exit(literalMarkerType);
|
||
effects.exit(literalType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
effects.enter(stringType);
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return enclosed(code2);
|
||
}
|
||
function enclosed(code2) {
|
||
if (code2 === 62) {
|
||
effects.exit("chunkString");
|
||
effects.exit(stringType);
|
||
return enclosedBefore(code2);
|
||
}
|
||
if (code2 === null || code2 === 60 || markdownLineEnding(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return code2 === 92 ? enclosedEscape : enclosed;
|
||
}
|
||
function enclosedEscape(code2) {
|
||
if (code2 === 60 || code2 === 62 || code2 === 92) {
|
||
effects.consume(code2);
|
||
return enclosed;
|
||
}
|
||
return enclosed(code2);
|
||
}
|
||
function raw(code2) {
|
||
if (!balance && (code2 === null || code2 === 41 || markdownLineEndingOrSpace(code2))) {
|
||
effects.exit("chunkString");
|
||
effects.exit(stringType);
|
||
effects.exit(rawType);
|
||
effects.exit(type);
|
||
return ok3(code2);
|
||
}
|
||
if (balance < limit && code2 === 40) {
|
||
effects.consume(code2);
|
||
balance++;
|
||
return raw;
|
||
}
|
||
if (code2 === 41) {
|
||
effects.consume(code2);
|
||
balance--;
|
||
return raw;
|
||
}
|
||
if (code2 === null || code2 === 32 || code2 === 40 || asciiControl(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return code2 === 92 ? rawEscape : raw;
|
||
}
|
||
function rawEscape(code2) {
|
||
if (code2 === 40 || code2 === 41 || code2 === 92) {
|
||
effects.consume(code2);
|
||
return raw;
|
||
}
|
||
return raw(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-label/index.js
|
||
function factoryLabel(effects, ok3, nok, type, markerType, stringType) {
|
||
const self = this;
|
||
let size = 0;
|
||
let seen;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter(type);
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
effects.enter(stringType);
|
||
return atBreak;
|
||
}
|
||
function atBreak(code2) {
|
||
if (size > 999 || code2 === null || code2 === 91 || code2 === 93 && !seen || // To do: remove in the future once we’ve switched from
|
||
// `micromark-extension-footnote` to `micromark-extension-gfm-footnote`,
|
||
// which doesn’t need this.
|
||
// Hidden footnotes hook.
|
||
/* c8 ignore next 3 */
|
||
code2 === 94 && !size && "_hiddenFootnoteSupport" in self.parser.constructs) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 93) {
|
||
effects.exit(stringType);
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return atBreak;
|
||
}
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return labelInside(code2);
|
||
}
|
||
function labelInside(code2) {
|
||
if (code2 === null || code2 === 91 || code2 === 93 || markdownLineEnding(code2) || size++ > 999) {
|
||
effects.exit("chunkString");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
if (!seen)
|
||
seen = !markdownSpace(code2);
|
||
return code2 === 92 ? labelEscape : labelInside;
|
||
}
|
||
function labelEscape(code2) {
|
||
if (code2 === 91 || code2 === 92 || code2 === 93) {
|
||
effects.consume(code2);
|
||
size++;
|
||
return labelInside;
|
||
}
|
||
return labelInside(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-title/index.js
|
||
function factoryTitle(effects, ok3, nok, type, markerType, stringType) {
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
if (code2 === 34 || code2 === 39 || code2 === 40) {
|
||
effects.enter(type);
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
marker = code2 === 40 ? 41 : code2;
|
||
return begin;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function begin(code2) {
|
||
if (code2 === marker) {
|
||
effects.enter(markerType);
|
||
effects.consume(code2);
|
||
effects.exit(markerType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
effects.enter(stringType);
|
||
return atBreak(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === marker) {
|
||
effects.exit(stringType);
|
||
return begin(marker);
|
||
}
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, atBreak, "linePrefix");
|
||
}
|
||
effects.enter("chunkString", {
|
||
contentType: "string"
|
||
});
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === marker || code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("chunkString");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return code2 === 92 ? escape2 : inside;
|
||
}
|
||
function escape2(code2) {
|
||
if (code2 === marker || code2 === 92) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
return inside(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-whitespace/index.js
|
||
function factoryWhitespace(effects, ok3) {
|
||
let seen;
|
||
return start;
|
||
function start(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
seen = true;
|
||
return start;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
return factorySpace(
|
||
effects,
|
||
start,
|
||
seen ? "linePrefix" : "lineSuffix"
|
||
)(code2);
|
||
}
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/definition.js
|
||
var definition = {
|
||
name: "definition",
|
||
tokenize: tokenizeDefinition
|
||
};
|
||
var titleBefore = {
|
||
tokenize: tokenizeTitleBefore,
|
||
partial: true
|
||
};
|
||
function tokenizeDefinition(effects, ok3, nok) {
|
||
const self = this;
|
||
let identifier;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("definition");
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
return factoryLabel.call(
|
||
self,
|
||
effects,
|
||
labelAfter,
|
||
// Note: we don’t need to reset the way `markdown-rs` does.
|
||
nok,
|
||
"definitionLabel",
|
||
"definitionLabelMarker",
|
||
"definitionLabelString"
|
||
)(code2);
|
||
}
|
||
function labelAfter(code2) {
|
||
identifier = normalizeIdentifier(
|
||
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
|
||
);
|
||
if (code2 === 58) {
|
||
effects.enter("definitionMarker");
|
||
effects.consume(code2);
|
||
effects.exit("definitionMarker");
|
||
return markerAfter;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function markerAfter(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, destinationBefore)(code2) : destinationBefore(code2);
|
||
}
|
||
function destinationBefore(code2) {
|
||
return factoryDestination(
|
||
effects,
|
||
destinationAfter,
|
||
// Note: we don’t need to reset the way `markdown-rs` does.
|
||
nok,
|
||
"definitionDestination",
|
||
"definitionDestinationLiteral",
|
||
"definitionDestinationLiteralMarker",
|
||
"definitionDestinationRaw",
|
||
"definitionDestinationString"
|
||
)(code2);
|
||
}
|
||
function destinationAfter(code2) {
|
||
return effects.attempt(titleBefore, after, after)(code2);
|
||
}
|
||
function after(code2) {
|
||
return markdownSpace(code2) ? factorySpace(effects, afterWhitespace, "whitespace")(code2) : afterWhitespace(code2);
|
||
}
|
||
function afterWhitespace(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("definition");
|
||
self.parser.defined.push(identifier);
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeTitleBefore(effects, ok3, nok) {
|
||
return titleBefore2;
|
||
function titleBefore2(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, beforeMarker)(code2) : nok(code2);
|
||
}
|
||
function beforeMarker(code2) {
|
||
return factoryTitle(
|
||
effects,
|
||
titleAfter,
|
||
nok,
|
||
"definitionTitle",
|
||
"definitionTitleMarker",
|
||
"definitionTitleString"
|
||
)(code2);
|
||
}
|
||
function titleAfter(code2) {
|
||
return markdownSpace(code2) ? factorySpace(effects, titleAfterOptionalWhitespace, "whitespace")(code2) : titleAfterOptionalWhitespace(code2);
|
||
}
|
||
function titleAfterOptionalWhitespace(code2) {
|
||
return code2 === null || markdownLineEnding(code2) ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/hard-break-escape.js
|
||
var hardBreakEscape = {
|
||
name: "hardBreakEscape",
|
||
tokenize: tokenizeHardBreakEscape
|
||
};
|
||
function tokenizeHardBreakEscape(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("hardBreakEscape");
|
||
effects.consume(code2);
|
||
return after;
|
||
}
|
||
function after(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
effects.exit("hardBreakEscape");
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/heading-atx.js
|
||
var headingAtx = {
|
||
name: "headingAtx",
|
||
tokenize: tokenizeHeadingAtx,
|
||
resolve: resolveHeadingAtx
|
||
};
|
||
function resolveHeadingAtx(events, context) {
|
||
let contentEnd = events.length - 2;
|
||
let contentStart = 3;
|
||
let content3;
|
||
let text4;
|
||
if (events[contentStart][1].type === "whitespace") {
|
||
contentStart += 2;
|
||
}
|
||
if (contentEnd - 2 > contentStart && events[contentEnd][1].type === "whitespace") {
|
||
contentEnd -= 2;
|
||
}
|
||
if (events[contentEnd][1].type === "atxHeadingSequence" && (contentStart === contentEnd - 1 || contentEnd - 4 > contentStart && events[contentEnd - 2][1].type === "whitespace")) {
|
||
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4;
|
||
}
|
||
if (contentEnd > contentStart) {
|
||
content3 = {
|
||
type: "atxHeadingText",
|
||
start: events[contentStart][1].start,
|
||
end: events[contentEnd][1].end
|
||
};
|
||
text4 = {
|
||
type: "chunkText",
|
||
start: events[contentStart][1].start,
|
||
end: events[contentEnd][1].end,
|
||
contentType: "text"
|
||
};
|
||
splice(events, contentStart, contentEnd - contentStart + 1, [
|
||
["enter", content3, context],
|
||
["enter", text4, context],
|
||
["exit", text4, context],
|
||
["exit", content3, context]
|
||
]);
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeHeadingAtx(effects, ok3, nok) {
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("atxHeading");
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
effects.enter("atxHeadingSequence");
|
||
return sequenceOpen(code2);
|
||
}
|
||
function sequenceOpen(code2) {
|
||
if (code2 === 35 && size++ < 6) {
|
||
effects.consume(code2);
|
||
return sequenceOpen;
|
||
}
|
||
if (code2 === null || markdownLineEndingOrSpace(code2)) {
|
||
effects.exit("atxHeadingSequence");
|
||
return atBreak(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === 35) {
|
||
effects.enter("atxHeadingSequence");
|
||
return sequenceFurther(code2);
|
||
}
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("atxHeading");
|
||
return ok3(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
return factorySpace(effects, atBreak, "whitespace")(code2);
|
||
}
|
||
effects.enter("atxHeadingText");
|
||
return data(code2);
|
||
}
|
||
function sequenceFurther(code2) {
|
||
if (code2 === 35) {
|
||
effects.consume(code2);
|
||
return sequenceFurther;
|
||
}
|
||
effects.exit("atxHeadingSequence");
|
||
return atBreak(code2);
|
||
}
|
||
function data(code2) {
|
||
if (code2 === null || code2 === 35 || markdownLineEndingOrSpace(code2)) {
|
||
effects.exit("atxHeadingText");
|
||
return atBreak(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-html-tag-name/index.js
|
||
var htmlBlockNames = [
|
||
"address",
|
||
"article",
|
||
"aside",
|
||
"base",
|
||
"basefont",
|
||
"blockquote",
|
||
"body",
|
||
"caption",
|
||
"center",
|
||
"col",
|
||
"colgroup",
|
||
"dd",
|
||
"details",
|
||
"dialog",
|
||
"dir",
|
||
"div",
|
||
"dl",
|
||
"dt",
|
||
"fieldset",
|
||
"figcaption",
|
||
"figure",
|
||
"footer",
|
||
"form",
|
||
"frame",
|
||
"frameset",
|
||
"h1",
|
||
"h2",
|
||
"h3",
|
||
"h4",
|
||
"h5",
|
||
"h6",
|
||
"head",
|
||
"header",
|
||
"hr",
|
||
"html",
|
||
"iframe",
|
||
"legend",
|
||
"li",
|
||
"link",
|
||
"main",
|
||
"menu",
|
||
"menuitem",
|
||
"nav",
|
||
"noframes",
|
||
"ol",
|
||
"optgroup",
|
||
"option",
|
||
"p",
|
||
"param",
|
||
"search",
|
||
"section",
|
||
"summary",
|
||
"table",
|
||
"tbody",
|
||
"td",
|
||
"tfoot",
|
||
"th",
|
||
"thead",
|
||
"title",
|
||
"tr",
|
||
"track",
|
||
"ul"
|
||
];
|
||
var htmlRawNames = ["pre", "script", "style", "textarea"];
|
||
|
||
// node_modules/micromark-core-commonmark/lib/html-flow.js
|
||
var htmlFlow = {
|
||
name: "htmlFlow",
|
||
tokenize: tokenizeHtmlFlow,
|
||
resolveTo: resolveToHtmlFlow,
|
||
concrete: true
|
||
};
|
||
var blankLineBefore = {
|
||
tokenize: tokenizeBlankLineBefore,
|
||
partial: true
|
||
};
|
||
var nonLazyContinuationStart = {
|
||
tokenize: tokenizeNonLazyContinuationStart,
|
||
partial: true
|
||
};
|
||
function resolveToHtmlFlow(events) {
|
||
let index2 = events.length;
|
||
while (index2--) {
|
||
if (events[index2][0] === "enter" && events[index2][1].type === "htmlFlow") {
|
||
break;
|
||
}
|
||
}
|
||
if (index2 > 1 && events[index2 - 2][1].type === "linePrefix") {
|
||
events[index2][1].start = events[index2 - 2][1].start;
|
||
events[index2 + 1][1].start = events[index2 - 2][1].start;
|
||
events.splice(index2 - 2, 2);
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeHtmlFlow(effects, ok3, nok) {
|
||
const self = this;
|
||
let marker;
|
||
let closingTag;
|
||
let buffer;
|
||
let index2;
|
||
let markerB;
|
||
return start;
|
||
function start(code2) {
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
effects.enter("htmlFlow");
|
||
effects.enter("htmlFlowData");
|
||
effects.consume(code2);
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 33) {
|
||
effects.consume(code2);
|
||
return declarationOpen;
|
||
}
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
closingTag = true;
|
||
return tagCloseStart;
|
||
}
|
||
if (code2 === 63) {
|
||
effects.consume(code2);
|
||
marker = 3;
|
||
return self.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
buffer = String.fromCharCode(code2);
|
||
return tagName;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function declarationOpen(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
marker = 2;
|
||
return commentOpenInside;
|
||
}
|
||
if (code2 === 91) {
|
||
effects.consume(code2);
|
||
marker = 5;
|
||
index2 = 0;
|
||
return cdataOpenInside;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
marker = 4;
|
||
return self.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function commentOpenInside(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return self.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function cdataOpenInside(code2) {
|
||
const value2 = "CDATA[";
|
||
if (code2 === value2.charCodeAt(index2++)) {
|
||
effects.consume(code2);
|
||
if (index2 === value2.length) {
|
||
return self.interrupt ? ok3 : continuation;
|
||
}
|
||
return cdataOpenInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagCloseStart(code2) {
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
buffer = String.fromCharCode(code2);
|
||
return tagName;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagName(code2) {
|
||
if (code2 === null || code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
const slash = code2 === 47;
|
||
const name = buffer.toLowerCase();
|
||
if (!slash && !closingTag && htmlRawNames.includes(name)) {
|
||
marker = 1;
|
||
return self.interrupt ? ok3(code2) : continuation(code2);
|
||
}
|
||
if (htmlBlockNames.includes(buffer.toLowerCase())) {
|
||
marker = 6;
|
||
if (slash) {
|
||
effects.consume(code2);
|
||
return basicSelfClosing;
|
||
}
|
||
return self.interrupt ? ok3(code2) : continuation(code2);
|
||
}
|
||
marker = 7;
|
||
return self.interrupt && !self.parser.lazy[self.now().line] ? nok(code2) : closingTag ? completeClosingTagAfter(code2) : completeAttributeNameBefore(code2);
|
||
}
|
||
if (code2 === 45 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
buffer += String.fromCharCode(code2);
|
||
return tagName;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function basicSelfClosing(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
return self.interrupt ? ok3 : continuation;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function completeClosingTagAfter(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeClosingTagAfter;
|
||
}
|
||
return completeEnd(code2);
|
||
}
|
||
function completeAttributeNameBefore(code2) {
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
return completeEnd;
|
||
}
|
||
if (code2 === 58 || code2 === 95 || asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeName;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeNameBefore;
|
||
}
|
||
return completeEnd(code2);
|
||
}
|
||
function completeAttributeName(code2) {
|
||
if (code2 === 45 || code2 === 46 || code2 === 58 || code2 === 95 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeName;
|
||
}
|
||
return completeAttributeNameAfter(code2);
|
||
}
|
||
function completeAttributeNameAfter(code2) {
|
||
if (code2 === 61) {
|
||
effects.consume(code2);
|
||
return completeAttributeValueBefore;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeNameAfter;
|
||
}
|
||
return completeAttributeNameBefore(code2);
|
||
}
|
||
function completeAttributeValueBefore(code2) {
|
||
if (code2 === null || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 34 || code2 === 39) {
|
||
effects.consume(code2);
|
||
markerB = code2;
|
||
return completeAttributeValueQuoted;
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAttributeValueBefore;
|
||
}
|
||
return completeAttributeValueUnquoted(code2);
|
||
}
|
||
function completeAttributeValueQuoted(code2) {
|
||
if (code2 === markerB) {
|
||
effects.consume(code2);
|
||
markerB = null;
|
||
return completeAttributeValueQuotedAfter;
|
||
}
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return nok(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return completeAttributeValueQuoted;
|
||
}
|
||
function completeAttributeValueUnquoted(code2) {
|
||
if (code2 === null || code2 === 34 || code2 === 39 || code2 === 47 || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96 || markdownLineEndingOrSpace(code2)) {
|
||
return completeAttributeNameAfter(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return completeAttributeValueUnquoted;
|
||
}
|
||
function completeAttributeValueQuotedAfter(code2) {
|
||
if (code2 === 47 || code2 === 62 || markdownSpace(code2)) {
|
||
return completeAttributeNameBefore(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function completeEnd(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
return completeAfter;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function completeAfter(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return continuation(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return completeAfter;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function continuation(code2) {
|
||
if (code2 === 45 && marker === 2) {
|
||
effects.consume(code2);
|
||
return continuationCommentInside;
|
||
}
|
||
if (code2 === 60 && marker === 1) {
|
||
effects.consume(code2);
|
||
return continuationRawTagOpen;
|
||
}
|
||
if (code2 === 62 && marker === 4) {
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
if (code2 === 63 && marker === 3) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
if (code2 === 93 && marker === 5) {
|
||
effects.consume(code2);
|
||
return continuationCdataInside;
|
||
}
|
||
if (markdownLineEnding(code2) && (marker === 6 || marker === 7)) {
|
||
effects.exit("htmlFlowData");
|
||
return effects.check(
|
||
blankLineBefore,
|
||
continuationAfter,
|
||
continuationStart
|
||
)(code2);
|
||
}
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("htmlFlowData");
|
||
return continuationStart(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return continuation;
|
||
}
|
||
function continuationStart(code2) {
|
||
return effects.check(
|
||
nonLazyContinuationStart,
|
||
continuationStartNonLazy,
|
||
continuationAfter
|
||
)(code2);
|
||
}
|
||
function continuationStartNonLazy(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return continuationBefore;
|
||
}
|
||
function continuationBefore(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return continuationStart(code2);
|
||
}
|
||
effects.enter("htmlFlowData");
|
||
return continuation(code2);
|
||
}
|
||
function continuationCommentInside(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationRawTagOpen(code2) {
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
buffer = "";
|
||
return continuationRawEndTag;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationRawEndTag(code2) {
|
||
if (code2 === 62) {
|
||
const name = buffer.toLowerCase();
|
||
if (htmlRawNames.includes(name)) {
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
if (asciiAlpha(code2) && buffer.length < 8) {
|
||
effects.consume(code2);
|
||
buffer += String.fromCharCode(code2);
|
||
return continuationRawEndTag;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationCdataInside(code2) {
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationDeclarationInside(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
if (code2 === 45 && marker === 2) {
|
||
effects.consume(code2);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code2);
|
||
}
|
||
function continuationClose(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("htmlFlowData");
|
||
return continuationAfter(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return continuationClose;
|
||
}
|
||
function continuationAfter(code2) {
|
||
effects.exit("htmlFlow");
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeNonLazyContinuationStart(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return after;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function after(code2) {
|
||
return self.parser.lazy[self.now().line] ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeBlankLineBefore(effects, ok3, nok) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return effects.attempt(blankLine, ok3, nok);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/html-text.js
|
||
var htmlText = {
|
||
name: "htmlText",
|
||
tokenize: tokenizeHtmlText
|
||
};
|
||
function tokenizeHtmlText(effects, ok3, nok) {
|
||
const self = this;
|
||
let marker;
|
||
let index2;
|
||
let returnState;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("htmlText");
|
||
effects.enter("htmlTextData");
|
||
effects.consume(code2);
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 33) {
|
||
effects.consume(code2);
|
||
return declarationOpen;
|
||
}
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
return tagCloseStart;
|
||
}
|
||
if (code2 === 63) {
|
||
effects.consume(code2);
|
||
return instruction;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpen;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function declarationOpen(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentOpenInside;
|
||
}
|
||
if (code2 === 91) {
|
||
effects.consume(code2);
|
||
index2 = 0;
|
||
return cdataOpenInside;
|
||
}
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return declaration;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function commentOpenInside(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentEnd;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function comment(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentClose;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = comment;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return comment;
|
||
}
|
||
function commentClose(code2) {
|
||
if (code2 === 45) {
|
||
effects.consume(code2);
|
||
return commentEnd;
|
||
}
|
||
return comment(code2);
|
||
}
|
||
function commentEnd(code2) {
|
||
return code2 === 62 ? end(code2) : code2 === 45 ? commentClose(code2) : comment(code2);
|
||
}
|
||
function cdataOpenInside(code2) {
|
||
const value2 = "CDATA[";
|
||
if (code2 === value2.charCodeAt(index2++)) {
|
||
effects.consume(code2);
|
||
return index2 === value2.length ? cdata : cdataOpenInside;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function cdata(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return cdataClose;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = cdata;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return cdata;
|
||
}
|
||
function cdataClose(code2) {
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return cdataEnd;
|
||
}
|
||
return cdata(code2);
|
||
}
|
||
function cdataEnd(code2) {
|
||
if (code2 === 62) {
|
||
return end(code2);
|
||
}
|
||
if (code2 === 93) {
|
||
effects.consume(code2);
|
||
return cdataEnd;
|
||
}
|
||
return cdata(code2);
|
||
}
|
||
function declaration(code2) {
|
||
if (code2 === null || code2 === 62) {
|
||
return end(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = declaration;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return declaration;
|
||
}
|
||
function instruction(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 63) {
|
||
effects.consume(code2);
|
||
return instructionClose;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = instruction;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return instruction;
|
||
}
|
||
function instructionClose(code2) {
|
||
return code2 === 62 ? end(code2) : instruction(code2);
|
||
}
|
||
function tagCloseStart(code2) {
|
||
if (asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return tagClose;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagClose(code2) {
|
||
if (code2 === 45 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return tagClose;
|
||
}
|
||
return tagCloseBetween(code2);
|
||
}
|
||
function tagCloseBetween(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagCloseBetween;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagCloseBetween;
|
||
}
|
||
return end(code2);
|
||
}
|
||
function tagOpen(code2) {
|
||
if (code2 === 45 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpen;
|
||
}
|
||
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
return tagOpenBetween(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function tagOpenBetween(code2) {
|
||
if (code2 === 47) {
|
||
effects.consume(code2);
|
||
return end;
|
||
}
|
||
if (code2 === 58 || code2 === 95 || asciiAlpha(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeName;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenBetween;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenBetween;
|
||
}
|
||
return end(code2);
|
||
}
|
||
function tagOpenAttributeName(code2) {
|
||
if (code2 === 45 || code2 === 46 || code2 === 58 || code2 === 95 || asciiAlphanumeric(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeName;
|
||
}
|
||
return tagOpenAttributeNameAfter(code2);
|
||
}
|
||
function tagOpenAttributeNameAfter(code2) {
|
||
if (code2 === 61) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueBefore;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenAttributeNameAfter;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeNameAfter;
|
||
}
|
||
return tagOpenBetween(code2);
|
||
}
|
||
function tagOpenAttributeValueBefore(code2) {
|
||
if (code2 === null || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 34 || code2 === 39) {
|
||
effects.consume(code2);
|
||
marker = code2;
|
||
return tagOpenAttributeValueQuoted;
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenAttributeValueBefore;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueBefore;
|
||
}
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueUnquoted;
|
||
}
|
||
function tagOpenAttributeValueQuoted(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
marker = void 0;
|
||
return tagOpenAttributeValueQuotedAfter;
|
||
}
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
if (markdownLineEnding(code2)) {
|
||
returnState = tagOpenAttributeValueQuoted;
|
||
return lineEndingBefore(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueQuoted;
|
||
}
|
||
function tagOpenAttributeValueUnquoted(code2) {
|
||
if (code2 === null || code2 === 34 || code2 === 39 || code2 === 60 || code2 === 61 || code2 === 96) {
|
||
return nok(code2);
|
||
}
|
||
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
return tagOpenBetween(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return tagOpenAttributeValueUnquoted;
|
||
}
|
||
function tagOpenAttributeValueQuotedAfter(code2) {
|
||
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
|
||
return tagOpenBetween(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function end(code2) {
|
||
if (code2 === 62) {
|
||
effects.consume(code2);
|
||
effects.exit("htmlTextData");
|
||
effects.exit("htmlText");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function lineEndingBefore(code2) {
|
||
effects.exit("htmlTextData");
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return lineEndingAfter;
|
||
}
|
||
function lineEndingAfter(code2) {
|
||
return markdownSpace(code2) ? factorySpace(
|
||
effects,
|
||
lineEndingAfterPrefix,
|
||
"linePrefix",
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
|
||
)(code2) : lineEndingAfterPrefix(code2);
|
||
}
|
||
function lineEndingAfterPrefix(code2) {
|
||
effects.enter("htmlTextData");
|
||
return returnState(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/label-end.js
|
||
var labelEnd = {
|
||
name: "labelEnd",
|
||
tokenize: tokenizeLabelEnd,
|
||
resolveTo: resolveToLabelEnd,
|
||
resolveAll: resolveAllLabelEnd
|
||
};
|
||
var resourceConstruct = {
|
||
tokenize: tokenizeResource
|
||
};
|
||
var referenceFullConstruct = {
|
||
tokenize: tokenizeReferenceFull
|
||
};
|
||
var referenceCollapsedConstruct = {
|
||
tokenize: tokenizeReferenceCollapsed
|
||
};
|
||
function resolveAllLabelEnd(events) {
|
||
let index2 = -1;
|
||
while (++index2 < events.length) {
|
||
const token = events[index2][1];
|
||
if (token.type === "labelImage" || token.type === "labelLink" || token.type === "labelEnd") {
|
||
events.splice(index2 + 1, token.type === "labelImage" ? 4 : 2);
|
||
token.type = "data";
|
||
index2++;
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
function resolveToLabelEnd(events, context) {
|
||
let index2 = events.length;
|
||
let offset = 0;
|
||
let token;
|
||
let open;
|
||
let close2;
|
||
let media;
|
||
while (index2--) {
|
||
token = events[index2][1];
|
||
if (open) {
|
||
if (token.type === "link" || token.type === "labelLink" && token._inactive) {
|
||
break;
|
||
}
|
||
if (events[index2][0] === "enter" && token.type === "labelLink") {
|
||
token._inactive = true;
|
||
}
|
||
} else if (close2) {
|
||
if (events[index2][0] === "enter" && (token.type === "labelImage" || token.type === "labelLink") && !token._balanced) {
|
||
open = index2;
|
||
if (token.type !== "labelLink") {
|
||
offset = 2;
|
||
break;
|
||
}
|
||
}
|
||
} else if (token.type === "labelEnd") {
|
||
close2 = index2;
|
||
}
|
||
}
|
||
const group = {
|
||
type: events[open][1].type === "labelLink" ? "link" : "image",
|
||
start: Object.assign({}, events[open][1].start),
|
||
end: Object.assign({}, events[events.length - 1][1].end)
|
||
};
|
||
const label = {
|
||
type: "label",
|
||
start: Object.assign({}, events[open][1].start),
|
||
end: Object.assign({}, events[close2][1].end)
|
||
};
|
||
const text4 = {
|
||
type: "labelText",
|
||
start: Object.assign({}, events[open + offset + 2][1].end),
|
||
end: Object.assign({}, events[close2 - 2][1].start)
|
||
};
|
||
media = [
|
||
["enter", group, context],
|
||
["enter", label, context]
|
||
];
|
||
media = push(media, events.slice(open + 1, open + offset + 3));
|
||
media = push(media, [["enter", text4, context]]);
|
||
media = push(
|
||
media,
|
||
resolveAll(
|
||
context.parser.constructs.insideSpan.null,
|
||
events.slice(open + offset + 4, close2 - 3),
|
||
context
|
||
)
|
||
);
|
||
media = push(media, [
|
||
["exit", text4, context],
|
||
events[close2 - 2],
|
||
events[close2 - 1],
|
||
["exit", label, context]
|
||
]);
|
||
media = push(media, events.slice(close2 + 1));
|
||
media = push(media, [["exit", group, context]]);
|
||
splice(events, open, events.length, media);
|
||
return events;
|
||
}
|
||
function tokenizeLabelEnd(effects, ok3, nok) {
|
||
const self = this;
|
||
let index2 = self.events.length;
|
||
let labelStart;
|
||
let defined;
|
||
while (index2--) {
|
||
if ((self.events[index2][1].type === "labelImage" || self.events[index2][1].type === "labelLink") && !self.events[index2][1]._balanced) {
|
||
labelStart = self.events[index2][1];
|
||
break;
|
||
}
|
||
}
|
||
return start;
|
||
function start(code2) {
|
||
if (!labelStart) {
|
||
return nok(code2);
|
||
}
|
||
if (labelStart._inactive) {
|
||
return labelEndNok(code2);
|
||
}
|
||
defined = self.parser.defined.includes(
|
||
normalizeIdentifier(
|
||
self.sliceSerialize({
|
||
start: labelStart.end,
|
||
end: self.now()
|
||
})
|
||
)
|
||
);
|
||
effects.enter("labelEnd");
|
||
effects.enter("labelMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelMarker");
|
||
effects.exit("labelEnd");
|
||
return after;
|
||
}
|
||
function after(code2) {
|
||
if (code2 === 40) {
|
||
return effects.attempt(
|
||
resourceConstruct,
|
||
labelEndOk,
|
||
defined ? labelEndOk : labelEndNok
|
||
)(code2);
|
||
}
|
||
if (code2 === 91) {
|
||
return effects.attempt(
|
||
referenceFullConstruct,
|
||
labelEndOk,
|
||
defined ? referenceNotFull : labelEndNok
|
||
)(code2);
|
||
}
|
||
return defined ? labelEndOk(code2) : labelEndNok(code2);
|
||
}
|
||
function referenceNotFull(code2) {
|
||
return effects.attempt(
|
||
referenceCollapsedConstruct,
|
||
labelEndOk,
|
||
labelEndNok
|
||
)(code2);
|
||
}
|
||
function labelEndOk(code2) {
|
||
return ok3(code2);
|
||
}
|
||
function labelEndNok(code2) {
|
||
labelStart._balanced = true;
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeResource(effects, ok3, nok) {
|
||
return resourceStart;
|
||
function resourceStart(code2) {
|
||
effects.enter("resource");
|
||
effects.enter("resourceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("resourceMarker");
|
||
return resourceBefore;
|
||
}
|
||
function resourceBefore(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceOpen)(code2) : resourceOpen(code2);
|
||
}
|
||
function resourceOpen(code2) {
|
||
if (code2 === 41) {
|
||
return resourceEnd(code2);
|
||
}
|
||
return factoryDestination(
|
||
effects,
|
||
resourceDestinationAfter,
|
||
resourceDestinationMissing,
|
||
"resourceDestination",
|
||
"resourceDestinationLiteral",
|
||
"resourceDestinationLiteralMarker",
|
||
"resourceDestinationRaw",
|
||
"resourceDestinationString",
|
||
32
|
||
)(code2);
|
||
}
|
||
function resourceDestinationAfter(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceBetween)(code2) : resourceEnd(code2);
|
||
}
|
||
function resourceDestinationMissing(code2) {
|
||
return nok(code2);
|
||
}
|
||
function resourceBetween(code2) {
|
||
if (code2 === 34 || code2 === 39 || code2 === 40) {
|
||
return factoryTitle(
|
||
effects,
|
||
resourceTitleAfter,
|
||
nok,
|
||
"resourceTitle",
|
||
"resourceTitleMarker",
|
||
"resourceTitleString"
|
||
)(code2);
|
||
}
|
||
return resourceEnd(code2);
|
||
}
|
||
function resourceTitleAfter(code2) {
|
||
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceEnd)(code2) : resourceEnd(code2);
|
||
}
|
||
function resourceEnd(code2) {
|
||
if (code2 === 41) {
|
||
effects.enter("resourceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("resourceMarker");
|
||
effects.exit("resource");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeReferenceFull(effects, ok3, nok) {
|
||
const self = this;
|
||
return referenceFull;
|
||
function referenceFull(code2) {
|
||
return factoryLabel.call(
|
||
self,
|
||
effects,
|
||
referenceFullAfter,
|
||
referenceFullMissing,
|
||
"reference",
|
||
"referenceMarker",
|
||
"referenceString"
|
||
)(code2);
|
||
}
|
||
function referenceFullAfter(code2) {
|
||
return self.parser.defined.includes(
|
||
normalizeIdentifier(
|
||
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
|
||
)
|
||
) ? ok3(code2) : nok(code2);
|
||
}
|
||
function referenceFullMissing(code2) {
|
||
return nok(code2);
|
||
}
|
||
}
|
||
function tokenizeReferenceCollapsed(effects, ok3, nok) {
|
||
return referenceCollapsedStart;
|
||
function referenceCollapsedStart(code2) {
|
||
effects.enter("reference");
|
||
effects.enter("referenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("referenceMarker");
|
||
return referenceCollapsedOpen;
|
||
}
|
||
function referenceCollapsedOpen(code2) {
|
||
if (code2 === 93) {
|
||
effects.enter("referenceMarker");
|
||
effects.consume(code2);
|
||
effects.exit("referenceMarker");
|
||
effects.exit("reference");
|
||
return ok3;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/label-start-image.js
|
||
var labelStartImage = {
|
||
name: "labelStartImage",
|
||
tokenize: tokenizeLabelStartImage,
|
||
resolveAll: labelEnd.resolveAll
|
||
};
|
||
function tokenizeLabelStartImage(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("labelImage");
|
||
effects.enter("labelImageMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelImageMarker");
|
||
return open;
|
||
}
|
||
function open(code2) {
|
||
if (code2 === 91) {
|
||
effects.enter("labelMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelMarker");
|
||
effects.exit("labelImage");
|
||
return after;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function after(code2) {
|
||
return code2 === 94 && "_hiddenFootnoteSupport" in self.parser.constructs ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/label-start-link.js
|
||
var labelStartLink = {
|
||
name: "labelStartLink",
|
||
tokenize: tokenizeLabelStartLink,
|
||
resolveAll: labelEnd.resolveAll
|
||
};
|
||
function tokenizeLabelStartLink(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("labelLink");
|
||
effects.enter("labelMarker");
|
||
effects.consume(code2);
|
||
effects.exit("labelMarker");
|
||
effects.exit("labelLink");
|
||
return after;
|
||
}
|
||
function after(code2) {
|
||
return code2 === 94 && "_hiddenFootnoteSupport" in self.parser.constructs ? nok(code2) : ok3(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/line-ending.js
|
||
var lineEnding = {
|
||
name: "lineEnding",
|
||
tokenize: tokenizeLineEnding
|
||
};
|
||
function tokenizeLineEnding(effects, ok3) {
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return factorySpace(effects, ok3, "linePrefix");
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/thematic-break.js
|
||
var thematicBreak = {
|
||
name: "thematicBreak",
|
||
tokenize: tokenizeThematicBreak
|
||
};
|
||
function tokenizeThematicBreak(effects, ok3, nok) {
|
||
let size = 0;
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
effects.enter("thematicBreak");
|
||
return before(code2);
|
||
}
|
||
function before(code2) {
|
||
marker = code2;
|
||
return atBreak(code2);
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === marker) {
|
||
effects.enter("thematicBreakSequence");
|
||
return sequence(code2);
|
||
}
|
||
if (size >= 3 && (code2 === null || markdownLineEnding(code2))) {
|
||
effects.exit("thematicBreak");
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function sequence(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
size++;
|
||
return sequence;
|
||
}
|
||
effects.exit("thematicBreakSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects, atBreak, "whitespace")(code2) : atBreak(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/list.js
|
||
var list = {
|
||
name: "list",
|
||
tokenize: tokenizeListStart,
|
||
continuation: {
|
||
tokenize: tokenizeListContinuation
|
||
},
|
||
exit: tokenizeListEnd
|
||
};
|
||
var listItemPrefixWhitespaceConstruct = {
|
||
tokenize: tokenizeListItemPrefixWhitespace,
|
||
partial: true
|
||
};
|
||
var indentConstruct = {
|
||
tokenize: tokenizeIndent,
|
||
partial: true
|
||
};
|
||
function tokenizeListStart(effects, ok3, nok) {
|
||
const self = this;
|
||
const tail = self.events[self.events.length - 1];
|
||
let initialSize = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
|
||
let size = 0;
|
||
return start;
|
||
function start(code2) {
|
||
const kind = self.containerState.type || (code2 === 42 || code2 === 43 || code2 === 45 ? "listUnordered" : "listOrdered");
|
||
if (kind === "listUnordered" ? !self.containerState.marker || code2 === self.containerState.marker : asciiDigit(code2)) {
|
||
if (!self.containerState.type) {
|
||
self.containerState.type = kind;
|
||
effects.enter(kind, {
|
||
_container: true
|
||
});
|
||
}
|
||
if (kind === "listUnordered") {
|
||
effects.enter("listItemPrefix");
|
||
return code2 === 42 || code2 === 45 ? effects.check(thematicBreak, nok, atMarker)(code2) : atMarker(code2);
|
||
}
|
||
if (!self.interrupt || code2 === 49) {
|
||
effects.enter("listItemPrefix");
|
||
effects.enter("listItemValue");
|
||
return inside(code2);
|
||
}
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (asciiDigit(code2) && ++size < 10) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
if ((!self.interrupt || size < 2) && (self.containerState.marker ? code2 === self.containerState.marker : code2 === 41 || code2 === 46)) {
|
||
effects.exit("listItemValue");
|
||
return atMarker(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function atMarker(code2) {
|
||
effects.enter("listItemMarker");
|
||
effects.consume(code2);
|
||
effects.exit("listItemMarker");
|
||
self.containerState.marker = self.containerState.marker || code2;
|
||
return effects.check(
|
||
blankLine,
|
||
// Can’t be empty when interrupting.
|
||
self.interrupt ? nok : onBlank,
|
||
effects.attempt(
|
||
listItemPrefixWhitespaceConstruct,
|
||
endOfPrefix,
|
||
otherPrefix
|
||
)
|
||
);
|
||
}
|
||
function onBlank(code2) {
|
||
self.containerState.initialBlankLine = true;
|
||
initialSize++;
|
||
return endOfPrefix(code2);
|
||
}
|
||
function otherPrefix(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.enter("listItemPrefixWhitespace");
|
||
effects.consume(code2);
|
||
effects.exit("listItemPrefixWhitespace");
|
||
return endOfPrefix;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function endOfPrefix(code2) {
|
||
self.containerState.size = initialSize + self.sliceSerialize(effects.exit("listItemPrefix"), true).length;
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeListContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
self.containerState._closeFlow = void 0;
|
||
return effects.check(blankLine, onBlank, notBlank);
|
||
function onBlank(code2) {
|
||
self.containerState.furtherBlankLines = self.containerState.furtherBlankLines || self.containerState.initialBlankLine;
|
||
return factorySpace(
|
||
effects,
|
||
ok3,
|
||
"listItemIndent",
|
||
self.containerState.size + 1
|
||
)(code2);
|
||
}
|
||
function notBlank(code2) {
|
||
if (self.containerState.furtherBlankLines || !markdownSpace(code2)) {
|
||
self.containerState.furtherBlankLines = void 0;
|
||
self.containerState.initialBlankLine = void 0;
|
||
return notInCurrentItem(code2);
|
||
}
|
||
self.containerState.furtherBlankLines = void 0;
|
||
self.containerState.initialBlankLine = void 0;
|
||
return effects.attempt(indentConstruct, ok3, notInCurrentItem)(code2);
|
||
}
|
||
function notInCurrentItem(code2) {
|
||
self.containerState._closeFlow = true;
|
||
self.interrupt = void 0;
|
||
return factorySpace(
|
||
effects,
|
||
effects.attempt(list, ok3, nok),
|
||
"linePrefix",
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
|
||
)(code2);
|
||
}
|
||
}
|
||
function tokenizeIndent(effects, ok3, nok) {
|
||
const self = this;
|
||
return factorySpace(
|
||
effects,
|
||
afterPrefix,
|
||
"listItemIndent",
|
||
self.containerState.size + 1
|
||
);
|
||
function afterPrefix(code2) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return tail && tail[1].type === "listItemIndent" && tail[2].sliceSerialize(tail[1], true).length === self.containerState.size ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
function tokenizeListEnd(effects) {
|
||
effects.exit(this.containerState.type);
|
||
}
|
||
function tokenizeListItemPrefixWhitespace(effects, ok3, nok) {
|
||
const self = this;
|
||
return factorySpace(
|
||
effects,
|
||
afterPrefix,
|
||
"listItemPrefixWhitespace",
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4 + 1
|
||
);
|
||
function afterPrefix(code2) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return !markdownSpace(code2) && tail && tail[1].type === "listItemPrefixWhitespace" ? ok3(code2) : nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/lib/setext-underline.js
|
||
var setextUnderline = {
|
||
name: "setextUnderline",
|
||
tokenize: tokenizeSetextUnderline,
|
||
resolveTo: resolveToSetextUnderline
|
||
};
|
||
function resolveToSetextUnderline(events, context) {
|
||
let index2 = events.length;
|
||
let content3;
|
||
let text4;
|
||
let definition3;
|
||
while (index2--) {
|
||
if (events[index2][0] === "enter") {
|
||
if (events[index2][1].type === "content") {
|
||
content3 = index2;
|
||
break;
|
||
}
|
||
if (events[index2][1].type === "paragraph") {
|
||
text4 = index2;
|
||
}
|
||
} else {
|
||
if (events[index2][1].type === "content") {
|
||
events.splice(index2, 1);
|
||
}
|
||
if (!definition3 && events[index2][1].type === "definition") {
|
||
definition3 = index2;
|
||
}
|
||
}
|
||
}
|
||
const heading2 = {
|
||
type: "setextHeading",
|
||
start: Object.assign({}, events[text4][1].start),
|
||
end: Object.assign({}, events[events.length - 1][1].end)
|
||
};
|
||
events[text4][1].type = "setextHeadingText";
|
||
if (definition3) {
|
||
events.splice(text4, 0, ["enter", heading2, context]);
|
||
events.splice(definition3 + 1, 0, ["exit", events[content3][1], context]);
|
||
events[content3][1].end = Object.assign({}, events[definition3][1].end);
|
||
} else {
|
||
events[content3][1] = heading2;
|
||
}
|
||
events.push(["exit", heading2, context]);
|
||
return events;
|
||
}
|
||
function tokenizeSetextUnderline(effects, ok3, nok) {
|
||
const self = this;
|
||
let marker;
|
||
return start;
|
||
function start(code2) {
|
||
let index2 = self.events.length;
|
||
let paragraph2;
|
||
while (index2--) {
|
||
if (self.events[index2][1].type !== "lineEnding" && self.events[index2][1].type !== "linePrefix" && self.events[index2][1].type !== "content") {
|
||
paragraph2 = self.events[index2][1].type === "paragraph";
|
||
break;
|
||
}
|
||
}
|
||
if (!self.parser.lazy[self.now().line] && (self.interrupt || paragraph2)) {
|
||
effects.enter("setextHeadingLine");
|
||
marker = code2;
|
||
return before(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function before(code2) {
|
||
effects.enter("setextHeadingLineSequence");
|
||
return inside(code2);
|
||
}
|
||
function inside(code2) {
|
||
if (code2 === marker) {
|
||
effects.consume(code2);
|
||
return inside;
|
||
}
|
||
effects.exit("setextHeadingLineSequence");
|
||
return markdownSpace(code2) ? factorySpace(effects, after, "lineSuffix")(code2) : after(code2);
|
||
}
|
||
function after(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit("setextHeadingLine");
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/flow.js
|
||
var flow = {
|
||
tokenize: initializeFlow
|
||
};
|
||
function initializeFlow(effects) {
|
||
const self = this;
|
||
const initial = effects.attempt(
|
||
// Try to parse a blank line.
|
||
blankLine,
|
||
atBlankEnding,
|
||
// Try to parse initial flow (essentially, only code).
|
||
effects.attempt(
|
||
this.parser.constructs.flowInitial,
|
||
afterConstruct,
|
||
factorySpace(
|
||
effects,
|
||
effects.attempt(
|
||
this.parser.constructs.flow,
|
||
afterConstruct,
|
||
effects.attempt(content2, afterConstruct)
|
||
),
|
||
"linePrefix"
|
||
)
|
||
)
|
||
);
|
||
return initial;
|
||
function atBlankEnding(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("lineEndingBlank");
|
||
effects.consume(code2);
|
||
effects.exit("lineEndingBlank");
|
||
self.currentConstruct = void 0;
|
||
return initial;
|
||
}
|
||
function afterConstruct(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
self.currentConstruct = void 0;
|
||
return initial;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/initialize/text.js
|
||
var resolver = {
|
||
resolveAll: createResolver()
|
||
};
|
||
var string = initializeFactory("string");
|
||
var text = initializeFactory("text");
|
||
function initializeFactory(field) {
|
||
return {
|
||
tokenize: initializeText,
|
||
resolveAll: createResolver(
|
||
field === "text" ? resolveAllLineSuffixes : void 0
|
||
)
|
||
};
|
||
function initializeText(effects) {
|
||
const self = this;
|
||
const constructs2 = this.parser.constructs[field];
|
||
const text4 = effects.attempt(constructs2, start, notText);
|
||
return start;
|
||
function start(code2) {
|
||
return atBreak(code2) ? text4(code2) : notText(code2);
|
||
}
|
||
function notText(code2) {
|
||
if (code2 === null) {
|
||
effects.consume(code2);
|
||
return;
|
||
}
|
||
effects.enter("data");
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
function data(code2) {
|
||
if (atBreak(code2)) {
|
||
effects.exit("data");
|
||
return text4(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return data;
|
||
}
|
||
function atBreak(code2) {
|
||
if (code2 === null) {
|
||
return true;
|
||
}
|
||
const list4 = constructs2[code2];
|
||
let index2 = -1;
|
||
if (list4) {
|
||
while (++index2 < list4.length) {
|
||
const item = list4[index2];
|
||
if (!item.previous || item.previous.call(self, self.previous)) {
|
||
return true;
|
||
}
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
}
|
||
}
|
||
function createResolver(extraResolver) {
|
||
return resolveAllText;
|
||
function resolveAllText(events, context) {
|
||
let index2 = -1;
|
||
let enter;
|
||
while (++index2 <= events.length) {
|
||
if (enter === void 0) {
|
||
if (events[index2] && events[index2][1].type === "data") {
|
||
enter = index2;
|
||
index2++;
|
||
}
|
||
} else if (!events[index2] || events[index2][1].type !== "data") {
|
||
if (index2 !== enter + 2) {
|
||
events[enter][1].end = events[index2 - 1][1].end;
|
||
events.splice(enter + 2, index2 - enter - 2);
|
||
index2 = enter + 2;
|
||
}
|
||
enter = void 0;
|
||
}
|
||
}
|
||
return extraResolver ? extraResolver(events, context) : events;
|
||
}
|
||
}
|
||
function resolveAllLineSuffixes(events, context) {
|
||
let eventIndex = 0;
|
||
while (++eventIndex <= events.length) {
|
||
if ((eventIndex === events.length || events[eventIndex][1].type === "lineEnding") && events[eventIndex - 1][1].type === "data") {
|
||
const data = events[eventIndex - 1][1];
|
||
const chunks = context.sliceStream(data);
|
||
let index2 = chunks.length;
|
||
let bufferIndex = -1;
|
||
let size = 0;
|
||
let tabs;
|
||
while (index2--) {
|
||
const chunk = chunks[index2];
|
||
if (typeof chunk === "string") {
|
||
bufferIndex = chunk.length;
|
||
while (chunk.charCodeAt(bufferIndex - 1) === 32) {
|
||
size++;
|
||
bufferIndex--;
|
||
}
|
||
if (bufferIndex)
|
||
break;
|
||
bufferIndex = -1;
|
||
} else if (chunk === -2) {
|
||
tabs = true;
|
||
size++;
|
||
} else if (chunk === -1) {
|
||
} else {
|
||
index2++;
|
||
break;
|
||
}
|
||
}
|
||
if (size) {
|
||
const token = {
|
||
type: eventIndex === events.length || tabs || size < 2 ? "lineSuffix" : "hardBreakTrailing",
|
||
start: {
|
||
line: data.end.line,
|
||
column: data.end.column - size,
|
||
offset: data.end.offset - size,
|
||
_index: data.start._index + index2,
|
||
_bufferIndex: index2 ? bufferIndex : data.start._bufferIndex + bufferIndex
|
||
},
|
||
end: Object.assign({}, data.end)
|
||
};
|
||
data.end = Object.assign({}, token.start);
|
||
if (data.start.offset === data.end.offset) {
|
||
Object.assign(data, token);
|
||
} else {
|
||
events.splice(
|
||
eventIndex,
|
||
0,
|
||
["enter", token, context],
|
||
["exit", token, context]
|
||
);
|
||
eventIndex += 2;
|
||
}
|
||
}
|
||
eventIndex++;
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
|
||
// node_modules/micromark/lib/create-tokenizer.js
|
||
function createTokenizer(parser, initialize, from) {
|
||
let point3 = Object.assign(
|
||
from ? Object.assign({}, from) : {
|
||
line: 1,
|
||
column: 1,
|
||
offset: 0
|
||
},
|
||
{
|
||
_index: 0,
|
||
_bufferIndex: -1
|
||
}
|
||
);
|
||
const columnStart = {};
|
||
const resolveAllConstructs = [];
|
||
let chunks = [];
|
||
let stack = [];
|
||
let consumed = true;
|
||
const effects = {
|
||
consume,
|
||
enter,
|
||
exit: exit2,
|
||
attempt: constructFactory(onsuccessfulconstruct),
|
||
check: constructFactory(onsuccessfulcheck),
|
||
interrupt: constructFactory(onsuccessfulcheck, {
|
||
interrupt: true
|
||
})
|
||
};
|
||
const context = {
|
||
previous: null,
|
||
code: null,
|
||
containerState: {},
|
||
events: [],
|
||
parser,
|
||
sliceStream,
|
||
sliceSerialize,
|
||
now,
|
||
defineSkip,
|
||
write
|
||
};
|
||
let state = initialize.tokenize.call(context, effects);
|
||
let expectedCode;
|
||
if (initialize.resolveAll) {
|
||
resolveAllConstructs.push(initialize);
|
||
}
|
||
return context;
|
||
function write(slice) {
|
||
chunks = push(chunks, slice);
|
||
main();
|
||
if (chunks[chunks.length - 1] !== null) {
|
||
return [];
|
||
}
|
||
addResult(initialize, 0);
|
||
context.events = resolveAll(resolveAllConstructs, context.events, context);
|
||
return context.events;
|
||
}
|
||
function sliceSerialize(token, expandTabs) {
|
||
return serializeChunks(sliceStream(token), expandTabs);
|
||
}
|
||
function sliceStream(token) {
|
||
return sliceChunks(chunks, token);
|
||
}
|
||
function now() {
|
||
const { line, column, offset, _index, _bufferIndex } = point3;
|
||
return {
|
||
line,
|
||
column,
|
||
offset,
|
||
_index,
|
||
_bufferIndex
|
||
};
|
||
}
|
||
function defineSkip(value2) {
|
||
columnStart[value2.line] = value2.column;
|
||
accountForPotentialSkip();
|
||
}
|
||
function main() {
|
||
let chunkIndex;
|
||
while (point3._index < chunks.length) {
|
||
const chunk = chunks[point3._index];
|
||
if (typeof chunk === "string") {
|
||
chunkIndex = point3._index;
|
||
if (point3._bufferIndex < 0) {
|
||
point3._bufferIndex = 0;
|
||
}
|
||
while (point3._index === chunkIndex && point3._bufferIndex < chunk.length) {
|
||
go(chunk.charCodeAt(point3._bufferIndex));
|
||
}
|
||
} else {
|
||
go(chunk);
|
||
}
|
||
}
|
||
}
|
||
function go(code2) {
|
||
consumed = void 0;
|
||
expectedCode = code2;
|
||
state = state(code2);
|
||
}
|
||
function consume(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
point3.line++;
|
||
point3.column = 1;
|
||
point3.offset += code2 === -3 ? 2 : 1;
|
||
accountForPotentialSkip();
|
||
} else if (code2 !== -1) {
|
||
point3.column++;
|
||
point3.offset++;
|
||
}
|
||
if (point3._bufferIndex < 0) {
|
||
point3._index++;
|
||
} else {
|
||
point3._bufferIndex++;
|
||
if (point3._bufferIndex === chunks[point3._index].length) {
|
||
point3._bufferIndex = -1;
|
||
point3._index++;
|
||
}
|
||
}
|
||
context.previous = code2;
|
||
consumed = true;
|
||
}
|
||
function enter(type, fields) {
|
||
const token = fields || {};
|
||
token.type = type;
|
||
token.start = now();
|
||
context.events.push(["enter", token, context]);
|
||
stack.push(token);
|
||
return token;
|
||
}
|
||
function exit2(type) {
|
||
const token = stack.pop();
|
||
token.end = now();
|
||
context.events.push(["exit", token, context]);
|
||
return token;
|
||
}
|
||
function onsuccessfulconstruct(construct, info) {
|
||
addResult(construct, info.from);
|
||
}
|
||
function onsuccessfulcheck(_, info) {
|
||
info.restore();
|
||
}
|
||
function constructFactory(onreturn, fields) {
|
||
return hook;
|
||
function hook(constructs2, returnState, bogusState) {
|
||
let listOfConstructs;
|
||
let constructIndex;
|
||
let currentConstruct;
|
||
let info;
|
||
return Array.isArray(constructs2) ? handleListOfConstructs(constructs2) : "tokenize" in constructs2 ? (
|
||
// @ts-expect-error Looks like a construct.
|
||
handleListOfConstructs([constructs2])
|
||
) : handleMapOfConstructs(constructs2);
|
||
function handleMapOfConstructs(map4) {
|
||
return start;
|
||
function start(code2) {
|
||
const def = code2 !== null && map4[code2];
|
||
const all2 = code2 !== null && map4.null;
|
||
const list4 = [
|
||
// To do: add more extension tests.
|
||
/* c8 ignore next 2 */
|
||
...Array.isArray(def) ? def : def ? [def] : [],
|
||
...Array.isArray(all2) ? all2 : all2 ? [all2] : []
|
||
];
|
||
return handleListOfConstructs(list4)(code2);
|
||
}
|
||
}
|
||
function handleListOfConstructs(list4) {
|
||
listOfConstructs = list4;
|
||
constructIndex = 0;
|
||
if (list4.length === 0) {
|
||
return bogusState;
|
||
}
|
||
return handleConstruct(list4[constructIndex]);
|
||
}
|
||
function handleConstruct(construct) {
|
||
return start;
|
||
function start(code2) {
|
||
info = store();
|
||
currentConstruct = construct;
|
||
if (!construct.partial) {
|
||
context.currentConstruct = construct;
|
||
}
|
||
if (construct.name && context.parser.constructs.disable.null.includes(construct.name)) {
|
||
return nok(code2);
|
||
}
|
||
return construct.tokenize.call(
|
||
// If we do have fields, create an object w/ `context` as its
|
||
// prototype.
|
||
// This allows a “live binding”, which is needed for `interrupt`.
|
||
fields ? Object.assign(Object.create(context), fields) : context,
|
||
effects,
|
||
ok3,
|
||
nok
|
||
)(code2);
|
||
}
|
||
}
|
||
function ok3(code2) {
|
||
consumed = true;
|
||
onreturn(currentConstruct, info);
|
||
return returnState;
|
||
}
|
||
function nok(code2) {
|
||
consumed = true;
|
||
info.restore();
|
||
if (++constructIndex < listOfConstructs.length) {
|
||
return handleConstruct(listOfConstructs[constructIndex]);
|
||
}
|
||
return bogusState;
|
||
}
|
||
}
|
||
}
|
||
function addResult(construct, from2) {
|
||
if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {
|
||
resolveAllConstructs.push(construct);
|
||
}
|
||
if (construct.resolve) {
|
||
splice(
|
||
context.events,
|
||
from2,
|
||
context.events.length - from2,
|
||
construct.resolve(context.events.slice(from2), context)
|
||
);
|
||
}
|
||
if (construct.resolveTo) {
|
||
context.events = construct.resolveTo(context.events, context);
|
||
}
|
||
}
|
||
function store() {
|
||
const startPoint = now();
|
||
const startPrevious = context.previous;
|
||
const startCurrentConstruct = context.currentConstruct;
|
||
const startEventsIndex = context.events.length;
|
||
const startStack = Array.from(stack);
|
||
return {
|
||
restore,
|
||
from: startEventsIndex
|
||
};
|
||
function restore() {
|
||
point3 = startPoint;
|
||
context.previous = startPrevious;
|
||
context.currentConstruct = startCurrentConstruct;
|
||
context.events.length = startEventsIndex;
|
||
stack = startStack;
|
||
accountForPotentialSkip();
|
||
}
|
||
}
|
||
function accountForPotentialSkip() {
|
||
if (point3.line in columnStart && point3.column < 2) {
|
||
point3.column = columnStart[point3.line];
|
||
point3.offset += columnStart[point3.line] - 1;
|
||
}
|
||
}
|
||
}
|
||
function sliceChunks(chunks, token) {
|
||
const startIndex = token.start._index;
|
||
const startBufferIndex = token.start._bufferIndex;
|
||
const endIndex = token.end._index;
|
||
const endBufferIndex = token.end._bufferIndex;
|
||
let view;
|
||
if (startIndex === endIndex) {
|
||
view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)];
|
||
} else {
|
||
view = chunks.slice(startIndex, endIndex);
|
||
if (startBufferIndex > -1) {
|
||
const head = view[0];
|
||
if (typeof head === "string") {
|
||
view[0] = head.slice(startBufferIndex);
|
||
} else {
|
||
view.shift();
|
||
}
|
||
}
|
||
if (endBufferIndex > 0) {
|
||
view.push(chunks[endIndex].slice(0, endBufferIndex));
|
||
}
|
||
}
|
||
return view;
|
||
}
|
||
function serializeChunks(chunks, expandTabs) {
|
||
let index2 = -1;
|
||
const result = [];
|
||
let atTab;
|
||
while (++index2 < chunks.length) {
|
||
const chunk = chunks[index2];
|
||
let value2;
|
||
if (typeof chunk === "string") {
|
||
value2 = chunk;
|
||
} else
|
||
switch (chunk) {
|
||
case -5: {
|
||
value2 = "\r";
|
||
break;
|
||
}
|
||
case -4: {
|
||
value2 = "\n";
|
||
break;
|
||
}
|
||
case -3: {
|
||
value2 = "\r\n";
|
||
break;
|
||
}
|
||
case -2: {
|
||
value2 = expandTabs ? " " : " ";
|
||
break;
|
||
}
|
||
case -1: {
|
||
if (!expandTabs && atTab)
|
||
continue;
|
||
value2 = " ";
|
||
break;
|
||
}
|
||
default: {
|
||
value2 = String.fromCharCode(chunk);
|
||
}
|
||
}
|
||
atTab = chunk === -2;
|
||
result.push(value2);
|
||
}
|
||
return result.join("");
|
||
}
|
||
|
||
// node_modules/micromark/lib/constructs.js
|
||
var constructs_exports = {};
|
||
__export(constructs_exports, {
|
||
attentionMarkers: () => attentionMarkers,
|
||
contentInitial: () => contentInitial,
|
||
disable: () => disable,
|
||
document: () => document2,
|
||
flow: () => flow2,
|
||
flowInitial: () => flowInitial,
|
||
insideSpan: () => insideSpan,
|
||
string: () => string2,
|
||
text: () => text2
|
||
});
|
||
var document2 = {
|
||
[42]: list,
|
||
[43]: list,
|
||
[45]: list,
|
||
[48]: list,
|
||
[49]: list,
|
||
[50]: list,
|
||
[51]: list,
|
||
[52]: list,
|
||
[53]: list,
|
||
[54]: list,
|
||
[55]: list,
|
||
[56]: list,
|
||
[57]: list,
|
||
[62]: blockQuote
|
||
};
|
||
var contentInitial = {
|
||
[91]: definition
|
||
};
|
||
var flowInitial = {
|
||
[-2]: codeIndented,
|
||
[-1]: codeIndented,
|
||
[32]: codeIndented
|
||
};
|
||
var flow2 = {
|
||
[35]: headingAtx,
|
||
[42]: thematicBreak,
|
||
[45]: [setextUnderline, thematicBreak],
|
||
[60]: htmlFlow,
|
||
[61]: setextUnderline,
|
||
[95]: thematicBreak,
|
||
[96]: codeFenced,
|
||
[126]: codeFenced
|
||
};
|
||
var string2 = {
|
||
[38]: characterReference,
|
||
[92]: characterEscape
|
||
};
|
||
var text2 = {
|
||
[-5]: lineEnding,
|
||
[-4]: lineEnding,
|
||
[-3]: lineEnding,
|
||
[33]: labelStartImage,
|
||
[38]: characterReference,
|
||
[42]: attention,
|
||
[60]: [autolink, htmlText],
|
||
[91]: labelStartLink,
|
||
[92]: [hardBreakEscape, characterEscape],
|
||
[93]: labelEnd,
|
||
[95]: attention,
|
||
[96]: codeText
|
||
};
|
||
var insideSpan = {
|
||
null: [attention, resolver]
|
||
};
|
||
var attentionMarkers = {
|
||
null: [42, 95]
|
||
};
|
||
var disable = {
|
||
null: []
|
||
};
|
||
|
||
// node_modules/micromark/lib/parse.js
|
||
function parse(options) {
|
||
const settings = options || {};
|
||
const constructs2 = (
|
||
/** @type {FullNormalizedExtension} */
|
||
combineExtensions([constructs_exports, ...settings.extensions || []])
|
||
);
|
||
const parser = {
|
||
defined: [],
|
||
lazy: {},
|
||
constructs: constructs2,
|
||
content: create2(content),
|
||
document: create2(document),
|
||
flow: create2(flow),
|
||
string: create2(string),
|
||
text: create2(text)
|
||
};
|
||
return parser;
|
||
function create2(initial) {
|
||
return creator;
|
||
function creator(from) {
|
||
return createTokenizer(parser, initial, from);
|
||
}
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark/lib/postprocess.js
|
||
function postprocess(events) {
|
||
while (!subtokenize(events)) {
|
||
}
|
||
return events;
|
||
}
|
||
|
||
// node_modules/micromark/lib/preprocess.js
|
||
var search = /[\0\t\n\r]/g;
|
||
function preprocess() {
|
||
let column = 1;
|
||
let buffer = "";
|
||
let start = true;
|
||
let atCarriageReturn;
|
||
return preprocessor;
|
||
function preprocessor(value2, encoding, end) {
|
||
const chunks = [];
|
||
let match2;
|
||
let next;
|
||
let startPosition;
|
||
let endPosition;
|
||
let code2;
|
||
value2 = buffer + (typeof value2 === "string" ? value2.toString() : new TextDecoder(encoding || void 0).decode(value2));
|
||
startPosition = 0;
|
||
buffer = "";
|
||
if (start) {
|
||
if (value2.charCodeAt(0) === 65279) {
|
||
startPosition++;
|
||
}
|
||
start = void 0;
|
||
}
|
||
while (startPosition < value2.length) {
|
||
search.lastIndex = startPosition;
|
||
match2 = search.exec(value2);
|
||
endPosition = match2 && match2.index !== void 0 ? match2.index : value2.length;
|
||
code2 = value2.charCodeAt(endPosition);
|
||
if (!match2) {
|
||
buffer = value2.slice(startPosition);
|
||
break;
|
||
}
|
||
if (code2 === 10 && startPosition === endPosition && atCarriageReturn) {
|
||
chunks.push(-3);
|
||
atCarriageReturn = void 0;
|
||
} else {
|
||
if (atCarriageReturn) {
|
||
chunks.push(-5);
|
||
atCarriageReturn = void 0;
|
||
}
|
||
if (startPosition < endPosition) {
|
||
chunks.push(value2.slice(startPosition, endPosition));
|
||
column += endPosition - startPosition;
|
||
}
|
||
switch (code2) {
|
||
case 0: {
|
||
chunks.push(65533);
|
||
column++;
|
||
break;
|
||
}
|
||
case 9: {
|
||
next = Math.ceil(column / 4) * 4;
|
||
chunks.push(-2);
|
||
while (column++ < next)
|
||
chunks.push(-1);
|
||
break;
|
||
}
|
||
case 10: {
|
||
chunks.push(-4);
|
||
column = 1;
|
||
break;
|
||
}
|
||
default: {
|
||
atCarriageReturn = true;
|
||
column = 1;
|
||
}
|
||
}
|
||
}
|
||
startPosition = endPosition + 1;
|
||
}
|
||
if (end) {
|
||
if (atCarriageReturn)
|
||
chunks.push(-5);
|
||
if (buffer)
|
||
chunks.push(buffer);
|
||
chunks.push(null);
|
||
}
|
||
return chunks;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-decode-string/index.js
|
||
var characterEscapeOrReference = /\\([!-/:-@[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi;
|
||
function decodeString(value2) {
|
||
return value2.replace(characterEscapeOrReference, decode);
|
||
}
|
||
function decode($0, $1, $2) {
|
||
if ($1) {
|
||
return $1;
|
||
}
|
||
const head = $2.charCodeAt(0);
|
||
if (head === 35) {
|
||
const head2 = $2.charCodeAt(1);
|
||
const hex = head2 === 120 || head2 === 88;
|
||
return decodeNumericCharacterReference($2.slice(hex ? 2 : 1), hex ? 16 : 10);
|
||
}
|
||
return decodeNamedCharacterReference($2) || $0;
|
||
}
|
||
|
||
// node_modules/unist-util-stringify-position/lib/index.js
|
||
function stringifyPosition(value2) {
|
||
if (!value2 || typeof value2 !== "object") {
|
||
return "";
|
||
}
|
||
if ("position" in value2 || "type" in value2) {
|
||
return position(value2.position);
|
||
}
|
||
if ("start" in value2 || "end" in value2) {
|
||
return position(value2);
|
||
}
|
||
if ("line" in value2 || "column" in value2) {
|
||
return point(value2);
|
||
}
|
||
return "";
|
||
}
|
||
function point(point3) {
|
||
return index(point3 && point3.line) + ":" + index(point3 && point3.column);
|
||
}
|
||
function position(pos) {
|
||
return point(pos && pos.start) + "-" + point(pos && pos.end);
|
||
}
|
||
function index(value2) {
|
||
return value2 && typeof value2 === "number" ? value2 : 1;
|
||
}
|
||
|
||
// node_modules/mdast-util-from-markdown/lib/index.js
|
||
var own2 = {}.hasOwnProperty;
|
||
function fromMarkdown(value2, encoding, options) {
|
||
if (typeof encoding !== "string") {
|
||
options = encoding;
|
||
encoding = void 0;
|
||
}
|
||
return compiler(options)(
|
||
postprocess(
|
||
parse(options).document().write(preprocess()(value2, encoding, true))
|
||
)
|
||
);
|
||
}
|
||
function compiler(options) {
|
||
const config = {
|
||
transforms: [],
|
||
canContainEols: ["emphasis", "fragment", "heading", "paragraph", "strong"],
|
||
enter: {
|
||
autolink: opener2(link2),
|
||
autolinkProtocol: onenterdata,
|
||
autolinkEmail: onenterdata,
|
||
atxHeading: opener2(heading2),
|
||
blockQuote: opener2(blockQuote2),
|
||
characterEscape: onenterdata,
|
||
characterReference: onenterdata,
|
||
codeFenced: opener2(codeFlow),
|
||
codeFencedFenceInfo: buffer,
|
||
codeFencedFenceMeta: buffer,
|
||
codeIndented: opener2(codeFlow, buffer),
|
||
codeText: opener2(codeText2, buffer),
|
||
codeTextData: onenterdata,
|
||
data: onenterdata,
|
||
codeFlowValue: onenterdata,
|
||
definition: opener2(definition3),
|
||
definitionDestinationString: buffer,
|
||
definitionLabelString: buffer,
|
||
definitionTitleString: buffer,
|
||
emphasis: opener2(emphasis2),
|
||
hardBreakEscape: opener2(hardBreak2),
|
||
hardBreakTrailing: opener2(hardBreak2),
|
||
htmlFlow: opener2(html2, buffer),
|
||
htmlFlowData: onenterdata,
|
||
htmlText: opener2(html2, buffer),
|
||
htmlTextData: onenterdata,
|
||
image: opener2(image2),
|
||
label: buffer,
|
||
link: opener2(link2),
|
||
listItem: opener2(listItem2),
|
||
listItemValue: onenterlistitemvalue,
|
||
listOrdered: opener2(list4, onenterlistordered),
|
||
listUnordered: opener2(list4),
|
||
paragraph: opener2(paragraph2),
|
||
reference: onenterreference,
|
||
referenceString: buffer,
|
||
resourceDestinationString: buffer,
|
||
resourceTitleString: buffer,
|
||
setextHeading: opener2(heading2),
|
||
strong: opener2(strong2),
|
||
thematicBreak: opener2(thematicBreak3)
|
||
},
|
||
exit: {
|
||
atxHeading: closer(),
|
||
atxHeadingSequence: onexitatxheadingsequence,
|
||
autolink: closer(),
|
||
autolinkEmail: onexitautolinkemail,
|
||
autolinkProtocol: onexitautolinkprotocol,
|
||
blockQuote: closer(),
|
||
characterEscapeValue: onexitdata,
|
||
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
|
||
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
|
||
characterReferenceValue: onexitcharacterreferencevalue,
|
||
codeFenced: closer(onexitcodefenced),
|
||
codeFencedFence: onexitcodefencedfence,
|
||
codeFencedFenceInfo: onexitcodefencedfenceinfo,
|
||
codeFencedFenceMeta: onexitcodefencedfencemeta,
|
||
codeFlowValue: onexitdata,
|
||
codeIndented: closer(onexitcodeindented),
|
||
codeText: closer(onexitcodetext),
|
||
codeTextData: onexitdata,
|
||
data: onexitdata,
|
||
definition: closer(),
|
||
definitionDestinationString: onexitdefinitiondestinationstring,
|
||
definitionLabelString: onexitdefinitionlabelstring,
|
||
definitionTitleString: onexitdefinitiontitlestring,
|
||
emphasis: closer(),
|
||
hardBreakEscape: closer(onexithardbreak),
|
||
hardBreakTrailing: closer(onexithardbreak),
|
||
htmlFlow: closer(onexithtmlflow),
|
||
htmlFlowData: onexitdata,
|
||
htmlText: closer(onexithtmltext),
|
||
htmlTextData: onexitdata,
|
||
image: closer(onexitimage),
|
||
label: onexitlabel,
|
||
labelText: onexitlabeltext,
|
||
lineEnding: onexitlineending,
|
||
link: closer(onexitlink),
|
||
listItem: closer(),
|
||
listOrdered: closer(),
|
||
listUnordered: closer(),
|
||
paragraph: closer(),
|
||
referenceString: onexitreferencestring,
|
||
resourceDestinationString: onexitresourcedestinationstring,
|
||
resourceTitleString: onexitresourcetitlestring,
|
||
resource: onexitresource,
|
||
setextHeading: closer(onexitsetextheading),
|
||
setextHeadingLineSequence: onexitsetextheadinglinesequence,
|
||
setextHeadingText: onexitsetextheadingtext,
|
||
strong: closer(),
|
||
thematicBreak: closer()
|
||
}
|
||
};
|
||
configure(config, (options || {}).mdastExtensions || []);
|
||
const data = {};
|
||
return compile;
|
||
function compile(events) {
|
||
let tree = {
|
||
type: "root",
|
||
children: []
|
||
};
|
||
const context = {
|
||
stack: [tree],
|
||
tokenStack: [],
|
||
config,
|
||
enter,
|
||
exit: exit2,
|
||
buffer,
|
||
resume,
|
||
data
|
||
};
|
||
const listStack = [];
|
||
let index2 = -1;
|
||
while (++index2 < events.length) {
|
||
if (events[index2][1].type === "listOrdered" || events[index2][1].type === "listUnordered") {
|
||
if (events[index2][0] === "enter") {
|
||
listStack.push(index2);
|
||
} else {
|
||
const tail = listStack.pop();
|
||
index2 = prepareList(events, tail, index2);
|
||
}
|
||
}
|
||
}
|
||
index2 = -1;
|
||
while (++index2 < events.length) {
|
||
const handler2 = config[events[index2][0]];
|
||
if (own2.call(handler2, events[index2][1].type)) {
|
||
handler2[events[index2][1].type].call(
|
||
Object.assign(
|
||
{
|
||
sliceSerialize: events[index2][2].sliceSerialize
|
||
},
|
||
context
|
||
),
|
||
events[index2][1]
|
||
);
|
||
}
|
||
}
|
||
if (context.tokenStack.length > 0) {
|
||
const tail = context.tokenStack[context.tokenStack.length - 1];
|
||
const handler2 = tail[1] || defaultOnError;
|
||
handler2.call(context, void 0, tail[0]);
|
||
}
|
||
tree.position = {
|
||
start: point2(
|
||
events.length > 0 ? events[0][1].start : {
|
||
line: 1,
|
||
column: 1,
|
||
offset: 0
|
||
}
|
||
),
|
||
end: point2(
|
||
events.length > 0 ? events[events.length - 2][1].end : {
|
||
line: 1,
|
||
column: 1,
|
||
offset: 0
|
||
}
|
||
)
|
||
};
|
||
index2 = -1;
|
||
while (++index2 < config.transforms.length) {
|
||
tree = config.transforms[index2](tree) || tree;
|
||
}
|
||
return tree;
|
||
}
|
||
function prepareList(events, start, length) {
|
||
let index2 = start - 1;
|
||
let containerBalance = -1;
|
||
let listSpread = false;
|
||
let listItem3;
|
||
let lineIndex;
|
||
let firstBlankLineIndex;
|
||
let atMarker;
|
||
while (++index2 <= length) {
|
||
const event = events[index2];
|
||
switch (event[1].type) {
|
||
case "listUnordered":
|
||
case "listOrdered":
|
||
case "blockQuote": {
|
||
if (event[0] === "enter") {
|
||
containerBalance++;
|
||
} else {
|
||
containerBalance--;
|
||
}
|
||
atMarker = void 0;
|
||
break;
|
||
}
|
||
case "lineEndingBlank": {
|
||
if (event[0] === "enter") {
|
||
if (listItem3 && !atMarker && !containerBalance && !firstBlankLineIndex) {
|
||
firstBlankLineIndex = index2;
|
||
}
|
||
atMarker = void 0;
|
||
}
|
||
break;
|
||
}
|
||
case "linePrefix":
|
||
case "listItemValue":
|
||
case "listItemMarker":
|
||
case "listItemPrefix":
|
||
case "listItemPrefixWhitespace": {
|
||
break;
|
||
}
|
||
default: {
|
||
atMarker = void 0;
|
||
}
|
||
}
|
||
if (!containerBalance && event[0] === "enter" && event[1].type === "listItemPrefix" || containerBalance === -1 && event[0] === "exit" && (event[1].type === "listUnordered" || event[1].type === "listOrdered")) {
|
||
if (listItem3) {
|
||
let tailIndex = index2;
|
||
lineIndex = void 0;
|
||
while (tailIndex--) {
|
||
const tailEvent = events[tailIndex];
|
||
if (tailEvent[1].type === "lineEnding" || tailEvent[1].type === "lineEndingBlank") {
|
||
if (tailEvent[0] === "exit")
|
||
continue;
|
||
if (lineIndex) {
|
||
events[lineIndex][1].type = "lineEndingBlank";
|
||
listSpread = true;
|
||
}
|
||
tailEvent[1].type = "lineEnding";
|
||
lineIndex = tailIndex;
|
||
} else if (tailEvent[1].type === "linePrefix" || tailEvent[1].type === "blockQuotePrefix" || tailEvent[1].type === "blockQuotePrefixWhitespace" || tailEvent[1].type === "blockQuoteMarker" || tailEvent[1].type === "listItemIndent") {
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
if (firstBlankLineIndex && (!lineIndex || firstBlankLineIndex < lineIndex)) {
|
||
listItem3._spread = true;
|
||
}
|
||
listItem3.end = Object.assign(
|
||
{},
|
||
lineIndex ? events[lineIndex][1].start : event[1].end
|
||
);
|
||
events.splice(lineIndex || index2, 0, ["exit", listItem3, event[2]]);
|
||
index2++;
|
||
length++;
|
||
}
|
||
if (event[1].type === "listItemPrefix") {
|
||
const item = {
|
||
type: "listItem",
|
||
_spread: false,
|
||
start: Object.assign({}, event[1].start),
|
||
// @ts-expect-error: we’ll add `end` in a second.
|
||
end: void 0
|
||
};
|
||
listItem3 = item;
|
||
events.splice(index2, 0, ["enter", item, event[2]]);
|
||
index2++;
|
||
length++;
|
||
firstBlankLineIndex = void 0;
|
||
atMarker = true;
|
||
}
|
||
}
|
||
}
|
||
events[start][1]._spread = listSpread;
|
||
return length;
|
||
}
|
||
function opener2(create2, and) {
|
||
return open;
|
||
function open(token) {
|
||
enter.call(this, create2(token), token);
|
||
if (and)
|
||
and.call(this, token);
|
||
}
|
||
}
|
||
function buffer() {
|
||
this.stack.push({
|
||
type: "fragment",
|
||
children: []
|
||
});
|
||
}
|
||
function enter(node2, token, errorHandler) {
|
||
const parent = this.stack[this.stack.length - 1];
|
||
const siblings = parent.children;
|
||
siblings.push(node2);
|
||
this.stack.push(node2);
|
||
this.tokenStack.push([token, errorHandler]);
|
||
node2.position = {
|
||
start: point2(token.start),
|
||
// @ts-expect-error: `end` will be patched later.
|
||
end: void 0
|
||
};
|
||
}
|
||
function closer(and) {
|
||
return close2;
|
||
function close2(token) {
|
||
if (and)
|
||
and.call(this, token);
|
||
exit2.call(this, token);
|
||
}
|
||
}
|
||
function exit2(token, onExitError) {
|
||
const node2 = this.stack.pop();
|
||
const open = this.tokenStack.pop();
|
||
if (!open) {
|
||
throw new Error(
|
||
"Cannot close `" + token.type + "` (" + stringifyPosition({
|
||
start: token.start,
|
||
end: token.end
|
||
}) + "): it\u2019s not open"
|
||
);
|
||
} else if (open[0].type !== token.type) {
|
||
if (onExitError) {
|
||
onExitError.call(this, token, open[0]);
|
||
} else {
|
||
const handler2 = open[1] || defaultOnError;
|
||
handler2.call(this, token, open[0]);
|
||
}
|
||
}
|
||
node2.position.end = point2(token.end);
|
||
}
|
||
function resume() {
|
||
return toString(this.stack.pop());
|
||
}
|
||
function onenterlistordered() {
|
||
this.data.expectingFirstListItemValue = true;
|
||
}
|
||
function onenterlistitemvalue(token) {
|
||
if (this.data.expectingFirstListItemValue) {
|
||
const ancestor = this.stack[this.stack.length - 2];
|
||
ancestor.start = Number.parseInt(this.sliceSerialize(token), 10);
|
||
this.data.expectingFirstListItemValue = void 0;
|
||
}
|
||
}
|
||
function onexitcodefencedfenceinfo() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.lang = data2;
|
||
}
|
||
function onexitcodefencedfencemeta() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.meta = data2;
|
||
}
|
||
function onexitcodefencedfence() {
|
||
if (this.data.flowCodeInside)
|
||
return;
|
||
this.buffer();
|
||
this.data.flowCodeInside = true;
|
||
}
|
||
function onexitcodefenced() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, "");
|
||
this.data.flowCodeInside = void 0;
|
||
}
|
||
function onexitcodeindented() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2.replace(/(\r?\n|\r)$/g, "");
|
||
}
|
||
function onexitdefinitionlabelstring(token) {
|
||
const label = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.label = label;
|
||
node2.identifier = normalizeIdentifier(
|
||
this.sliceSerialize(token)
|
||
).toLowerCase();
|
||
}
|
||
function onexitdefinitiontitlestring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.title = data2;
|
||
}
|
||
function onexitdefinitiondestinationstring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = data2;
|
||
}
|
||
function onexitatxheadingsequence(token) {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
if (!node2.depth) {
|
||
const depth = this.sliceSerialize(token).length;
|
||
node2.depth = depth;
|
||
}
|
||
}
|
||
function onexitsetextheadingtext() {
|
||
this.data.setextHeadingSlurpLineEnding = true;
|
||
}
|
||
function onexitsetextheadinglinesequence(token) {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.depth = this.sliceSerialize(token).codePointAt(0) === 61 ? 1 : 2;
|
||
}
|
||
function onexitsetextheading() {
|
||
this.data.setextHeadingSlurpLineEnding = void 0;
|
||
}
|
||
function onenterdata(token) {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
const siblings = node2.children;
|
||
let tail = siblings[siblings.length - 1];
|
||
if (!tail || tail.type !== "text") {
|
||
tail = text4();
|
||
tail.position = {
|
||
start: point2(token.start),
|
||
// @ts-expect-error: we’ll add `end` later.
|
||
end: void 0
|
||
};
|
||
siblings.push(tail);
|
||
}
|
||
this.stack.push(tail);
|
||
}
|
||
function onexitdata(token) {
|
||
const tail = this.stack.pop();
|
||
tail.value += this.sliceSerialize(token);
|
||
tail.position.end = point2(token.end);
|
||
}
|
||
function onexitlineending(token) {
|
||
const context = this.stack[this.stack.length - 1];
|
||
if (this.data.atHardBreak) {
|
||
const tail = context.children[context.children.length - 1];
|
||
tail.position.end = point2(token.end);
|
||
this.data.atHardBreak = void 0;
|
||
return;
|
||
}
|
||
if (!this.data.setextHeadingSlurpLineEnding && config.canContainEols.includes(context.type)) {
|
||
onenterdata.call(this, token);
|
||
onexitdata.call(this, token);
|
||
}
|
||
}
|
||
function onexithardbreak() {
|
||
this.data.atHardBreak = true;
|
||
}
|
||
function onexithtmlflow() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2;
|
||
}
|
||
function onexithtmltext() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2;
|
||
}
|
||
function onexitcodetext() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.value = data2;
|
||
}
|
||
function onexitlink() {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
if (this.data.inReference) {
|
||
const referenceType = this.data.referenceType || "shortcut";
|
||
node2.type += "Reference";
|
||
node2.referenceType = referenceType;
|
||
delete node2.url;
|
||
delete node2.title;
|
||
} else {
|
||
delete node2.identifier;
|
||
delete node2.label;
|
||
}
|
||
this.data.referenceType = void 0;
|
||
}
|
||
function onexitimage() {
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
if (this.data.inReference) {
|
||
const referenceType = this.data.referenceType || "shortcut";
|
||
node2.type += "Reference";
|
||
node2.referenceType = referenceType;
|
||
delete node2.url;
|
||
delete node2.title;
|
||
} else {
|
||
delete node2.identifier;
|
||
delete node2.label;
|
||
}
|
||
this.data.referenceType = void 0;
|
||
}
|
||
function onexitlabeltext(token) {
|
||
const string3 = this.sliceSerialize(token);
|
||
const ancestor = this.stack[this.stack.length - 2];
|
||
ancestor.label = decodeString(string3);
|
||
ancestor.identifier = normalizeIdentifier(string3).toLowerCase();
|
||
}
|
||
function onexitlabel() {
|
||
const fragment = this.stack[this.stack.length - 1];
|
||
const value2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
this.data.inReference = true;
|
||
if (node2.type === "link") {
|
||
const children = fragment.children;
|
||
node2.children = children;
|
||
} else {
|
||
node2.alt = value2;
|
||
}
|
||
}
|
||
function onexitresourcedestinationstring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = data2;
|
||
}
|
||
function onexitresourcetitlestring() {
|
||
const data2 = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.title = data2;
|
||
}
|
||
function onexitresource() {
|
||
this.data.inReference = void 0;
|
||
}
|
||
function onenterreference() {
|
||
this.data.referenceType = "collapsed";
|
||
}
|
||
function onexitreferencestring(token) {
|
||
const label = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.label = label;
|
||
node2.identifier = normalizeIdentifier(
|
||
this.sliceSerialize(token)
|
||
).toLowerCase();
|
||
this.data.referenceType = "full";
|
||
}
|
||
function onexitcharacterreferencemarker(token) {
|
||
this.data.characterReferenceType = token.type;
|
||
}
|
||
function onexitcharacterreferencevalue(token) {
|
||
const data2 = this.sliceSerialize(token);
|
||
const type = this.data.characterReferenceType;
|
||
let value2;
|
||
if (type) {
|
||
value2 = decodeNumericCharacterReference(
|
||
data2,
|
||
type === "characterReferenceMarkerNumeric" ? 10 : 16
|
||
);
|
||
this.data.characterReferenceType = void 0;
|
||
} else {
|
||
const result = decodeNamedCharacterReference(data2);
|
||
value2 = result;
|
||
}
|
||
const tail = this.stack.pop();
|
||
tail.value += value2;
|
||
tail.position.end = point2(token.end);
|
||
}
|
||
function onexitautolinkprotocol(token) {
|
||
onexitdata.call(this, token);
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = this.sliceSerialize(token);
|
||
}
|
||
function onexitautolinkemail(token) {
|
||
onexitdata.call(this, token);
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
node2.url = "mailto:" + this.sliceSerialize(token);
|
||
}
|
||
function blockQuote2() {
|
||
return {
|
||
type: "blockquote",
|
||
children: []
|
||
};
|
||
}
|
||
function codeFlow() {
|
||
return {
|
||
type: "code",
|
||
lang: null,
|
||
meta: null,
|
||
value: ""
|
||
};
|
||
}
|
||
function codeText2() {
|
||
return {
|
||
type: "inlineCode",
|
||
value: ""
|
||
};
|
||
}
|
||
function definition3() {
|
||
return {
|
||
type: "definition",
|
||
identifier: "",
|
||
label: null,
|
||
title: null,
|
||
url: ""
|
||
};
|
||
}
|
||
function emphasis2() {
|
||
return {
|
||
type: "emphasis",
|
||
children: []
|
||
};
|
||
}
|
||
function heading2() {
|
||
return {
|
||
type: "heading",
|
||
// @ts-expect-error `depth` will be set later.
|
||
depth: 0,
|
||
children: []
|
||
};
|
||
}
|
||
function hardBreak2() {
|
||
return {
|
||
type: "break"
|
||
};
|
||
}
|
||
function html2() {
|
||
return {
|
||
type: "html",
|
||
value: ""
|
||
};
|
||
}
|
||
function image2() {
|
||
return {
|
||
type: "image",
|
||
title: null,
|
||
url: "",
|
||
alt: null
|
||
};
|
||
}
|
||
function link2() {
|
||
return {
|
||
type: "link",
|
||
title: null,
|
||
url: "",
|
||
children: []
|
||
};
|
||
}
|
||
function list4(token) {
|
||
return {
|
||
type: "list",
|
||
ordered: token.type === "listOrdered",
|
||
start: null,
|
||
spread: token._spread,
|
||
children: []
|
||
};
|
||
}
|
||
function listItem2(token) {
|
||
return {
|
||
type: "listItem",
|
||
spread: token._spread,
|
||
checked: null,
|
||
children: []
|
||
};
|
||
}
|
||
function paragraph2() {
|
||
return {
|
||
type: "paragraph",
|
||
children: []
|
||
};
|
||
}
|
||
function strong2() {
|
||
return {
|
||
type: "strong",
|
||
children: []
|
||
};
|
||
}
|
||
function text4() {
|
||
return {
|
||
type: "text",
|
||
value: ""
|
||
};
|
||
}
|
||
function thematicBreak3() {
|
||
return {
|
||
type: "thematicBreak"
|
||
};
|
||
}
|
||
}
|
||
function point2(d) {
|
||
return {
|
||
line: d.line,
|
||
column: d.column,
|
||
offset: d.offset
|
||
};
|
||
}
|
||
function configure(combined, extensions) {
|
||
let index2 = -1;
|
||
while (++index2 < extensions.length) {
|
||
const value2 = extensions[index2];
|
||
if (Array.isArray(value2)) {
|
||
configure(combined, value2);
|
||
} else {
|
||
extension(combined, value2);
|
||
}
|
||
}
|
||
}
|
||
function extension(combined, extension2) {
|
||
let key;
|
||
for (key in extension2) {
|
||
if (own2.call(extension2, key)) {
|
||
switch (key) {
|
||
case "canContainEols": {
|
||
const right = extension2[key];
|
||
if (right) {
|
||
combined[key].push(...right);
|
||
}
|
||
break;
|
||
}
|
||
case "transforms": {
|
||
const right = extension2[key];
|
||
if (right) {
|
||
combined[key].push(...right);
|
||
}
|
||
break;
|
||
}
|
||
case "enter":
|
||
case "exit": {
|
||
const right = extension2[key];
|
||
if (right) {
|
||
Object.assign(combined[key], right);
|
||
}
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
}
|
||
function defaultOnError(left, right) {
|
||
if (left) {
|
||
throw new Error(
|
||
"Cannot close `" + left.type + "` (" + stringifyPosition({
|
||
start: left.start,
|
||
end: left.end
|
||
}) + "): a different token (`" + right.type + "`, " + stringifyPosition({
|
||
start: right.start,
|
||
end: right.end
|
||
}) + ") is open"
|
||
);
|
||
} else {
|
||
throw new Error(
|
||
"Cannot close document, a token (`" + right.type + "`, " + stringifyPosition({
|
||
start: right.start,
|
||
end: right.end
|
||
}) + ") is still open"
|
||
);
|
||
}
|
||
}
|
||
|
||
// node_modules/remark-parse/lib/index.js
|
||
function remarkParse(options) {
|
||
const self = this;
|
||
self.parser = parser;
|
||
function parser(doc) {
|
||
return fromMarkdown(doc, {
|
||
...self.data("settings"),
|
||
...options,
|
||
// Note: these options are not in the readme.
|
||
// The goal is for them to be set by plugins on `data` instead of being
|
||
// passed by users.
|
||
extensions: self.data("micromarkExtensions") || [],
|
||
mdastExtensions: self.data("fromMarkdownExtensions") || []
|
||
});
|
||
}
|
||
}
|
||
|
||
// node_modules/zwitch/index.js
|
||
var own3 = {}.hasOwnProperty;
|
||
function zwitch(key, options) {
|
||
const settings = options || {};
|
||
function one2(value2, ...parameters) {
|
||
let fn = one2.invalid;
|
||
const handlers = one2.handlers;
|
||
if (value2 && own3.call(value2, key)) {
|
||
const id = String(value2[key]);
|
||
fn = own3.call(handlers, id) ? handlers[id] : one2.unknown;
|
||
}
|
||
if (fn) {
|
||
return fn.call(this, value2, ...parameters);
|
||
}
|
||
}
|
||
one2.handlers = settings.handlers || {};
|
||
one2.invalid = settings.invalid;
|
||
one2.unknown = settings.unknown;
|
||
return one2;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/configure.js
|
||
var own4 = {}.hasOwnProperty;
|
||
function configure2(base, extension2) {
|
||
let index2 = -1;
|
||
let key;
|
||
if (extension2.extensions) {
|
||
while (++index2 < extension2.extensions.length) {
|
||
configure2(base, extension2.extensions[index2]);
|
||
}
|
||
}
|
||
for (key in extension2) {
|
||
if (own4.call(extension2, key)) {
|
||
switch (key) {
|
||
case "extensions": {
|
||
break;
|
||
}
|
||
case "unsafe": {
|
||
list2(base[key], extension2[key]);
|
||
break;
|
||
}
|
||
case "join": {
|
||
list2(base[key], extension2[key]);
|
||
break;
|
||
}
|
||
case "handlers": {
|
||
map(base[key], extension2[key]);
|
||
break;
|
||
}
|
||
default: {
|
||
base.options[key] = extension2[key];
|
||
}
|
||
}
|
||
}
|
||
}
|
||
return base;
|
||
}
|
||
function list2(left, right) {
|
||
if (right) {
|
||
left.push(...right);
|
||
}
|
||
}
|
||
function map(left, right) {
|
||
if (right) {
|
||
Object.assign(left, right);
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/blockquote.js
|
||
function blockquote(node2, _, state, info) {
|
||
const exit2 = state.enter("blockquote");
|
||
const tracker = state.createTracker(info);
|
||
tracker.move("> ");
|
||
tracker.shift(2);
|
||
const value2 = state.indentLines(
|
||
state.containerFlow(node2, tracker.current()),
|
||
map2
|
||
);
|
||
exit2();
|
||
return value2;
|
||
}
|
||
function map2(line, _, blank) {
|
||
return ">" + (blank ? "" : " ") + line;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/pattern-in-scope.js
|
||
function patternInScope(stack, pattern) {
|
||
return listInScope(stack, pattern.inConstruct, true) && !listInScope(stack, pattern.notInConstruct, false);
|
||
}
|
||
function listInScope(stack, list4, none) {
|
||
if (typeof list4 === "string") {
|
||
list4 = [list4];
|
||
}
|
||
if (!list4 || list4.length === 0) {
|
||
return none;
|
||
}
|
||
let index2 = -1;
|
||
while (++index2 < list4.length) {
|
||
if (stack.includes(list4[index2])) {
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/break.js
|
||
function hardBreak(_, _1, state, info) {
|
||
let index2 = -1;
|
||
while (++index2 < state.unsafe.length) {
|
||
if (state.unsafe[index2].character === "\n" && patternInScope(state.stack, state.unsafe[index2])) {
|
||
return /[ \t]/.test(info.before) ? "" : " ";
|
||
}
|
||
}
|
||
return "\\\n";
|
||
}
|
||
|
||
// node_modules/longest-streak/index.js
|
||
function longestStreak(value2, substring) {
|
||
const source = String(value2);
|
||
let index2 = source.indexOf(substring);
|
||
let expected = index2;
|
||
let count = 0;
|
||
let max = 0;
|
||
if (typeof substring !== "string") {
|
||
throw new TypeError("Expected substring");
|
||
}
|
||
while (index2 !== -1) {
|
||
if (index2 === expected) {
|
||
if (++count > max) {
|
||
max = count;
|
||
}
|
||
} else {
|
||
count = 1;
|
||
}
|
||
expected = index2 + substring.length;
|
||
index2 = source.indexOf(substring, expected);
|
||
}
|
||
return max;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/format-code-as-indented.js
|
||
function formatCodeAsIndented(node2, state) {
|
||
return Boolean(
|
||
state.options.fences === false && node2.value && // If there’s no info…
|
||
!node2.lang && // And there’s a non-whitespace character…
|
||
/[^ \r\n]/.test(node2.value) && // And the value doesn’t start or end in a blank…
|
||
!/^[\t ]*(?:[\r\n]|$)|(?:^|[\r\n])[\t ]*$/.test(node2.value)
|
||
);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-fence.js
|
||
function checkFence(state) {
|
||
const marker = state.options.fence || "`";
|
||
if (marker !== "`" && marker !== "~") {
|
||
throw new Error(
|
||
"Cannot serialize code with `" + marker + "` for `options.fence`, expected `` ` `` or `~`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/code.js
|
||
function code(node2, _, state, info) {
|
||
const marker = checkFence(state);
|
||
const raw = node2.value || "";
|
||
const suffix = marker === "`" ? "GraveAccent" : "Tilde";
|
||
if (formatCodeAsIndented(node2, state)) {
|
||
const exit3 = state.enter("codeIndented");
|
||
const value3 = state.indentLines(raw, map3);
|
||
exit3();
|
||
return value3;
|
||
}
|
||
const tracker = state.createTracker(info);
|
||
const sequence = marker.repeat(Math.max(longestStreak(raw, marker) + 1, 3));
|
||
const exit2 = state.enter("codeFenced");
|
||
let value2 = tracker.move(sequence);
|
||
if (node2.lang) {
|
||
const subexit = state.enter(`codeFencedLang${suffix}`);
|
||
value2 += tracker.move(
|
||
state.safe(node2.lang, {
|
||
before: value2,
|
||
after: " ",
|
||
encode: ["`"],
|
||
...tracker.current()
|
||
})
|
||
);
|
||
subexit();
|
||
}
|
||
if (node2.lang && node2.meta) {
|
||
const subexit = state.enter(`codeFencedMeta${suffix}`);
|
||
value2 += tracker.move(" ");
|
||
value2 += tracker.move(
|
||
state.safe(node2.meta, {
|
||
before: value2,
|
||
after: "\n",
|
||
encode: ["`"],
|
||
...tracker.current()
|
||
})
|
||
);
|
||
subexit();
|
||
}
|
||
value2 += tracker.move("\n");
|
||
if (raw) {
|
||
value2 += tracker.move(raw + "\n");
|
||
}
|
||
value2 += tracker.move(sequence);
|
||
exit2();
|
||
return value2;
|
||
}
|
||
function map3(line, _, blank) {
|
||
return (blank ? "" : " ") + line;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-quote.js
|
||
function checkQuote(state) {
|
||
const marker = state.options.quote || '"';
|
||
if (marker !== '"' && marker !== "'") {
|
||
throw new Error(
|
||
"Cannot serialize title with `" + marker + "` for `options.quote`, expected `\"`, or `'`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/definition.js
|
||
function definition2(node2, _, state, info) {
|
||
const quote = checkQuote(state);
|
||
const suffix = quote === '"' ? "Quote" : "Apostrophe";
|
||
const exit2 = state.enter("definition");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value2 = tracker.move("[");
|
||
value2 += tracker.move(
|
||
state.safe(state.associationId(node2), {
|
||
before: value2,
|
||
after: "]",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value2 += tracker.move("]: ");
|
||
subexit();
|
||
if (
|
||
// If there’s no url, or…
|
||
!node2.url || // If there are control characters or whitespace.
|
||
/[\0- \u007F]/.test(node2.url)
|
||
) {
|
||
subexit = state.enter("destinationLiteral");
|
||
value2 += tracker.move("<");
|
||
value2 += tracker.move(
|
||
state.safe(node2.url, { before: value2, after: ">", ...tracker.current() })
|
||
);
|
||
value2 += tracker.move(">");
|
||
} else {
|
||
subexit = state.enter("destinationRaw");
|
||
value2 += tracker.move(
|
||
state.safe(node2.url, {
|
||
before: value2,
|
||
after: node2.title ? " " : "\n",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
}
|
||
subexit();
|
||
if (node2.title) {
|
||
subexit = state.enter(`title${suffix}`);
|
||
value2 += tracker.move(" " + quote);
|
||
value2 += tracker.move(
|
||
state.safe(node2.title, {
|
||
before: value2,
|
||
after: quote,
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value2 += tracker.move(quote);
|
||
subexit();
|
||
}
|
||
exit2();
|
||
return value2;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-emphasis.js
|
||
function checkEmphasis(state) {
|
||
const marker = state.options.emphasis || "*";
|
||
if (marker !== "*" && marker !== "_") {
|
||
throw new Error(
|
||
"Cannot serialize emphasis with `" + marker + "` for `options.emphasis`, expected `*`, or `_`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/emphasis.js
|
||
emphasis.peek = emphasisPeek;
|
||
function emphasis(node2, _, state, info) {
|
||
const marker = checkEmphasis(state);
|
||
const exit2 = state.enter("emphasis");
|
||
const tracker = state.createTracker(info);
|
||
let value2 = tracker.move(marker);
|
||
value2 += tracker.move(
|
||
state.containerPhrasing(node2, {
|
||
before: value2,
|
||
after: marker,
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value2 += tracker.move(marker);
|
||
exit2();
|
||
return value2;
|
||
}
|
||
function emphasisPeek(_, _1, state) {
|
||
return state.options.emphasis || "*";
|
||
}
|
||
|
||
// node_modules/unist-util-is/lib/index.js
|
||
var convert = (
|
||
// Note: overloads in JSDoc can’t yet use different `@template`s.
|
||
/**
|
||
* @type {(
|
||
* (<Condition extends string>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & {type: Condition}) &
|
||
* (<Condition extends Props>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Condition) &
|
||
* (<Condition extends TestFunction>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Predicate<Condition, Node>) &
|
||
* ((test?: null | undefined) => (node?: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node) &
|
||
* ((test?: Test) => Check)
|
||
* )}
|
||
*/
|
||
/**
|
||
* @param {Test} [test]
|
||
* @returns {Check}
|
||
*/
|
||
function(test) {
|
||
if (test === null || test === void 0) {
|
||
return ok;
|
||
}
|
||
if (typeof test === "function") {
|
||
return castFactory(test);
|
||
}
|
||
if (typeof test === "object") {
|
||
return Array.isArray(test) ? anyFactory(test) : propsFactory(test);
|
||
}
|
||
if (typeof test === "string") {
|
||
return typeFactory(test);
|
||
}
|
||
throw new Error("Expected function, string, or object as test");
|
||
}
|
||
);
|
||
function anyFactory(tests) {
|
||
const checks = [];
|
||
let index2 = -1;
|
||
while (++index2 < tests.length) {
|
||
checks[index2] = convert(tests[index2]);
|
||
}
|
||
return castFactory(any);
|
||
function any(...parameters) {
|
||
let index3 = -1;
|
||
while (++index3 < checks.length) {
|
||
if (checks[index3].apply(this, parameters))
|
||
return true;
|
||
}
|
||
return false;
|
||
}
|
||
}
|
||
function propsFactory(check) {
|
||
const checkAsRecord = (
|
||
/** @type {Record<string, unknown>} */
|
||
check
|
||
);
|
||
return castFactory(all2);
|
||
function all2(node2) {
|
||
const nodeAsRecord = (
|
||
/** @type {Record<string, unknown>} */
|
||
/** @type {unknown} */
|
||
node2
|
||
);
|
||
let key;
|
||
for (key in check) {
|
||
if (nodeAsRecord[key] !== checkAsRecord[key])
|
||
return false;
|
||
}
|
||
return true;
|
||
}
|
||
}
|
||
function typeFactory(check) {
|
||
return castFactory(type);
|
||
function type(node2) {
|
||
return node2 && node2.type === check;
|
||
}
|
||
}
|
||
function castFactory(testFunction) {
|
||
return check;
|
||
function check(value2, index2, parent) {
|
||
return Boolean(
|
||
looksLikeANode(value2) && testFunction.call(
|
||
this,
|
||
value2,
|
||
typeof index2 === "number" ? index2 : void 0,
|
||
parent || void 0
|
||
)
|
||
);
|
||
}
|
||
}
|
||
function ok() {
|
||
return true;
|
||
}
|
||
function looksLikeANode(value2) {
|
||
return value2 !== null && typeof value2 === "object" && "type" in value2;
|
||
}
|
||
|
||
// node_modules/unist-util-visit-parents/lib/color.node.js
|
||
function color(d) {
|
||
return "\x1B[33m" + d + "\x1B[39m";
|
||
}
|
||
|
||
// node_modules/unist-util-visit-parents/lib/index.js
|
||
var empty = [];
|
||
var CONTINUE = true;
|
||
var EXIT = false;
|
||
var SKIP = "skip";
|
||
function visitParents(tree, test, visitor, reverse) {
|
||
let check;
|
||
if (typeof test === "function" && typeof visitor !== "function") {
|
||
reverse = visitor;
|
||
visitor = test;
|
||
} else {
|
||
check = test;
|
||
}
|
||
const is2 = convert(check);
|
||
const step = reverse ? -1 : 1;
|
||
factory(tree, void 0, [])();
|
||
function factory(node2, index2, parents) {
|
||
const value2 = (
|
||
/** @type {Record<string, unknown>} */
|
||
node2 && typeof node2 === "object" ? node2 : {}
|
||
);
|
||
if (typeof value2.type === "string") {
|
||
const name = (
|
||
// `hast`
|
||
typeof value2.tagName === "string" ? value2.tagName : (
|
||
// `xast`
|
||
typeof value2.name === "string" ? value2.name : void 0
|
||
)
|
||
);
|
||
Object.defineProperty(visit2, "name", {
|
||
value: "node (" + color(node2.type + (name ? "<" + name + ">" : "")) + ")"
|
||
});
|
||
}
|
||
return visit2;
|
||
function visit2() {
|
||
let result = empty;
|
||
let subresult;
|
||
let offset;
|
||
let grandparents;
|
||
if (!test || is2(node2, index2, parents[parents.length - 1] || void 0)) {
|
||
result = toResult(visitor(node2, parents));
|
||
if (result[0] === EXIT) {
|
||
return result;
|
||
}
|
||
}
|
||
if ("children" in node2 && node2.children) {
|
||
const nodeAsParent = (
|
||
/** @type {UnistParent} */
|
||
node2
|
||
);
|
||
if (nodeAsParent.children && result[0] !== SKIP) {
|
||
offset = (reverse ? nodeAsParent.children.length : -1) + step;
|
||
grandparents = parents.concat(nodeAsParent);
|
||
while (offset > -1 && offset < nodeAsParent.children.length) {
|
||
const child = nodeAsParent.children[offset];
|
||
subresult = factory(child, offset, grandparents)();
|
||
if (subresult[0] === EXIT) {
|
||
return subresult;
|
||
}
|
||
offset = typeof subresult[1] === "number" ? subresult[1] : offset + step;
|
||
}
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
}
|
||
}
|
||
function toResult(value2) {
|
||
if (Array.isArray(value2)) {
|
||
return value2;
|
||
}
|
||
if (typeof value2 === "number") {
|
||
return [CONTINUE, value2];
|
||
}
|
||
return value2 === null || value2 === void 0 ? empty : [value2];
|
||
}
|
||
|
||
// node_modules/unist-util-visit/lib/index.js
|
||
function visit(tree, testOrVisitor, visitorOrReverse, maybeReverse) {
|
||
let reverse;
|
||
let test;
|
||
let visitor;
|
||
if (typeof testOrVisitor === "function" && typeof visitorOrReverse !== "function") {
|
||
test = void 0;
|
||
visitor = testOrVisitor;
|
||
reverse = visitorOrReverse;
|
||
} else {
|
||
test = testOrVisitor;
|
||
visitor = visitorOrReverse;
|
||
reverse = maybeReverse;
|
||
}
|
||
visitParents(tree, test, overload, reverse);
|
||
function overload(node2, parents) {
|
||
const parent = parents[parents.length - 1];
|
||
const index2 = parent ? parent.children.indexOf(node2) : void 0;
|
||
return visitor(node2, index2, parent);
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/format-heading-as-setext.js
|
||
function formatHeadingAsSetext(node2, state) {
|
||
let literalWithBreak = false;
|
||
visit(node2, function(node3) {
|
||
if ("value" in node3 && /\r?\n|\r/.test(node3.value) || node3.type === "break") {
|
||
literalWithBreak = true;
|
||
return EXIT;
|
||
}
|
||
});
|
||
return Boolean(
|
||
(!node2.depth || node2.depth < 3) && toString(node2) && (state.options.setext || literalWithBreak)
|
||
);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/heading.js
|
||
function heading(node2, _, state, info) {
|
||
const rank = Math.max(Math.min(6, node2.depth || 1), 1);
|
||
const tracker = state.createTracker(info);
|
||
if (formatHeadingAsSetext(node2, state)) {
|
||
const exit3 = state.enter("headingSetext");
|
||
const subexit2 = state.enter("phrasing");
|
||
const value3 = state.containerPhrasing(node2, {
|
||
...tracker.current(),
|
||
before: "\n",
|
||
after: "\n"
|
||
});
|
||
subexit2();
|
||
exit3();
|
||
return value3 + "\n" + (rank === 1 ? "=" : "-").repeat(
|
||
// The whole size…
|
||
value3.length - // Minus the position of the character after the last EOL (or
|
||
// 0 if there is none)…
|
||
(Math.max(value3.lastIndexOf("\r"), value3.lastIndexOf("\n")) + 1)
|
||
);
|
||
}
|
||
const sequence = "#".repeat(rank);
|
||
const exit2 = state.enter("headingAtx");
|
||
const subexit = state.enter("phrasing");
|
||
tracker.move(sequence + " ");
|
||
let value2 = state.containerPhrasing(node2, {
|
||
before: "# ",
|
||
after: "\n",
|
||
...tracker.current()
|
||
});
|
||
if (/^[\t ]/.test(value2)) {
|
||
value2 = "&#x" + value2.charCodeAt(0).toString(16).toUpperCase() + ";" + value2.slice(1);
|
||
}
|
||
value2 = value2 ? sequence + " " + value2 : sequence;
|
||
if (state.options.closeAtx) {
|
||
value2 += " " + sequence;
|
||
}
|
||
subexit();
|
||
exit2();
|
||
return value2;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/html.js
|
||
html.peek = htmlPeek;
|
||
function html(node2) {
|
||
return node2.value || "";
|
||
}
|
||
function htmlPeek() {
|
||
return "<";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/image.js
|
||
image.peek = imagePeek;
|
||
function image(node2, _, state, info) {
|
||
const quote = checkQuote(state);
|
||
const suffix = quote === '"' ? "Quote" : "Apostrophe";
|
||
const exit2 = state.enter("image");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value2 = tracker.move("![");
|
||
value2 += tracker.move(
|
||
state.safe(node2.alt, { before: value2, after: "]", ...tracker.current() })
|
||
);
|
||
value2 += tracker.move("](");
|
||
subexit();
|
||
if (
|
||
// If there’s no url but there is a title…
|
||
!node2.url && node2.title || // If there are control characters or whitespace.
|
||
/[\0- \u007F]/.test(node2.url)
|
||
) {
|
||
subexit = state.enter("destinationLiteral");
|
||
value2 += tracker.move("<");
|
||
value2 += tracker.move(
|
||
state.safe(node2.url, { before: value2, after: ">", ...tracker.current() })
|
||
);
|
||
value2 += tracker.move(">");
|
||
} else {
|
||
subexit = state.enter("destinationRaw");
|
||
value2 += tracker.move(
|
||
state.safe(node2.url, {
|
||
before: value2,
|
||
after: node2.title ? " " : ")",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
}
|
||
subexit();
|
||
if (node2.title) {
|
||
subexit = state.enter(`title${suffix}`);
|
||
value2 += tracker.move(" " + quote);
|
||
value2 += tracker.move(
|
||
state.safe(node2.title, {
|
||
before: value2,
|
||
after: quote,
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value2 += tracker.move(quote);
|
||
subexit();
|
||
}
|
||
value2 += tracker.move(")");
|
||
exit2();
|
||
return value2;
|
||
}
|
||
function imagePeek() {
|
||
return "!";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/image-reference.js
|
||
imageReference.peek = imageReferencePeek;
|
||
function imageReference(node2, _, state, info) {
|
||
const type = node2.referenceType;
|
||
const exit2 = state.enter("imageReference");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value2 = tracker.move("![");
|
||
const alt = state.safe(node2.alt, {
|
||
before: value2,
|
||
after: "]",
|
||
...tracker.current()
|
||
});
|
||
value2 += tracker.move(alt + "][");
|
||
subexit();
|
||
const stack = state.stack;
|
||
state.stack = [];
|
||
subexit = state.enter("reference");
|
||
const reference = state.safe(state.associationId(node2), {
|
||
before: value2,
|
||
after: "]",
|
||
...tracker.current()
|
||
});
|
||
subexit();
|
||
state.stack = stack;
|
||
exit2();
|
||
if (type === "full" || !alt || alt !== reference) {
|
||
value2 += tracker.move(reference + "]");
|
||
} else if (type === "shortcut") {
|
||
value2 = value2.slice(0, -1);
|
||
} else {
|
||
value2 += tracker.move("]");
|
||
}
|
||
return value2;
|
||
}
|
||
function imageReferencePeek() {
|
||
return "!";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/inline-code.js
|
||
inlineCode.peek = inlineCodePeek;
|
||
function inlineCode(node2, _, state) {
|
||
let value2 = node2.value || "";
|
||
let sequence = "`";
|
||
let index2 = -1;
|
||
while (new RegExp("(^|[^`])" + sequence + "([^`]|$)").test(value2)) {
|
||
sequence += "`";
|
||
}
|
||
if (/[^ \r\n]/.test(value2) && (/^[ \r\n]/.test(value2) && /[ \r\n]$/.test(value2) || /^`|`$/.test(value2))) {
|
||
value2 = " " + value2 + " ";
|
||
}
|
||
while (++index2 < state.unsafe.length) {
|
||
const pattern = state.unsafe[index2];
|
||
const expression = state.compilePattern(pattern);
|
||
let match2;
|
||
if (!pattern.atBreak)
|
||
continue;
|
||
while (match2 = expression.exec(value2)) {
|
||
let position2 = match2.index;
|
||
if (value2.charCodeAt(position2) === 10 && value2.charCodeAt(position2 - 1) === 13) {
|
||
position2--;
|
||
}
|
||
value2 = value2.slice(0, position2) + " " + value2.slice(match2.index + 1);
|
||
}
|
||
}
|
||
return sequence + value2 + sequence;
|
||
}
|
||
function inlineCodePeek() {
|
||
return "`";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/format-link-as-autolink.js
|
||
function formatLinkAsAutolink(node2, state) {
|
||
const raw = toString(node2);
|
||
return Boolean(
|
||
!state.options.resourceLink && // If there’s a url…
|
||
node2.url && // And there’s a no title…
|
||
!node2.title && // And the content of `node` is a single text node…
|
||
node2.children && node2.children.length === 1 && node2.children[0].type === "text" && // And if the url is the same as the content…
|
||
(raw === node2.url || "mailto:" + raw === node2.url) && // And that starts w/ a protocol…
|
||
/^[a-z][a-z+.-]+:/i.test(node2.url) && // And that doesn’t contain ASCII control codes (character escapes and
|
||
// references don’t work), space, or angle brackets…
|
||
!/[\0- <>\u007F]/.test(node2.url)
|
||
);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/link.js
|
||
link.peek = linkPeek;
|
||
function link(node2, _, state, info) {
|
||
const quote = checkQuote(state);
|
||
const suffix = quote === '"' ? "Quote" : "Apostrophe";
|
||
const tracker = state.createTracker(info);
|
||
let exit2;
|
||
let subexit;
|
||
if (formatLinkAsAutolink(node2, state)) {
|
||
const stack = state.stack;
|
||
state.stack = [];
|
||
exit2 = state.enter("autolink");
|
||
let value3 = tracker.move("<");
|
||
value3 += tracker.move(
|
||
state.containerPhrasing(node2, {
|
||
before: value3,
|
||
after: ">",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value3 += tracker.move(">");
|
||
exit2();
|
||
state.stack = stack;
|
||
return value3;
|
||
}
|
||
exit2 = state.enter("link");
|
||
subexit = state.enter("label");
|
||
let value2 = tracker.move("[");
|
||
value2 += tracker.move(
|
||
state.containerPhrasing(node2, {
|
||
before: value2,
|
||
after: "](",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value2 += tracker.move("](");
|
||
subexit();
|
||
if (
|
||
// If there’s no url but there is a title…
|
||
!node2.url && node2.title || // If there are control characters or whitespace.
|
||
/[\0- \u007F]/.test(node2.url)
|
||
) {
|
||
subexit = state.enter("destinationLiteral");
|
||
value2 += tracker.move("<");
|
||
value2 += tracker.move(
|
||
state.safe(node2.url, { before: value2, after: ">", ...tracker.current() })
|
||
);
|
||
value2 += tracker.move(">");
|
||
} else {
|
||
subexit = state.enter("destinationRaw");
|
||
value2 += tracker.move(
|
||
state.safe(node2.url, {
|
||
before: value2,
|
||
after: node2.title ? " " : ")",
|
||
...tracker.current()
|
||
})
|
||
);
|
||
}
|
||
subexit();
|
||
if (node2.title) {
|
||
subexit = state.enter(`title${suffix}`);
|
||
value2 += tracker.move(" " + quote);
|
||
value2 += tracker.move(
|
||
state.safe(node2.title, {
|
||
before: value2,
|
||
after: quote,
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value2 += tracker.move(quote);
|
||
subexit();
|
||
}
|
||
value2 += tracker.move(")");
|
||
exit2();
|
||
return value2;
|
||
}
|
||
function linkPeek(node2, _, state) {
|
||
return formatLinkAsAutolink(node2, state) ? "<" : "[";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/link-reference.js
|
||
linkReference.peek = linkReferencePeek;
|
||
function linkReference(node2, _, state, info) {
|
||
const type = node2.referenceType;
|
||
const exit2 = state.enter("linkReference");
|
||
let subexit = state.enter("label");
|
||
const tracker = state.createTracker(info);
|
||
let value2 = tracker.move("[");
|
||
const text4 = state.containerPhrasing(node2, {
|
||
before: value2,
|
||
after: "]",
|
||
...tracker.current()
|
||
});
|
||
value2 += tracker.move(text4 + "][");
|
||
subexit();
|
||
const stack = state.stack;
|
||
state.stack = [];
|
||
subexit = state.enter("reference");
|
||
const reference = state.safe(state.associationId(node2), {
|
||
before: value2,
|
||
after: "]",
|
||
...tracker.current()
|
||
});
|
||
subexit();
|
||
state.stack = stack;
|
||
exit2();
|
||
if (type === "full" || !text4 || text4 !== reference) {
|
||
value2 += tracker.move(reference + "]");
|
||
} else if (type === "shortcut") {
|
||
value2 = value2.slice(0, -1);
|
||
} else {
|
||
value2 += tracker.move("]");
|
||
}
|
||
return value2;
|
||
}
|
||
function linkReferencePeek() {
|
||
return "[";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-bullet.js
|
||
function checkBullet(state) {
|
||
const marker = state.options.bullet || "*";
|
||
if (marker !== "*" && marker !== "+" && marker !== "-") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + marker + "` for `options.bullet`, expected `*`, `+`, or `-`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-bullet-other.js
|
||
function checkBulletOther(state) {
|
||
const bullet = checkBullet(state);
|
||
const bulletOther = state.options.bulletOther;
|
||
if (!bulletOther) {
|
||
return bullet === "*" ? "-" : "*";
|
||
}
|
||
if (bulletOther !== "*" && bulletOther !== "+" && bulletOther !== "-") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + bulletOther + "` for `options.bulletOther`, expected `*`, `+`, or `-`"
|
||
);
|
||
}
|
||
if (bulletOther === bullet) {
|
||
throw new Error(
|
||
"Expected `bullet` (`" + bullet + "`) and `bulletOther` (`" + bulletOther + "`) to be different"
|
||
);
|
||
}
|
||
return bulletOther;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-bullet-ordered.js
|
||
function checkBulletOrdered(state) {
|
||
const marker = state.options.bulletOrdered || ".";
|
||
if (marker !== "." && marker !== ")") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + marker + "` for `options.bulletOrdered`, expected `.` or `)`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-rule.js
|
||
function checkRule(state) {
|
||
const marker = state.options.rule || "*";
|
||
if (marker !== "*" && marker !== "-" && marker !== "_") {
|
||
throw new Error(
|
||
"Cannot serialize rules with `" + marker + "` for `options.rule`, expected `*`, `-`, or `_`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/list.js
|
||
function list3(node2, parent, state, info) {
|
||
const exit2 = state.enter("list");
|
||
const bulletCurrent = state.bulletCurrent;
|
||
let bullet = node2.ordered ? checkBulletOrdered(state) : checkBullet(state);
|
||
const bulletOther = node2.ordered ? bullet === "." ? ")" : "." : checkBulletOther(state);
|
||
let useDifferentMarker = parent && state.bulletLastUsed ? bullet === state.bulletLastUsed : false;
|
||
if (!node2.ordered) {
|
||
const firstListItem = node2.children ? node2.children[0] : void 0;
|
||
if (
|
||
// Bullet could be used as a thematic break marker:
|
||
(bullet === "*" || bullet === "-") && // Empty first list item:
|
||
firstListItem && (!firstListItem.children || !firstListItem.children[0]) && // Directly in two other list items:
|
||
state.stack[state.stack.length - 1] === "list" && state.stack[state.stack.length - 2] === "listItem" && state.stack[state.stack.length - 3] === "list" && state.stack[state.stack.length - 4] === "listItem" && // That are each the first child.
|
||
state.indexStack[state.indexStack.length - 1] === 0 && state.indexStack[state.indexStack.length - 2] === 0 && state.indexStack[state.indexStack.length - 3] === 0
|
||
) {
|
||
useDifferentMarker = true;
|
||
}
|
||
if (checkRule(state) === bullet && firstListItem) {
|
||
let index2 = -1;
|
||
while (++index2 < node2.children.length) {
|
||
const item = node2.children[index2];
|
||
if (item && item.type === "listItem" && item.children && item.children[0] && item.children[0].type === "thematicBreak") {
|
||
useDifferentMarker = true;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (useDifferentMarker) {
|
||
bullet = bulletOther;
|
||
}
|
||
state.bulletCurrent = bullet;
|
||
const value2 = state.containerFlow(node2, info);
|
||
state.bulletLastUsed = bullet;
|
||
state.bulletCurrent = bulletCurrent;
|
||
exit2();
|
||
return value2;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-list-item-indent.js
|
||
function checkListItemIndent(state) {
|
||
const style = state.options.listItemIndent || "one";
|
||
if (style !== "tab" && style !== "one" && style !== "mixed") {
|
||
throw new Error(
|
||
"Cannot serialize items with `" + style + "` for `options.listItemIndent`, expected `tab`, `one`, or `mixed`"
|
||
);
|
||
}
|
||
return style;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/list-item.js
|
||
function listItem(node2, parent, state, info) {
|
||
const listItemIndent = checkListItemIndent(state);
|
||
let bullet = state.bulletCurrent || checkBullet(state);
|
||
if (parent && parent.type === "list" && parent.ordered) {
|
||
bullet = (typeof parent.start === "number" && parent.start > -1 ? parent.start : 1) + (state.options.incrementListMarker === false ? 0 : parent.children.indexOf(node2)) + bullet;
|
||
}
|
||
let size = bullet.length + 1;
|
||
if (listItemIndent === "tab" || listItemIndent === "mixed" && (parent && parent.type === "list" && parent.spread || node2.spread)) {
|
||
size = Math.ceil(size / 4) * 4;
|
||
}
|
||
const tracker = state.createTracker(info);
|
||
tracker.move(bullet + " ".repeat(size - bullet.length));
|
||
tracker.shift(size);
|
||
const exit2 = state.enter("listItem");
|
||
const value2 = state.indentLines(
|
||
state.containerFlow(node2, tracker.current()),
|
||
map4
|
||
);
|
||
exit2();
|
||
return value2;
|
||
function map4(line, index2, blank) {
|
||
if (index2) {
|
||
return (blank ? "" : " ".repeat(size)) + line;
|
||
}
|
||
return (blank ? bullet : bullet + " ".repeat(size - bullet.length)) + line;
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/paragraph.js
|
||
function paragraph(node2, _, state, info) {
|
||
const exit2 = state.enter("paragraph");
|
||
const subexit = state.enter("phrasing");
|
||
const value2 = state.containerPhrasing(node2, info);
|
||
subexit();
|
||
exit2();
|
||
return value2;
|
||
}
|
||
|
||
// node_modules/mdast-util-phrasing/lib/index.js
|
||
var phrasing = (
|
||
/** @type {(node?: unknown) => node is PhrasingContent} */
|
||
convert([
|
||
"break",
|
||
"delete",
|
||
"emphasis",
|
||
"footnote",
|
||
"footnoteReference",
|
||
"image",
|
||
"imageReference",
|
||
"inlineCode",
|
||
"link",
|
||
"linkReference",
|
||
"strong",
|
||
"text"
|
||
])
|
||
);
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/root.js
|
||
function root(node2, _, state, info) {
|
||
const hasPhrasing = node2.children.some(function(d) {
|
||
return phrasing(d);
|
||
});
|
||
const fn = hasPhrasing ? state.containerPhrasing : state.containerFlow;
|
||
return fn.call(state, node2, info);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-strong.js
|
||
function checkStrong(state) {
|
||
const marker = state.options.strong || "*";
|
||
if (marker !== "*" && marker !== "_") {
|
||
throw new Error(
|
||
"Cannot serialize strong with `" + marker + "` for `options.strong`, expected `*`, or `_`"
|
||
);
|
||
}
|
||
return marker;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/strong.js
|
||
strong.peek = strongPeek;
|
||
function strong(node2, _, state, info) {
|
||
const marker = checkStrong(state);
|
||
const exit2 = state.enter("strong");
|
||
const tracker = state.createTracker(info);
|
||
let value2 = tracker.move(marker + marker);
|
||
value2 += tracker.move(
|
||
state.containerPhrasing(node2, {
|
||
before: value2,
|
||
after: marker,
|
||
...tracker.current()
|
||
})
|
||
);
|
||
value2 += tracker.move(marker + marker);
|
||
exit2();
|
||
return value2;
|
||
}
|
||
function strongPeek(_, _1, state) {
|
||
return state.options.strong || "*";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/text.js
|
||
function text3(node2, _, state, info) {
|
||
return state.safe(node2.value, info);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/check-rule-repetition.js
|
||
function checkRuleRepetition(state) {
|
||
const repetition = state.options.ruleRepetition || 3;
|
||
if (repetition < 3) {
|
||
throw new Error(
|
||
"Cannot serialize rules with repetition `" + repetition + "` for `options.ruleRepetition`, expected `3` or more"
|
||
);
|
||
}
|
||
return repetition;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/thematic-break.js
|
||
function thematicBreak2(_, _1, state) {
|
||
const value2 = (checkRule(state) + (state.options.ruleSpaces ? " " : "")).repeat(checkRuleRepetition(state));
|
||
return state.options.ruleSpaces ? value2.slice(0, -1) : value2;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/handle/index.js
|
||
var handle = {
|
||
blockquote,
|
||
break: hardBreak,
|
||
code,
|
||
definition: definition2,
|
||
emphasis,
|
||
hardBreak,
|
||
heading,
|
||
html,
|
||
image,
|
||
imageReference,
|
||
inlineCode,
|
||
link,
|
||
linkReference,
|
||
list: list3,
|
||
listItem,
|
||
paragraph,
|
||
root,
|
||
strong,
|
||
text: text3,
|
||
thematicBreak: thematicBreak2
|
||
};
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/join.js
|
||
var join = [joinDefaults];
|
||
function joinDefaults(left, right, parent, state) {
|
||
if (right.type === "code" && formatCodeAsIndented(right, state) && (left.type === "list" || left.type === right.type && formatCodeAsIndented(left, state))) {
|
||
return false;
|
||
}
|
||
if ("spread" in parent && typeof parent.spread === "boolean") {
|
||
if (left.type === "paragraph" && // Two paragraphs.
|
||
(left.type === right.type || right.type === "definition" || // Paragraph followed by a setext heading.
|
||
right.type === "heading" && formatHeadingAsSetext(right, state))) {
|
||
return;
|
||
}
|
||
return parent.spread ? 1 : 0;
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/unsafe.js
|
||
var fullPhrasingSpans = [
|
||
"autolink",
|
||
"destinationLiteral",
|
||
"destinationRaw",
|
||
"reference",
|
||
"titleQuote",
|
||
"titleApostrophe"
|
||
];
|
||
var unsafe = [
|
||
{ character: " ", after: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{ character: " ", before: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{
|
||
character: " ",
|
||
inConstruct: ["codeFencedLangGraveAccent", "codeFencedLangTilde"]
|
||
},
|
||
{
|
||
character: "\r",
|
||
inConstruct: [
|
||
"codeFencedLangGraveAccent",
|
||
"codeFencedLangTilde",
|
||
"codeFencedMetaGraveAccent",
|
||
"codeFencedMetaTilde",
|
||
"destinationLiteral",
|
||
"headingAtx"
|
||
]
|
||
},
|
||
{
|
||
character: "\n",
|
||
inConstruct: [
|
||
"codeFencedLangGraveAccent",
|
||
"codeFencedLangTilde",
|
||
"codeFencedMetaGraveAccent",
|
||
"codeFencedMetaTilde",
|
||
"destinationLiteral",
|
||
"headingAtx"
|
||
]
|
||
},
|
||
{ character: " ", after: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{ character: " ", before: "[\\r\\n]", inConstruct: "phrasing" },
|
||
{
|
||
character: " ",
|
||
inConstruct: ["codeFencedLangGraveAccent", "codeFencedLangTilde"]
|
||
},
|
||
// An exclamation mark can start an image, if it is followed by a link or
|
||
// a link reference.
|
||
{
|
||
character: "!",
|
||
after: "\\[",
|
||
inConstruct: "phrasing",
|
||
notInConstruct: fullPhrasingSpans
|
||
},
|
||
// A quote can break out of a title.
|
||
{ character: '"', inConstruct: "titleQuote" },
|
||
// A number sign could start an ATX heading if it starts a line.
|
||
{ atBreak: true, character: "#" },
|
||
{ character: "#", inConstruct: "headingAtx", after: "(?:[\r\n]|$)" },
|
||
// Dollar sign and percentage are not used in markdown.
|
||
// An ampersand could start a character reference.
|
||
{ character: "&", after: "[#A-Za-z]", inConstruct: "phrasing" },
|
||
// An apostrophe can break out of a title.
|
||
{ character: "'", inConstruct: "titleApostrophe" },
|
||
// A left paren could break out of a destination raw.
|
||
{ character: "(", inConstruct: "destinationRaw" },
|
||
// A left paren followed by `]` could make something into a link or image.
|
||
{
|
||
before: "\\]",
|
||
character: "(",
|
||
inConstruct: "phrasing",
|
||
notInConstruct: fullPhrasingSpans
|
||
},
|
||
// A right paren could start a list item or break out of a destination
|
||
// raw.
|
||
{ atBreak: true, before: "\\d+", character: ")" },
|
||
{ character: ")", inConstruct: "destinationRaw" },
|
||
// An asterisk can start thematic breaks, list items, emphasis, strong.
|
||
{ atBreak: true, character: "*", after: "(?:[ \r\n*])" },
|
||
{ character: "*", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
// A plus sign could start a list item.
|
||
{ atBreak: true, character: "+", after: "(?:[ \r\n])" },
|
||
// A dash can start thematic breaks, list items, and setext heading
|
||
// underlines.
|
||
{ atBreak: true, character: "-", after: "(?:[ \r\n-])" },
|
||
// A dot could start a list item.
|
||
{ atBreak: true, before: "\\d+", character: ".", after: "(?:[ \r\n]|$)" },
|
||
// Slash, colon, and semicolon are not used in markdown for constructs.
|
||
// A less than can start html (flow or text) or an autolink.
|
||
// HTML could start with an exclamation mark (declaration, cdata, comment),
|
||
// slash (closing tag), question mark (instruction), or a letter (tag).
|
||
// An autolink also starts with a letter.
|
||
// Finally, it could break out of a destination literal.
|
||
{ atBreak: true, character: "<", after: "[!/?A-Za-z]" },
|
||
{
|
||
character: "<",
|
||
after: "[!/?A-Za-z]",
|
||
inConstruct: "phrasing",
|
||
notInConstruct: fullPhrasingSpans
|
||
},
|
||
{ character: "<", inConstruct: "destinationLiteral" },
|
||
// An equals to can start setext heading underlines.
|
||
{ atBreak: true, character: "=" },
|
||
// A greater than can start block quotes and it can break out of a
|
||
// destination literal.
|
||
{ atBreak: true, character: ">" },
|
||
{ character: ">", inConstruct: "destinationLiteral" },
|
||
// Question mark and at sign are not used in markdown for constructs.
|
||
// A left bracket can start definitions, references, labels,
|
||
{ atBreak: true, character: "[" },
|
||
{ character: "[", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
{ character: "[", inConstruct: ["label", "reference"] },
|
||
// A backslash can start an escape (when followed by punctuation) or a
|
||
// hard break (when followed by an eol).
|
||
// Note: typical escapes are handled in `safe`!
|
||
{ character: "\\", after: "[\\r\\n]", inConstruct: "phrasing" },
|
||
// A right bracket can exit labels.
|
||
{ character: "]", inConstruct: ["label", "reference"] },
|
||
// Caret is not used in markdown for constructs.
|
||
// An underscore can start emphasis, strong, or a thematic break.
|
||
{ atBreak: true, character: "_" },
|
||
{ character: "_", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
// A grave accent can start code (fenced or text), or it can break out of
|
||
// a grave accent code fence.
|
||
{ atBreak: true, character: "`" },
|
||
{
|
||
character: "`",
|
||
inConstruct: ["codeFencedLangGraveAccent", "codeFencedMetaGraveAccent"]
|
||
},
|
||
{ character: "`", inConstruct: "phrasing", notInConstruct: fullPhrasingSpans },
|
||
// Left brace, vertical bar, right brace are not used in markdown for
|
||
// constructs.
|
||
// A tilde can start code (fenced).
|
||
{ atBreak: true, character: "~" }
|
||
];
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/association.js
|
||
function association(node2) {
|
||
if (node2.label || !node2.identifier) {
|
||
return node2.label || "";
|
||
}
|
||
return decodeString(node2.identifier);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/compile-pattern.js
|
||
function compilePattern(pattern) {
|
||
if (!pattern._compiled) {
|
||
const before = (pattern.atBreak ? "[\\r\\n][\\t ]*" : "") + (pattern.before ? "(?:" + pattern.before + ")" : "");
|
||
pattern._compiled = new RegExp(
|
||
(before ? "(" + before + ")" : "") + (/[|\\{}()[\]^$+*?.-]/.test(pattern.character) ? "\\" : "") + pattern.character + (pattern.after ? "(?:" + pattern.after + ")" : ""),
|
||
"g"
|
||
);
|
||
}
|
||
return pattern._compiled;
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/container-phrasing.js
|
||
function containerPhrasing(parent, state, info) {
|
||
const indexStack = state.indexStack;
|
||
const children = parent.children || [];
|
||
const results = [];
|
||
let index2 = -1;
|
||
let before = info.before;
|
||
indexStack.push(-1);
|
||
let tracker = state.createTracker(info);
|
||
while (++index2 < children.length) {
|
||
const child = children[index2];
|
||
let after;
|
||
indexStack[indexStack.length - 1] = index2;
|
||
if (index2 + 1 < children.length) {
|
||
let handle2 = state.handle.handlers[children[index2 + 1].type];
|
||
if (handle2 && handle2.peek)
|
||
handle2 = handle2.peek;
|
||
after = handle2 ? handle2(children[index2 + 1], parent, state, {
|
||
before: "",
|
||
after: "",
|
||
...tracker.current()
|
||
}).charAt(0) : "";
|
||
} else {
|
||
after = info.after;
|
||
}
|
||
if (results.length > 0 && (before === "\r" || before === "\n") && child.type === "html") {
|
||
results[results.length - 1] = results[results.length - 1].replace(
|
||
/(\r?\n|\r)$/,
|
||
" "
|
||
);
|
||
before = " ";
|
||
tracker = state.createTracker(info);
|
||
tracker.move(results.join(""));
|
||
}
|
||
results.push(
|
||
tracker.move(
|
||
state.handle(child, parent, state, {
|
||
...tracker.current(),
|
||
before,
|
||
after
|
||
})
|
||
)
|
||
);
|
||
before = results[results.length - 1].slice(-1);
|
||
}
|
||
indexStack.pop();
|
||
return results.join("");
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/container-flow.js
|
||
function containerFlow(parent, state, info) {
|
||
const indexStack = state.indexStack;
|
||
const children = parent.children || [];
|
||
const tracker = state.createTracker(info);
|
||
const results = [];
|
||
let index2 = -1;
|
||
indexStack.push(-1);
|
||
while (++index2 < children.length) {
|
||
const child = children[index2];
|
||
indexStack[indexStack.length - 1] = index2;
|
||
results.push(
|
||
tracker.move(
|
||
state.handle(child, parent, state, {
|
||
before: "\n",
|
||
after: "\n",
|
||
...tracker.current()
|
||
})
|
||
)
|
||
);
|
||
if (child.type !== "list") {
|
||
state.bulletLastUsed = void 0;
|
||
}
|
||
if (index2 < children.length - 1) {
|
||
results.push(
|
||
tracker.move(between(child, children[index2 + 1], parent, state))
|
||
);
|
||
}
|
||
}
|
||
indexStack.pop();
|
||
return results.join("");
|
||
}
|
||
function between(left, right, parent, state) {
|
||
let index2 = state.join.length;
|
||
while (index2--) {
|
||
const result = state.join[index2](left, right, parent, state);
|
||
if (result === true || result === 1) {
|
||
break;
|
||
}
|
||
if (typeof result === "number") {
|
||
return "\n".repeat(1 + result);
|
||
}
|
||
if (result === false) {
|
||
return "\n\n<!---->\n\n";
|
||
}
|
||
}
|
||
return "\n\n";
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/indent-lines.js
|
||
var eol = /\r?\n|\r/g;
|
||
function indentLines(value2, map4) {
|
||
const result = [];
|
||
let start = 0;
|
||
let line = 0;
|
||
let match2;
|
||
while (match2 = eol.exec(value2)) {
|
||
one2(value2.slice(start, match2.index));
|
||
result.push(match2[0]);
|
||
start = match2.index + match2[0].length;
|
||
line++;
|
||
}
|
||
one2(value2.slice(start));
|
||
return result.join("");
|
||
function one2(value3) {
|
||
result.push(map4(value3, line, !value3));
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/safe.js
|
||
function safe(state, input, config) {
|
||
const value2 = (config.before || "") + (input || "") + (config.after || "");
|
||
const positions = [];
|
||
const result = [];
|
||
const infos = {};
|
||
let index2 = -1;
|
||
while (++index2 < state.unsafe.length) {
|
||
const pattern = state.unsafe[index2];
|
||
if (!patternInScope(state.stack, pattern)) {
|
||
continue;
|
||
}
|
||
const expression = state.compilePattern(pattern);
|
||
let match2;
|
||
while (match2 = expression.exec(value2)) {
|
||
const before = "before" in pattern || Boolean(pattern.atBreak);
|
||
const after = "after" in pattern;
|
||
const position2 = match2.index + (before ? match2[1].length : 0);
|
||
if (positions.includes(position2)) {
|
||
if (infos[position2].before && !before) {
|
||
infos[position2].before = false;
|
||
}
|
||
if (infos[position2].after && !after) {
|
||
infos[position2].after = false;
|
||
}
|
||
} else {
|
||
positions.push(position2);
|
||
infos[position2] = { before, after };
|
||
}
|
||
}
|
||
}
|
||
positions.sort(numerical);
|
||
let start = config.before ? config.before.length : 0;
|
||
const end = value2.length - (config.after ? config.after.length : 0);
|
||
index2 = -1;
|
||
while (++index2 < positions.length) {
|
||
const position2 = positions[index2];
|
||
if (position2 < start || position2 >= end) {
|
||
continue;
|
||
}
|
||
if (position2 + 1 < end && positions[index2 + 1] === position2 + 1 && infos[position2].after && !infos[position2 + 1].before && !infos[position2 + 1].after || positions[index2 - 1] === position2 - 1 && infos[position2].before && !infos[position2 - 1].before && !infos[position2 - 1].after) {
|
||
continue;
|
||
}
|
||
if (start !== position2) {
|
||
result.push(escapeBackslashes(value2.slice(start, position2), "\\"));
|
||
}
|
||
start = position2;
|
||
if (/[!-/:-@[-`{-~]/.test(value2.charAt(position2)) && (!config.encode || !config.encode.includes(value2.charAt(position2)))) {
|
||
result.push("\\");
|
||
} else {
|
||
result.push(
|
||
"&#x" + value2.charCodeAt(position2).toString(16).toUpperCase() + ";"
|
||
);
|
||
start++;
|
||
}
|
||
}
|
||
result.push(escapeBackslashes(value2.slice(start, end), config.after));
|
||
return result.join("");
|
||
}
|
||
function numerical(a, b) {
|
||
return a - b;
|
||
}
|
||
function escapeBackslashes(value2, after) {
|
||
const expression = /\\(?=[!-/:-@[-`{-~])/g;
|
||
const positions = [];
|
||
const results = [];
|
||
const whole = value2 + after;
|
||
let index2 = -1;
|
||
let start = 0;
|
||
let match2;
|
||
while (match2 = expression.exec(whole)) {
|
||
positions.push(match2.index);
|
||
}
|
||
while (++index2 < positions.length) {
|
||
if (start !== positions[index2]) {
|
||
results.push(value2.slice(start, positions[index2]));
|
||
}
|
||
results.push("\\");
|
||
start = positions[index2];
|
||
}
|
||
results.push(value2.slice(start));
|
||
return results.join("");
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/util/track.js
|
||
function track(config) {
|
||
const options = config || {};
|
||
const now = options.now || {};
|
||
let lineShift = options.lineShift || 0;
|
||
let line = now.line || 1;
|
||
let column = now.column || 1;
|
||
return { move, current, shift };
|
||
function current() {
|
||
return { now: { line, column }, lineShift };
|
||
}
|
||
function shift(value2) {
|
||
lineShift += value2;
|
||
}
|
||
function move(input) {
|
||
const value2 = input || "";
|
||
const chunks = value2.split(/\r?\n|\r/g);
|
||
const tail = chunks[chunks.length - 1];
|
||
line += chunks.length - 1;
|
||
column = chunks.length === 1 ? column + tail.length : 1 + tail.length + lineShift;
|
||
return value2;
|
||
}
|
||
}
|
||
|
||
// node_modules/mdast-util-to-markdown/lib/index.js
|
||
function toMarkdown(tree, options = {}) {
|
||
const state = {
|
||
enter,
|
||
indentLines,
|
||
associationId: association,
|
||
containerPhrasing: containerPhrasingBound,
|
||
containerFlow: containerFlowBound,
|
||
createTracker: track,
|
||
compilePattern,
|
||
safe: safeBound,
|
||
stack: [],
|
||
unsafe: [...unsafe],
|
||
join: [...join],
|
||
// @ts-expect-error: GFM / frontmatter are typed in `mdast` but not defined
|
||
// here.
|
||
handlers: { ...handle },
|
||
options: {},
|
||
indexStack: [],
|
||
// @ts-expect-error: add `handle` in a second.
|
||
handle: void 0
|
||
};
|
||
configure2(state, options);
|
||
if (state.options.tightDefinitions) {
|
||
state.join.push(joinDefinition);
|
||
}
|
||
state.handle = zwitch("type", {
|
||
invalid,
|
||
unknown,
|
||
handlers: state.handlers
|
||
});
|
||
let result = state.handle(tree, void 0, state, {
|
||
before: "\n",
|
||
after: "\n",
|
||
now: { line: 1, column: 1 },
|
||
lineShift: 0
|
||
});
|
||
if (result && result.charCodeAt(result.length - 1) !== 10 && result.charCodeAt(result.length - 1) !== 13) {
|
||
result += "\n";
|
||
}
|
||
return result;
|
||
function enter(name) {
|
||
state.stack.push(name);
|
||
return exit2;
|
||
function exit2() {
|
||
state.stack.pop();
|
||
}
|
||
}
|
||
}
|
||
function invalid(value2) {
|
||
throw new Error("Cannot handle value `" + value2 + "`, expected node");
|
||
}
|
||
function unknown(value2) {
|
||
const node2 = (
|
||
/** @type {Nodes} */
|
||
value2
|
||
);
|
||
throw new Error("Cannot handle unknown node `" + node2.type + "`");
|
||
}
|
||
function joinDefinition(left, right) {
|
||
if (left.type === "definition" && left.type === right.type) {
|
||
return 0;
|
||
}
|
||
}
|
||
function containerPhrasingBound(parent, info) {
|
||
return containerPhrasing(parent, this, info);
|
||
}
|
||
function containerFlowBound(parent, info) {
|
||
return containerFlow(parent, this, info);
|
||
}
|
||
function safeBound(value2, config) {
|
||
return safe(this, value2, config);
|
||
}
|
||
|
||
// node_modules/remark-stringify/lib/index.js
|
||
function remarkStringify(options) {
|
||
const self = this;
|
||
self.compiler = compiler2;
|
||
function compiler2(tree) {
|
||
return toMarkdown(tree, {
|
||
...self.data("settings"),
|
||
...options,
|
||
// Note: this option is not in the readme.
|
||
// The goal is for it to be set by plugins on `data` instead of being
|
||
// passed by users.
|
||
extensions: self.data("toMarkdownExtensions") || []
|
||
});
|
||
}
|
||
}
|
||
|
||
// node_modules/bail/index.js
|
||
function bail(error) {
|
||
if (error) {
|
||
throw error;
|
||
}
|
||
}
|
||
|
||
// node_modules/unified/lib/index.js
|
||
var import_extend = __toESM(require_extend(), 1);
|
||
|
||
// node_modules/devlop/lib/default.js
|
||
function ok2() {
|
||
}
|
||
|
||
// node_modules/is-plain-obj/index.js
|
||
function isPlainObject(value2) {
|
||
if (typeof value2 !== "object" || value2 === null) {
|
||
return false;
|
||
}
|
||
const prototype = Object.getPrototypeOf(value2);
|
||
return (prototype === null || prototype === Object.prototype || Object.getPrototypeOf(prototype) === null) && !(Symbol.toStringTag in value2) && !(Symbol.iterator in value2);
|
||
}
|
||
|
||
// node_modules/trough/index.js
|
||
function trough() {
|
||
const fns = [];
|
||
const pipeline = { run, use };
|
||
return pipeline;
|
||
function run(...values) {
|
||
let middlewareIndex = -1;
|
||
const callback = values.pop();
|
||
if (typeof callback !== "function") {
|
||
throw new TypeError("Expected function as last argument, not " + callback);
|
||
}
|
||
next(null, ...values);
|
||
function next(error, ...output) {
|
||
const fn = fns[++middlewareIndex];
|
||
let index2 = -1;
|
||
if (error) {
|
||
callback(error);
|
||
return;
|
||
}
|
||
while (++index2 < values.length) {
|
||
if (output[index2] === null || output[index2] === void 0) {
|
||
output[index2] = values[index2];
|
||
}
|
||
}
|
||
values = output;
|
||
if (fn) {
|
||
wrap(fn, next)(...output);
|
||
} else {
|
||
callback(null, ...output);
|
||
}
|
||
}
|
||
}
|
||
function use(middelware) {
|
||
if (typeof middelware !== "function") {
|
||
throw new TypeError(
|
||
"Expected `middelware` to be a function, not " + middelware
|
||
);
|
||
}
|
||
fns.push(middelware);
|
||
return pipeline;
|
||
}
|
||
}
|
||
function wrap(middleware, callback) {
|
||
let called;
|
||
return wrapped;
|
||
function wrapped(...parameters) {
|
||
const fnExpectsCallback = middleware.length > parameters.length;
|
||
let result;
|
||
if (fnExpectsCallback) {
|
||
parameters.push(done);
|
||
}
|
||
try {
|
||
result = middleware.apply(this, parameters);
|
||
} catch (error) {
|
||
const exception = (
|
||
/** @type {Error} */
|
||
error
|
||
);
|
||
if (fnExpectsCallback && called) {
|
||
throw exception;
|
||
}
|
||
return done(exception);
|
||
}
|
||
if (!fnExpectsCallback) {
|
||
if (result instanceof Promise) {
|
||
result.then(then, done);
|
||
} else if (result instanceof Error) {
|
||
done(result);
|
||
} else {
|
||
then(result);
|
||
}
|
||
}
|
||
}
|
||
function done(error, ...output) {
|
||
if (!called) {
|
||
called = true;
|
||
callback(error, ...output);
|
||
}
|
||
}
|
||
function then(value2) {
|
||
done(null, value2);
|
||
}
|
||
}
|
||
|
||
// node_modules/vfile-message/lib/index.js
|
||
var VFileMessage = class extends Error {
|
||
/**
|
||
* Create a message for `reason`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Options | null | undefined} [options]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | Options | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns
|
||
* Instance of `VFileMessage`.
|
||
*/
|
||
// eslint-disable-next-line complexity
|
||
constructor(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
super();
|
||
if (typeof optionsOrParentOrPlace === "string") {
|
||
origin = optionsOrParentOrPlace;
|
||
optionsOrParentOrPlace = void 0;
|
||
}
|
||
let reason = "";
|
||
let options = {};
|
||
let legacyCause = false;
|
||
if (optionsOrParentOrPlace) {
|
||
if ("line" in optionsOrParentOrPlace && "column" in optionsOrParentOrPlace) {
|
||
options = { place: optionsOrParentOrPlace };
|
||
} else if ("start" in optionsOrParentOrPlace && "end" in optionsOrParentOrPlace) {
|
||
options = { place: optionsOrParentOrPlace };
|
||
} else if ("type" in optionsOrParentOrPlace) {
|
||
options = {
|
||
ancestors: [optionsOrParentOrPlace],
|
||
place: optionsOrParentOrPlace.position
|
||
};
|
||
} else {
|
||
options = { ...optionsOrParentOrPlace };
|
||
}
|
||
}
|
||
if (typeof causeOrReason === "string") {
|
||
reason = causeOrReason;
|
||
} else if (!options.cause && causeOrReason) {
|
||
legacyCause = true;
|
||
reason = causeOrReason.message;
|
||
options.cause = causeOrReason;
|
||
}
|
||
if (!options.ruleId && !options.source && typeof origin === "string") {
|
||
const index2 = origin.indexOf(":");
|
||
if (index2 === -1) {
|
||
options.ruleId = origin;
|
||
} else {
|
||
options.source = origin.slice(0, index2);
|
||
options.ruleId = origin.slice(index2 + 1);
|
||
}
|
||
}
|
||
if (!options.place && options.ancestors && options.ancestors) {
|
||
const parent = options.ancestors[options.ancestors.length - 1];
|
||
if (parent) {
|
||
options.place = parent.position;
|
||
}
|
||
}
|
||
const start = options.place && "start" in options.place ? options.place.start : options.place;
|
||
this.ancestors = options.ancestors || void 0;
|
||
this.cause = options.cause || void 0;
|
||
this.column = start ? start.column : void 0;
|
||
this.fatal = void 0;
|
||
this.file;
|
||
this.message = reason;
|
||
this.line = start ? start.line : void 0;
|
||
this.name = stringifyPosition(options.place) || "1:1";
|
||
this.place = options.place || void 0;
|
||
this.reason = this.message;
|
||
this.ruleId = options.ruleId || void 0;
|
||
this.source = options.source || void 0;
|
||
this.stack = legacyCause && options.cause && typeof options.cause.stack === "string" ? options.cause.stack : "";
|
||
this.actual;
|
||
this.expected;
|
||
this.note;
|
||
this.url;
|
||
}
|
||
};
|
||
VFileMessage.prototype.file = "";
|
||
VFileMessage.prototype.name = "";
|
||
VFileMessage.prototype.reason = "";
|
||
VFileMessage.prototype.message = "";
|
||
VFileMessage.prototype.stack = "";
|
||
VFileMessage.prototype.column = void 0;
|
||
VFileMessage.prototype.line = void 0;
|
||
VFileMessage.prototype.ancestors = void 0;
|
||
VFileMessage.prototype.cause = void 0;
|
||
VFileMessage.prototype.fatal = void 0;
|
||
VFileMessage.prototype.place = void 0;
|
||
VFileMessage.prototype.ruleId = void 0;
|
||
VFileMessage.prototype.source = void 0;
|
||
|
||
// node_modules/vfile/lib/minpath.js
|
||
var import_node_path = __toESM(require("node:path"), 1);
|
||
|
||
// node_modules/vfile/lib/minproc.js
|
||
var import_node_process = __toESM(require("node:process"), 1);
|
||
|
||
// node_modules/vfile/lib/minurl.js
|
||
var import_node_url = require("node:url");
|
||
|
||
// node_modules/vfile/lib/minurl.shared.js
|
||
function isUrl(fileUrlOrPath) {
|
||
return Boolean(
|
||
fileUrlOrPath !== null && typeof fileUrlOrPath === "object" && "href" in fileUrlOrPath && fileUrlOrPath.href && "protocol" in fileUrlOrPath && fileUrlOrPath.protocol && // @ts-expect-error: indexing is fine.
|
||
fileUrlOrPath.auth === void 0
|
||
);
|
||
}
|
||
|
||
// node_modules/vfile/lib/index.js
|
||
var order = (
|
||
/** @type {const} */
|
||
[
|
||
"history",
|
||
"path",
|
||
"basename",
|
||
"stem",
|
||
"extname",
|
||
"dirname"
|
||
]
|
||
);
|
||
var VFile = class {
|
||
/**
|
||
* Create a new virtual file.
|
||
*
|
||
* `options` is treated as:
|
||
*
|
||
* * `string` or `Uint8Array` — `{value: options}`
|
||
* * `URL` — `{path: options}`
|
||
* * `VFile` — shallow copies its data over to the new file
|
||
* * `object` — all fields are shallow copied over to the new file
|
||
*
|
||
* Path related fields are set in the following order (least specific to
|
||
* most specific): `history`, `path`, `basename`, `stem`, `extname`,
|
||
* `dirname`.
|
||
*
|
||
* You cannot set `dirname` or `extname` without setting either `history`,
|
||
* `path`, `basename`, or `stem` too.
|
||
*
|
||
* @param {Compatible | null | undefined} [value]
|
||
* File value.
|
||
* @returns
|
||
* New instance.
|
||
*/
|
||
constructor(value2) {
|
||
let options;
|
||
if (!value2) {
|
||
options = {};
|
||
} else if (isUrl(value2)) {
|
||
options = { path: value2 };
|
||
} else if (typeof value2 === "string" || isUint8Array(value2)) {
|
||
options = { value: value2 };
|
||
} else {
|
||
options = value2;
|
||
}
|
||
this.cwd = import_node_process.default.cwd();
|
||
this.data = {};
|
||
this.history = [];
|
||
this.messages = [];
|
||
this.value;
|
||
this.map;
|
||
this.result;
|
||
this.stored;
|
||
let index2 = -1;
|
||
while (++index2 < order.length) {
|
||
const prop2 = order[index2];
|
||
if (prop2 in options && options[prop2] !== void 0 && options[prop2] !== null) {
|
||
this[prop2] = prop2 === "history" ? [...options[prop2]] : options[prop2];
|
||
}
|
||
}
|
||
let prop;
|
||
for (prop in options) {
|
||
if (!order.includes(prop)) {
|
||
this[prop] = options[prop];
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Get the basename (including extname) (example: `'index.min.js'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Basename.
|
||
*/
|
||
get basename() {
|
||
return typeof this.path === "string" ? import_node_path.default.basename(this.path) : void 0;
|
||
}
|
||
/**
|
||
* Set basename (including extname) (`'index.min.js'`).
|
||
*
|
||
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
|
||
* on windows).
|
||
* Cannot be nullified (use `file.path = file.dirname` instead).
|
||
*
|
||
* @param {string} basename
|
||
* Basename.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set basename(basename) {
|
||
assertNonEmpty(basename, "basename");
|
||
assertPart(basename, "basename");
|
||
this.path = import_node_path.default.join(this.dirname || "", basename);
|
||
}
|
||
/**
|
||
* Get the parent path (example: `'~'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Dirname.
|
||
*/
|
||
get dirname() {
|
||
return typeof this.path === "string" ? import_node_path.default.dirname(this.path) : void 0;
|
||
}
|
||
/**
|
||
* Set the parent path (example: `'~'`).
|
||
*
|
||
* Cannot be set if there’s no `path` yet.
|
||
*
|
||
* @param {string | undefined} dirname
|
||
* Dirname.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set dirname(dirname) {
|
||
assertPath(this.basename, "dirname");
|
||
this.path = import_node_path.default.join(dirname || "", this.basename);
|
||
}
|
||
/**
|
||
* Get the extname (including dot) (example: `'.js'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Extname.
|
||
*/
|
||
get extname() {
|
||
return typeof this.path === "string" ? import_node_path.default.extname(this.path) : void 0;
|
||
}
|
||
/**
|
||
* Set the extname (including dot) (example: `'.js'`).
|
||
*
|
||
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
|
||
* on windows).
|
||
* Cannot be set if there’s no `path` yet.
|
||
*
|
||
* @param {string | undefined} extname
|
||
* Extname.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set extname(extname) {
|
||
assertPart(extname, "extname");
|
||
assertPath(this.dirname, "extname");
|
||
if (extname) {
|
||
if (extname.codePointAt(0) !== 46) {
|
||
throw new Error("`extname` must start with `.`");
|
||
}
|
||
if (extname.includes(".", 1)) {
|
||
throw new Error("`extname` cannot contain multiple dots");
|
||
}
|
||
}
|
||
this.path = import_node_path.default.join(this.dirname, this.stem + (extname || ""));
|
||
}
|
||
/**
|
||
* Get the full path (example: `'~/index.min.js'`).
|
||
*
|
||
* @returns {string}
|
||
* Path.
|
||
*/
|
||
get path() {
|
||
return this.history[this.history.length - 1];
|
||
}
|
||
/**
|
||
* Set the full path (example: `'~/index.min.js'`).
|
||
*
|
||
* Cannot be nullified.
|
||
* You can set a file URL (a `URL` object with a `file:` protocol) which will
|
||
* be turned into a path with `url.fileURLToPath`.
|
||
*
|
||
* @param {URL | string} path
|
||
* Path.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set path(path2) {
|
||
if (isUrl(path2)) {
|
||
path2 = (0, import_node_url.fileURLToPath)(path2);
|
||
}
|
||
assertNonEmpty(path2, "path");
|
||
if (this.path !== path2) {
|
||
this.history.push(path2);
|
||
}
|
||
}
|
||
/**
|
||
* Get the stem (basename w/o extname) (example: `'index.min'`).
|
||
*
|
||
* @returns {string | undefined}
|
||
* Stem.
|
||
*/
|
||
get stem() {
|
||
return typeof this.path === "string" ? import_node_path.default.basename(this.path, this.extname) : void 0;
|
||
}
|
||
/**
|
||
* Set the stem (basename w/o extname) (example: `'index.min'`).
|
||
*
|
||
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
|
||
* on windows).
|
||
* Cannot be nullified (use `file.path = file.dirname` instead).
|
||
*
|
||
* @param {string} stem
|
||
* Stem.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
set stem(stem) {
|
||
assertNonEmpty(stem, "stem");
|
||
assertPart(stem, "stem");
|
||
this.path = import_node_path.default.join(this.dirname || "", stem + (this.extname || ""));
|
||
}
|
||
// Normal prototypal methods.
|
||
/**
|
||
* Create a fatal message for `reason` associated with the file.
|
||
*
|
||
* The `fatal` field of the message is set to `true` (error; file not usable)
|
||
* and the `file` field is set to the current file path.
|
||
* The message is added to the `messages` field on `file`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {MessageOptions | null | undefined} [options]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {never}
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns {never}
|
||
* Never.
|
||
* @throws {VFileMessage}
|
||
* Message.
|
||
*/
|
||
fail(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
const message = this.message(causeOrReason, optionsOrParentOrPlace, origin);
|
||
message.fatal = true;
|
||
throw message;
|
||
}
|
||
/**
|
||
* Create an info message for `reason` associated with the file.
|
||
*
|
||
* The `fatal` field of the message is set to `undefined` (info; change
|
||
* likely not needed) and the `file` field is set to the current file path.
|
||
* The message is added to the `messages` field on `file`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {MessageOptions | null | undefined} [options]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns {VFileMessage}
|
||
* Message.
|
||
*/
|
||
info(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
const message = this.message(causeOrReason, optionsOrParentOrPlace, origin);
|
||
message.fatal = void 0;
|
||
return message;
|
||
}
|
||
/**
|
||
* Create a message for `reason` associated with the file.
|
||
*
|
||
* The `fatal` field of the message is set to `false` (warning; change may be
|
||
* needed) and the `file` field is set to the current file path.
|
||
* The message is added to the `messages` field on `file`.
|
||
*
|
||
* > 🪦 **Note**: also has obsolete signatures.
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {MessageOptions | null | undefined} [options]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {string} reason
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Node | NodeLike | null | undefined} parent
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {Point | Position | null | undefined} place
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @overload
|
||
* @param {Error | VFileMessage} cause
|
||
* @param {string | null | undefined} [origin]
|
||
* @returns {VFileMessage}
|
||
*
|
||
* @param {Error | VFileMessage | string} causeOrReason
|
||
* Reason for message, should use markdown.
|
||
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
|
||
* Configuration (optional).
|
||
* @param {string | null | undefined} [origin]
|
||
* Place in code where the message originates (example:
|
||
* `'my-package:my-rule'` or `'my-rule'`).
|
||
* @returns {VFileMessage}
|
||
* Message.
|
||
*/
|
||
message(causeOrReason, optionsOrParentOrPlace, origin) {
|
||
const message = new VFileMessage(
|
||
// @ts-expect-error: the overloads are fine.
|
||
causeOrReason,
|
||
optionsOrParentOrPlace,
|
||
origin
|
||
);
|
||
if (this.path) {
|
||
message.name = this.path + ":" + message.name;
|
||
message.file = this.path;
|
||
}
|
||
message.fatal = false;
|
||
this.messages.push(message);
|
||
return message;
|
||
}
|
||
/**
|
||
* Serialize the file.
|
||
*
|
||
* > **Note**: which encodings are supported depends on the engine.
|
||
* > For info on Node.js, see:
|
||
* > <https://nodejs.org/api/util.html#whatwg-supported-encodings>.
|
||
*
|
||
* @param {string | null | undefined} [encoding='utf8']
|
||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||
* (default: `'utf-8'`).
|
||
* @returns {string}
|
||
* Serialized file.
|
||
*/
|
||
toString(encoding) {
|
||
if (this.value === void 0) {
|
||
return "";
|
||
}
|
||
if (typeof this.value === "string") {
|
||
return this.value;
|
||
}
|
||
const decoder = new TextDecoder(encoding || void 0);
|
||
return decoder.decode(this.value);
|
||
}
|
||
};
|
||
function assertPart(part, name) {
|
||
if (part && part.includes(import_node_path.default.sep)) {
|
||
throw new Error(
|
||
"`" + name + "` cannot be a path: did not expect `" + import_node_path.default.sep + "`"
|
||
);
|
||
}
|
||
}
|
||
function assertNonEmpty(part, name) {
|
||
if (!part) {
|
||
throw new Error("`" + name + "` cannot be empty");
|
||
}
|
||
}
|
||
function assertPath(path2, name) {
|
||
if (!path2) {
|
||
throw new Error("Setting `" + name + "` requires `path` to be set too");
|
||
}
|
||
}
|
||
function isUint8Array(value2) {
|
||
return Boolean(
|
||
value2 && typeof value2 === "object" && "byteLength" in value2 && "byteOffset" in value2
|
||
);
|
||
}
|
||
|
||
// node_modules/unified/lib/callable-instance.js
|
||
var CallableInstance = (
|
||
/**
|
||
* @type {new <Parameters extends Array<unknown>, Result>(property: string | symbol) => (...parameters: Parameters) => Result}
|
||
*/
|
||
/** @type {unknown} */
|
||
/**
|
||
* @this {Function}
|
||
* @param {string | symbol} property
|
||
* @returns {(...parameters: Array<unknown>) => unknown}
|
||
*/
|
||
function(property) {
|
||
const self = this;
|
||
const constr = self.constructor;
|
||
const proto = (
|
||
/** @type {Record<string | symbol, Function>} */
|
||
// Prototypes do exist.
|
||
// type-coverage:ignore-next-line
|
||
constr.prototype
|
||
);
|
||
const func = proto[property];
|
||
const apply = function() {
|
||
return func.apply(apply, arguments);
|
||
};
|
||
Object.setPrototypeOf(apply, proto);
|
||
const names = Object.getOwnPropertyNames(func);
|
||
for (const p of names) {
|
||
const descriptor = Object.getOwnPropertyDescriptor(func, p);
|
||
if (descriptor)
|
||
Object.defineProperty(apply, p, descriptor);
|
||
}
|
||
return apply;
|
||
}
|
||
);
|
||
|
||
// node_modules/unified/lib/index.js
|
||
var own5 = {}.hasOwnProperty;
|
||
var Processor = class _Processor extends CallableInstance {
|
||
/**
|
||
* Create a processor.
|
||
*/
|
||
constructor() {
|
||
super("copy");
|
||
this.Compiler = void 0;
|
||
this.Parser = void 0;
|
||
this.attachers = [];
|
||
this.compiler = void 0;
|
||
this.freezeIndex = -1;
|
||
this.frozen = void 0;
|
||
this.namespace = {};
|
||
this.parser = void 0;
|
||
this.transformers = trough();
|
||
}
|
||
/**
|
||
* Copy a processor.
|
||
*
|
||
* @deprecated
|
||
* This is a private internal method and should not be used.
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
* New *unfrozen* processor ({@link Processor `Processor`}) that is
|
||
* configured to work the same as its ancestor.
|
||
* When the descendant processor is configured in the future it does not
|
||
* affect the ancestral processor.
|
||
*/
|
||
copy() {
|
||
const destination = (
|
||
/** @type {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>} */
|
||
new _Processor()
|
||
);
|
||
let index2 = -1;
|
||
while (++index2 < this.attachers.length) {
|
||
const attacher = this.attachers[index2];
|
||
destination.use(...attacher);
|
||
}
|
||
destination.data((0, import_extend.default)(true, {}, this.namespace));
|
||
return destination;
|
||
}
|
||
/**
|
||
* Configure the processor with info available to all plugins.
|
||
* Information is stored in an object.
|
||
*
|
||
* Typically, options can be given to a specific plugin, but sometimes it
|
||
* makes sense to have information shared with several plugins.
|
||
* For example, a list of HTML elements that are self-closing, which is
|
||
* needed during all phases.
|
||
*
|
||
* > 👉 **Note**: setting information cannot occur on *frozen* processors.
|
||
* > Call the processor first to create a new unfrozen processor.
|
||
*
|
||
* > 👉 **Note**: to register custom data in TypeScript, augment the
|
||
* > {@link Data `Data`} interface.
|
||
*
|
||
* @example
|
||
* This example show how to get and set info:
|
||
*
|
||
* ```js
|
||
* import {unified} from 'unified'
|
||
*
|
||
* const processor = unified().data('alpha', 'bravo')
|
||
*
|
||
* processor.data('alpha') // => 'bravo'
|
||
*
|
||
* processor.data() // => {alpha: 'bravo'}
|
||
*
|
||
* processor.data({charlie: 'delta'})
|
||
*
|
||
* processor.data() // => {charlie: 'delta'}
|
||
* ```
|
||
*
|
||
* @template {keyof Data} Key
|
||
*
|
||
* @overload
|
||
* @returns {Data}
|
||
*
|
||
* @overload
|
||
* @param {Data} dataset
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @overload
|
||
* @param {Key} key
|
||
* @returns {Data[Key]}
|
||
*
|
||
* @overload
|
||
* @param {Key} key
|
||
* @param {Data[Key]} value
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @param {Data | Key} [key]
|
||
* Key to get or set, or entire dataset to set, or nothing to get the
|
||
* entire dataset (optional).
|
||
* @param {Data[Key]} [value]
|
||
* Value to set (optional).
|
||
* @returns {unknown}
|
||
* The current processor when setting, the value at `key` when getting, or
|
||
* the entire dataset when getting without key.
|
||
*/
|
||
data(key, value2) {
|
||
if (typeof key === "string") {
|
||
if (arguments.length === 2) {
|
||
assertUnfrozen("data", this.frozen);
|
||
this.namespace[key] = value2;
|
||
return this;
|
||
}
|
||
return own5.call(this.namespace, key) && this.namespace[key] || void 0;
|
||
}
|
||
if (key) {
|
||
assertUnfrozen("data", this.frozen);
|
||
this.namespace = key;
|
||
return this;
|
||
}
|
||
return this.namespace;
|
||
}
|
||
/**
|
||
* Freeze a processor.
|
||
*
|
||
* Frozen processors are meant to be extended and not to be configured
|
||
* directly.
|
||
*
|
||
* When a processor is frozen it cannot be unfrozen.
|
||
* New processors working the same way can be created by calling the
|
||
* processor.
|
||
*
|
||
* It’s possible to freeze processors explicitly by calling `.freeze()`.
|
||
* Processors freeze automatically when `.parse()`, `.run()`, `.runSync()`,
|
||
* `.stringify()`, `.process()`, or `.processSync()` are called.
|
||
*
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
* The current processor.
|
||
*/
|
||
freeze() {
|
||
if (this.frozen) {
|
||
return this;
|
||
}
|
||
const self = (
|
||
/** @type {Processor} */
|
||
/** @type {unknown} */
|
||
this
|
||
);
|
||
while (++this.freezeIndex < this.attachers.length) {
|
||
const [attacher, ...options] = this.attachers[this.freezeIndex];
|
||
if (options[0] === false) {
|
||
continue;
|
||
}
|
||
if (options[0] === true) {
|
||
options[0] = void 0;
|
||
}
|
||
const transformer = attacher.call(self, ...options);
|
||
if (typeof transformer === "function") {
|
||
this.transformers.use(transformer);
|
||
}
|
||
}
|
||
this.frozen = true;
|
||
this.freezeIndex = Number.POSITIVE_INFINITY;
|
||
return this;
|
||
}
|
||
/**
|
||
* Parse text to a syntax tree.
|
||
*
|
||
* > 👉 **Note**: `parse` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `parse` performs the parse phase, not the run phase or other
|
||
* > phases.
|
||
*
|
||
* @param {Compatible | undefined} [file]
|
||
* file to parse (optional); typically `string` or `VFile`; any value
|
||
* accepted as `x` in `new VFile(x)`.
|
||
* @returns {ParseTree extends undefined ? Node : ParseTree}
|
||
* Syntax tree representing `file`.
|
||
*/
|
||
parse(file) {
|
||
this.freeze();
|
||
const realFile = vfile(file);
|
||
const parser = this.parser || this.Parser;
|
||
assertParser("parse", parser);
|
||
return parser(String(realFile), realFile);
|
||
}
|
||
/**
|
||
* Process the given file as configured on the processor.
|
||
*
|
||
* > 👉 **Note**: `process` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `process` performs the parse, run, and stringify phases.
|
||
*
|
||
* @overload
|
||
* @param {Compatible | undefined} file
|
||
* @param {ProcessCallback<VFileWithOutput<CompileResult>>} done
|
||
* @returns {undefined}
|
||
*
|
||
* @overload
|
||
* @param {Compatible | undefined} [file]
|
||
* @returns {Promise<VFileWithOutput<CompileResult>>}
|
||
*
|
||
* @param {Compatible | undefined} [file]
|
||
* File (optional); typically `string` or `VFile`]; any value accepted as
|
||
* `x` in `new VFile(x)`.
|
||
* @param {ProcessCallback<VFileWithOutput<CompileResult>> | undefined} [done]
|
||
* Callback (optional).
|
||
* @returns {Promise<VFile> | undefined}
|
||
* Nothing if `done` is given.
|
||
* Otherwise a promise, rejected with a fatal error or resolved with the
|
||
* processed file.
|
||
*
|
||
* The parsed, transformed, and compiled value is available at
|
||
* `file.value` (see note).
|
||
*
|
||
* > 👉 **Note**: unified typically compiles by serializing: most
|
||
* > compilers return `string` (or `Uint8Array`).
|
||
* > Some compilers, such as the one configured with
|
||
* > [`rehype-react`][rehype-react], return other values (in this case, a
|
||
* > React tree).
|
||
* > If you’re using a compiler that doesn’t serialize, expect different
|
||
* > result values.
|
||
* >
|
||
* > To register custom results in TypeScript, add them to
|
||
* > {@link CompileResultMap `CompileResultMap`}.
|
||
*
|
||
* [rehype-react]: https://github.com/rehypejs/rehype-react
|
||
*/
|
||
process(file, done) {
|
||
const self = this;
|
||
this.freeze();
|
||
assertParser("process", this.parser || this.Parser);
|
||
assertCompiler("process", this.compiler || this.Compiler);
|
||
return done ? executor(void 0, done) : new Promise(executor);
|
||
function executor(resolve, reject) {
|
||
const realFile = vfile(file);
|
||
const parseTree = (
|
||
/** @type {HeadTree extends undefined ? Node : HeadTree} */
|
||
/** @type {unknown} */
|
||
self.parse(realFile)
|
||
);
|
||
self.run(parseTree, realFile, function(error, tree, file2) {
|
||
if (error || !tree || !file2) {
|
||
return realDone(error);
|
||
}
|
||
const compileTree = (
|
||
/** @type {CompileTree extends undefined ? Node : CompileTree} */
|
||
/** @type {unknown} */
|
||
tree
|
||
);
|
||
const compileResult = self.stringify(compileTree, file2);
|
||
if (looksLikeAValue(compileResult)) {
|
||
file2.value = compileResult;
|
||
} else {
|
||
file2.result = compileResult;
|
||
}
|
||
realDone(
|
||
error,
|
||
/** @type {VFileWithOutput<CompileResult>} */
|
||
file2
|
||
);
|
||
});
|
||
function realDone(error, file2) {
|
||
if (error || !file2) {
|
||
reject(error);
|
||
} else if (resolve) {
|
||
resolve(file2);
|
||
} else {
|
||
ok2(done, "`done` is defined if `resolve` is not");
|
||
done(void 0, file2);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Process the given file as configured on the processor.
|
||
*
|
||
* An error is thrown if asynchronous transforms are configured.
|
||
*
|
||
* > 👉 **Note**: `processSync` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `processSync` performs the parse, run, and stringify phases.
|
||
*
|
||
* @param {Compatible | undefined} [file]
|
||
* File (optional); typically `string` or `VFile`; any value accepted as
|
||
* `x` in `new VFile(x)`.
|
||
* @returns {VFileWithOutput<CompileResult>}
|
||
* The processed file.
|
||
*
|
||
* The parsed, transformed, and compiled value is available at
|
||
* `file.value` (see note).
|
||
*
|
||
* > 👉 **Note**: unified typically compiles by serializing: most
|
||
* > compilers return `string` (or `Uint8Array`).
|
||
* > Some compilers, such as the one configured with
|
||
* > [`rehype-react`][rehype-react], return other values (in this case, a
|
||
* > React tree).
|
||
* > If you’re using a compiler that doesn’t serialize, expect different
|
||
* > result values.
|
||
* >
|
||
* > To register custom results in TypeScript, add them to
|
||
* > {@link CompileResultMap `CompileResultMap`}.
|
||
*
|
||
* [rehype-react]: https://github.com/rehypejs/rehype-react
|
||
*/
|
||
processSync(file) {
|
||
let complete = false;
|
||
let result;
|
||
this.freeze();
|
||
assertParser("processSync", this.parser || this.Parser);
|
||
assertCompiler("processSync", this.compiler || this.Compiler);
|
||
this.process(file, realDone);
|
||
assertDone("processSync", "process", complete);
|
||
ok2(result, "we either bailed on an error or have a tree");
|
||
return result;
|
||
function realDone(error, file2) {
|
||
complete = true;
|
||
bail(error);
|
||
result = file2;
|
||
}
|
||
}
|
||
/**
|
||
* Run *transformers* on a syntax tree.
|
||
*
|
||
* > 👉 **Note**: `run` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `run` performs the run phase, not other phases.
|
||
*
|
||
* @overload
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} done
|
||
* @returns {undefined}
|
||
*
|
||
* @overload
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* @param {Compatible | undefined} file
|
||
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} done
|
||
* @returns {undefined}
|
||
*
|
||
* @overload
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* @param {Compatible | undefined} [file]
|
||
* @returns {Promise<TailTree extends undefined ? Node : TailTree>}
|
||
*
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* Tree to transform and inspect.
|
||
* @param {(
|
||
* RunCallback<TailTree extends undefined ? Node : TailTree> |
|
||
* Compatible
|
||
* )} [file]
|
||
* File associated with `node` (optional); any value accepted as `x` in
|
||
* `new VFile(x)`.
|
||
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} [done]
|
||
* Callback (optional).
|
||
* @returns {Promise<TailTree extends undefined ? Node : TailTree> | undefined}
|
||
* Nothing if `done` is given.
|
||
* Otherwise, a promise rejected with a fatal error or resolved with the
|
||
* transformed tree.
|
||
*/
|
||
run(tree, file, done) {
|
||
assertNode(tree);
|
||
this.freeze();
|
||
const transformers = this.transformers;
|
||
if (!done && typeof file === "function") {
|
||
done = file;
|
||
file = void 0;
|
||
}
|
||
return done ? executor(void 0, done) : new Promise(executor);
|
||
function executor(resolve, reject) {
|
||
ok2(
|
||
typeof file !== "function",
|
||
"`file` can\u2019t be a `done` anymore, we checked"
|
||
);
|
||
const realFile = vfile(file);
|
||
transformers.run(tree, realFile, realDone);
|
||
function realDone(error, outputTree, file2) {
|
||
const resultingTree = (
|
||
/** @type {TailTree extends undefined ? Node : TailTree} */
|
||
outputTree || tree
|
||
);
|
||
if (error) {
|
||
reject(error);
|
||
} else if (resolve) {
|
||
resolve(resultingTree);
|
||
} else {
|
||
ok2(done, "`done` is defined if `resolve` is not");
|
||
done(void 0, resultingTree, file2);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Run *transformers* on a syntax tree.
|
||
*
|
||
* An error is thrown if asynchronous transforms are configured.
|
||
*
|
||
* > 👉 **Note**: `runSync` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `runSync` performs the run phase, not other phases.
|
||
*
|
||
* @param {HeadTree extends undefined ? Node : HeadTree} tree
|
||
* Tree to transform and inspect.
|
||
* @param {Compatible | undefined} [file]
|
||
* File associated with `node` (optional); any value accepted as `x` in
|
||
* `new VFile(x)`.
|
||
* @returns {TailTree extends undefined ? Node : TailTree}
|
||
* Transformed tree.
|
||
*/
|
||
runSync(tree, file) {
|
||
let complete = false;
|
||
let result;
|
||
this.run(tree, file, realDone);
|
||
assertDone("runSync", "run", complete);
|
||
ok2(result, "we either bailed on an error or have a tree");
|
||
return result;
|
||
function realDone(error, tree2) {
|
||
bail(error);
|
||
result = tree2;
|
||
complete = true;
|
||
}
|
||
}
|
||
/**
|
||
* Compile a syntax tree.
|
||
*
|
||
* > 👉 **Note**: `stringify` freezes the processor if not already *frozen*.
|
||
*
|
||
* > 👉 **Note**: `stringify` performs the stringify phase, not the run phase
|
||
* > or other phases.
|
||
*
|
||
* @param {CompileTree extends undefined ? Node : CompileTree} tree
|
||
* Tree to compile.
|
||
* @param {Compatible | undefined} [file]
|
||
* File associated with `node` (optional); any value accepted as `x` in
|
||
* `new VFile(x)`.
|
||
* @returns {CompileResult extends undefined ? Value : CompileResult}
|
||
* Textual representation of the tree (see note).
|
||
*
|
||
* > 👉 **Note**: unified typically compiles by serializing: most compilers
|
||
* > return `string` (or `Uint8Array`).
|
||
* > Some compilers, such as the one configured with
|
||
* > [`rehype-react`][rehype-react], return other values (in this case, a
|
||
* > React tree).
|
||
* > If you’re using a compiler that doesn’t serialize, expect different
|
||
* > result values.
|
||
* >
|
||
* > To register custom results in TypeScript, add them to
|
||
* > {@link CompileResultMap `CompileResultMap`}.
|
||
*
|
||
* [rehype-react]: https://github.com/rehypejs/rehype-react
|
||
*/
|
||
stringify(tree, file) {
|
||
this.freeze();
|
||
const realFile = vfile(file);
|
||
const compiler2 = this.compiler || this.Compiler;
|
||
assertCompiler("stringify", compiler2);
|
||
assertNode(tree);
|
||
return compiler2(tree, realFile);
|
||
}
|
||
/**
|
||
* Configure the processor to use a plugin, a list of usable values, or a
|
||
* preset.
|
||
*
|
||
* If the processor is already using a plugin, the previous plugin
|
||
* configuration is changed based on the options that are passed in.
|
||
* In other words, the plugin is not added a second time.
|
||
*
|
||
* > 👉 **Note**: `use` cannot be called on *frozen* processors.
|
||
* > Call the processor first to create a new unfrozen processor.
|
||
*
|
||
* @example
|
||
* There are many ways to pass plugins to `.use()`.
|
||
* This example gives an overview:
|
||
*
|
||
* ```js
|
||
* import {unified} from 'unified'
|
||
*
|
||
* unified()
|
||
* // Plugin with options:
|
||
* .use(pluginA, {x: true, y: true})
|
||
* // Passing the same plugin again merges configuration (to `{x: true, y: false, z: true}`):
|
||
* .use(pluginA, {y: false, z: true})
|
||
* // Plugins:
|
||
* .use([pluginB, pluginC])
|
||
* // Two plugins, the second with options:
|
||
* .use([pluginD, [pluginE, {}]])
|
||
* // Preset with plugins and settings:
|
||
* .use({plugins: [pluginF, [pluginG, {}]], settings: {position: false}})
|
||
* // Settings only:
|
||
* .use({settings: {position: false}})
|
||
* ```
|
||
*
|
||
* @template {Array<unknown>} [Parameters=[]]
|
||
* @template {Node | string | undefined} [Input=undefined]
|
||
* @template [Output=Input]
|
||
*
|
||
* @overload
|
||
* @param {Preset | null | undefined} [preset]
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @overload
|
||
* @param {PluggableList} list
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
*
|
||
* @overload
|
||
* @param {Plugin<Parameters, Input, Output>} plugin
|
||
* @param {...(Parameters | [boolean])} parameters
|
||
* @returns {UsePlugin<ParseTree, HeadTree, TailTree, CompileTree, CompileResult, Input, Output>}
|
||
*
|
||
* @param {PluggableList | Plugin | Preset | null | undefined} value
|
||
* Usable value.
|
||
* @param {...unknown} parameters
|
||
* Parameters, when a plugin is given as a usable value.
|
||
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
|
||
* Current processor.
|
||
*/
|
||
use(value2, ...parameters) {
|
||
const attachers = this.attachers;
|
||
const namespace = this.namespace;
|
||
assertUnfrozen("use", this.frozen);
|
||
if (value2 === null || value2 === void 0) {
|
||
} else if (typeof value2 === "function") {
|
||
addPlugin(value2, parameters);
|
||
} else if (typeof value2 === "object") {
|
||
if (Array.isArray(value2)) {
|
||
addList(value2);
|
||
} else {
|
||
addPreset(value2);
|
||
}
|
||
} else {
|
||
throw new TypeError("Expected usable value, not `" + value2 + "`");
|
||
}
|
||
return this;
|
||
function add(value3) {
|
||
if (typeof value3 === "function") {
|
||
addPlugin(value3, []);
|
||
} else if (typeof value3 === "object") {
|
||
if (Array.isArray(value3)) {
|
||
const [plugin, ...parameters2] = (
|
||
/** @type {PluginTuple<Array<unknown>>} */
|
||
value3
|
||
);
|
||
addPlugin(plugin, parameters2);
|
||
} else {
|
||
addPreset(value3);
|
||
}
|
||
} else {
|
||
throw new TypeError("Expected usable value, not `" + value3 + "`");
|
||
}
|
||
}
|
||
function addPreset(result) {
|
||
if (!("plugins" in result) && !("settings" in result)) {
|
||
throw new Error(
|
||
"Expected usable value but received an empty preset, which is probably a mistake: presets typically come with `plugins` and sometimes with `settings`, but this has neither"
|
||
);
|
||
}
|
||
addList(result.plugins);
|
||
if (result.settings) {
|
||
namespace.settings = (0, import_extend.default)(true, namespace.settings, result.settings);
|
||
}
|
||
}
|
||
function addList(plugins) {
|
||
let index2 = -1;
|
||
if (plugins === null || plugins === void 0) {
|
||
} else if (Array.isArray(plugins)) {
|
||
while (++index2 < plugins.length) {
|
||
const thing = plugins[index2];
|
||
add(thing);
|
||
}
|
||
} else {
|
||
throw new TypeError("Expected a list of plugins, not `" + plugins + "`");
|
||
}
|
||
}
|
||
function addPlugin(plugin, parameters2) {
|
||
let index2 = -1;
|
||
let entryIndex = -1;
|
||
while (++index2 < attachers.length) {
|
||
if (attachers[index2][0] === plugin) {
|
||
entryIndex = index2;
|
||
break;
|
||
}
|
||
}
|
||
if (entryIndex === -1) {
|
||
attachers.push([plugin, ...parameters2]);
|
||
} else if (parameters2.length > 0) {
|
||
let [primary, ...rest] = parameters2;
|
||
const currentPrimary = attachers[entryIndex][1];
|
||
if (isPlainObject(currentPrimary) && isPlainObject(primary)) {
|
||
primary = (0, import_extend.default)(true, currentPrimary, primary);
|
||
}
|
||
attachers[entryIndex] = [plugin, primary, ...rest];
|
||
}
|
||
}
|
||
}
|
||
};
|
||
var unified = new Processor().freeze();
|
||
function assertParser(name, value2) {
|
||
if (typeof value2 !== "function") {
|
||
throw new TypeError("Cannot `" + name + "` without `parser`");
|
||
}
|
||
}
|
||
function assertCompiler(name, value2) {
|
||
if (typeof value2 !== "function") {
|
||
throw new TypeError("Cannot `" + name + "` without `compiler`");
|
||
}
|
||
}
|
||
function assertUnfrozen(name, frozen) {
|
||
if (frozen) {
|
||
throw new Error(
|
||
"Cannot call `" + name + "` on a frozen processor.\nCreate a new processor first, by calling it: use `processor()` instead of `processor`."
|
||
);
|
||
}
|
||
}
|
||
function assertNode(node2) {
|
||
if (!isPlainObject(node2) || typeof node2.type !== "string") {
|
||
throw new TypeError("Expected node, got `" + node2 + "`");
|
||
}
|
||
}
|
||
function assertDone(name, asyncName, complete) {
|
||
if (!complete) {
|
||
throw new Error(
|
||
"`" + name + "` finished async. Use `" + asyncName + "` instead"
|
||
);
|
||
}
|
||
}
|
||
function vfile(value2) {
|
||
return looksLikeAVFile(value2) ? value2 : new VFile(value2);
|
||
}
|
||
function looksLikeAVFile(value2) {
|
||
return Boolean(
|
||
value2 && typeof value2 === "object" && "message" in value2 && "messages" in value2
|
||
);
|
||
}
|
||
function looksLikeAValue(value2) {
|
||
return typeof value2 === "string" || isUint8Array2(value2);
|
||
}
|
||
function isUint8Array2(value2) {
|
||
return Boolean(
|
||
value2 && typeof value2 === "object" && "byteLength" in value2 && "byteOffset" in value2
|
||
);
|
||
}
|
||
|
||
// node_modules/remark/index.js
|
||
var remark = unified().use(remarkParse).use(remarkStringify).freeze();
|
||
|
||
// index.js
|
||
var import_fs2 = require("fs");
|
||
|
||
// node_modules/fault/index.js
|
||
var import_format = __toESM(require_format(), 1);
|
||
var fault = Object.assign(create(Error), {
|
||
eval: create(EvalError),
|
||
range: create(RangeError),
|
||
reference: create(ReferenceError),
|
||
syntax: create(SyntaxError),
|
||
type: create(TypeError),
|
||
uri: create(URIError)
|
||
});
|
||
function create(Constructor) {
|
||
FormattedError.displayName = Constructor.displayName || Constructor.name;
|
||
return FormattedError;
|
||
function FormattedError(format, ...values) {
|
||
const reason = format ? (0, import_format.default)(format, ...values) : format;
|
||
return new Constructor(reason);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-extension-frontmatter/lib/to-matters.js
|
||
var own6 = {}.hasOwnProperty;
|
||
var markers = {
|
||
yaml: "-",
|
||
toml: "+"
|
||
};
|
||
function toMatters(options) {
|
||
const result = [];
|
||
let index2 = -1;
|
||
const presetsOrMatters = Array.isArray(options) ? options : options ? [options] : ["yaml"];
|
||
while (++index2 < presetsOrMatters.length) {
|
||
result[index2] = matter(presetsOrMatters[index2]);
|
||
}
|
||
return result;
|
||
}
|
||
function matter(option) {
|
||
let result = option;
|
||
if (typeof result === "string") {
|
||
if (!own6.call(markers, result)) {
|
||
throw fault("Missing matter definition for `%s`", result);
|
||
}
|
||
result = {
|
||
type: result,
|
||
marker: markers[result]
|
||
};
|
||
} else if (typeof result !== "object") {
|
||
throw fault("Expected matter to be an object, not `%j`", result);
|
||
}
|
||
if (!own6.call(result, "type")) {
|
||
throw fault("Missing `type` in matter `%j`", result);
|
||
}
|
||
if (!own6.call(result, "fence") && !own6.call(result, "marker")) {
|
||
throw fault("Missing `marker` or `fence` in matter `%j`", result);
|
||
}
|
||
return result;
|
||
}
|
||
|
||
// node_modules/micromark-extension-frontmatter/lib/syntax.js
|
||
function frontmatter(options) {
|
||
const matters = toMatters(options);
|
||
const flow3 = {};
|
||
let index2 = -1;
|
||
while (++index2 < matters.length) {
|
||
const matter2 = matters[index2];
|
||
const code2 = fence(matter2, "open").charCodeAt(0);
|
||
const construct = createConstruct(matter2);
|
||
const existing = flow3[code2];
|
||
if (Array.isArray(existing)) {
|
||
existing.push(construct);
|
||
} else {
|
||
flow3[code2] = [construct];
|
||
}
|
||
}
|
||
return {
|
||
flow: flow3
|
||
};
|
||
}
|
||
function createConstruct(matter2) {
|
||
const anywhere = matter2.anywhere;
|
||
const frontmatterType = (
|
||
/** @type {TokenType} */
|
||
matter2.type
|
||
);
|
||
const fenceType = (
|
||
/** @type {TokenType} */
|
||
frontmatterType + "Fence"
|
||
);
|
||
const sequenceType = (
|
||
/** @type {TokenType} */
|
||
fenceType + "Sequence"
|
||
);
|
||
const valueType = (
|
||
/** @type {TokenType} */
|
||
frontmatterType + "Value"
|
||
);
|
||
const closingFenceConstruct = {
|
||
tokenize: tokenizeClosingFence,
|
||
partial: true
|
||
};
|
||
let buffer;
|
||
let bufferIndex = 0;
|
||
return {
|
||
tokenize: tokenizeFrontmatter,
|
||
concrete: true
|
||
};
|
||
function tokenizeFrontmatter(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code2) {
|
||
const position2 = self.now();
|
||
if (
|
||
// Indent not allowed.
|
||
position2.column === 1 && // Normally, only allowed in first line.
|
||
(position2.line === 1 || anywhere)
|
||
) {
|
||
buffer = fence(matter2, "open");
|
||
bufferIndex = 0;
|
||
if (code2 === buffer.charCodeAt(bufferIndex)) {
|
||
effects.enter(frontmatterType);
|
||
effects.enter(fenceType);
|
||
effects.enter(sequenceType);
|
||
return openSequence(code2);
|
||
}
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function openSequence(code2) {
|
||
if (bufferIndex === buffer.length) {
|
||
effects.exit(sequenceType);
|
||
if (markdownSpace(code2)) {
|
||
effects.enter("whitespace");
|
||
return openSequenceWhitespace(code2);
|
||
}
|
||
return openAfter(code2);
|
||
}
|
||
if (code2 === buffer.charCodeAt(bufferIndex++)) {
|
||
effects.consume(code2);
|
||
return openSequence;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function openSequenceWhitespace(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return openSequenceWhitespace;
|
||
}
|
||
effects.exit("whitespace");
|
||
return openAfter(code2);
|
||
}
|
||
function openAfter(code2) {
|
||
if (markdownLineEnding(code2)) {
|
||
effects.exit(fenceType);
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
buffer = fence(matter2, "close");
|
||
bufferIndex = 0;
|
||
return effects.attempt(closingFenceConstruct, after, contentStart);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function contentStart(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
return contentEnd(code2);
|
||
}
|
||
effects.enter(valueType);
|
||
return contentInside(code2);
|
||
}
|
||
function contentInside(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit(valueType);
|
||
return contentEnd(code2);
|
||
}
|
||
effects.consume(code2);
|
||
return contentInside;
|
||
}
|
||
function contentEnd(code2) {
|
||
if (code2 === null) {
|
||
return nok(code2);
|
||
}
|
||
effects.enter("lineEnding");
|
||
effects.consume(code2);
|
||
effects.exit("lineEnding");
|
||
return effects.attempt(closingFenceConstruct, after, contentStart);
|
||
}
|
||
function after(code2) {
|
||
effects.exit(frontmatterType);
|
||
return ok3(code2);
|
||
}
|
||
}
|
||
function tokenizeClosingFence(effects, ok3, nok) {
|
||
let bufferIndex2 = 0;
|
||
return closeStart;
|
||
function closeStart(code2) {
|
||
if (code2 === buffer.charCodeAt(bufferIndex2)) {
|
||
effects.enter(fenceType);
|
||
effects.enter(sequenceType);
|
||
return closeSequence(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function closeSequence(code2) {
|
||
if (bufferIndex2 === buffer.length) {
|
||
effects.exit(sequenceType);
|
||
if (markdownSpace(code2)) {
|
||
effects.enter("whitespace");
|
||
return closeSequenceWhitespace(code2);
|
||
}
|
||
return closeAfter(code2);
|
||
}
|
||
if (code2 === buffer.charCodeAt(bufferIndex2++)) {
|
||
effects.consume(code2);
|
||
return closeSequence;
|
||
}
|
||
return nok(code2);
|
||
}
|
||
function closeSequenceWhitespace(code2) {
|
||
if (markdownSpace(code2)) {
|
||
effects.consume(code2);
|
||
return closeSequenceWhitespace;
|
||
}
|
||
effects.exit("whitespace");
|
||
return closeAfter(code2);
|
||
}
|
||
function closeAfter(code2) {
|
||
if (code2 === null || markdownLineEnding(code2)) {
|
||
effects.exit(fenceType);
|
||
return ok3(code2);
|
||
}
|
||
return nok(code2);
|
||
}
|
||
}
|
||
}
|
||
function fence(matter2, prop) {
|
||
return matter2.marker ? pick(matter2.marker, prop).repeat(3) : (
|
||
// @ts-expect-error: They’re mutually exclusive.
|
||
pick(matter2.fence, prop)
|
||
);
|
||
}
|
||
function pick(schema, prop) {
|
||
return typeof schema === "string" ? schema : schema[prop];
|
||
}
|
||
|
||
// node_modules/escape-string-regexp/index.js
|
||
function escapeStringRegexp(string3) {
|
||
if (typeof string3 !== "string") {
|
||
throw new TypeError("Expected a string");
|
||
}
|
||
return string3.replace(/[|\\{}()[\]^$+*?.]/g, "\\$&").replace(/-/g, "\\x2d");
|
||
}
|
||
|
||
// node_modules/mdast-util-frontmatter/lib/index.js
|
||
function frontmatterFromMarkdown(options) {
|
||
const matters = toMatters(options);
|
||
const enter = {};
|
||
const exit2 = {};
|
||
let index2 = -1;
|
||
while (++index2 < matters.length) {
|
||
const matter2 = matters[index2];
|
||
enter[matter2.type] = opener(matter2);
|
||
exit2[matter2.type] = close;
|
||
exit2[matter2.type + "Value"] = value;
|
||
}
|
||
return { enter, exit: exit2 };
|
||
}
|
||
function opener(matter2) {
|
||
return open;
|
||
function open(token) {
|
||
this.enter({ type: matter2.type, value: "" }, token);
|
||
this.buffer();
|
||
}
|
||
}
|
||
function close(token) {
|
||
const data = this.resume();
|
||
const node2 = this.stack[this.stack.length - 1];
|
||
ok2("value" in node2);
|
||
this.exit(token);
|
||
node2.value = data.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, "");
|
||
}
|
||
function value(token) {
|
||
this.config.enter.data.call(this, token);
|
||
this.config.exit.data.call(this, token);
|
||
}
|
||
function frontmatterToMarkdown(options) {
|
||
const unsafe2 = [];
|
||
const handlers = {};
|
||
const matters = toMatters(options);
|
||
let index2 = -1;
|
||
while (++index2 < matters.length) {
|
||
const matter2 = matters[index2];
|
||
handlers[matter2.type] = handler(matter2);
|
||
const open = fence2(matter2, "open");
|
||
unsafe2.push({
|
||
atBreak: true,
|
||
character: open.charAt(0),
|
||
after: escapeStringRegexp(open.charAt(1))
|
||
});
|
||
}
|
||
return { unsafe: unsafe2, handlers };
|
||
}
|
||
function handler(matter2) {
|
||
const open = fence2(matter2, "open");
|
||
const close2 = fence2(matter2, "close");
|
||
return handle2;
|
||
function handle2(node2) {
|
||
return open + (node2.value ? "\n" + node2.value : "") + "\n" + close2;
|
||
}
|
||
}
|
||
function fence2(matter2, prop) {
|
||
return matter2.marker ? pick2(matter2.marker, prop).repeat(3) : (
|
||
// @ts-expect-error: They’re mutually exclusive.
|
||
pick2(matter2.fence, prop)
|
||
);
|
||
}
|
||
function pick2(schema, prop) {
|
||
return typeof schema === "string" ? schema : schema[prop];
|
||
}
|
||
|
||
// node_modules/remark-frontmatter/lib/index.js
|
||
var emptyOptions2 = "yaml";
|
||
function remarkFrontmatter(options) {
|
||
const self = (
|
||
/** @type {Processor} */
|
||
this
|
||
);
|
||
const settings = options || emptyOptions2;
|
||
const data = self.data();
|
||
const micromarkExtensions = data.micromarkExtensions || (data.micromarkExtensions = []);
|
||
const fromMarkdownExtensions = data.fromMarkdownExtensions || (data.fromMarkdownExtensions = []);
|
||
const toMarkdownExtensions = data.toMarkdownExtensions || (data.toMarkdownExtensions = []);
|
||
micromarkExtensions.push(frontmatter(settings));
|
||
fromMarkdownExtensions.push(frontmatterFromMarkdown(settings));
|
||
toMarkdownExtensions.push(frontmatterToMarkdown(settings));
|
||
}
|
||
|
||
// node_modules/minimatch/dist/mjs/index.js
|
||
var import_brace_expansion = __toESM(require_brace_expansion(), 1);
|
||
|
||
// node_modules/minimatch/dist/mjs/assert-valid-pattern.js
|
||
var MAX_PATTERN_LENGTH = 1024 * 64;
|
||
var assertValidPattern = (pattern) => {
|
||
if (typeof pattern !== "string") {
|
||
throw new TypeError("invalid pattern");
|
||
}
|
||
if (pattern.length > MAX_PATTERN_LENGTH) {
|
||
throw new TypeError("pattern is too long");
|
||
}
|
||
};
|
||
|
||
// node_modules/minimatch/dist/mjs/brace-expressions.js
|
||
var posixClasses = {
|
||
"[:alnum:]": ["\\p{L}\\p{Nl}\\p{Nd}", true],
|
||
"[:alpha:]": ["\\p{L}\\p{Nl}", true],
|
||
"[:ascii:]": ["\\x00-\\x7f", false],
|
||
"[:blank:]": ["\\p{Zs}\\t", true],
|
||
"[:cntrl:]": ["\\p{Cc}", true],
|
||
"[:digit:]": ["\\p{Nd}", true],
|
||
"[:graph:]": ["\\p{Z}\\p{C}", true, true],
|
||
"[:lower:]": ["\\p{Ll}", true],
|
||
"[:print:]": ["\\p{C}", true],
|
||
"[:punct:]": ["\\p{P}", true],
|
||
"[:space:]": ["\\p{Z}\\t\\r\\n\\v\\f", true],
|
||
"[:upper:]": ["\\p{Lu}", true],
|
||
"[:word:]": ["\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}", true],
|
||
"[:xdigit:]": ["A-Fa-f0-9", false]
|
||
};
|
||
var braceEscape = (s) => s.replace(/[[\]\\-]/g, "\\$&");
|
||
var regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
|
||
var rangesToString = (ranges) => ranges.join("");
|
||
var parseClass = (glob2, position2) => {
|
||
const pos = position2;
|
||
if (glob2.charAt(pos) !== "[") {
|
||
throw new Error("not in a brace expression");
|
||
}
|
||
const ranges = [];
|
||
const negs = [];
|
||
let i = pos + 1;
|
||
let sawStart = false;
|
||
let uflag = false;
|
||
let escaping = false;
|
||
let negate = false;
|
||
let endPos = pos;
|
||
let rangeStart = "";
|
||
WHILE:
|
||
while (i < glob2.length) {
|
||
const c = glob2.charAt(i);
|
||
if ((c === "!" || c === "^") && i === pos + 1) {
|
||
negate = true;
|
||
i++;
|
||
continue;
|
||
}
|
||
if (c === "]" && sawStart && !escaping) {
|
||
endPos = i + 1;
|
||
break;
|
||
}
|
||
sawStart = true;
|
||
if (c === "\\") {
|
||
if (!escaping) {
|
||
escaping = true;
|
||
i++;
|
||
continue;
|
||
}
|
||
}
|
||
if (c === "[" && !escaping) {
|
||
for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
|
||
if (glob2.startsWith(cls, i)) {
|
||
if (rangeStart) {
|
||
return ["$.", false, glob2.length - pos, true];
|
||
}
|
||
i += cls.length;
|
||
if (neg)
|
||
negs.push(unip);
|
||
else
|
||
ranges.push(unip);
|
||
uflag = uflag || u;
|
||
continue WHILE;
|
||
}
|
||
}
|
||
}
|
||
escaping = false;
|
||
if (rangeStart) {
|
||
if (c > rangeStart) {
|
||
ranges.push(braceEscape(rangeStart) + "-" + braceEscape(c));
|
||
} else if (c === rangeStart) {
|
||
ranges.push(braceEscape(c));
|
||
}
|
||
rangeStart = "";
|
||
i++;
|
||
continue;
|
||
}
|
||
if (glob2.startsWith("-]", i + 1)) {
|
||
ranges.push(braceEscape(c + "-"));
|
||
i += 2;
|
||
continue;
|
||
}
|
||
if (glob2.startsWith("-", i + 1)) {
|
||
rangeStart = c;
|
||
i += 2;
|
||
continue;
|
||
}
|
||
ranges.push(braceEscape(c));
|
||
i++;
|
||
}
|
||
if (endPos < i) {
|
||
return ["", false, 0, false];
|
||
}
|
||
if (!ranges.length && !negs.length) {
|
||
return ["$.", false, glob2.length - pos, true];
|
||
}
|
||
if (negs.length === 0 && ranges.length === 1 && /^\\?.$/.test(ranges[0]) && !negate) {
|
||
const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
|
||
return [regexpEscape(r), false, endPos - pos, false];
|
||
}
|
||
const sranges = "[" + (negate ? "^" : "") + rangesToString(ranges) + "]";
|
||
const snegs = "[" + (negate ? "" : "^") + rangesToString(negs) + "]";
|
||
const comb = ranges.length && negs.length ? "(" + sranges + "|" + snegs + ")" : ranges.length ? sranges : snegs;
|
||
return [comb, uflag, endPos - pos, true];
|
||
};
|
||
|
||
// node_modules/minimatch/dist/mjs/unescape.js
|
||
var unescape = (s, { windowsPathsNoEscape = false } = {}) => {
|
||
return windowsPathsNoEscape ? s.replace(/\[([^\/\\])\]/g, "$1") : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, "$1$2").replace(/\\([^\/])/g, "$1");
|
||
};
|
||
|
||
// node_modules/minimatch/dist/mjs/ast.js
|
||
var types = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]);
|
||
var isExtglobType = (c) => types.has(c);
|
||
var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))";
|
||
var startNoDot = "(?!\\.)";
|
||
var addPatternStart = /* @__PURE__ */ new Set(["[", "."]);
|
||
var justDots = /* @__PURE__ */ new Set(["..", "."]);
|
||
var reSpecials = new Set("().*{}+?[]^$\\!");
|
||
var regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
|
||
var qmark = "[^/]";
|
||
var star = qmark + "*?";
|
||
var starNoEmpty = qmark + "+?";
|
||
var AST = class _AST {
|
||
type;
|
||
#root;
|
||
#hasMagic;
|
||
#uflag = false;
|
||
#parts = [];
|
||
#parent;
|
||
#parentIndex;
|
||
#negs;
|
||
#filledNegs = false;
|
||
#options;
|
||
#toString;
|
||
// set to true if it's an extglob with no children
|
||
// (which really means one child of '')
|
||
#emptyExt = false;
|
||
constructor(type, parent, options = {}) {
|
||
this.type = type;
|
||
if (type)
|
||
this.#hasMagic = true;
|
||
this.#parent = parent;
|
||
this.#root = this.#parent ? this.#parent.#root : this;
|
||
this.#options = this.#root === this ? options : this.#root.#options;
|
||
this.#negs = this.#root === this ? [] : this.#root.#negs;
|
||
if (type === "!" && !this.#root.#filledNegs)
|
||
this.#negs.push(this);
|
||
this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
|
||
}
|
||
get hasMagic() {
|
||
if (this.#hasMagic !== void 0)
|
||
return this.#hasMagic;
|
||
for (const p of this.#parts) {
|
||
if (typeof p === "string")
|
||
continue;
|
||
if (p.type || p.hasMagic)
|
||
return this.#hasMagic = true;
|
||
}
|
||
return this.#hasMagic;
|
||
}
|
||
// reconstructs the pattern
|
||
toString() {
|
||
if (this.#toString !== void 0)
|
||
return this.#toString;
|
||
if (!this.type) {
|
||
return this.#toString = this.#parts.map((p) => String(p)).join("");
|
||
} else {
|
||
return this.#toString = this.type + "(" + this.#parts.map((p) => String(p)).join("|") + ")";
|
||
}
|
||
}
|
||
#fillNegs() {
|
||
if (this !== this.#root)
|
||
throw new Error("should only call on root");
|
||
if (this.#filledNegs)
|
||
return this;
|
||
this.toString();
|
||
this.#filledNegs = true;
|
||
let n;
|
||
while (n = this.#negs.pop()) {
|
||
if (n.type !== "!")
|
||
continue;
|
||
let p = n;
|
||
let pp = p.#parent;
|
||
while (pp) {
|
||
for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
|
||
for (const part of n.#parts) {
|
||
if (typeof part === "string") {
|
||
throw new Error("string part in extglob AST??");
|
||
}
|
||
part.copyIn(pp.#parts[i]);
|
||
}
|
||
}
|
||
p = pp;
|
||
pp = p.#parent;
|
||
}
|
||
}
|
||
return this;
|
||
}
|
||
push(...parts) {
|
||
for (const p of parts) {
|
||
if (p === "")
|
||
continue;
|
||
if (typeof p !== "string" && !(p instanceof _AST && p.#parent === this)) {
|
||
throw new Error("invalid part: " + p);
|
||
}
|
||
this.#parts.push(p);
|
||
}
|
||
}
|
||
toJSON() {
|
||
const ret = this.type === null ? this.#parts.slice().map((p) => typeof p === "string" ? p : p.toJSON()) : [this.type, ...this.#parts.map((p) => p.toJSON())];
|
||
if (this.isStart() && !this.type)
|
||
ret.unshift([]);
|
||
if (this.isEnd() && (this === this.#root || this.#root.#filledNegs && this.#parent?.type === "!")) {
|
||
ret.push({});
|
||
}
|
||
return ret;
|
||
}
|
||
isStart() {
|
||
if (this.#root === this)
|
||
return true;
|
||
if (!this.#parent?.isStart())
|
||
return false;
|
||
if (this.#parentIndex === 0)
|
||
return true;
|
||
const p = this.#parent;
|
||
for (let i = 0; i < this.#parentIndex; i++) {
|
||
const pp = p.#parts[i];
|
||
if (!(pp instanceof _AST && pp.type === "!")) {
|
||
return false;
|
||
}
|
||
}
|
||
return true;
|
||
}
|
||
isEnd() {
|
||
if (this.#root === this)
|
||
return true;
|
||
if (this.#parent?.type === "!")
|
||
return true;
|
||
if (!this.#parent?.isEnd())
|
||
return false;
|
||
if (!this.type)
|
||
return this.#parent?.isEnd();
|
||
const pl = this.#parent ? this.#parent.#parts.length : 0;
|
||
return this.#parentIndex === pl - 1;
|
||
}
|
||
copyIn(part) {
|
||
if (typeof part === "string")
|
||
this.push(part);
|
||
else
|
||
this.push(part.clone(this));
|
||
}
|
||
clone(parent) {
|
||
const c = new _AST(this.type, parent);
|
||
for (const p of this.#parts) {
|
||
c.copyIn(p);
|
||
}
|
||
return c;
|
||
}
|
||
static #parseAST(str, ast, pos, opt) {
|
||
let escaping = false;
|
||
let inBrace = false;
|
||
let braceStart = -1;
|
||
let braceNeg = false;
|
||
if (ast.type === null) {
|
||
let i2 = pos;
|
||
let acc2 = "";
|
||
while (i2 < str.length) {
|
||
const c = str.charAt(i2++);
|
||
if (escaping || c === "\\") {
|
||
escaping = !escaping;
|
||
acc2 += c;
|
||
continue;
|
||
}
|
||
if (inBrace) {
|
||
if (i2 === braceStart + 1) {
|
||
if (c === "^" || c === "!") {
|
||
braceNeg = true;
|
||
}
|
||
} else if (c === "]" && !(i2 === braceStart + 2 && braceNeg)) {
|
||
inBrace = false;
|
||
}
|
||
acc2 += c;
|
||
continue;
|
||
} else if (c === "[") {
|
||
inBrace = true;
|
||
braceStart = i2;
|
||
braceNeg = false;
|
||
acc2 += c;
|
||
continue;
|
||
}
|
||
if (!opt.noext && isExtglobType(c) && str.charAt(i2) === "(") {
|
||
ast.push(acc2);
|
||
acc2 = "";
|
||
const ext2 = new _AST(c, ast);
|
||
i2 = _AST.#parseAST(str, ext2, i2, opt);
|
||
ast.push(ext2);
|
||
continue;
|
||
}
|
||
acc2 += c;
|
||
}
|
||
ast.push(acc2);
|
||
return i2;
|
||
}
|
||
let i = pos + 1;
|
||
let part = new _AST(null, ast);
|
||
const parts = [];
|
||
let acc = "";
|
||
while (i < str.length) {
|
||
const c = str.charAt(i++);
|
||
if (escaping || c === "\\") {
|
||
escaping = !escaping;
|
||
acc += c;
|
||
continue;
|
||
}
|
||
if (inBrace) {
|
||
if (i === braceStart + 1) {
|
||
if (c === "^" || c === "!") {
|
||
braceNeg = true;
|
||
}
|
||
} else if (c === "]" && !(i === braceStart + 2 && braceNeg)) {
|
||
inBrace = false;
|
||
}
|
||
acc += c;
|
||
continue;
|
||
} else if (c === "[") {
|
||
inBrace = true;
|
||
braceStart = i;
|
||
braceNeg = false;
|
||
acc += c;
|
||
continue;
|
||
}
|
||
if (isExtglobType(c) && str.charAt(i) === "(") {
|
||
part.push(acc);
|
||
acc = "";
|
||
const ext2 = new _AST(c, part);
|
||
part.push(ext2);
|
||
i = _AST.#parseAST(str, ext2, i, opt);
|
||
continue;
|
||
}
|
||
if (c === "|") {
|
||
part.push(acc);
|
||
acc = "";
|
||
parts.push(part);
|
||
part = new _AST(null, ast);
|
||
continue;
|
||
}
|
||
if (c === ")") {
|
||
if (acc === "" && ast.#parts.length === 0) {
|
||
ast.#emptyExt = true;
|
||
}
|
||
part.push(acc);
|
||
acc = "";
|
||
ast.push(...parts, part);
|
||
return i;
|
||
}
|
||
acc += c;
|
||
}
|
||
ast.type = null;
|
||
ast.#hasMagic = void 0;
|
||
ast.#parts = [str.substring(pos - 1)];
|
||
return i;
|
||
}
|
||
static fromGlob(pattern, options = {}) {
|
||
const ast = new _AST(null, void 0, options);
|
||
_AST.#parseAST(pattern, ast, 0, options);
|
||
return ast;
|
||
}
|
||
// returns the regular expression if there's magic, or the unescaped
|
||
// string if not.
|
||
toMMPattern() {
|
||
if (this !== this.#root)
|
||
return this.#root.toMMPattern();
|
||
const glob2 = this.toString();
|
||
const [re, body, hasMagic2, uflag] = this.toRegExpSource();
|
||
const anyMagic = hasMagic2 || this.#hasMagic || this.#options.nocase && !this.#options.nocaseMagicOnly && glob2.toUpperCase() !== glob2.toLowerCase();
|
||
if (!anyMagic) {
|
||
return body;
|
||
}
|
||
const flags = (this.#options.nocase ? "i" : "") + (uflag ? "u" : "");
|
||
return Object.assign(new RegExp(`^${re}$`, flags), {
|
||
_src: re,
|
||
_glob: glob2
|
||
});
|
||
}
|
||
// returns the string match, the regexp source, whether there's magic
|
||
// in the regexp (so a regular expression is required) and whether or
|
||
// not the uflag is needed for the regular expression (for posix classes)
|
||
// TODO: instead of injecting the start/end at this point, just return
|
||
// the BODY of the regexp, along with the start/end portions suitable
|
||
// for binding the start/end in either a joined full-path makeRe context
|
||
// (where we bind to (^|/), or a standalone matchPart context (where
|
||
// we bind to ^, and not /). Otherwise slashes get duped!
|
||
//
|
||
// In part-matching mode, the start is:
|
||
// - if not isStart: nothing
|
||
// - if traversal possible, but not allowed: ^(?!\.\.?$)
|
||
// - if dots allowed or not possible: ^
|
||
// - if dots possible and not allowed: ^(?!\.)
|
||
// end is:
|
||
// - if not isEnd(): nothing
|
||
// - else: $
|
||
//
|
||
// In full-path matching mode, we put the slash at the START of the
|
||
// pattern, so start is:
|
||
// - if first pattern: same as part-matching mode
|
||
// - if not isStart(): nothing
|
||
// - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
|
||
// - if dots allowed or not possible: /
|
||
// - if dots possible and not allowed: /(?!\.)
|
||
// end is:
|
||
// - if last pattern, same as part-matching mode
|
||
// - else nothing
|
||
//
|
||
// Always put the (?:$|/) on negated tails, though, because that has to be
|
||
// there to bind the end of the negated pattern portion, and it's easier to
|
||
// just stick it in now rather than try to inject it later in the middle of
|
||
// the pattern.
|
||
//
|
||
// We can just always return the same end, and leave it up to the caller
|
||
// to know whether it's going to be used joined or in parts.
|
||
// And, if the start is adjusted slightly, can do the same there:
|
||
// - if not isStart: nothing
|
||
// - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
|
||
// - if dots allowed or not possible: (?:/|^)
|
||
// - if dots possible and not allowed: (?:/|^)(?!\.)
|
||
//
|
||
// But it's better to have a simpler binding without a conditional, for
|
||
// performance, so probably better to return both start options.
|
||
//
|
||
// Then the caller just ignores the end if it's not the first pattern,
|
||
// and the start always gets applied.
|
||
//
|
||
// But that's always going to be $ if it's the ending pattern, or nothing,
|
||
// so the caller can just attach $ at the end of the pattern when building.
|
||
//
|
||
// So the todo is:
|
||
// - better detect what kind of start is needed
|
||
// - return both flavors of starting pattern
|
||
// - attach $ at the end of the pattern when creating the actual RegExp
|
||
//
|
||
// Ah, but wait, no, that all only applies to the root when the first pattern
|
||
// is not an extglob. If the first pattern IS an extglob, then we need all
|
||
// that dot prevention biz to live in the extglob portions, because eg
|
||
// +(*|.x*) can match .xy but not .yx.
|
||
//
|
||
// So, return the two flavors if it's #root and the first child is not an
|
||
// AST, otherwise leave it to the child AST to handle it, and there,
|
||
// use the (?:^|/) style of start binding.
|
||
//
|
||
// Even simplified further:
|
||
// - Since the start for a join is eg /(?!\.) and the start for a part
|
||
// is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
|
||
// or start or whatever) and prepend ^ or / at the Regexp construction.
|
||
toRegExpSource(allowDot) {
|
||
const dot = allowDot ?? !!this.#options.dot;
|
||
if (this.#root === this)
|
||
this.#fillNegs();
|
||
if (!this.type) {
|
||
const noEmpty = this.isStart() && this.isEnd();
|
||
const src = this.#parts.map((p) => {
|
||
const [re, _, hasMagic2, uflag] = typeof p === "string" ? _AST.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot);
|
||
this.#hasMagic = this.#hasMagic || hasMagic2;
|
||
this.#uflag = this.#uflag || uflag;
|
||
return re;
|
||
}).join("");
|
||
let start2 = "";
|
||
if (this.isStart()) {
|
||
if (typeof this.#parts[0] === "string") {
|
||
const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
|
||
if (!dotTravAllowed) {
|
||
const aps = addPatternStart;
|
||
const needNoTrav = (
|
||
// dots are allowed, and the pattern starts with [ or .
|
||
dot && aps.has(src.charAt(0)) || // the pattern starts with \., and then [ or .
|
||
src.startsWith("\\.") && aps.has(src.charAt(2)) || // the pattern starts with \.\., and then [ or .
|
||
src.startsWith("\\.\\.") && aps.has(src.charAt(4))
|
||
);
|
||
const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
|
||
start2 = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : "";
|
||
}
|
||
}
|
||
}
|
||
let end = "";
|
||
if (this.isEnd() && this.#root.#filledNegs && this.#parent?.type === "!") {
|
||
end = "(?:$|\\/)";
|
||
}
|
||
const final2 = start2 + src + end;
|
||
return [
|
||
final2,
|
||
unescape(src),
|
||
this.#hasMagic = !!this.#hasMagic,
|
||
this.#uflag
|
||
];
|
||
}
|
||
const repeated = this.type === "*" || this.type === "+";
|
||
const start = this.type === "!" ? "(?:(?!(?:" : "(?:";
|
||
let body = this.#partsToRegExp(dot);
|
||
if (this.isStart() && this.isEnd() && !body && this.type !== "!") {
|
||
const s = this.toString();
|
||
this.#parts = [s];
|
||
this.type = null;
|
||
this.#hasMagic = void 0;
|
||
return [s, unescape(this.toString()), false, false];
|
||
}
|
||
let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true);
|
||
if (bodyDotAllowed === body) {
|
||
bodyDotAllowed = "";
|
||
}
|
||
if (bodyDotAllowed) {
|
||
body = `(?:${body})(?:${bodyDotAllowed})*?`;
|
||
}
|
||
let final = "";
|
||
if (this.type === "!" && this.#emptyExt) {
|
||
final = (this.isStart() && !dot ? startNoDot : "") + starNoEmpty;
|
||
} else {
|
||
const close2 = this.type === "!" ? (
|
||
// !() must match something,but !(x) can match ''
|
||
"))" + (this.isStart() && !dot && !allowDot ? startNoDot : "") + star + ")"
|
||
) : this.type === "@" ? ")" : this.type === "?" ? ")?" : this.type === "+" && bodyDotAllowed ? ")" : this.type === "*" && bodyDotAllowed ? `)?` : `)${this.type}`;
|
||
final = start + body + close2;
|
||
}
|
||
return [
|
||
final,
|
||
unescape(body),
|
||
this.#hasMagic = !!this.#hasMagic,
|
||
this.#uflag
|
||
];
|
||
}
|
||
#partsToRegExp(dot) {
|
||
return this.#parts.map((p) => {
|
||
if (typeof p === "string") {
|
||
throw new Error("string type in extglob ast??");
|
||
}
|
||
const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
|
||
this.#uflag = this.#uflag || uflag;
|
||
return re;
|
||
}).filter((p) => !(this.isStart() && this.isEnd()) || !!p).join("|");
|
||
}
|
||
static #parseGlob(glob2, hasMagic2, noEmpty = false) {
|
||
let escaping = false;
|
||
let re = "";
|
||
let uflag = false;
|
||
for (let i = 0; i < glob2.length; i++) {
|
||
const c = glob2.charAt(i);
|
||
if (escaping) {
|
||
escaping = false;
|
||
re += (reSpecials.has(c) ? "\\" : "") + c;
|
||
continue;
|
||
}
|
||
if (c === "\\") {
|
||
if (i === glob2.length - 1) {
|
||
re += "\\\\";
|
||
} else {
|
||
escaping = true;
|
||
}
|
||
continue;
|
||
}
|
||
if (c === "[") {
|
||
const [src, needUflag, consumed, magic] = parseClass(glob2, i);
|
||
if (consumed) {
|
||
re += src;
|
||
uflag = uflag || needUflag;
|
||
i += consumed - 1;
|
||
hasMagic2 = hasMagic2 || magic;
|
||
continue;
|
||
}
|
||
}
|
||
if (c === "*") {
|
||
if (noEmpty && glob2 === "*")
|
||
re += starNoEmpty;
|
||
else
|
||
re += star;
|
||
hasMagic2 = true;
|
||
continue;
|
||
}
|
||
if (c === "?") {
|
||
re += qmark;
|
||
hasMagic2 = true;
|
||
continue;
|
||
}
|
||
re += regExpEscape(c);
|
||
}
|
||
return [re, unescape(glob2), !!hasMagic2, uflag];
|
||
}
|
||
};
|
||
|
||
// node_modules/minimatch/dist/mjs/escape.js
|
||
var escape = (s, { windowsPathsNoEscape = false } = {}) => {
|
||
return windowsPathsNoEscape ? s.replace(/[?*()[\]]/g, "[$&]") : s.replace(/[?*()[\]\\]/g, "\\$&");
|
||
};
|
||
|
||
// node_modules/minimatch/dist/mjs/index.js
|
||
var minimatch = (p, pattern, options = {}) => {
|
||
assertValidPattern(pattern);
|
||
if (!options.nocomment && pattern.charAt(0) === "#") {
|
||
return false;
|
||
}
|
||
return new Minimatch(pattern, options).match(p);
|
||
};
|
||
var starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
|
||
var starDotExtTest = (ext2) => (f) => !f.startsWith(".") && f.endsWith(ext2);
|
||
var starDotExtTestDot = (ext2) => (f) => f.endsWith(ext2);
|
||
var starDotExtTestNocase = (ext2) => {
|
||
ext2 = ext2.toLowerCase();
|
||
return (f) => !f.startsWith(".") && f.toLowerCase().endsWith(ext2);
|
||
};
|
||
var starDotExtTestNocaseDot = (ext2) => {
|
||
ext2 = ext2.toLowerCase();
|
||
return (f) => f.toLowerCase().endsWith(ext2);
|
||
};
|
||
var starDotStarRE = /^\*+\.\*+$/;
|
||
var starDotStarTest = (f) => !f.startsWith(".") && f.includes(".");
|
||
var starDotStarTestDot = (f) => f !== "." && f !== ".." && f.includes(".");
|
||
var dotStarRE = /^\.\*+$/;
|
||
var dotStarTest = (f) => f !== "." && f !== ".." && f.startsWith(".");
|
||
var starRE = /^\*+$/;
|
||
var starTest = (f) => f.length !== 0 && !f.startsWith(".");
|
||
var starTestDot = (f) => f.length !== 0 && f !== "." && f !== "..";
|
||
var qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
|
||
var qmarksTestNocase = ([$0, ext2 = ""]) => {
|
||
const noext = qmarksTestNoExt([$0]);
|
||
if (!ext2)
|
||
return noext;
|
||
ext2 = ext2.toLowerCase();
|
||
return (f) => noext(f) && f.toLowerCase().endsWith(ext2);
|
||
};
|
||
var qmarksTestNocaseDot = ([$0, ext2 = ""]) => {
|
||
const noext = qmarksTestNoExtDot([$0]);
|
||
if (!ext2)
|
||
return noext;
|
||
ext2 = ext2.toLowerCase();
|
||
return (f) => noext(f) && f.toLowerCase().endsWith(ext2);
|
||
};
|
||
var qmarksTestDot = ([$0, ext2 = ""]) => {
|
||
const noext = qmarksTestNoExtDot([$0]);
|
||
return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2);
|
||
};
|
||
var qmarksTest = ([$0, ext2 = ""]) => {
|
||
const noext = qmarksTestNoExt([$0]);
|
||
return !ext2 ? noext : (f) => noext(f) && f.endsWith(ext2);
|
||
};
|
||
var qmarksTestNoExt = ([$0]) => {
|
||
const len = $0.length;
|
||
return (f) => f.length === len && !f.startsWith(".");
|
||
};
|
||
var qmarksTestNoExtDot = ([$0]) => {
|
||
const len = $0.length;
|
||
return (f) => f.length === len && f !== "." && f !== "..";
|
||
};
|
||
var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix";
|
||
var path = {
|
||
win32: { sep: "\\" },
|
||
posix: { sep: "/" }
|
||
};
|
||
var sep = defaultPlatform === "win32" ? path.win32.sep : path.posix.sep;
|
||
minimatch.sep = sep;
|
||
var GLOBSTAR = Symbol("globstar **");
|
||
minimatch.GLOBSTAR = GLOBSTAR;
|
||
var qmark2 = "[^/]";
|
||
var star2 = qmark2 + "*?";
|
||
var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?";
|
||
var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?";
|
||
var filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
|
||
minimatch.filter = filter;
|
||
var ext = (a, b = {}) => Object.assign({}, a, b);
|
||
var defaults = (def) => {
|
||
if (!def || typeof def !== "object" || !Object.keys(def).length) {
|
||
return minimatch;
|
||
}
|
||
const orig = minimatch;
|
||
const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
|
||
return Object.assign(m, {
|
||
Minimatch: class Minimatch extends orig.Minimatch {
|
||
constructor(pattern, options = {}) {
|
||
super(pattern, ext(def, options));
|
||
}
|
||
static defaults(options) {
|
||
return orig.defaults(ext(def, options)).Minimatch;
|
||
}
|
||
},
|
||
AST: class AST extends orig.AST {
|
||
/* c8 ignore start */
|
||
constructor(type, parent, options = {}) {
|
||
super(type, parent, ext(def, options));
|
||
}
|
||
/* c8 ignore stop */
|
||
static fromGlob(pattern, options = {}) {
|
||
return orig.AST.fromGlob(pattern, ext(def, options));
|
||
}
|
||
},
|
||
unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
|
||
escape: (s, options = {}) => orig.escape(s, ext(def, options)),
|
||
filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
|
||
defaults: (options) => orig.defaults(ext(def, options)),
|
||
makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
|
||
braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
|
||
match: (list4, pattern, options = {}) => orig.match(list4, pattern, ext(def, options)),
|
||
sep: orig.sep,
|
||
GLOBSTAR
|
||
});
|
||
};
|
||
minimatch.defaults = defaults;
|
||
var braceExpand = (pattern, options = {}) => {
|
||
assertValidPattern(pattern);
|
||
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
|
||
return [pattern];
|
||
}
|
||
return (0, import_brace_expansion.default)(pattern);
|
||
};
|
||
minimatch.braceExpand = braceExpand;
|
||
var makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
|
||
minimatch.makeRe = makeRe;
|
||
var match = (list4, pattern, options = {}) => {
|
||
const mm = new Minimatch(pattern, options);
|
||
list4 = list4.filter((f) => mm.match(f));
|
||
if (mm.options.nonull && !list4.length) {
|
||
list4.push(pattern);
|
||
}
|
||
return list4;
|
||
};
|
||
minimatch.match = match;
|
||
var globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
|
||
var regExpEscape2 = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
|
||
var Minimatch = class {
|
||
options;
|
||
set;
|
||
pattern;
|
||
windowsPathsNoEscape;
|
||
nonegate;
|
||
negate;
|
||
comment;
|
||
empty;
|
||
preserveMultipleSlashes;
|
||
partial;
|
||
globSet;
|
||
globParts;
|
||
nocase;
|
||
isWindows;
|
||
platform;
|
||
windowsNoMagicRoot;
|
||
regexp;
|
||
constructor(pattern, options = {}) {
|
||
assertValidPattern(pattern);
|
||
options = options || {};
|
||
this.options = options;
|
||
this.pattern = pattern;
|
||
this.platform = options.platform || defaultPlatform;
|
||
this.isWindows = this.platform === "win32";
|
||
this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
|
||
if (this.windowsPathsNoEscape) {
|
||
this.pattern = this.pattern.replace(/\\/g, "/");
|
||
}
|
||
this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
|
||
this.regexp = null;
|
||
this.negate = false;
|
||
this.nonegate = !!options.nonegate;
|
||
this.comment = false;
|
||
this.empty = false;
|
||
this.partial = !!options.partial;
|
||
this.nocase = !!this.options.nocase;
|
||
this.windowsNoMagicRoot = options.windowsNoMagicRoot !== void 0 ? options.windowsNoMagicRoot : !!(this.isWindows && this.nocase);
|
||
this.globSet = [];
|
||
this.globParts = [];
|
||
this.set = [];
|
||
this.make();
|
||
}
|
||
hasMagic() {
|
||
if (this.options.magicalBraces && this.set.length > 1) {
|
||
return true;
|
||
}
|
||
for (const pattern of this.set) {
|
||
for (const part of pattern) {
|
||
if (typeof part !== "string")
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
debug(..._) {
|
||
}
|
||
make() {
|
||
const pattern = this.pattern;
|
||
const options = this.options;
|
||
if (!options.nocomment && pattern.charAt(0) === "#") {
|
||
this.comment = true;
|
||
return;
|
||
}
|
||
if (!pattern) {
|
||
this.empty = true;
|
||
return;
|
||
}
|
||
this.parseNegate();
|
||
this.globSet = [...new Set(this.braceExpand())];
|
||
if (options.debug) {
|
||
this.debug = (...args) => console.error(...args);
|
||
}
|
||
this.debug(this.pattern, this.globSet);
|
||
const rawGlobParts = this.globSet.map((s) => this.slashSplit(s));
|
||
this.globParts = this.preprocess(rawGlobParts);
|
||
this.debug(this.pattern, this.globParts);
|
||
let set = this.globParts.map((s, _, __) => {
|
||
if (this.isWindows && this.windowsNoMagicRoot) {
|
||
const isUNC = s[0] === "" && s[1] === "" && (s[2] === "?" || !globMagic.test(s[2])) && !globMagic.test(s[3]);
|
||
const isDrive = /^[a-z]:/i.test(s[0]);
|
||
if (isUNC) {
|
||
return [...s.slice(0, 4), ...s.slice(4).map((ss) => this.parse(ss))];
|
||
} else if (isDrive) {
|
||
return [s[0], ...s.slice(1).map((ss) => this.parse(ss))];
|
||
}
|
||
}
|
||
return s.map((ss) => this.parse(ss));
|
||
});
|
||
this.debug(this.pattern, set);
|
||
this.set = set.filter((s) => s.indexOf(false) === -1);
|
||
if (this.isWindows) {
|
||
for (let i = 0; i < this.set.length; i++) {
|
||
const p = this.set[i];
|
||
if (p[0] === "" && p[1] === "" && this.globParts[i][2] === "?" && typeof p[3] === "string" && /^[a-z]:$/i.test(p[3])) {
|
||
p[2] = "?";
|
||
}
|
||
}
|
||
}
|
||
this.debug(this.pattern, this.set);
|
||
}
|
||
// various transforms to equivalent pattern sets that are
|
||
// faster to process in a filesystem walk. The goal is to
|
||
// eliminate what we can, and push all ** patterns as far
|
||
// to the right as possible, even if it increases the number
|
||
// of patterns that we have to process.
|
||
preprocess(globParts) {
|
||
if (this.options.noglobstar) {
|
||
for (let i = 0; i < globParts.length; i++) {
|
||
for (let j = 0; j < globParts[i].length; j++) {
|
||
if (globParts[i][j] === "**") {
|
||
globParts[i][j] = "*";
|
||
}
|
||
}
|
||
}
|
||
}
|
||
const { optimizationLevel = 1 } = this.options;
|
||
if (optimizationLevel >= 2) {
|
||
globParts = this.firstPhasePreProcess(globParts);
|
||
globParts = this.secondPhasePreProcess(globParts);
|
||
} else if (optimizationLevel >= 1) {
|
||
globParts = this.levelOneOptimize(globParts);
|
||
} else {
|
||
globParts = this.adjascentGlobstarOptimize(globParts);
|
||
}
|
||
return globParts;
|
||
}
|
||
// just get rid of adjascent ** portions
|
||
adjascentGlobstarOptimize(globParts) {
|
||
return globParts.map((parts) => {
|
||
let gs = -1;
|
||
while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
|
||
let i = gs;
|
||
while (parts[i + 1] === "**") {
|
||
i++;
|
||
}
|
||
if (i !== gs) {
|
||
parts.splice(gs, i - gs);
|
||
}
|
||
}
|
||
return parts;
|
||
});
|
||
}
|
||
// get rid of adjascent ** and resolve .. portions
|
||
levelOneOptimize(globParts) {
|
||
return globParts.map((parts) => {
|
||
parts = parts.reduce((set, part) => {
|
||
const prev = set[set.length - 1];
|
||
if (part === "**" && prev === "**") {
|
||
return set;
|
||
}
|
||
if (part === "..") {
|
||
if (prev && prev !== ".." && prev !== "." && prev !== "**") {
|
||
set.pop();
|
||
return set;
|
||
}
|
||
}
|
||
set.push(part);
|
||
return set;
|
||
}, []);
|
||
return parts.length === 0 ? [""] : parts;
|
||
});
|
||
}
|
||
levelTwoFileOptimize(parts) {
|
||
if (!Array.isArray(parts)) {
|
||
parts = this.slashSplit(parts);
|
||
}
|
||
let didSomething = false;
|
||
do {
|
||
didSomething = false;
|
||
if (!this.preserveMultipleSlashes) {
|
||
for (let i = 1; i < parts.length - 1; i++) {
|
||
const p = parts[i];
|
||
if (i === 1 && p === "" && parts[0] === "")
|
||
continue;
|
||
if (p === "." || p === "") {
|
||
didSomething = true;
|
||
parts.splice(i, 1);
|
||
i--;
|
||
}
|
||
}
|
||
if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
|
||
didSomething = true;
|
||
parts.pop();
|
||
}
|
||
}
|
||
let dd = 0;
|
||
while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
|
||
const p = parts[dd - 1];
|
||
if (p && p !== "." && p !== ".." && p !== "**") {
|
||
didSomething = true;
|
||
parts.splice(dd - 1, 2);
|
||
dd -= 2;
|
||
}
|
||
}
|
||
} while (didSomething);
|
||
return parts.length === 0 ? [""] : parts;
|
||
}
|
||
// First phase: single-pattern processing
|
||
// <pre> is 1 or more portions
|
||
// <rest> is 1 or more portions
|
||
// <p> is any portion other than ., .., '', or **
|
||
// <e> is . or ''
|
||
//
|
||
// **/.. is *brutal* for filesystem walking performance, because
|
||
// it effectively resets the recursive walk each time it occurs,
|
||
// and ** cannot be reduced out by a .. pattern part like a regexp
|
||
// or most strings (other than .., ., and '') can be.
|
||
//
|
||
// <pre>/**/../<p>/<p>/<rest> -> {<pre>/../<p>/<p>/<rest>,<pre>/**/<p>/<p>/<rest>}
|
||
// <pre>/<e>/<rest> -> <pre>/<rest>
|
||
// <pre>/<p>/../<rest> -> <pre>/<rest>
|
||
// **/**/<rest> -> **/<rest>
|
||
//
|
||
// **/*/<rest> -> */**/<rest> <== not valid because ** doesn't follow
|
||
// this WOULD be allowed if ** did follow symlinks, or * didn't
|
||
firstPhasePreProcess(globParts) {
|
||
let didSomething = false;
|
||
do {
|
||
didSomething = false;
|
||
for (let parts of globParts) {
|
||
let gs = -1;
|
||
while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
|
||
let gss = gs;
|
||
while (parts[gss + 1] === "**") {
|
||
gss++;
|
||
}
|
||
if (gss > gs) {
|
||
parts.splice(gs + 1, gss - gs);
|
||
}
|
||
let next = parts[gs + 1];
|
||
const p = parts[gs + 2];
|
||
const p2 = parts[gs + 3];
|
||
if (next !== "..")
|
||
continue;
|
||
if (!p || p === "." || p === ".." || !p2 || p2 === "." || p2 === "..") {
|
||
continue;
|
||
}
|
||
didSomething = true;
|
||
parts.splice(gs, 1);
|
||
const other = parts.slice(0);
|
||
other[gs] = "**";
|
||
globParts.push(other);
|
||
gs--;
|
||
}
|
||
if (!this.preserveMultipleSlashes) {
|
||
for (let i = 1; i < parts.length - 1; i++) {
|
||
const p = parts[i];
|
||
if (i === 1 && p === "" && parts[0] === "")
|
||
continue;
|
||
if (p === "." || p === "") {
|
||
didSomething = true;
|
||
parts.splice(i, 1);
|
||
i--;
|
||
}
|
||
}
|
||
if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
|
||
didSomething = true;
|
||
parts.pop();
|
||
}
|
||
}
|
||
let dd = 0;
|
||
while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
|
||
const p = parts[dd - 1];
|
||
if (p && p !== "." && p !== ".." && p !== "**") {
|
||
didSomething = true;
|
||
const needDot = dd === 1 && parts[dd + 1] === "**";
|
||
const splin = needDot ? ["."] : [];
|
||
parts.splice(dd - 1, 2, ...splin);
|
||
if (parts.length === 0)
|
||
parts.push("");
|
||
dd -= 2;
|
||
}
|
||
}
|
||
}
|
||
} while (didSomething);
|
||
return globParts;
|
||
}
|
||
// second phase: multi-pattern dedupes
|
||
// {<pre>/*/<rest>,<pre>/<p>/<rest>} -> <pre>/*/<rest>
|
||
// {<pre>/<rest>,<pre>/<rest>} -> <pre>/<rest>
|
||
// {<pre>/**/<rest>,<pre>/<rest>} -> <pre>/**/<rest>
|
||
//
|
||
// {<pre>/**/<rest>,<pre>/**/<p>/<rest>} -> <pre>/**/<rest>
|
||
// ^-- not valid because ** doens't follow symlinks
|
||
secondPhasePreProcess(globParts) {
|
||
for (let i = 0; i < globParts.length - 1; i++) {
|
||
for (let j = i + 1; j < globParts.length; j++) {
|
||
const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
|
||
if (!matched)
|
||
continue;
|
||
globParts[i] = matched;
|
||
globParts[j] = [];
|
||
}
|
||
}
|
||
return globParts.filter((gs) => gs.length);
|
||
}
|
||
partsMatch(a, b, emptyGSMatch = false) {
|
||
let ai = 0;
|
||
let bi = 0;
|
||
let result = [];
|
||
let which = "";
|
||
while (ai < a.length && bi < b.length) {
|
||
if (a[ai] === b[bi]) {
|
||
result.push(which === "b" ? b[bi] : a[ai]);
|
||
ai++;
|
||
bi++;
|
||
} else if (emptyGSMatch && a[ai] === "**" && b[bi] === a[ai + 1]) {
|
||
result.push(a[ai]);
|
||
ai++;
|
||
} else if (emptyGSMatch && b[bi] === "**" && a[ai] === b[bi + 1]) {
|
||
result.push(b[bi]);
|
||
bi++;
|
||
} else if (a[ai] === "*" && b[bi] && (this.options.dot || !b[bi].startsWith(".")) && b[bi] !== "**") {
|
||
if (which === "b")
|
||
return false;
|
||
which = "a";
|
||
result.push(a[ai]);
|
||
ai++;
|
||
bi++;
|
||
} else if (b[bi] === "*" && a[ai] && (this.options.dot || !a[ai].startsWith(".")) && a[ai] !== "**") {
|
||
if (which === "a")
|
||
return false;
|
||
which = "b";
|
||
result.push(b[bi]);
|
||
ai++;
|
||
bi++;
|
||
} else {
|
||
return false;
|
||
}
|
||
}
|
||
return a.length === b.length && result;
|
||
}
|
||
parseNegate() {
|
||
if (this.nonegate)
|
||
return;
|
||
const pattern = this.pattern;
|
||
let negate = false;
|
||
let negateOffset = 0;
|
||
for (let i = 0; i < pattern.length && pattern.charAt(i) === "!"; i++) {
|
||
negate = !negate;
|
||
negateOffset++;
|
||
}
|
||
if (negateOffset)
|
||
this.pattern = pattern.slice(negateOffset);
|
||
this.negate = negate;
|
||
}
|
||
// set partial to true to test if, for example,
|
||
// "/a/b" matches the start of "/*/b/*/d"
|
||
// Partial means, if you run out of file before you run
|
||
// out of pattern, then that's fine, as long as all
|
||
// the parts match.
|
||
matchOne(file, pattern, partial = false) {
|
||
const options = this.options;
|
||
if (this.isWindows) {
|
||
const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]);
|
||
const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]);
|
||
const patternDrive = typeof pattern[0] === "string" && /^[a-z]:$/i.test(pattern[0]);
|
||
const patternUNC = !patternDrive && pattern[0] === "" && pattern[1] === "" && pattern[2] === "?" && typeof pattern[3] === "string" && /^[a-z]:$/i.test(pattern[3]);
|
||
const fdi = fileUNC ? 3 : fileDrive ? 0 : void 0;
|
||
const pdi = patternUNC ? 3 : patternDrive ? 0 : void 0;
|
||
if (typeof fdi === "number" && typeof pdi === "number") {
|
||
const [fd, pd] = [file[fdi], pattern[pdi]];
|
||
if (fd.toLowerCase() === pd.toLowerCase()) {
|
||
pattern[pdi] = fd;
|
||
if (pdi > fdi) {
|
||
pattern = pattern.slice(pdi);
|
||
} else if (fdi > pdi) {
|
||
file = file.slice(fdi);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
const { optimizationLevel = 1 } = this.options;
|
||
if (optimizationLevel >= 2) {
|
||
file = this.levelTwoFileOptimize(file);
|
||
}
|
||
this.debug("matchOne", this, { file, pattern });
|
||
this.debug("matchOne", file.length, pattern.length);
|
||
for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
|
||
this.debug("matchOne loop");
|
||
var p = pattern[pi];
|
||
var f = file[fi];
|
||
this.debug(pattern, p, f);
|
||
if (p === false) {
|
||
return false;
|
||
}
|
||
if (p === GLOBSTAR) {
|
||
this.debug("GLOBSTAR", [pattern, p, f]);
|
||
var fr = fi;
|
||
var pr = pi + 1;
|
||
if (pr === pl) {
|
||
this.debug("** at the end");
|
||
for (; fi < fl; fi++) {
|
||
if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".")
|
||
return false;
|
||
}
|
||
return true;
|
||
}
|
||
while (fr < fl) {
|
||
var swallowee = file[fr];
|
||
this.debug("\nglobstar while", file, fr, pattern, pr, swallowee);
|
||
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
|
||
this.debug("globstar found match!", fr, fl, swallowee);
|
||
return true;
|
||
} else {
|
||
if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") {
|
||
this.debug("dot detected!", file, fr, pattern, pr);
|
||
break;
|
||
}
|
||
this.debug("globstar swallow a segment, and continue");
|
||
fr++;
|
||
}
|
||
}
|
||
if (partial) {
|
||
this.debug("\n>>> no match, partial?", file, fr, pattern, pr);
|
||
if (fr === fl) {
|
||
return true;
|
||
}
|
||
}
|
||
return false;
|
||
}
|
||
let hit;
|
||
if (typeof p === "string") {
|
||
hit = f === p;
|
||
this.debug("string match", p, f, hit);
|
||
} else {
|
||
hit = p.test(f);
|
||
this.debug("pattern match", p, f, hit);
|
||
}
|
||
if (!hit)
|
||
return false;
|
||
}
|
||
if (fi === fl && pi === pl) {
|
||
return true;
|
||
} else if (fi === fl) {
|
||
return partial;
|
||
} else if (pi === pl) {
|
||
return fi === fl - 1 && file[fi] === "";
|
||
} else {
|
||
throw new Error("wtf?");
|
||
}
|
||
}
|
||
braceExpand() {
|
||
return braceExpand(this.pattern, this.options);
|
||
}
|
||
parse(pattern) {
|
||
assertValidPattern(pattern);
|
||
const options = this.options;
|
||
if (pattern === "**")
|
||
return GLOBSTAR;
|
||
if (pattern === "")
|
||
return "";
|
||
let m;
|
||
let fastTest = null;
|
||
if (m = pattern.match(starRE)) {
|
||
fastTest = options.dot ? starTestDot : starTest;
|
||
} else if (m = pattern.match(starDotExtRE)) {
|
||
fastTest = (options.nocase ? options.dot ? starDotExtTestNocaseDot : starDotExtTestNocase : options.dot ? starDotExtTestDot : starDotExtTest)(m[1]);
|
||
} else if (m = pattern.match(qmarksRE)) {
|
||
fastTest = (options.nocase ? options.dot ? qmarksTestNocaseDot : qmarksTestNocase : options.dot ? qmarksTestDot : qmarksTest)(m);
|
||
} else if (m = pattern.match(starDotStarRE)) {
|
||
fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
|
||
} else if (m = pattern.match(dotStarRE)) {
|
||
fastTest = dotStarTest;
|
||
}
|
||
const re = AST.fromGlob(pattern, this.options).toMMPattern();
|
||
return fastTest ? Object.assign(re, { test: fastTest }) : re;
|
||
}
|
||
makeRe() {
|
||
if (this.regexp || this.regexp === false)
|
||
return this.regexp;
|
||
const set = this.set;
|
||
if (!set.length) {
|
||
this.regexp = false;
|
||
return this.regexp;
|
||
}
|
||
const options = this.options;
|
||
const twoStar = options.noglobstar ? star2 : options.dot ? twoStarDot : twoStarNoDot;
|
||
const flags = new Set(options.nocase ? ["i"] : []);
|
||
let re = set.map((pattern) => {
|
||
const pp = pattern.map((p) => {
|
||
if (p instanceof RegExp) {
|
||
for (const f of p.flags.split(""))
|
||
flags.add(f);
|
||
}
|
||
return typeof p === "string" ? regExpEscape2(p) : p === GLOBSTAR ? GLOBSTAR : p._src;
|
||
});
|
||
pp.forEach((p, i) => {
|
||
const next = pp[i + 1];
|
||
const prev = pp[i - 1];
|
||
if (p !== GLOBSTAR || prev === GLOBSTAR) {
|
||
return;
|
||
}
|
||
if (prev === void 0) {
|
||
if (next !== void 0 && next !== GLOBSTAR) {
|
||
pp[i + 1] = "(?:\\/|" + twoStar + "\\/)?" + next;
|
||
} else {
|
||
pp[i] = twoStar;
|
||
}
|
||
} else if (next === void 0) {
|
||
pp[i - 1] = prev + "(?:\\/|" + twoStar + ")?";
|
||
} else if (next !== GLOBSTAR) {
|
||
pp[i - 1] = prev + "(?:\\/|\\/" + twoStar + "\\/)" + next;
|
||
pp[i + 1] = GLOBSTAR;
|
||
}
|
||
});
|
||
return pp.filter((p) => p !== GLOBSTAR).join("/");
|
||
}).join("|");
|
||
const [open, close2] = set.length > 1 ? ["(?:", ")"] : ["", ""];
|
||
re = "^" + open + re + close2 + "$";
|
||
if (this.negate)
|
||
re = "^(?!" + re + ").+$";
|
||
try {
|
||
this.regexp = new RegExp(re, [...flags].join(""));
|
||
} catch (ex) {
|
||
this.regexp = false;
|
||
}
|
||
return this.regexp;
|
||
}
|
||
slashSplit(p) {
|
||
if (this.preserveMultipleSlashes) {
|
||
return p.split("/");
|
||
} else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
|
||
return ["", ...p.split(/\/+/)];
|
||
} else {
|
||
return p.split(/\/+/);
|
||
}
|
||
}
|
||
match(f, partial = this.partial) {
|
||
this.debug("match", f, this.pattern);
|
||
if (this.comment) {
|
||
return false;
|
||
}
|
||
if (this.empty) {
|
||
return f === "";
|
||
}
|
||
if (f === "/" && partial) {
|
||
return true;
|
||
}
|
||
const options = this.options;
|
||
if (this.isWindows) {
|
||
f = f.split("\\").join("/");
|
||
}
|
||
const ff = this.slashSplit(f);
|
||
this.debug(this.pattern, "split", ff);
|
||
const set = this.set;
|
||
this.debug(this.pattern, "set", set);
|
||
let filename = ff[ff.length - 1];
|
||
if (!filename) {
|
||
for (let i = ff.length - 2; !filename && i >= 0; i--) {
|
||
filename = ff[i];
|
||
}
|
||
}
|
||
for (let i = 0; i < set.length; i++) {
|
||
const pattern = set[i];
|
||
let file = ff;
|
||
if (options.matchBase && pattern.length === 1) {
|
||
file = [filename];
|
||
}
|
||
const hit = this.matchOne(file, pattern, partial);
|
||
if (hit) {
|
||
if (options.flipNegate) {
|
||
return true;
|
||
}
|
||
return !this.negate;
|
||
}
|
||
}
|
||
if (options.flipNegate) {
|
||
return false;
|
||
}
|
||
return this.negate;
|
||
}
|
||
static defaults(def) {
|
||
return minimatch.defaults(def).Minimatch;
|
||
}
|
||
};
|
||
minimatch.AST = AST;
|
||
minimatch.Minimatch = Minimatch;
|
||
minimatch.escape = escape;
|
||
minimatch.unescape = unescape;
|
||
|
||
// node_modules/lru-cache/dist/esm/index.js
|
||
var perf = typeof performance === "object" && performance && typeof performance.now === "function" ? performance : Date;
|
||
var warned = /* @__PURE__ */ new Set();
|
||
var PROCESS = typeof process === "object" && !!process ? process : {};
|
||
var emitWarning = (msg, type, code2, fn) => {
|
||
typeof PROCESS.emitWarning === "function" ? PROCESS.emitWarning(msg, type, code2, fn) : console.error(`[${code2}] ${type}: ${msg}`);
|
||
};
|
||
var AC = globalThis.AbortController;
|
||
var AS = globalThis.AbortSignal;
|
||
if (typeof AC === "undefined") {
|
||
AS = class AbortSignal {
|
||
onabort;
|
||
_onabort = [];
|
||
reason;
|
||
aborted = false;
|
||
addEventListener(_, fn) {
|
||
this._onabort.push(fn);
|
||
}
|
||
};
|
||
AC = class AbortController {
|
||
constructor() {
|
||
warnACPolyfill();
|
||
}
|
||
signal = new AS();
|
||
abort(reason) {
|
||
if (this.signal.aborted)
|
||
return;
|
||
this.signal.reason = reason;
|
||
this.signal.aborted = true;
|
||
for (const fn of this.signal._onabort) {
|
||
fn(reason);
|
||
}
|
||
this.signal.onabort?.(reason);
|
||
}
|
||
};
|
||
let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== "1";
|
||
const warnACPolyfill = () => {
|
||
if (!printACPolyfillWarning)
|
||
return;
|
||
printACPolyfillWarning = false;
|
||
emitWarning("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.", "NO_ABORT_CONTROLLER", "ENOTSUP", warnACPolyfill);
|
||
};
|
||
}
|
||
var shouldWarn = (code2) => !warned.has(code2);
|
||
var TYPE = Symbol("type");
|
||
var isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
|
||
var getUintArray = (max) => !isPosInt(max) ? null : max <= Math.pow(2, 8) ? Uint8Array : max <= Math.pow(2, 16) ? Uint16Array : max <= Math.pow(2, 32) ? Uint32Array : max <= Number.MAX_SAFE_INTEGER ? ZeroArray : null;
|
||
var ZeroArray = class extends Array {
|
||
constructor(size) {
|
||
super(size);
|
||
this.fill(0);
|
||
}
|
||
};
|
||
var Stack = class _Stack {
|
||
heap;
|
||
length;
|
||
// private constructor
|
||
static #constructing = false;
|
||
static create(max) {
|
||
const HeapCls = getUintArray(max);
|
||
if (!HeapCls)
|
||
return [];
|
||
_Stack.#constructing = true;
|
||
const s = new _Stack(max, HeapCls);
|
||
_Stack.#constructing = false;
|
||
return s;
|
||
}
|
||
constructor(max, HeapCls) {
|
||
if (!_Stack.#constructing) {
|
||
throw new TypeError("instantiate Stack using Stack.create(n)");
|
||
}
|
||
this.heap = new HeapCls(max);
|
||
this.length = 0;
|
||
}
|
||
push(n) {
|
||
this.heap[this.length++] = n;
|
||
}
|
||
pop() {
|
||
return this.heap[--this.length];
|
||
}
|
||
};
|
||
var LRUCache = class _LRUCache {
|
||
// properties coming in from the options of these, only max and maxSize
|
||
// really *need* to be protected. The rest can be modified, as they just
|
||
// set defaults for various methods.
|
||
#max;
|
||
#maxSize;
|
||
#dispose;
|
||
#disposeAfter;
|
||
#fetchMethod;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.ttl}
|
||
*/
|
||
ttl;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.ttlResolution}
|
||
*/
|
||
ttlResolution;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.ttlAutopurge}
|
||
*/
|
||
ttlAutopurge;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.updateAgeOnGet}
|
||
*/
|
||
updateAgeOnGet;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.updateAgeOnHas}
|
||
*/
|
||
updateAgeOnHas;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.allowStale}
|
||
*/
|
||
allowStale;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.noDisposeOnSet}
|
||
*/
|
||
noDisposeOnSet;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.noUpdateTTL}
|
||
*/
|
||
noUpdateTTL;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.maxEntrySize}
|
||
*/
|
||
maxEntrySize;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.sizeCalculation}
|
||
*/
|
||
sizeCalculation;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
|
||
*/
|
||
noDeleteOnFetchRejection;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
|
||
*/
|
||
noDeleteOnStaleGet;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
|
||
*/
|
||
allowStaleOnFetchAbort;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
|
||
*/
|
||
allowStaleOnFetchRejection;
|
||
/**
|
||
* {@link LRUCache.OptionsBase.ignoreFetchAbort}
|
||
*/
|
||
ignoreFetchAbort;
|
||
// computed properties
|
||
#size;
|
||
#calculatedSize;
|
||
#keyMap;
|
||
#keyList;
|
||
#valList;
|
||
#next;
|
||
#prev;
|
||
#head;
|
||
#tail;
|
||
#free;
|
||
#disposed;
|
||
#sizes;
|
||
#starts;
|
||
#ttls;
|
||
#hasDispose;
|
||
#hasFetchMethod;
|
||
#hasDisposeAfter;
|
||
/**
|
||
* Do not call this method unless you need to inspect the
|
||
* inner workings of the cache. If anything returned by this
|
||
* object is modified in any way, strange breakage may occur.
|
||
*
|
||
* These fields are private for a reason!
|
||
*
|
||
* @internal
|
||
*/
|
||
static unsafeExposeInternals(c) {
|
||
return {
|
||
// properties
|
||
starts: c.#starts,
|
||
ttls: c.#ttls,
|
||
sizes: c.#sizes,
|
||
keyMap: c.#keyMap,
|
||
keyList: c.#keyList,
|
||
valList: c.#valList,
|
||
next: c.#next,
|
||
prev: c.#prev,
|
||
get head() {
|
||
return c.#head;
|
||
},
|
||
get tail() {
|
||
return c.#tail;
|
||
},
|
||
free: c.#free,
|
||
// methods
|
||
isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
|
||
backgroundFetch: (k, index2, options, context) => c.#backgroundFetch(k, index2, options, context),
|
||
moveToTail: (index2) => c.#moveToTail(index2),
|
||
indexes: (options) => c.#indexes(options),
|
||
rindexes: (options) => c.#rindexes(options),
|
||
isStale: (index2) => c.#isStale(index2)
|
||
};
|
||
}
|
||
// Protected read-only members
|
||
/**
|
||
* {@link LRUCache.OptionsBase.max} (read-only)
|
||
*/
|
||
get max() {
|
||
return this.#max;
|
||
}
|
||
/**
|
||
* {@link LRUCache.OptionsBase.maxSize} (read-only)
|
||
*/
|
||
get maxSize() {
|
||
return this.#maxSize;
|
||
}
|
||
/**
|
||
* The total computed size of items in the cache (read-only)
|
||
*/
|
||
get calculatedSize() {
|
||
return this.#calculatedSize;
|
||
}
|
||
/**
|
||
* The number of items stored in the cache (read-only)
|
||
*/
|
||
get size() {
|
||
return this.#size;
|
||
}
|
||
/**
|
||
* {@link LRUCache.OptionsBase.fetchMethod} (read-only)
|
||
*/
|
||
get fetchMethod() {
|
||
return this.#fetchMethod;
|
||
}
|
||
/**
|
||
* {@link LRUCache.OptionsBase.dispose} (read-only)
|
||
*/
|
||
get dispose() {
|
||
return this.#dispose;
|
||
}
|
||
/**
|
||
* {@link LRUCache.OptionsBase.disposeAfter} (read-only)
|
||
*/
|
||
get disposeAfter() {
|
||
return this.#disposeAfter;
|
||
}
|
||
constructor(options) {
|
||
const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort } = options;
|
||
if (max !== 0 && !isPosInt(max)) {
|
||
throw new TypeError("max option must be a nonnegative integer");
|
||
}
|
||
const UintArray = max ? getUintArray(max) : Array;
|
||
if (!UintArray) {
|
||
throw new Error("invalid max value: " + max);
|
||
}
|
||
this.#max = max;
|
||
this.#maxSize = maxSize;
|
||
this.maxEntrySize = maxEntrySize || this.#maxSize;
|
||
this.sizeCalculation = sizeCalculation;
|
||
if (this.sizeCalculation) {
|
||
if (!this.#maxSize && !this.maxEntrySize) {
|
||
throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");
|
||
}
|
||
if (typeof this.sizeCalculation !== "function") {
|
||
throw new TypeError("sizeCalculation set to non-function");
|
||
}
|
||
}
|
||
if (fetchMethod !== void 0 && typeof fetchMethod !== "function") {
|
||
throw new TypeError("fetchMethod must be a function if specified");
|
||
}
|
||
this.#fetchMethod = fetchMethod;
|
||
this.#hasFetchMethod = !!fetchMethod;
|
||
this.#keyMap = /* @__PURE__ */ new Map();
|
||
this.#keyList = new Array(max).fill(void 0);
|
||
this.#valList = new Array(max).fill(void 0);
|
||
this.#next = new UintArray(max);
|
||
this.#prev = new UintArray(max);
|
||
this.#head = 0;
|
||
this.#tail = 0;
|
||
this.#free = Stack.create(max);
|
||
this.#size = 0;
|
||
this.#calculatedSize = 0;
|
||
if (typeof dispose === "function") {
|
||
this.#dispose = dispose;
|
||
}
|
||
if (typeof disposeAfter === "function") {
|
||
this.#disposeAfter = disposeAfter;
|
||
this.#disposed = [];
|
||
} else {
|
||
this.#disposeAfter = void 0;
|
||
this.#disposed = void 0;
|
||
}
|
||
this.#hasDispose = !!this.#dispose;
|
||
this.#hasDisposeAfter = !!this.#disposeAfter;
|
||
this.noDisposeOnSet = !!noDisposeOnSet;
|
||
this.noUpdateTTL = !!noUpdateTTL;
|
||
this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
|
||
this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
|
||
this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
|
||
this.ignoreFetchAbort = !!ignoreFetchAbort;
|
||
if (this.maxEntrySize !== 0) {
|
||
if (this.#maxSize !== 0) {
|
||
if (!isPosInt(this.#maxSize)) {
|
||
throw new TypeError("maxSize must be a positive integer if specified");
|
||
}
|
||
}
|
||
if (!isPosInt(this.maxEntrySize)) {
|
||
throw new TypeError("maxEntrySize must be a positive integer if specified");
|
||
}
|
||
this.#initializeSizeTracking();
|
||
}
|
||
this.allowStale = !!allowStale;
|
||
this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
|
||
this.updateAgeOnGet = !!updateAgeOnGet;
|
||
this.updateAgeOnHas = !!updateAgeOnHas;
|
||
this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0 ? ttlResolution : 1;
|
||
this.ttlAutopurge = !!ttlAutopurge;
|
||
this.ttl = ttl || 0;
|
||
if (this.ttl) {
|
||
if (!isPosInt(this.ttl)) {
|
||
throw new TypeError("ttl must be a positive integer if specified");
|
||
}
|
||
this.#initializeTTLTracking();
|
||
}
|
||
if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
|
||
throw new TypeError("At least one of max, maxSize, or ttl is required");
|
||
}
|
||
if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
|
||
const code2 = "LRU_CACHE_UNBOUNDED";
|
||
if (shouldWarn(code2)) {
|
||
warned.add(code2);
|
||
const msg = "TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.";
|
||
emitWarning(msg, "UnboundedCacheWarning", code2, _LRUCache);
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Return the remaining TTL time for a given entry key
|
||
*/
|
||
getRemainingTTL(key) {
|
||
return this.#keyMap.has(key) ? Infinity : 0;
|
||
}
|
||
#initializeTTLTracking() {
|
||
const ttls = new ZeroArray(this.#max);
|
||
const starts = new ZeroArray(this.#max);
|
||
this.#ttls = ttls;
|
||
this.#starts = starts;
|
||
this.#setItemTTL = (index2, ttl, start = perf.now()) => {
|
||
starts[index2] = ttl !== 0 ? start : 0;
|
||
ttls[index2] = ttl;
|
||
if (ttl !== 0 && this.ttlAutopurge) {
|
||
const t = setTimeout(() => {
|
||
if (this.#isStale(index2)) {
|
||
this.delete(this.#keyList[index2]);
|
||
}
|
||
}, ttl + 1);
|
||
if (t.unref) {
|
||
t.unref();
|
||
}
|
||
}
|
||
};
|
||
this.#updateItemAge = (index2) => {
|
||
starts[index2] = ttls[index2] !== 0 ? perf.now() : 0;
|
||
};
|
||
this.#statusTTL = (status, index2) => {
|
||
if (ttls[index2]) {
|
||
const ttl = ttls[index2];
|
||
const start = starts[index2];
|
||
if (!ttl || !start)
|
||
return;
|
||
status.ttl = ttl;
|
||
status.start = start;
|
||
status.now = cachedNow || getNow();
|
||
const age = status.now - start;
|
||
status.remainingTTL = ttl - age;
|
||
}
|
||
};
|
||
let cachedNow = 0;
|
||
const getNow = () => {
|
||
const n = perf.now();
|
||
if (this.ttlResolution > 0) {
|
||
cachedNow = n;
|
||
const t = setTimeout(() => cachedNow = 0, this.ttlResolution);
|
||
if (t.unref) {
|
||
t.unref();
|
||
}
|
||
}
|
||
return n;
|
||
};
|
||
this.getRemainingTTL = (key) => {
|
||
const index2 = this.#keyMap.get(key);
|
||
if (index2 === void 0) {
|
||
return 0;
|
||
}
|
||
const ttl = ttls[index2];
|
||
const start = starts[index2];
|
||
if (!ttl || !start) {
|
||
return Infinity;
|
||
}
|
||
const age = (cachedNow || getNow()) - start;
|
||
return ttl - age;
|
||
};
|
||
this.#isStale = (index2) => {
|
||
const s = starts[index2];
|
||
const t = ttls[index2];
|
||
return !!t && !!s && (cachedNow || getNow()) - s > t;
|
||
};
|
||
}
|
||
// conditionally set private methods related to TTL
|
||
#updateItemAge = () => {
|
||
};
|
||
#statusTTL = () => {
|
||
};
|
||
#setItemTTL = () => {
|
||
};
|
||
/* c8 ignore stop */
|
||
#isStale = () => false;
|
||
#initializeSizeTracking() {
|
||
const sizes = new ZeroArray(this.#max);
|
||
this.#calculatedSize = 0;
|
||
this.#sizes = sizes;
|
||
this.#removeItemSize = (index2) => {
|
||
this.#calculatedSize -= sizes[index2];
|
||
sizes[index2] = 0;
|
||
};
|
||
this.#requireSize = (k, v, size, sizeCalculation) => {
|
||
if (this.#isBackgroundFetch(v)) {
|
||
return 0;
|
||
}
|
||
if (!isPosInt(size)) {
|
||
if (sizeCalculation) {
|
||
if (typeof sizeCalculation !== "function") {
|
||
throw new TypeError("sizeCalculation must be a function");
|
||
}
|
||
size = sizeCalculation(v, k);
|
||
if (!isPosInt(size)) {
|
||
throw new TypeError("sizeCalculation return invalid (expect positive integer)");
|
||
}
|
||
} else {
|
||
throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");
|
||
}
|
||
}
|
||
return size;
|
||
};
|
||
this.#addItemSize = (index2, size, status) => {
|
||
sizes[index2] = size;
|
||
if (this.#maxSize) {
|
||
const maxSize = this.#maxSize - sizes[index2];
|
||
while (this.#calculatedSize > maxSize) {
|
||
this.#evict(true);
|
||
}
|
||
}
|
||
this.#calculatedSize += sizes[index2];
|
||
if (status) {
|
||
status.entrySize = size;
|
||
status.totalCalculatedSize = this.#calculatedSize;
|
||
}
|
||
};
|
||
}
|
||
#removeItemSize = (_i) => {
|
||
};
|
||
#addItemSize = (_i, _s, _st) => {
|
||
};
|
||
#requireSize = (_k, _v, size, sizeCalculation) => {
|
||
if (size || sizeCalculation) {
|
||
throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");
|
||
}
|
||
return 0;
|
||
};
|
||
*#indexes({ allowStale = this.allowStale } = {}) {
|
||
if (this.#size) {
|
||
for (let i = this.#tail; true; ) {
|
||
if (!this.#isValidIndex(i)) {
|
||
break;
|
||
}
|
||
if (allowStale || !this.#isStale(i)) {
|
||
yield i;
|
||
}
|
||
if (i === this.#head) {
|
||
break;
|
||
} else {
|
||
i = this.#prev[i];
|
||
}
|
||
}
|
||
}
|
||
}
|
||
*#rindexes({ allowStale = this.allowStale } = {}) {
|
||
if (this.#size) {
|
||
for (let i = this.#head; true; ) {
|
||
if (!this.#isValidIndex(i)) {
|
||
break;
|
||
}
|
||
if (allowStale || !this.#isStale(i)) {
|
||
yield i;
|
||
}
|
||
if (i === this.#tail) {
|
||
break;
|
||
} else {
|
||
i = this.#next[i];
|
||
}
|
||
}
|
||
}
|
||
}
|
||
#isValidIndex(index2) {
|
||
return index2 !== void 0 && this.#keyMap.get(this.#keyList[index2]) === index2;
|
||
}
|
||
/**
|
||
* Return a generator yielding `[key, value]` pairs,
|
||
* in order from most recently used to least recently used.
|
||
*/
|
||
*entries() {
|
||
for (const i of this.#indexes()) {
|
||
if (this.#valList[i] !== void 0 && this.#keyList[i] !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) {
|
||
yield [this.#keyList[i], this.#valList[i]];
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Inverse order version of {@link LRUCache.entries}
|
||
*
|
||
* Return a generator yielding `[key, value]` pairs,
|
||
* in order from least recently used to most recently used.
|
||
*/
|
||
*rentries() {
|
||
for (const i of this.#rindexes()) {
|
||
if (this.#valList[i] !== void 0 && this.#keyList[i] !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) {
|
||
yield [this.#keyList[i], this.#valList[i]];
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Return a generator yielding the keys in the cache,
|
||
* in order from most recently used to least recently used.
|
||
*/
|
||
*keys() {
|
||
for (const i of this.#indexes()) {
|
||
const k = this.#keyList[i];
|
||
if (k !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) {
|
||
yield k;
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Inverse order version of {@link LRUCache.keys}
|
||
*
|
||
* Return a generator yielding the keys in the cache,
|
||
* in order from least recently used to most recently used.
|
||
*/
|
||
*rkeys() {
|
||
for (const i of this.#rindexes()) {
|
||
const k = this.#keyList[i];
|
||
if (k !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) {
|
||
yield k;
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Return a generator yielding the values in the cache,
|
||
* in order from most recently used to least recently used.
|
||
*/
|
||
*values() {
|
||
for (const i of this.#indexes()) {
|
||
const v = this.#valList[i];
|
||
if (v !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) {
|
||
yield this.#valList[i];
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Inverse order version of {@link LRUCache.values}
|
||
*
|
||
* Return a generator yielding the values in the cache,
|
||
* in order from least recently used to most recently used.
|
||
*/
|
||
*rvalues() {
|
||
for (const i of this.#rindexes()) {
|
||
const v = this.#valList[i];
|
||
if (v !== void 0 && !this.#isBackgroundFetch(this.#valList[i])) {
|
||
yield this.#valList[i];
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Iterating over the cache itself yields the same results as
|
||
* {@link LRUCache.entries}
|
||
*/
|
||
[Symbol.iterator]() {
|
||
return this.entries();
|
||
}
|
||
/**
|
||
* Find a value for which the supplied fn method returns a truthy value,
|
||
* similar to Array.find(). fn is called as fn(value, key, cache).
|
||
*/
|
||
find(fn, getOptions = {}) {
|
||
for (const i of this.#indexes()) {
|
||
const v = this.#valList[i];
|
||
const value2 = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
||
if (value2 === void 0)
|
||
continue;
|
||
if (fn(value2, this.#keyList[i], this)) {
|
||
return this.get(this.#keyList[i], getOptions);
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* Call the supplied function on each item in the cache, in order from
|
||
* most recently used to least recently used. fn is called as
|
||
* fn(value, key, cache). Does not update age or recenty of use.
|
||
* Does not iterate over stale values.
|
||
*/
|
||
forEach(fn, thisp = this) {
|
||
for (const i of this.#indexes()) {
|
||
const v = this.#valList[i];
|
||
const value2 = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
||
if (value2 === void 0)
|
||
continue;
|
||
fn.call(thisp, value2, this.#keyList[i], this);
|
||
}
|
||
}
|
||
/**
|
||
* The same as {@link LRUCache.forEach} but items are iterated over in
|
||
* reverse order. (ie, less recently used items are iterated over first.)
|
||
*/
|
||
rforEach(fn, thisp = this) {
|
||
for (const i of this.#rindexes()) {
|
||
const v = this.#valList[i];
|
||
const value2 = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
||
if (value2 === void 0)
|
||
continue;
|
||
fn.call(thisp, value2, this.#keyList[i], this);
|
||
}
|
||
}
|
||
/**
|
||
* Delete any stale entries. Returns true if anything was removed,
|
||
* false otherwise.
|
||
*/
|
||
purgeStale() {
|
||
let deleted = false;
|
||
for (const i of this.#rindexes({ allowStale: true })) {
|
||
if (this.#isStale(i)) {
|
||
this.delete(this.#keyList[i]);
|
||
deleted = true;
|
||
}
|
||
}
|
||
return deleted;
|
||
}
|
||
/**
|
||
* Get the extended info about a given entry, to get its value, size, and
|
||
* TTL info simultaneously. Like {@link LRUCache#dump}, but just for a
|
||
* single key. Always returns stale values, if their info is found in the
|
||
* cache, so be sure to check for expired TTLs if relevant.
|
||
*/
|
||
info(key) {
|
||
const i = this.#keyMap.get(key);
|
||
if (i === void 0)
|
||
return void 0;
|
||
const v = this.#valList[i];
|
||
const value2 = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
||
if (value2 === void 0)
|
||
return void 0;
|
||
const entry = { value: value2 };
|
||
if (this.#ttls && this.#starts) {
|
||
const ttl = this.#ttls[i];
|
||
const start = this.#starts[i];
|
||
if (ttl && start) {
|
||
const remain = ttl - (perf.now() - start);
|
||
entry.ttl = remain;
|
||
entry.start = Date.now();
|
||
}
|
||
}
|
||
if (this.#sizes) {
|
||
entry.size = this.#sizes[i];
|
||
}
|
||
return entry;
|
||
}
|
||
/**
|
||
* Return an array of [key, {@link LRUCache.Entry}] tuples which can be
|
||
* passed to cache.load()
|
||
*/
|
||
dump() {
|
||
const arr = [];
|
||
for (const i of this.#indexes({ allowStale: true })) {
|
||
const key = this.#keyList[i];
|
||
const v = this.#valList[i];
|
||
const value2 = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
||
if (value2 === void 0 || key === void 0)
|
||
continue;
|
||
const entry = { value: value2 };
|
||
if (this.#ttls && this.#starts) {
|
||
entry.ttl = this.#ttls[i];
|
||
const age = perf.now() - this.#starts[i];
|
||
entry.start = Math.floor(Date.now() - age);
|
||
}
|
||
if (this.#sizes) {
|
||
entry.size = this.#sizes[i];
|
||
}
|
||
arr.unshift([key, entry]);
|
||
}
|
||
return arr;
|
||
}
|
||
/**
|
||
* Reset the cache and load in the items in entries in the order listed.
|
||
* Note that the shape of the resulting cache may be different if the
|
||
* same options are not used in both caches.
|
||
*/
|
||
load(arr) {
|
||
this.clear();
|
||
for (const [key, entry] of arr) {
|
||
if (entry.start) {
|
||
const age = Date.now() - entry.start;
|
||
entry.start = perf.now() - age;
|
||
}
|
||
this.set(key, entry.value, entry);
|
||
}
|
||
}
|
||
/**
|
||
* Add a value to the cache.
|
||
*
|
||
* Note: if `undefined` is specified as a value, this is an alias for
|
||
* {@link LRUCache#delete}
|
||
*/
|
||
set(k, v, setOptions = {}) {
|
||
if (v === void 0) {
|
||
this.delete(k);
|
||
return this;
|
||
}
|
||
const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status } = setOptions;
|
||
let { noUpdateTTL = this.noUpdateTTL } = setOptions;
|
||
const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
|
||
if (this.maxEntrySize && size > this.maxEntrySize) {
|
||
if (status) {
|
||
status.set = "miss";
|
||
status.maxEntrySizeExceeded = true;
|
||
}
|
||
this.delete(k);
|
||
return this;
|
||
}
|
||
let index2 = this.#size === 0 ? void 0 : this.#keyMap.get(k);
|
||
if (index2 === void 0) {
|
||
index2 = this.#size === 0 ? this.#tail : this.#free.length !== 0 ? this.#free.pop() : this.#size === this.#max ? this.#evict(false) : this.#size;
|
||
this.#keyList[index2] = k;
|
||
this.#valList[index2] = v;
|
||
this.#keyMap.set(k, index2);
|
||
this.#next[this.#tail] = index2;
|
||
this.#prev[index2] = this.#tail;
|
||
this.#tail = index2;
|
||
this.#size++;
|
||
this.#addItemSize(index2, size, status);
|
||
if (status)
|
||
status.set = "add";
|
||
noUpdateTTL = false;
|
||
} else {
|
||
this.#moveToTail(index2);
|
||
const oldVal = this.#valList[index2];
|
||
if (v !== oldVal) {
|
||
if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
|
||
oldVal.__abortController.abort(new Error("replaced"));
|
||
const { __staleWhileFetching: s } = oldVal;
|
||
if (s !== void 0 && !noDisposeOnSet) {
|
||
if (this.#hasDispose) {
|
||
this.#dispose?.(s, k, "set");
|
||
}
|
||
if (this.#hasDisposeAfter) {
|
||
this.#disposed?.push([s, k, "set"]);
|
||
}
|
||
}
|
||
} else if (!noDisposeOnSet) {
|
||
if (this.#hasDispose) {
|
||
this.#dispose?.(oldVal, k, "set");
|
||
}
|
||
if (this.#hasDisposeAfter) {
|
||
this.#disposed?.push([oldVal, k, "set"]);
|
||
}
|
||
}
|
||
this.#removeItemSize(index2);
|
||
this.#addItemSize(index2, size, status);
|
||
this.#valList[index2] = v;
|
||
if (status) {
|
||
status.set = "replace";
|
||
const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ? oldVal.__staleWhileFetching : oldVal;
|
||
if (oldValue !== void 0)
|
||
status.oldValue = oldValue;
|
||
}
|
||
} else if (status) {
|
||
status.set = "update";
|
||
}
|
||
}
|
||
if (ttl !== 0 && !this.#ttls) {
|
||
this.#initializeTTLTracking();
|
||
}
|
||
if (this.#ttls) {
|
||
if (!noUpdateTTL) {
|
||
this.#setItemTTL(index2, ttl, start);
|
||
}
|
||
if (status)
|
||
this.#statusTTL(status, index2);
|
||
}
|
||
if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
|
||
const dt = this.#disposed;
|
||
let task;
|
||
while (task = dt?.shift()) {
|
||
this.#disposeAfter?.(...task);
|
||
}
|
||
}
|
||
return this;
|
||
}
|
||
/**
|
||
* Evict the least recently used item, returning its value or
|
||
* `undefined` if cache is empty.
|
||
*/
|
||
pop() {
|
||
try {
|
||
while (this.#size) {
|
||
const val = this.#valList[this.#head];
|
||
this.#evict(true);
|
||
if (this.#isBackgroundFetch(val)) {
|
||
if (val.__staleWhileFetching) {
|
||
return val.__staleWhileFetching;
|
||
}
|
||
} else if (val !== void 0) {
|
||
return val;
|
||
}
|
||
}
|
||
} finally {
|
||
if (this.#hasDisposeAfter && this.#disposed) {
|
||
const dt = this.#disposed;
|
||
let task;
|
||
while (task = dt?.shift()) {
|
||
this.#disposeAfter?.(...task);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
#evict(free) {
|
||
const head = this.#head;
|
||
const k = this.#keyList[head];
|
||
const v = this.#valList[head];
|
||
if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
|
||
v.__abortController.abort(new Error("evicted"));
|
||
} else if (this.#hasDispose || this.#hasDisposeAfter) {
|
||
if (this.#hasDispose) {
|
||
this.#dispose?.(v, k, "evict");
|
||
}
|
||
if (this.#hasDisposeAfter) {
|
||
this.#disposed?.push([v, k, "evict"]);
|
||
}
|
||
}
|
||
this.#removeItemSize(head);
|
||
if (free) {
|
||
this.#keyList[head] = void 0;
|
||
this.#valList[head] = void 0;
|
||
this.#free.push(head);
|
||
}
|
||
if (this.#size === 1) {
|
||
this.#head = this.#tail = 0;
|
||
this.#free.length = 0;
|
||
} else {
|
||
this.#head = this.#next[head];
|
||
}
|
||
this.#keyMap.delete(k);
|
||
this.#size--;
|
||
return head;
|
||
}
|
||
/**
|
||
* Check if a key is in the cache, without updating the recency of use.
|
||
* Will return false if the item is stale, even though it is technically
|
||
* in the cache.
|
||
*
|
||
* Will not update item age unless
|
||
* {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
|
||
*/
|
||
has(k, hasOptions = {}) {
|
||
const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
|
||
const index2 = this.#keyMap.get(k);
|
||
if (index2 !== void 0) {
|
||
const v = this.#valList[index2];
|
||
if (this.#isBackgroundFetch(v) && v.__staleWhileFetching === void 0) {
|
||
return false;
|
||
}
|
||
if (!this.#isStale(index2)) {
|
||
if (updateAgeOnHas) {
|
||
this.#updateItemAge(index2);
|
||
}
|
||
if (status) {
|
||
status.has = "hit";
|
||
this.#statusTTL(status, index2);
|
||
}
|
||
return true;
|
||
} else if (status) {
|
||
status.has = "stale";
|
||
this.#statusTTL(status, index2);
|
||
}
|
||
} else if (status) {
|
||
status.has = "miss";
|
||
}
|
||
return false;
|
||
}
|
||
/**
|
||
* Like {@link LRUCache#get} but doesn't update recency or delete stale
|
||
* items.
|
||
*
|
||
* Returns `undefined` if the item is stale, unless
|
||
* {@link LRUCache.OptionsBase.allowStale} is set.
|
||
*/
|
||
peek(k, peekOptions = {}) {
|
||
const { allowStale = this.allowStale } = peekOptions;
|
||
const index2 = this.#keyMap.get(k);
|
||
if (index2 === void 0 || !allowStale && this.#isStale(index2)) {
|
||
return;
|
||
}
|
||
const v = this.#valList[index2];
|
||
return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
|
||
}
|
||
#backgroundFetch(k, index2, options, context) {
|
||
const v = index2 === void 0 ? void 0 : this.#valList[index2];
|
||
if (this.#isBackgroundFetch(v)) {
|
||
return v;
|
||
}
|
||
const ac = new AC();
|
||
const { signal } = options;
|
||
signal?.addEventListener("abort", () => ac.abort(signal.reason), {
|
||
signal: ac.signal
|
||
});
|
||
const fetchOpts = {
|
||
signal: ac.signal,
|
||
options,
|
||
context
|
||
};
|
||
const cb = (v2, updateCache = false) => {
|
||
const { aborted } = ac.signal;
|
||
const ignoreAbort = options.ignoreFetchAbort && v2 !== void 0;
|
||
if (options.status) {
|
||
if (aborted && !updateCache) {
|
||
options.status.fetchAborted = true;
|
||
options.status.fetchError = ac.signal.reason;
|
||
if (ignoreAbort)
|
||
options.status.fetchAbortIgnored = true;
|
||
} else {
|
||
options.status.fetchResolved = true;
|
||
}
|
||
}
|
||
if (aborted && !ignoreAbort && !updateCache) {
|
||
return fetchFail(ac.signal.reason);
|
||
}
|
||
const bf2 = p;
|
||
if (this.#valList[index2] === p) {
|
||
if (v2 === void 0) {
|
||
if (bf2.__staleWhileFetching) {
|
||
this.#valList[index2] = bf2.__staleWhileFetching;
|
||
} else {
|
||
this.delete(k);
|
||
}
|
||
} else {
|
||
if (options.status)
|
||
options.status.fetchUpdated = true;
|
||
this.set(k, v2, fetchOpts.options);
|
||
}
|
||
}
|
||
return v2;
|
||
};
|
||
const eb = (er) => {
|
||
if (options.status) {
|
||
options.status.fetchRejected = true;
|
||
options.status.fetchError = er;
|
||
}
|
||
return fetchFail(er);
|
||
};
|
||
const fetchFail = (er) => {
|
||
const { aborted } = ac.signal;
|
||
const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
|
||
const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
|
||
const noDelete = allowStale || options.noDeleteOnFetchRejection;
|
||
const bf2 = p;
|
||
if (this.#valList[index2] === p) {
|
||
const del = !noDelete || bf2.__staleWhileFetching === void 0;
|
||
if (del) {
|
||
this.delete(k);
|
||
} else if (!allowStaleAborted) {
|
||
this.#valList[index2] = bf2.__staleWhileFetching;
|
||
}
|
||
}
|
||
if (allowStale) {
|
||
if (options.status && bf2.__staleWhileFetching !== void 0) {
|
||
options.status.returnedStale = true;
|
||
}
|
||
return bf2.__staleWhileFetching;
|
||
} else if (bf2.__returned === bf2) {
|
||
throw er;
|
||
}
|
||
};
|
||
const pcall = (res, rej) => {
|
||
const fmp = this.#fetchMethod?.(k, v, fetchOpts);
|
||
if (fmp && fmp instanceof Promise) {
|
||
fmp.then((v2) => res(v2 === void 0 ? void 0 : v2), rej);
|
||
}
|
||
ac.signal.addEventListener("abort", () => {
|
||
if (!options.ignoreFetchAbort || options.allowStaleOnFetchAbort) {
|
||
res(void 0);
|
||
if (options.allowStaleOnFetchAbort) {
|
||
res = (v2) => cb(v2, true);
|
||
}
|
||
}
|
||
});
|
||
};
|
||
if (options.status)
|
||
options.status.fetchDispatched = true;
|
||
const p = new Promise(pcall).then(cb, eb);
|
||
const bf = Object.assign(p, {
|
||
__abortController: ac,
|
||
__staleWhileFetching: v,
|
||
__returned: void 0
|
||
});
|
||
if (index2 === void 0) {
|
||
this.set(k, bf, { ...fetchOpts.options, status: void 0 });
|
||
index2 = this.#keyMap.get(k);
|
||
} else {
|
||
this.#valList[index2] = bf;
|
||
}
|
||
return bf;
|
||
}
|
||
#isBackgroundFetch(p) {
|
||
if (!this.#hasFetchMethod)
|
||
return false;
|
||
const b = p;
|
||
return !!b && b instanceof Promise && b.hasOwnProperty("__staleWhileFetching") && b.__abortController instanceof AC;
|
||
}
|
||
async fetch(k, fetchOptions = {}) {
|
||
const {
|
||
// get options
|
||
allowStale = this.allowStale,
|
||
updateAgeOnGet = this.updateAgeOnGet,
|
||
noDeleteOnStaleGet = this.noDeleteOnStaleGet,
|
||
// set options
|
||
ttl = this.ttl,
|
||
noDisposeOnSet = this.noDisposeOnSet,
|
||
size = 0,
|
||
sizeCalculation = this.sizeCalculation,
|
||
noUpdateTTL = this.noUpdateTTL,
|
||
// fetch exclusive options
|
||
noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
|
||
allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,
|
||
ignoreFetchAbort = this.ignoreFetchAbort,
|
||
allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,
|
||
context,
|
||
forceRefresh = false,
|
||
status,
|
||
signal
|
||
} = fetchOptions;
|
||
if (!this.#hasFetchMethod) {
|
||
if (status)
|
||
status.fetch = "get";
|
||
return this.get(k, {
|
||
allowStale,
|
||
updateAgeOnGet,
|
||
noDeleteOnStaleGet,
|
||
status
|
||
});
|
||
}
|
||
const options = {
|
||
allowStale,
|
||
updateAgeOnGet,
|
||
noDeleteOnStaleGet,
|
||
ttl,
|
||
noDisposeOnSet,
|
||
size,
|
||
sizeCalculation,
|
||
noUpdateTTL,
|
||
noDeleteOnFetchRejection,
|
||
allowStaleOnFetchRejection,
|
||
allowStaleOnFetchAbort,
|
||
ignoreFetchAbort,
|
||
status,
|
||
signal
|
||
};
|
||
let index2 = this.#keyMap.get(k);
|
||
if (index2 === void 0) {
|
||
if (status)
|
||
status.fetch = "miss";
|
||
const p = this.#backgroundFetch(k, index2, options, context);
|
||
return p.__returned = p;
|
||
} else {
|
||
const v = this.#valList[index2];
|
||
if (this.#isBackgroundFetch(v)) {
|
||
const stale = allowStale && v.__staleWhileFetching !== void 0;
|
||
if (status) {
|
||
status.fetch = "inflight";
|
||
if (stale)
|
||
status.returnedStale = true;
|
||
}
|
||
return stale ? v.__staleWhileFetching : v.__returned = v;
|
||
}
|
||
const isStale = this.#isStale(index2);
|
||
if (!forceRefresh && !isStale) {
|
||
if (status)
|
||
status.fetch = "hit";
|
||
this.#moveToTail(index2);
|
||
if (updateAgeOnGet) {
|
||
this.#updateItemAge(index2);
|
||
}
|
||
if (status)
|
||
this.#statusTTL(status, index2);
|
||
return v;
|
||
}
|
||
const p = this.#backgroundFetch(k, index2, options, context);
|
||
const hasStale = p.__staleWhileFetching !== void 0;
|
||
const staleVal = hasStale && allowStale;
|
||
if (status) {
|
||
status.fetch = isStale ? "stale" : "refresh";
|
||
if (staleVal && isStale)
|
||
status.returnedStale = true;
|
||
}
|
||
return staleVal ? p.__staleWhileFetching : p.__returned = p;
|
||
}
|
||
}
|
||
/**
|
||
* Return a value from the cache. Will update the recency of the cache
|
||
* entry found.
|
||
*
|
||
* If the key is not found, get() will return `undefined`.
|
||
*/
|
||
get(k, getOptions = {}) {
|
||
const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status } = getOptions;
|
||
const index2 = this.#keyMap.get(k);
|
||
if (index2 !== void 0) {
|
||
const value2 = this.#valList[index2];
|
||
const fetching = this.#isBackgroundFetch(value2);
|
||
if (status)
|
||
this.#statusTTL(status, index2);
|
||
if (this.#isStale(index2)) {
|
||
if (status)
|
||
status.get = "stale";
|
||
if (!fetching) {
|
||
if (!noDeleteOnStaleGet) {
|
||
this.delete(k);
|
||
}
|
||
if (status && allowStale)
|
||
status.returnedStale = true;
|
||
return allowStale ? value2 : void 0;
|
||
} else {
|
||
if (status && allowStale && value2.__staleWhileFetching !== void 0) {
|
||
status.returnedStale = true;
|
||
}
|
||
return allowStale ? value2.__staleWhileFetching : void 0;
|
||
}
|
||
} else {
|
||
if (status)
|
||
status.get = "hit";
|
||
if (fetching) {
|
||
return value2.__staleWhileFetching;
|
||
}
|
||
this.#moveToTail(index2);
|
||
if (updateAgeOnGet) {
|
||
this.#updateItemAge(index2);
|
||
}
|
||
return value2;
|
||
}
|
||
} else if (status) {
|
||
status.get = "miss";
|
||
}
|
||
}
|
||
#connect(p, n) {
|
||
this.#prev[n] = p;
|
||
this.#next[p] = n;
|
||
}
|
||
#moveToTail(index2) {
|
||
if (index2 !== this.#tail) {
|
||
if (index2 === this.#head) {
|
||
this.#head = this.#next[index2];
|
||
} else {
|
||
this.#connect(this.#prev[index2], this.#next[index2]);
|
||
}
|
||
this.#connect(this.#tail, index2);
|
||
this.#tail = index2;
|
||
}
|
||
}
|
||
/**
|
||
* Deletes a key out of the cache.
|
||
* Returns true if the key was deleted, false otherwise.
|
||
*/
|
||
delete(k) {
|
||
let deleted = false;
|
||
if (this.#size !== 0) {
|
||
const index2 = this.#keyMap.get(k);
|
||
if (index2 !== void 0) {
|
||
deleted = true;
|
||
if (this.#size === 1) {
|
||
this.clear();
|
||
} else {
|
||
this.#removeItemSize(index2);
|
||
const v = this.#valList[index2];
|
||
if (this.#isBackgroundFetch(v)) {
|
||
v.__abortController.abort(new Error("deleted"));
|
||
} else if (this.#hasDispose || this.#hasDisposeAfter) {
|
||
if (this.#hasDispose) {
|
||
this.#dispose?.(v, k, "delete");
|
||
}
|
||
if (this.#hasDisposeAfter) {
|
||
this.#disposed?.push([v, k, "delete"]);
|
||
}
|
||
}
|
||
this.#keyMap.delete(k);
|
||
this.#keyList[index2] = void 0;
|
||
this.#valList[index2] = void 0;
|
||
if (index2 === this.#tail) {
|
||
this.#tail = this.#prev[index2];
|
||
} else if (index2 === this.#head) {
|
||
this.#head = this.#next[index2];
|
||
} else {
|
||
const pi = this.#prev[index2];
|
||
this.#next[pi] = this.#next[index2];
|
||
const ni = this.#next[index2];
|
||
this.#prev[ni] = this.#prev[index2];
|
||
}
|
||
this.#size--;
|
||
this.#free.push(index2);
|
||
}
|
||
}
|
||
}
|
||
if (this.#hasDisposeAfter && this.#disposed?.length) {
|
||
const dt = this.#disposed;
|
||
let task;
|
||
while (task = dt?.shift()) {
|
||
this.#disposeAfter?.(...task);
|
||
}
|
||
}
|
||
return deleted;
|
||
}
|
||
/**
|
||
* Clear the cache entirely, throwing away all values.
|
||
*/
|
||
clear() {
|
||
for (const index2 of this.#rindexes({ allowStale: true })) {
|
||
const v = this.#valList[index2];
|
||
if (this.#isBackgroundFetch(v)) {
|
||
v.__abortController.abort(new Error("deleted"));
|
||
} else {
|
||
const k = this.#keyList[index2];
|
||
if (this.#hasDispose) {
|
||
this.#dispose?.(v, k, "delete");
|
||
}
|
||
if (this.#hasDisposeAfter) {
|
||
this.#disposed?.push([v, k, "delete"]);
|
||
}
|
||
}
|
||
}
|
||
this.#keyMap.clear();
|
||
this.#valList.fill(void 0);
|
||
this.#keyList.fill(void 0);
|
||
if (this.#ttls && this.#starts) {
|
||
this.#ttls.fill(0);
|
||
this.#starts.fill(0);
|
||
}
|
||
if (this.#sizes) {
|
||
this.#sizes.fill(0);
|
||
}
|
||
this.#head = 0;
|
||
this.#tail = 0;
|
||
this.#free.length = 0;
|
||
this.#calculatedSize = 0;
|
||
this.#size = 0;
|
||
if (this.#hasDisposeAfter && this.#disposed) {
|
||
const dt = this.#disposed;
|
||
let task;
|
||
while (task = dt?.shift()) {
|
||
this.#disposeAfter?.(...task);
|
||
}
|
||
}
|
||
}
|
||
};
|
||
|
||
// node_modules/path-scurry/dist/mjs/index.js
|
||
var import_path = require("path");
|
||
var import_url = require("url");
|
||
var actualFS = __toESM(require("fs"), 1);
|
||
var import_fs = require("fs");
|
||
var import_promises = require("fs/promises");
|
||
|
||
// node_modules/minipass/dist/esm/index.js
|
||
var import_events = require("events");
|
||
var import_stream = __toESM(require("stream"), 1);
|
||
var import_string_decoder = require("string_decoder");
|
||
var proc = typeof process === "object" && process ? process : {
|
||
stdout: null,
|
||
stderr: null
|
||
};
|
||
var isStream = (s) => !!s && typeof s === "object" && (s instanceof Minipass || s instanceof import_stream.default || isReadable(s) || isWritable(s));
|
||
var isReadable = (s) => !!s && typeof s === "object" && s instanceof import_events.EventEmitter && typeof s.pipe === "function" && // node core Writable streams have a pipe() method, but it throws
|
||
s.pipe !== import_stream.default.Writable.prototype.pipe;
|
||
var isWritable = (s) => !!s && typeof s === "object" && s instanceof import_events.EventEmitter && typeof s.write === "function" && typeof s.end === "function";
|
||
var EOF = Symbol("EOF");
|
||
var MAYBE_EMIT_END = Symbol("maybeEmitEnd");
|
||
var EMITTED_END = Symbol("emittedEnd");
|
||
var EMITTING_END = Symbol("emittingEnd");
|
||
var EMITTED_ERROR = Symbol("emittedError");
|
||
var CLOSED = Symbol("closed");
|
||
var READ = Symbol("read");
|
||
var FLUSH = Symbol("flush");
|
||
var FLUSHCHUNK = Symbol("flushChunk");
|
||
var ENCODING = Symbol("encoding");
|
||
var DECODER = Symbol("decoder");
|
||
var FLOWING = Symbol("flowing");
|
||
var PAUSED = Symbol("paused");
|
||
var RESUME = Symbol("resume");
|
||
var BUFFER = Symbol("buffer");
|
||
var PIPES = Symbol("pipes");
|
||
var BUFFERLENGTH = Symbol("bufferLength");
|
||
var BUFFERPUSH = Symbol("bufferPush");
|
||
var BUFFERSHIFT = Symbol("bufferShift");
|
||
var OBJECTMODE = Symbol("objectMode");
|
||
var DESTROYED = Symbol("destroyed");
|
||
var ERROR = Symbol("error");
|
||
var EMITDATA = Symbol("emitData");
|
||
var EMITEND = Symbol("emitEnd");
|
||
var EMITEND2 = Symbol("emitEnd2");
|
||
var ASYNC = Symbol("async");
|
||
var ABORT = Symbol("abort");
|
||
var ABORTED = Symbol("aborted");
|
||
var SIGNAL = Symbol("signal");
|
||
var DATALISTENERS = Symbol("dataListeners");
|
||
var DISCARDED = Symbol("discarded");
|
||
var defer = (fn) => Promise.resolve().then(fn);
|
||
var nodefer = (fn) => fn();
|
||
var isEndish = (ev) => ev === "end" || ev === "finish" || ev === "prefinish";
|
||
var isArrayBufferLike = (b) => b instanceof ArrayBuffer || !!b && typeof b === "object" && b.constructor && b.constructor.name === "ArrayBuffer" && b.byteLength >= 0;
|
||
var isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
|
||
var Pipe = class {
|
||
src;
|
||
dest;
|
||
opts;
|
||
ondrain;
|
||
constructor(src, dest, opts) {
|
||
this.src = src;
|
||
this.dest = dest;
|
||
this.opts = opts;
|
||
this.ondrain = () => src[RESUME]();
|
||
this.dest.on("drain", this.ondrain);
|
||
}
|
||
unpipe() {
|
||
this.dest.removeListener("drain", this.ondrain);
|
||
}
|
||
// only here for the prototype
|
||
/* c8 ignore start */
|
||
proxyErrors(_er) {
|
||
}
|
||
/* c8 ignore stop */
|
||
end() {
|
||
this.unpipe();
|
||
if (this.opts.end)
|
||
this.dest.end();
|
||
}
|
||
};
|
||
var PipeProxyErrors = class extends Pipe {
|
||
unpipe() {
|
||
this.src.removeListener("error", this.proxyErrors);
|
||
super.unpipe();
|
||
}
|
||
constructor(src, dest, opts) {
|
||
super(src, dest, opts);
|
||
this.proxyErrors = (er) => dest.emit("error", er);
|
||
src.on("error", this.proxyErrors);
|
||
}
|
||
};
|
||
var isObjectModeOptions = (o) => !!o.objectMode;
|
||
var isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== "buffer";
|
||
var Minipass = class extends import_events.EventEmitter {
|
||
[FLOWING] = false;
|
||
[PAUSED] = false;
|
||
[PIPES] = [];
|
||
[BUFFER] = [];
|
||
[OBJECTMODE];
|
||
[ENCODING];
|
||
[ASYNC];
|
||
[DECODER];
|
||
[EOF] = false;
|
||
[EMITTED_END] = false;
|
||
[EMITTING_END] = false;
|
||
[CLOSED] = false;
|
||
[EMITTED_ERROR] = null;
|
||
[BUFFERLENGTH] = 0;
|
||
[DESTROYED] = false;
|
||
[SIGNAL];
|
||
[ABORTED] = false;
|
||
[DATALISTENERS] = 0;
|
||
[DISCARDED] = false;
|
||
/**
|
||
* true if the stream can be written
|
||
*/
|
||
writable = true;
|
||
/**
|
||
* true if the stream can be read
|
||
*/
|
||
readable = true;
|
||
/**
|
||
* If `RType` is Buffer, then options do not need to be provided.
|
||
* Otherwise, an options object must be provided to specify either
|
||
* {@link Minipass.SharedOptions.objectMode} or
|
||
* {@link Minipass.SharedOptions.encoding}, as appropriate.
|
||
*/
|
||
constructor(...args) {
|
||
const options = args[0] || {};
|
||
super();
|
||
if (options.objectMode && typeof options.encoding === "string") {
|
||
throw new TypeError("Encoding and objectMode may not be used together");
|
||
}
|
||
if (isObjectModeOptions(options)) {
|
||
this[OBJECTMODE] = true;
|
||
this[ENCODING] = null;
|
||
} else if (isEncodingOptions(options)) {
|
||
this[ENCODING] = options.encoding;
|
||
this[OBJECTMODE] = false;
|
||
} else {
|
||
this[OBJECTMODE] = false;
|
||
this[ENCODING] = null;
|
||
}
|
||
this[ASYNC] = !!options.async;
|
||
this[DECODER] = this[ENCODING] ? new import_string_decoder.StringDecoder(this[ENCODING]) : null;
|
||
if (options && options.debugExposeBuffer === true) {
|
||
Object.defineProperty(this, "buffer", { get: () => this[BUFFER] });
|
||
}
|
||
if (options && options.debugExposePipes === true) {
|
||
Object.defineProperty(this, "pipes", { get: () => this[PIPES] });
|
||
}
|
||
const { signal } = options;
|
||
if (signal) {
|
||
this[SIGNAL] = signal;
|
||
if (signal.aborted) {
|
||
this[ABORT]();
|
||
} else {
|
||
signal.addEventListener("abort", () => this[ABORT]());
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* The amount of data stored in the buffer waiting to be read.
|
||
*
|
||
* For Buffer strings, this will be the total byte length.
|
||
* For string encoding streams, this will be the string character length,
|
||
* according to JavaScript's `string.length` logic.
|
||
* For objectMode streams, this is a count of the items waiting to be
|
||
* emitted.
|
||
*/
|
||
get bufferLength() {
|
||
return this[BUFFERLENGTH];
|
||
}
|
||
/**
|
||
* The `BufferEncoding` currently in use, or `null`
|
||
*/
|
||
get encoding() {
|
||
return this[ENCODING];
|
||
}
|
||
/**
|
||
* @deprecated - This is a read only property
|
||
*/
|
||
set encoding(_enc) {
|
||
throw new Error("Encoding must be set at instantiation time");
|
||
}
|
||
/**
|
||
* @deprecated - Encoding may only be set at instantiation time
|
||
*/
|
||
setEncoding(_enc) {
|
||
throw new Error("Encoding must be set at instantiation time");
|
||
}
|
||
/**
|
||
* True if this is an objectMode stream
|
||
*/
|
||
get objectMode() {
|
||
return this[OBJECTMODE];
|
||
}
|
||
/**
|
||
* @deprecated - This is a read-only property
|
||
*/
|
||
set objectMode(_om) {
|
||
throw new Error("objectMode must be set at instantiation time");
|
||
}
|
||
/**
|
||
* true if this is an async stream
|
||
*/
|
||
get ["async"]() {
|
||
return this[ASYNC];
|
||
}
|
||
/**
|
||
* Set to true to make this stream async.
|
||
*
|
||
* Once set, it cannot be unset, as this would potentially cause incorrect
|
||
* behavior. Ie, a sync stream can be made async, but an async stream
|
||
* cannot be safely made sync.
|
||
*/
|
||
set ["async"](a) {
|
||
this[ASYNC] = this[ASYNC] || !!a;
|
||
}
|
||
// drop everything and get out of the flow completely
|
||
[ABORT]() {
|
||
this[ABORTED] = true;
|
||
this.emit("abort", this[SIGNAL]?.reason);
|
||
this.destroy(this[SIGNAL]?.reason);
|
||
}
|
||
/**
|
||
* True if the stream has been aborted.
|
||
*/
|
||
get aborted() {
|
||
return this[ABORTED];
|
||
}
|
||
/**
|
||
* No-op setter. Stream aborted status is set via the AbortSignal provided
|
||
* in the constructor options.
|
||
*/
|
||
set aborted(_) {
|
||
}
|
||
write(chunk, encoding, cb) {
|
||
if (this[ABORTED])
|
||
return false;
|
||
if (this[EOF])
|
||
throw new Error("write after end");
|
||
if (this[DESTROYED]) {
|
||
this.emit("error", Object.assign(new Error("Cannot call write after a stream was destroyed"), { code: "ERR_STREAM_DESTROYED" }));
|
||
return true;
|
||
}
|
||
if (typeof encoding === "function") {
|
||
cb = encoding;
|
||
encoding = "utf8";
|
||
}
|
||
if (!encoding)
|
||
encoding = "utf8";
|
||
const fn = this[ASYNC] ? defer : nodefer;
|
||
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
|
||
if (isArrayBufferView(chunk)) {
|
||
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
|
||
} else if (isArrayBufferLike(chunk)) {
|
||
chunk = Buffer.from(chunk);
|
||
} else if (typeof chunk !== "string") {
|
||
throw new Error("Non-contiguous data written to non-objectMode stream");
|
||
}
|
||
}
|
||
if (this[OBJECTMODE]) {
|
||
if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
|
||
this[FLUSH](true);
|
||
if (this[FLOWING])
|
||
this.emit("data", chunk);
|
||
else
|
||
this[BUFFERPUSH](chunk);
|
||
if (this[BUFFERLENGTH] !== 0)
|
||
this.emit("readable");
|
||
if (cb)
|
||
fn(cb);
|
||
return this[FLOWING];
|
||
}
|
||
if (!chunk.length) {
|
||
if (this[BUFFERLENGTH] !== 0)
|
||
this.emit("readable");
|
||
if (cb)
|
||
fn(cb);
|
||
return this[FLOWING];
|
||
}
|
||
if (typeof chunk === "string" && // unless it is a string already ready for us to use
|
||
!(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
|
||
chunk = Buffer.from(chunk, encoding);
|
||
}
|
||
if (Buffer.isBuffer(chunk) && this[ENCODING]) {
|
||
chunk = this[DECODER].write(chunk);
|
||
}
|
||
if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
|
||
this[FLUSH](true);
|
||
if (this[FLOWING])
|
||
this.emit("data", chunk);
|
||
else
|
||
this[BUFFERPUSH](chunk);
|
||
if (this[BUFFERLENGTH] !== 0)
|
||
this.emit("readable");
|
||
if (cb)
|
||
fn(cb);
|
||
return this[FLOWING];
|
||
}
|
||
/**
|
||
* Low-level explicit read method.
|
||
*
|
||
* In objectMode, the argument is ignored, and one item is returned if
|
||
* available.
|
||
*
|
||
* `n` is the number of bytes (or in the case of encoding streams,
|
||
* characters) to consume. If `n` is not provided, then the entire buffer
|
||
* is returned, or `null` is returned if no data is available.
|
||
*
|
||
* If `n` is greater that the amount of data in the internal buffer,
|
||
* then `null` is returned.
|
||
*/
|
||
read(n) {
|
||
if (this[DESTROYED])
|
||
return null;
|
||
this[DISCARDED] = false;
|
||
if (this[BUFFERLENGTH] === 0 || n === 0 || n && n > this[BUFFERLENGTH]) {
|
||
this[MAYBE_EMIT_END]();
|
||
return null;
|
||
}
|
||
if (this[OBJECTMODE])
|
||
n = null;
|
||
if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
|
||
this[BUFFER] = [
|
||
this[ENCODING] ? this[BUFFER].join("") : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])
|
||
];
|
||
}
|
||
const ret = this[READ](n || null, this[BUFFER][0]);
|
||
this[MAYBE_EMIT_END]();
|
||
return ret;
|
||
}
|
||
[READ](n, chunk) {
|
||
if (this[OBJECTMODE])
|
||
this[BUFFERSHIFT]();
|
||
else {
|
||
const c = chunk;
|
||
if (n === c.length || n === null)
|
||
this[BUFFERSHIFT]();
|
||
else if (typeof c === "string") {
|
||
this[BUFFER][0] = c.slice(n);
|
||
chunk = c.slice(0, n);
|
||
this[BUFFERLENGTH] -= n;
|
||
} else {
|
||
this[BUFFER][0] = c.subarray(n);
|
||
chunk = c.subarray(0, n);
|
||
this[BUFFERLENGTH] -= n;
|
||
}
|
||
}
|
||
this.emit("data", chunk);
|
||
if (!this[BUFFER].length && !this[EOF])
|
||
this.emit("drain");
|
||
return chunk;
|
||
}
|
||
end(chunk, encoding, cb) {
|
||
if (typeof chunk === "function") {
|
||
cb = chunk;
|
||
chunk = void 0;
|
||
}
|
||
if (typeof encoding === "function") {
|
||
cb = encoding;
|
||
encoding = "utf8";
|
||
}
|
||
if (chunk !== void 0)
|
||
this.write(chunk, encoding);
|
||
if (cb)
|
||
this.once("end", cb);
|
||
this[EOF] = true;
|
||
this.writable = false;
|
||
if (this[FLOWING] || !this[PAUSED])
|
||
this[MAYBE_EMIT_END]();
|
||
return this;
|
||
}
|
||
// don't let the internal resume be overwritten
|
||
[RESUME]() {
|
||
if (this[DESTROYED])
|
||
return;
|
||
if (!this[DATALISTENERS] && !this[PIPES].length) {
|
||
this[DISCARDED] = true;
|
||
}
|
||
this[PAUSED] = false;
|
||
this[FLOWING] = true;
|
||
this.emit("resume");
|
||
if (this[BUFFER].length)
|
||
this[FLUSH]();
|
||
else if (this[EOF])
|
||
this[MAYBE_EMIT_END]();
|
||
else
|
||
this.emit("drain");
|
||
}
|
||
/**
|
||
* Resume the stream if it is currently in a paused state
|
||
*
|
||
* If called when there are no pipe destinations or `data` event listeners,
|
||
* this will place the stream in a "discarded" state, where all data will
|
||
* be thrown away. The discarded state is removed if a pipe destination or
|
||
* data handler is added, if pause() is called, or if any synchronous or
|
||
* asynchronous iteration is started.
|
||
*/
|
||
resume() {
|
||
return this[RESUME]();
|
||
}
|
||
/**
|
||
* Pause the stream
|
||
*/
|
||
pause() {
|
||
this[FLOWING] = false;
|
||
this[PAUSED] = true;
|
||
this[DISCARDED] = false;
|
||
}
|
||
/**
|
||
* true if the stream has been forcibly destroyed
|
||
*/
|
||
get destroyed() {
|
||
return this[DESTROYED];
|
||
}
|
||
/**
|
||
* true if the stream is currently in a flowing state, meaning that
|
||
* any writes will be immediately emitted.
|
||
*/
|
||
get flowing() {
|
||
return this[FLOWING];
|
||
}
|
||
/**
|
||
* true if the stream is currently in a paused state
|
||
*/
|
||
get paused() {
|
||
return this[PAUSED];
|
||
}
|
||
[BUFFERPUSH](chunk) {
|
||
if (this[OBJECTMODE])
|
||
this[BUFFERLENGTH] += 1;
|
||
else
|
||
this[BUFFERLENGTH] += chunk.length;
|
||
this[BUFFER].push(chunk);
|
||
}
|
||
[BUFFERSHIFT]() {
|
||
if (this[OBJECTMODE])
|
||
this[BUFFERLENGTH] -= 1;
|
||
else
|
||
this[BUFFERLENGTH] -= this[BUFFER][0].length;
|
||
return this[BUFFER].shift();
|
||
}
|
||
[FLUSH](noDrain = false) {
|
||
do {
|
||
} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length);
|
||
if (!noDrain && !this[BUFFER].length && !this[EOF])
|
||
this.emit("drain");
|
||
}
|
||
[FLUSHCHUNK](chunk) {
|
||
this.emit("data", chunk);
|
||
return this[FLOWING];
|
||
}
|
||
/**
|
||
* Pipe all data emitted by this stream into the destination provided.
|
||
*
|
||
* Triggers the flow of data.
|
||
*/
|
||
pipe(dest, opts) {
|
||
if (this[DESTROYED])
|
||
return dest;
|
||
this[DISCARDED] = false;
|
||
const ended = this[EMITTED_END];
|
||
opts = opts || {};
|
||
if (dest === proc.stdout || dest === proc.stderr)
|
||
opts.end = false;
|
||
else
|
||
opts.end = opts.end !== false;
|
||
opts.proxyErrors = !!opts.proxyErrors;
|
||
if (ended) {
|
||
if (opts.end)
|
||
dest.end();
|
||
} else {
|
||
this[PIPES].push(!opts.proxyErrors ? new Pipe(this, dest, opts) : new PipeProxyErrors(this, dest, opts));
|
||
if (this[ASYNC])
|
||
defer(() => this[RESUME]());
|
||
else
|
||
this[RESUME]();
|
||
}
|
||
return dest;
|
||
}
|
||
/**
|
||
* Fully unhook a piped destination stream.
|
||
*
|
||
* If the destination stream was the only consumer of this stream (ie,
|
||
* there are no other piped destinations or `'data'` event listeners)
|
||
* then the flow of data will stop until there is another consumer or
|
||
* {@link Minipass#resume} is explicitly called.
|
||
*/
|
||
unpipe(dest) {
|
||
const p = this[PIPES].find((p2) => p2.dest === dest);
|
||
if (p) {
|
||
if (this[PIPES].length === 1) {
|
||
if (this[FLOWING] && this[DATALISTENERS] === 0) {
|
||
this[FLOWING] = false;
|
||
}
|
||
this[PIPES] = [];
|
||
} else
|
||
this[PIPES].splice(this[PIPES].indexOf(p), 1);
|
||
p.unpipe();
|
||
}
|
||
}
|
||
/**
|
||
* Alias for {@link Minipass#on}
|
||
*/
|
||
addListener(ev, handler2) {
|
||
return this.on(ev, handler2);
|
||
}
|
||
/**
|
||
* Mostly identical to `EventEmitter.on`, with the following
|
||
* behavior differences to prevent data loss and unnecessary hangs:
|
||
*
|
||
* - Adding a 'data' event handler will trigger the flow of data
|
||
*
|
||
* - Adding a 'readable' event handler when there is data waiting to be read
|
||
* will cause 'readable' to be emitted immediately.
|
||
*
|
||
* - Adding an 'endish' event handler ('end', 'finish', etc.) which has
|
||
* already passed will cause the event to be emitted immediately and all
|
||
* handlers removed.
|
||
*
|
||
* - Adding an 'error' event handler after an error has been emitted will
|
||
* cause the event to be re-emitted immediately with the error previously
|
||
* raised.
|
||
*/
|
||
on(ev, handler2) {
|
||
const ret = super.on(ev, handler2);
|
||
if (ev === "data") {
|
||
this[DISCARDED] = false;
|
||
this[DATALISTENERS]++;
|
||
if (!this[PIPES].length && !this[FLOWING]) {
|
||
this[RESUME]();
|
||
}
|
||
} else if (ev === "readable" && this[BUFFERLENGTH] !== 0) {
|
||
super.emit("readable");
|
||
} else if (isEndish(ev) && this[EMITTED_END]) {
|
||
super.emit(ev);
|
||
this.removeAllListeners(ev);
|
||
} else if (ev === "error" && this[EMITTED_ERROR]) {
|
||
const h = handler2;
|
||
if (this[ASYNC])
|
||
defer(() => h.call(this, this[EMITTED_ERROR]));
|
||
else
|
||
h.call(this, this[EMITTED_ERROR]);
|
||
}
|
||
return ret;
|
||
}
|
||
/**
|
||
* Alias for {@link Minipass#off}
|
||
*/
|
||
removeListener(ev, handler2) {
|
||
return this.off(ev, handler2);
|
||
}
|
||
/**
|
||
* Mostly identical to `EventEmitter.off`
|
||
*
|
||
* If a 'data' event handler is removed, and it was the last consumer
|
||
* (ie, there are no pipe destinations or other 'data' event listeners),
|
||
* then the flow of data will stop until there is another consumer or
|
||
* {@link Minipass#resume} is explicitly called.
|
||
*/
|
||
off(ev, handler2) {
|
||
const ret = super.off(ev, handler2);
|
||
if (ev === "data") {
|
||
this[DATALISTENERS] = this.listeners("data").length;
|
||
if (this[DATALISTENERS] === 0 && !this[DISCARDED] && !this[PIPES].length) {
|
||
this[FLOWING] = false;
|
||
}
|
||
}
|
||
return ret;
|
||
}
|
||
/**
|
||
* Mostly identical to `EventEmitter.removeAllListeners`
|
||
*
|
||
* If all 'data' event handlers are removed, and they were the last consumer
|
||
* (ie, there are no pipe destinations), then the flow of data will stop
|
||
* until there is another consumer or {@link Minipass#resume} is explicitly
|
||
* called.
|
||
*/
|
||
removeAllListeners(ev) {
|
||
const ret = super.removeAllListeners(ev);
|
||
if (ev === "data" || ev === void 0) {
|
||
this[DATALISTENERS] = 0;
|
||
if (!this[DISCARDED] && !this[PIPES].length) {
|
||
this[FLOWING] = false;
|
||
}
|
||
}
|
||
return ret;
|
||
}
|
||
/**
|
||
* true if the 'end' event has been emitted
|
||
*/
|
||
get emittedEnd() {
|
||
return this[EMITTED_END];
|
||
}
|
||
[MAYBE_EMIT_END]() {
|
||
if (!this[EMITTING_END] && !this[EMITTED_END] && !this[DESTROYED] && this[BUFFER].length === 0 && this[EOF]) {
|
||
this[EMITTING_END] = true;
|
||
this.emit("end");
|
||
this.emit("prefinish");
|
||
this.emit("finish");
|
||
if (this[CLOSED])
|
||
this.emit("close");
|
||
this[EMITTING_END] = false;
|
||
}
|
||
}
|
||
/**
|
||
* Mostly identical to `EventEmitter.emit`, with the following
|
||
* behavior differences to prevent data loss and unnecessary hangs:
|
||
*
|
||
* If the stream has been destroyed, and the event is something other
|
||
* than 'close' or 'error', then `false` is returned and no handlers
|
||
* are called.
|
||
*
|
||
* If the event is 'end', and has already been emitted, then the event
|
||
* is ignored. If the stream is in a paused or non-flowing state, then
|
||
* the event will be deferred until data flow resumes. If the stream is
|
||
* async, then handlers will be called on the next tick rather than
|
||
* immediately.
|
||
*
|
||
* If the event is 'close', and 'end' has not yet been emitted, then
|
||
* the event will be deferred until after 'end' is emitted.
|
||
*
|
||
* If the event is 'error', and an AbortSignal was provided for the stream,
|
||
* and there are no listeners, then the event is ignored, matching the
|
||
* behavior of node core streams in the presense of an AbortSignal.
|
||
*
|
||
* If the event is 'finish' or 'prefinish', then all listeners will be
|
||
* removed after emitting the event, to prevent double-firing.
|
||
*/
|
||
emit(ev, ...args) {
|
||
const data = args[0];
|
||
if (ev !== "error" && ev !== "close" && ev !== DESTROYED && this[DESTROYED]) {
|
||
return false;
|
||
} else if (ev === "data") {
|
||
return !this[OBJECTMODE] && !data ? false : this[ASYNC] ? (defer(() => this[EMITDATA](data)), true) : this[EMITDATA](data);
|
||
} else if (ev === "end") {
|
||
return this[EMITEND]();
|
||
} else if (ev === "close") {
|
||
this[CLOSED] = true;
|
||
if (!this[EMITTED_END] && !this[DESTROYED])
|
||
return false;
|
||
const ret2 = super.emit("close");
|
||
this.removeAllListeners("close");
|
||
return ret2;
|
||
} else if (ev === "error") {
|
||
this[EMITTED_ERROR] = data;
|
||
super.emit(ERROR, data);
|
||
const ret2 = !this[SIGNAL] || this.listeners("error").length ? super.emit("error", data) : false;
|
||
this[MAYBE_EMIT_END]();
|
||
return ret2;
|
||
} else if (ev === "resume") {
|
||
const ret2 = super.emit("resume");
|
||
this[MAYBE_EMIT_END]();
|
||
return ret2;
|
||
} else if (ev === "finish" || ev === "prefinish") {
|
||
const ret2 = super.emit(ev);
|
||
this.removeAllListeners(ev);
|
||
return ret2;
|
||
}
|
||
const ret = super.emit(ev, ...args);
|
||
this[MAYBE_EMIT_END]();
|
||
return ret;
|
||
}
|
||
[EMITDATA](data) {
|
||
for (const p of this[PIPES]) {
|
||
if (p.dest.write(data) === false)
|
||
this.pause();
|
||
}
|
||
const ret = this[DISCARDED] ? false : super.emit("data", data);
|
||
this[MAYBE_EMIT_END]();
|
||
return ret;
|
||
}
|
||
[EMITEND]() {
|
||
if (this[EMITTED_END])
|
||
return false;
|
||
this[EMITTED_END] = true;
|
||
this.readable = false;
|
||
return this[ASYNC] ? (defer(() => this[EMITEND2]()), true) : this[EMITEND2]();
|
||
}
|
||
[EMITEND2]() {
|
||
if (this[DECODER]) {
|
||
const data = this[DECODER].end();
|
||
if (data) {
|
||
for (const p of this[PIPES]) {
|
||
p.dest.write(data);
|
||
}
|
||
if (!this[DISCARDED])
|
||
super.emit("data", data);
|
||
}
|
||
}
|
||
for (const p of this[PIPES]) {
|
||
p.end();
|
||
}
|
||
const ret = super.emit("end");
|
||
this.removeAllListeners("end");
|
||
return ret;
|
||
}
|
||
/**
|
||
* Return a Promise that resolves to an array of all emitted data once
|
||
* the stream ends.
|
||
*/
|
||
async collect() {
|
||
const buf = Object.assign([], {
|
||
dataLength: 0
|
||
});
|
||
if (!this[OBJECTMODE])
|
||
buf.dataLength = 0;
|
||
const p = this.promise();
|
||
this.on("data", (c) => {
|
||
buf.push(c);
|
||
if (!this[OBJECTMODE])
|
||
buf.dataLength += c.length;
|
||
});
|
||
await p;
|
||
return buf;
|
||
}
|
||
/**
|
||
* Return a Promise that resolves to the concatenation of all emitted data
|
||
* once the stream ends.
|
||
*
|
||
* Not allowed on objectMode streams.
|
||
*/
|
||
async concat() {
|
||
if (this[OBJECTMODE]) {
|
||
throw new Error("cannot concat in objectMode");
|
||
}
|
||
const buf = await this.collect();
|
||
return this[ENCODING] ? buf.join("") : Buffer.concat(buf, buf.dataLength);
|
||
}
|
||
/**
|
||
* Return a void Promise that resolves once the stream ends.
|
||
*/
|
||
async promise() {
|
||
return new Promise((resolve, reject) => {
|
||
this.on(DESTROYED, () => reject(new Error("stream destroyed")));
|
||
this.on("error", (er) => reject(er));
|
||
this.on("end", () => resolve());
|
||
});
|
||
}
|
||
/**
|
||
* Asynchronous `for await of` iteration.
|
||
*
|
||
* This will continue emitting all chunks until the stream terminates.
|
||
*/
|
||
[Symbol.asyncIterator]() {
|
||
this[DISCARDED] = false;
|
||
let stopped = false;
|
||
const stop = async () => {
|
||
this.pause();
|
||
stopped = true;
|
||
return { value: void 0, done: true };
|
||
};
|
||
const next = () => {
|
||
if (stopped)
|
||
return stop();
|
||
const res = this.read();
|
||
if (res !== null)
|
||
return Promise.resolve({ done: false, value: res });
|
||
if (this[EOF])
|
||
return stop();
|
||
let resolve;
|
||
let reject;
|
||
const onerr = (er) => {
|
||
this.off("data", ondata);
|
||
this.off("end", onend);
|
||
this.off(DESTROYED, ondestroy);
|
||
stop();
|
||
reject(er);
|
||
};
|
||
const ondata = (value2) => {
|
||
this.off("error", onerr);
|
||
this.off("end", onend);
|
||
this.off(DESTROYED, ondestroy);
|
||
this.pause();
|
||
resolve({ value: value2, done: !!this[EOF] });
|
||
};
|
||
const onend = () => {
|
||
this.off("error", onerr);
|
||
this.off("data", ondata);
|
||
this.off(DESTROYED, ondestroy);
|
||
stop();
|
||
resolve({ done: true, value: void 0 });
|
||
};
|
||
const ondestroy = () => onerr(new Error("stream destroyed"));
|
||
return new Promise((res2, rej) => {
|
||
reject = rej;
|
||
resolve = res2;
|
||
this.once(DESTROYED, ondestroy);
|
||
this.once("error", onerr);
|
||
this.once("end", onend);
|
||
this.once("data", ondata);
|
||
});
|
||
};
|
||
return {
|
||
next,
|
||
throw: stop,
|
||
return: stop,
|
||
[Symbol.asyncIterator]() {
|
||
return this;
|
||
}
|
||
};
|
||
}
|
||
/**
|
||
* Synchronous `for of` iteration.
|
||
*
|
||
* The iteration will terminate when the internal buffer runs out, even
|
||
* if the stream has not yet terminated.
|
||
*/
|
||
[Symbol.iterator]() {
|
||
this[DISCARDED] = false;
|
||
let stopped = false;
|
||
const stop = () => {
|
||
this.pause();
|
||
this.off(ERROR, stop);
|
||
this.off(DESTROYED, stop);
|
||
this.off("end", stop);
|
||
stopped = true;
|
||
return { done: true, value: void 0 };
|
||
};
|
||
const next = () => {
|
||
if (stopped)
|
||
return stop();
|
||
const value2 = this.read();
|
||
return value2 === null ? stop() : { done: false, value: value2 };
|
||
};
|
||
this.once("end", stop);
|
||
this.once(ERROR, stop);
|
||
this.once(DESTROYED, stop);
|
||
return {
|
||
next,
|
||
throw: stop,
|
||
return: stop,
|
||
[Symbol.iterator]() {
|
||
return this;
|
||
}
|
||
};
|
||
}
|
||
/**
|
||
* Destroy a stream, preventing it from being used for any further purpose.
|
||
*
|
||
* If the stream has a `close()` method, then it will be called on
|
||
* destruction.
|
||
*
|
||
* After destruction, any attempt to write data, read data, or emit most
|
||
* events will be ignored.
|
||
*
|
||
* If an error argument is provided, then it will be emitted in an
|
||
* 'error' event.
|
||
*/
|
||
destroy(er) {
|
||
if (this[DESTROYED]) {
|
||
if (er)
|
||
this.emit("error", er);
|
||
else
|
||
this.emit(DESTROYED);
|
||
return this;
|
||
}
|
||
this[DESTROYED] = true;
|
||
this[DISCARDED] = true;
|
||
this[BUFFER].length = 0;
|
||
this[BUFFERLENGTH] = 0;
|
||
const wc = this;
|
||
if (typeof wc.close === "function" && !this[CLOSED])
|
||
wc.close();
|
||
if (er)
|
||
this.emit("error", er);
|
||
else
|
||
this.emit(DESTROYED);
|
||
return this;
|
||
}
|
||
/**
|
||
* Alias for {@link isStream}
|
||
*
|
||
* Former export location, maintained for backwards compatibility.
|
||
*
|
||
* @deprecated
|
||
*/
|
||
static get isStream() {
|
||
return isStream;
|
||
}
|
||
};
|
||
|
||
// node_modules/path-scurry/dist/mjs/index.js
|
||
var realpathSync = import_fs.realpathSync.native;
|
||
var defaultFS = {
|
||
lstatSync: import_fs.lstatSync,
|
||
readdir: import_fs.readdir,
|
||
readdirSync: import_fs.readdirSync,
|
||
readlinkSync: import_fs.readlinkSync,
|
||
realpathSync,
|
||
promises: {
|
||
lstat: import_promises.lstat,
|
||
readdir: import_promises.readdir,
|
||
readlink: import_promises.readlink,
|
||
realpath: import_promises.realpath
|
||
}
|
||
};
|
||
var fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ? defaultFS : {
|
||
...defaultFS,
|
||
...fsOption,
|
||
promises: {
|
||
...defaultFS.promises,
|
||
...fsOption.promises || {}
|
||
}
|
||
};
|
||
var uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
|
||
var uncToDrive = (rootPath) => rootPath.replace(/\//g, "\\").replace(uncDriveRegexp, "$1\\");
|
||
var eitherSep = /[\\\/]/;
|
||
var UNKNOWN = 0;
|
||
var IFIFO = 1;
|
||
var IFCHR = 2;
|
||
var IFDIR = 4;
|
||
var IFBLK = 6;
|
||
var IFREG = 8;
|
||
var IFLNK = 10;
|
||
var IFSOCK = 12;
|
||
var IFMT = 15;
|
||
var IFMT_UNKNOWN = ~IFMT;
|
||
var READDIR_CALLED = 16;
|
||
var LSTAT_CALLED = 32;
|
||
var ENOTDIR = 64;
|
||
var ENOENT = 128;
|
||
var ENOREADLINK = 256;
|
||
var ENOREALPATH = 512;
|
||
var ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
|
||
var TYPEMASK = 1023;
|
||
var entToType = (s) => s.isFile() ? IFREG : s.isDirectory() ? IFDIR : s.isSymbolicLink() ? IFLNK : s.isCharacterDevice() ? IFCHR : s.isBlockDevice() ? IFBLK : s.isSocket() ? IFSOCK : s.isFIFO() ? IFIFO : UNKNOWN;
|
||
var normalizeCache = /* @__PURE__ */ new Map();
|
||
var normalize = (s) => {
|
||
const c = normalizeCache.get(s);
|
||
if (c)
|
||
return c;
|
||
const n = s.normalize("NFKD");
|
||
normalizeCache.set(s, n);
|
||
return n;
|
||
};
|
||
var normalizeNocaseCache = /* @__PURE__ */ new Map();
|
||
var normalizeNocase = (s) => {
|
||
const c = normalizeNocaseCache.get(s);
|
||
if (c)
|
||
return c;
|
||
const n = normalize(s.toLowerCase());
|
||
normalizeNocaseCache.set(s, n);
|
||
return n;
|
||
};
|
||
var ResolveCache = class extends LRUCache {
|
||
constructor() {
|
||
super({ max: 256 });
|
||
}
|
||
};
|
||
var ChildrenCache = class extends LRUCache {
|
||
constructor(maxSize = 16 * 1024) {
|
||
super({
|
||
maxSize,
|
||
// parent + children
|
||
sizeCalculation: (a) => a.length + 1
|
||
});
|
||
}
|
||
};
|
||
var setAsCwd = Symbol("PathScurry setAsCwd");
|
||
var PathBase = class {
|
||
/**
|
||
* the basename of this path
|
||
*
|
||
* **Important**: *always* test the path name against any test string
|
||
* usingthe {@link isNamed} method, and not by directly comparing this
|
||
* string. Otherwise, unicode path strings that the system sees as identical
|
||
* will not be properly treated as the same path, leading to incorrect
|
||
* behavior and possible security issues.
|
||
*/
|
||
name;
|
||
/**
|
||
* the Path entry corresponding to the path root.
|
||
*
|
||
* @internal
|
||
*/
|
||
root;
|
||
/**
|
||
* All roots found within the current PathScurry family
|
||
*
|
||
* @internal
|
||
*/
|
||
roots;
|
||
/**
|
||
* a reference to the parent path, or undefined in the case of root entries
|
||
*
|
||
* @internal
|
||
*/
|
||
parent;
|
||
/**
|
||
* boolean indicating whether paths are compared case-insensitively
|
||
* @internal
|
||
*/
|
||
nocase;
|
||
// potential default fs override
|
||
#fs;
|
||
// Stats fields
|
||
#dev;
|
||
get dev() {
|
||
return this.#dev;
|
||
}
|
||
#mode;
|
||
get mode() {
|
||
return this.#mode;
|
||
}
|
||
#nlink;
|
||
get nlink() {
|
||
return this.#nlink;
|
||
}
|
||
#uid;
|
||
get uid() {
|
||
return this.#uid;
|
||
}
|
||
#gid;
|
||
get gid() {
|
||
return this.#gid;
|
||
}
|
||
#rdev;
|
||
get rdev() {
|
||
return this.#rdev;
|
||
}
|
||
#blksize;
|
||
get blksize() {
|
||
return this.#blksize;
|
||
}
|
||
#ino;
|
||
get ino() {
|
||
return this.#ino;
|
||
}
|
||
#size;
|
||
get size() {
|
||
return this.#size;
|
||
}
|
||
#blocks;
|
||
get blocks() {
|
||
return this.#blocks;
|
||
}
|
||
#atimeMs;
|
||
get atimeMs() {
|
||
return this.#atimeMs;
|
||
}
|
||
#mtimeMs;
|
||
get mtimeMs() {
|
||
return this.#mtimeMs;
|
||
}
|
||
#ctimeMs;
|
||
get ctimeMs() {
|
||
return this.#ctimeMs;
|
||
}
|
||
#birthtimeMs;
|
||
get birthtimeMs() {
|
||
return this.#birthtimeMs;
|
||
}
|
||
#atime;
|
||
get atime() {
|
||
return this.#atime;
|
||
}
|
||
#mtime;
|
||
get mtime() {
|
||
return this.#mtime;
|
||
}
|
||
#ctime;
|
||
get ctime() {
|
||
return this.#ctime;
|
||
}
|
||
#birthtime;
|
||
get birthtime() {
|
||
return this.#birthtime;
|
||
}
|
||
#matchName;
|
||
#depth;
|
||
#fullpath;
|
||
#fullpathPosix;
|
||
#relative;
|
||
#relativePosix;
|
||
#type;
|
||
#children;
|
||
#linkTarget;
|
||
#realpath;
|
||
/**
|
||
* This property is for compatibility with the Dirent class as of
|
||
* Node v20, where Dirent['path'] refers to the path of the directory
|
||
* that was passed to readdir. So, somewhat counterintuitively, this
|
||
* property refers to the *parent* path, not the path object itself.
|
||
* For root entries, it's the path to the entry itself.
|
||
*/
|
||
get path() {
|
||
return (this.parent || this).fullpath();
|
||
}
|
||
/**
|
||
* Do not create new Path objects directly. They should always be accessed
|
||
* via the PathScurry class or other methods on the Path class.
|
||
*
|
||
* @internal
|
||
*/
|
||
constructor(name, type = UNKNOWN, root2, roots, nocase, children, opts) {
|
||
this.name = name;
|
||
this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
|
||
this.#type = type & TYPEMASK;
|
||
this.nocase = nocase;
|
||
this.roots = roots;
|
||
this.root = root2 || this;
|
||
this.#children = children;
|
||
this.#fullpath = opts.fullpath;
|
||
this.#relative = opts.relative;
|
||
this.#relativePosix = opts.relativePosix;
|
||
this.parent = opts.parent;
|
||
if (this.parent) {
|
||
this.#fs = this.parent.#fs;
|
||
} else {
|
||
this.#fs = fsFromOption(opts.fs);
|
||
}
|
||
}
|
||
/**
|
||
* Returns the depth of the Path object from its root.
|
||
*
|
||
* For example, a path at `/foo/bar` would have a depth of 2.
|
||
*/
|
||
depth() {
|
||
if (this.#depth !== void 0)
|
||
return this.#depth;
|
||
if (!this.parent)
|
||
return this.#depth = 0;
|
||
return this.#depth = this.parent.depth() + 1;
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
childrenCache() {
|
||
return this.#children;
|
||
}
|
||
/**
|
||
* Get the Path object referenced by the string path, resolved from this Path
|
||
*/
|
||
resolve(path2) {
|
||
if (!path2) {
|
||
return this;
|
||
}
|
||
const rootPath = this.getRootString(path2);
|
||
const dir2 = path2.substring(rootPath.length);
|
||
const dirParts = dir2.split(this.splitSep);
|
||
const result = rootPath ? this.getRoot(rootPath).#resolveParts(dirParts) : this.#resolveParts(dirParts);
|
||
return result;
|
||
}
|
||
#resolveParts(dirParts) {
|
||
let p = this;
|
||
for (const part of dirParts) {
|
||
p = p.child(part);
|
||
}
|
||
return p;
|
||
}
|
||
/**
|
||
* Returns the cached children Path objects, if still available. If they
|
||
* have fallen out of the cache, then returns an empty array, and resets the
|
||
* READDIR_CALLED bit, so that future calls to readdir() will require an fs
|
||
* lookup.
|
||
*
|
||
* @internal
|
||
*/
|
||
children() {
|
||
const cached = this.#children.get(this);
|
||
if (cached) {
|
||
return cached;
|
||
}
|
||
const children = Object.assign([], { provisional: 0 });
|
||
this.#children.set(this, children);
|
||
this.#type &= ~READDIR_CALLED;
|
||
return children;
|
||
}
|
||
/**
|
||
* Resolves a path portion and returns or creates the child Path.
|
||
*
|
||
* Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
|
||
* `'..'`.
|
||
*
|
||
* This should not be called directly. If `pathPart` contains any path
|
||
* separators, it will lead to unsafe undefined behavior.
|
||
*
|
||
* Use `Path.resolve()` instead.
|
||
*
|
||
* @internal
|
||
*/
|
||
child(pathPart, opts) {
|
||
if (pathPart === "" || pathPart === ".") {
|
||
return this;
|
||
}
|
||
if (pathPart === "..") {
|
||
return this.parent || this;
|
||
}
|
||
const children = this.children();
|
||
const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
|
||
for (const p of children) {
|
||
if (p.#matchName === name) {
|
||
return p;
|
||
}
|
||
}
|
||
const s = this.parent ? this.sep : "";
|
||
const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : void 0;
|
||
const pchild = this.newChild(pathPart, UNKNOWN, {
|
||
...opts,
|
||
parent: this,
|
||
fullpath
|
||
});
|
||
if (!this.canReaddir()) {
|
||
pchild.#type |= ENOENT;
|
||
}
|
||
children.push(pchild);
|
||
return pchild;
|
||
}
|
||
/**
|
||
* The relative path from the cwd. If it does not share an ancestor with
|
||
* the cwd, then this ends up being equivalent to the fullpath()
|
||
*/
|
||
relative() {
|
||
if (this.#relative !== void 0) {
|
||
return this.#relative;
|
||
}
|
||
const name = this.name;
|
||
const p = this.parent;
|
||
if (!p) {
|
||
return this.#relative = this.name;
|
||
}
|
||
const pv = p.relative();
|
||
return pv + (!pv || !p.parent ? "" : this.sep) + name;
|
||
}
|
||
/**
|
||
* The relative path from the cwd, using / as the path separator.
|
||
* If it does not share an ancestor with
|
||
* the cwd, then this ends up being equivalent to the fullpathPosix()
|
||
* On posix systems, this is identical to relative().
|
||
*/
|
||
relativePosix() {
|
||
if (this.sep === "/")
|
||
return this.relative();
|
||
if (this.#relativePosix !== void 0)
|
||
return this.#relativePosix;
|
||
const name = this.name;
|
||
const p = this.parent;
|
||
if (!p) {
|
||
return this.#relativePosix = this.fullpathPosix();
|
||
}
|
||
const pv = p.relativePosix();
|
||
return pv + (!pv || !p.parent ? "" : "/") + name;
|
||
}
|
||
/**
|
||
* The fully resolved path string for this Path entry
|
||
*/
|
||
fullpath() {
|
||
if (this.#fullpath !== void 0) {
|
||
return this.#fullpath;
|
||
}
|
||
const name = this.name;
|
||
const p = this.parent;
|
||
if (!p) {
|
||
return this.#fullpath = this.name;
|
||
}
|
||
const pv = p.fullpath();
|
||
const fp = pv + (!p.parent ? "" : this.sep) + name;
|
||
return this.#fullpath = fp;
|
||
}
|
||
/**
|
||
* On platforms other than windows, this is identical to fullpath.
|
||
*
|
||
* On windows, this is overridden to return the forward-slash form of the
|
||
* full UNC path.
|
||
*/
|
||
fullpathPosix() {
|
||
if (this.#fullpathPosix !== void 0)
|
||
return this.#fullpathPosix;
|
||
if (this.sep === "/")
|
||
return this.#fullpathPosix = this.fullpath();
|
||
if (!this.parent) {
|
||
const p2 = this.fullpath().replace(/\\/g, "/");
|
||
if (/^[a-z]:\//i.test(p2)) {
|
||
return this.#fullpathPosix = `//?/${p2}`;
|
||
} else {
|
||
return this.#fullpathPosix = p2;
|
||
}
|
||
}
|
||
const p = this.parent;
|
||
const pfpp = p.fullpathPosix();
|
||
const fpp = pfpp + (!pfpp || !p.parent ? "" : "/") + this.name;
|
||
return this.#fullpathPosix = fpp;
|
||
}
|
||
/**
|
||
* Is the Path of an unknown type?
|
||
*
|
||
* Note that we might know *something* about it if there has been a previous
|
||
* filesystem operation, for example that it does not exist, or is not a
|
||
* link, or whether it has child entries.
|
||
*/
|
||
isUnknown() {
|
||
return (this.#type & IFMT) === UNKNOWN;
|
||
}
|
||
isType(type) {
|
||
return this[`is${type}`]();
|
||
}
|
||
getType() {
|
||
return this.isUnknown() ? "Unknown" : this.isDirectory() ? "Directory" : this.isFile() ? "File" : this.isSymbolicLink() ? "SymbolicLink" : this.isFIFO() ? "FIFO" : this.isCharacterDevice() ? "CharacterDevice" : this.isBlockDevice() ? "BlockDevice" : (
|
||
/* c8 ignore start */
|
||
this.isSocket() ? "Socket" : "Unknown"
|
||
);
|
||
}
|
||
/**
|
||
* Is the Path a regular file?
|
||
*/
|
||
isFile() {
|
||
return (this.#type & IFMT) === IFREG;
|
||
}
|
||
/**
|
||
* Is the Path a directory?
|
||
*/
|
||
isDirectory() {
|
||
return (this.#type & IFMT) === IFDIR;
|
||
}
|
||
/**
|
||
* Is the path a character device?
|
||
*/
|
||
isCharacterDevice() {
|
||
return (this.#type & IFMT) === IFCHR;
|
||
}
|
||
/**
|
||
* Is the path a block device?
|
||
*/
|
||
isBlockDevice() {
|
||
return (this.#type & IFMT) === IFBLK;
|
||
}
|
||
/**
|
||
* Is the path a FIFO pipe?
|
||
*/
|
||
isFIFO() {
|
||
return (this.#type & IFMT) === IFIFO;
|
||
}
|
||
/**
|
||
* Is the path a socket?
|
||
*/
|
||
isSocket() {
|
||
return (this.#type & IFMT) === IFSOCK;
|
||
}
|
||
/**
|
||
* Is the path a symbolic link?
|
||
*/
|
||
isSymbolicLink() {
|
||
return (this.#type & IFLNK) === IFLNK;
|
||
}
|
||
/**
|
||
* Return the entry if it has been subject of a successful lstat, or
|
||
* undefined otherwise.
|
||
*
|
||
* Does not read the filesystem, so an undefined result *could* simply
|
||
* mean that we haven't called lstat on it.
|
||
*/
|
||
lstatCached() {
|
||
return this.#type & LSTAT_CALLED ? this : void 0;
|
||
}
|
||
/**
|
||
* Return the cached link target if the entry has been the subject of a
|
||
* successful readlink, or undefined otherwise.
|
||
*
|
||
* Does not read the filesystem, so an undefined result *could* just mean we
|
||
* don't have any cached data. Only use it if you are very sure that a
|
||
* readlink() has been called at some point.
|
||
*/
|
||
readlinkCached() {
|
||
return this.#linkTarget;
|
||
}
|
||
/**
|
||
* Returns the cached realpath target if the entry has been the subject
|
||
* of a successful realpath, or undefined otherwise.
|
||
*
|
||
* Does not read the filesystem, so an undefined result *could* just mean we
|
||
* don't have any cached data. Only use it if you are very sure that a
|
||
* realpath() has been called at some point.
|
||
*/
|
||
realpathCached() {
|
||
return this.#realpath;
|
||
}
|
||
/**
|
||
* Returns the cached child Path entries array if the entry has been the
|
||
* subject of a successful readdir(), or [] otherwise.
|
||
*
|
||
* Does not read the filesystem, so an empty array *could* just mean we
|
||
* don't have any cached data. Only use it if you are very sure that a
|
||
* readdir() has been called recently enough to still be valid.
|
||
*/
|
||
readdirCached() {
|
||
const children = this.children();
|
||
return children.slice(0, children.provisional);
|
||
}
|
||
/**
|
||
* Return true if it's worth trying to readlink. Ie, we don't (yet) have
|
||
* any indication that readlink will definitely fail.
|
||
*
|
||
* Returns false if the path is known to not be a symlink, if a previous
|
||
* readlink failed, or if the entry does not exist.
|
||
*/
|
||
canReadlink() {
|
||
if (this.#linkTarget)
|
||
return true;
|
||
if (!this.parent)
|
||
return false;
|
||
const ifmt = this.#type & IFMT;
|
||
return !(ifmt !== UNKNOWN && ifmt !== IFLNK || this.#type & ENOREADLINK || this.#type & ENOENT);
|
||
}
|
||
/**
|
||
* Return true if readdir has previously been successfully called on this
|
||
* path, indicating that cachedReaddir() is likely valid.
|
||
*/
|
||
calledReaddir() {
|
||
return !!(this.#type & READDIR_CALLED);
|
||
}
|
||
/**
|
||
* Returns true if the path is known to not exist. That is, a previous lstat
|
||
* or readdir failed to verify its existence when that would have been
|
||
* expected, or a parent entry was marked either enoent or enotdir.
|
||
*/
|
||
isENOENT() {
|
||
return !!(this.#type & ENOENT);
|
||
}
|
||
/**
|
||
* Return true if the path is a match for the given path name. This handles
|
||
* case sensitivity and unicode normalization.
|
||
*
|
||
* Note: even on case-sensitive systems, it is **not** safe to test the
|
||
* equality of the `.name` property to determine whether a given pathname
|
||
* matches, due to unicode normalization mismatches.
|
||
*
|
||
* Always use this method instead of testing the `path.name` property
|
||
* directly.
|
||
*/
|
||
isNamed(n) {
|
||
return !this.nocase ? this.#matchName === normalize(n) : this.#matchName === normalizeNocase(n);
|
||
}
|
||
/**
|
||
* Return the Path object corresponding to the target of a symbolic link.
|
||
*
|
||
* If the Path is not a symbolic link, or if the readlink call fails for any
|
||
* reason, `undefined` is returned.
|
||
*
|
||
* Result is cached, and thus may be outdated if the filesystem is mutated.
|
||
*/
|
||
async readlink() {
|
||
const target = this.#linkTarget;
|
||
if (target) {
|
||
return target;
|
||
}
|
||
if (!this.canReadlink()) {
|
||
return void 0;
|
||
}
|
||
if (!this.parent) {
|
||
return void 0;
|
||
}
|
||
try {
|
||
const read = await this.#fs.promises.readlink(this.fullpath());
|
||
const linkTarget = this.parent.resolve(read);
|
||
if (linkTarget) {
|
||
return this.#linkTarget = linkTarget;
|
||
}
|
||
} catch (er) {
|
||
this.#readlinkFail(er.code);
|
||
return void 0;
|
||
}
|
||
}
|
||
/**
|
||
* Synchronous {@link PathBase.readlink}
|
||
*/
|
||
readlinkSync() {
|
||
const target = this.#linkTarget;
|
||
if (target) {
|
||
return target;
|
||
}
|
||
if (!this.canReadlink()) {
|
||
return void 0;
|
||
}
|
||
if (!this.parent) {
|
||
return void 0;
|
||
}
|
||
try {
|
||
const read = this.#fs.readlinkSync(this.fullpath());
|
||
const linkTarget = this.parent.resolve(read);
|
||
if (linkTarget) {
|
||
return this.#linkTarget = linkTarget;
|
||
}
|
||
} catch (er) {
|
||
this.#readlinkFail(er.code);
|
||
return void 0;
|
||
}
|
||
}
|
||
#readdirSuccess(children) {
|
||
this.#type |= READDIR_CALLED;
|
||
for (let p = children.provisional; p < children.length; p++) {
|
||
children[p].#markENOENT();
|
||
}
|
||
}
|
||
#markENOENT() {
|
||
if (this.#type & ENOENT)
|
||
return;
|
||
this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
|
||
this.#markChildrenENOENT();
|
||
}
|
||
#markChildrenENOENT() {
|
||
const children = this.children();
|
||
children.provisional = 0;
|
||
for (const p of children) {
|
||
p.#markENOENT();
|
||
}
|
||
}
|
||
#markENOREALPATH() {
|
||
this.#type |= ENOREALPATH;
|
||
this.#markENOTDIR();
|
||
}
|
||
// save the information when we know the entry is not a dir
|
||
#markENOTDIR() {
|
||
if (this.#type & ENOTDIR)
|
||
return;
|
||
let t = this.#type;
|
||
if ((t & IFMT) === IFDIR)
|
||
t &= IFMT_UNKNOWN;
|
||
this.#type = t | ENOTDIR;
|
||
this.#markChildrenENOENT();
|
||
}
|
||
#readdirFail(code2 = "") {
|
||
if (code2 === "ENOTDIR" || code2 === "EPERM") {
|
||
this.#markENOTDIR();
|
||
} else if (code2 === "ENOENT") {
|
||
this.#markENOENT();
|
||
} else {
|
||
this.children().provisional = 0;
|
||
}
|
||
}
|
||
#lstatFail(code2 = "") {
|
||
if (code2 === "ENOTDIR") {
|
||
const p = this.parent;
|
||
p.#markENOTDIR();
|
||
} else if (code2 === "ENOENT") {
|
||
this.#markENOENT();
|
||
}
|
||
}
|
||
#readlinkFail(code2 = "") {
|
||
let ter = this.#type;
|
||
ter |= ENOREADLINK;
|
||
if (code2 === "ENOENT")
|
||
ter |= ENOENT;
|
||
if (code2 === "EINVAL" || code2 === "UNKNOWN") {
|
||
ter &= IFMT_UNKNOWN;
|
||
}
|
||
this.#type = ter;
|
||
if (code2 === "ENOTDIR" && this.parent) {
|
||
this.parent.#markENOTDIR();
|
||
}
|
||
}
|
||
#readdirAddChild(e, c) {
|
||
return this.#readdirMaybePromoteChild(e, c) || this.#readdirAddNewChild(e, c);
|
||
}
|
||
#readdirAddNewChild(e, c) {
|
||
const type = entToType(e);
|
||
const child = this.newChild(e.name, type, { parent: this });
|
||
const ifmt = child.#type & IFMT;
|
||
if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
|
||
child.#type |= ENOTDIR;
|
||
}
|
||
c.unshift(child);
|
||
c.provisional++;
|
||
return child;
|
||
}
|
||
#readdirMaybePromoteChild(e, c) {
|
||
for (let p = c.provisional; p < c.length; p++) {
|
||
const pchild = c[p];
|
||
const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
|
||
if (name !== pchild.#matchName) {
|
||
continue;
|
||
}
|
||
return this.#readdirPromoteChild(e, pchild, p, c);
|
||
}
|
||
}
|
||
#readdirPromoteChild(e, p, index2, c) {
|
||
const v = p.name;
|
||
p.#type = p.#type & IFMT_UNKNOWN | entToType(e);
|
||
if (v !== e.name)
|
||
p.name = e.name;
|
||
if (index2 !== c.provisional) {
|
||
if (index2 === c.length - 1)
|
||
c.pop();
|
||
else
|
||
c.splice(index2, 1);
|
||
c.unshift(p);
|
||
}
|
||
c.provisional++;
|
||
return p;
|
||
}
|
||
/**
|
||
* Call lstat() on this Path, and update all known information that can be
|
||
* determined.
|
||
*
|
||
* Note that unlike `fs.lstat()`, the returned value does not contain some
|
||
* information, such as `mode`, `dev`, `nlink`, and `ino`. If that
|
||
* information is required, you will need to call `fs.lstat` yourself.
|
||
*
|
||
* If the Path refers to a nonexistent file, or if the lstat call fails for
|
||
* any reason, `undefined` is returned. Otherwise the updated Path object is
|
||
* returned.
|
||
*
|
||
* Results are cached, and thus may be out of date if the filesystem is
|
||
* mutated.
|
||
*/
|
||
async lstat() {
|
||
if ((this.#type & ENOENT) === 0) {
|
||
try {
|
||
this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
|
||
return this;
|
||
} catch (er) {
|
||
this.#lstatFail(er.code);
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* synchronous {@link PathBase.lstat}
|
||
*/
|
||
lstatSync() {
|
||
if ((this.#type & ENOENT) === 0) {
|
||
try {
|
||
this.#applyStat(this.#fs.lstatSync(this.fullpath()));
|
||
return this;
|
||
} catch (er) {
|
||
this.#lstatFail(er.code);
|
||
}
|
||
}
|
||
}
|
||
#applyStat(st) {
|
||
const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid } = st;
|
||
this.#atime = atime;
|
||
this.#atimeMs = atimeMs;
|
||
this.#birthtime = birthtime;
|
||
this.#birthtimeMs = birthtimeMs;
|
||
this.#blksize = blksize;
|
||
this.#blocks = blocks;
|
||
this.#ctime = ctime;
|
||
this.#ctimeMs = ctimeMs;
|
||
this.#dev = dev;
|
||
this.#gid = gid;
|
||
this.#ino = ino;
|
||
this.#mode = mode;
|
||
this.#mtime = mtime;
|
||
this.#mtimeMs = mtimeMs;
|
||
this.#nlink = nlink;
|
||
this.#rdev = rdev;
|
||
this.#size = size;
|
||
this.#uid = uid;
|
||
const ifmt = entToType(st);
|
||
this.#type = this.#type & IFMT_UNKNOWN | ifmt | LSTAT_CALLED;
|
||
if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
|
||
this.#type |= ENOTDIR;
|
||
}
|
||
}
|
||
#onReaddirCB = [];
|
||
#readdirCBInFlight = false;
|
||
#callOnReaddirCB(children) {
|
||
this.#readdirCBInFlight = false;
|
||
const cbs = this.#onReaddirCB.slice();
|
||
this.#onReaddirCB.length = 0;
|
||
cbs.forEach((cb) => cb(null, children));
|
||
}
|
||
/**
|
||
* Standard node-style callback interface to get list of directory entries.
|
||
*
|
||
* If the Path cannot or does not contain any children, then an empty array
|
||
* is returned.
|
||
*
|
||
* Results are cached, and thus may be out of date if the filesystem is
|
||
* mutated.
|
||
*
|
||
* @param cb The callback called with (er, entries). Note that the `er`
|
||
* param is somewhat extraneous, as all readdir() errors are handled and
|
||
* simply result in an empty set of entries being returned.
|
||
* @param allowZalgo Boolean indicating that immediately known results should
|
||
* *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
|
||
* zalgo at your peril, the dark pony lord is devious and unforgiving.
|
||
*/
|
||
readdirCB(cb, allowZalgo = false) {
|
||
if (!this.canReaddir()) {
|
||
if (allowZalgo)
|
||
cb(null, []);
|
||
else
|
||
queueMicrotask(() => cb(null, []));
|
||
return;
|
||
}
|
||
const children = this.children();
|
||
if (this.calledReaddir()) {
|
||
const c = children.slice(0, children.provisional);
|
||
if (allowZalgo)
|
||
cb(null, c);
|
||
else
|
||
queueMicrotask(() => cb(null, c));
|
||
return;
|
||
}
|
||
this.#onReaddirCB.push(cb);
|
||
if (this.#readdirCBInFlight) {
|
||
return;
|
||
}
|
||
this.#readdirCBInFlight = true;
|
||
const fullpath = this.fullpath();
|
||
this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
|
||
if (er) {
|
||
this.#readdirFail(er.code);
|
||
children.provisional = 0;
|
||
} else {
|
||
for (const e of entries) {
|
||
this.#readdirAddChild(e, children);
|
||
}
|
||
this.#readdirSuccess(children);
|
||
}
|
||
this.#callOnReaddirCB(children.slice(0, children.provisional));
|
||
return;
|
||
});
|
||
}
|
||
#asyncReaddirInFlight;
|
||
/**
|
||
* Return an array of known child entries.
|
||
*
|
||
* If the Path cannot or does not contain any children, then an empty array
|
||
* is returned.
|
||
*
|
||
* Results are cached, and thus may be out of date if the filesystem is
|
||
* mutated.
|
||
*/
|
||
async readdir() {
|
||
if (!this.canReaddir()) {
|
||
return [];
|
||
}
|
||
const children = this.children();
|
||
if (this.calledReaddir()) {
|
||
return children.slice(0, children.provisional);
|
||
}
|
||
const fullpath = this.fullpath();
|
||
if (this.#asyncReaddirInFlight) {
|
||
await this.#asyncReaddirInFlight;
|
||
} else {
|
||
let resolve = () => {
|
||
};
|
||
this.#asyncReaddirInFlight = new Promise((res) => resolve = res);
|
||
try {
|
||
for (const e of await this.#fs.promises.readdir(fullpath, {
|
||
withFileTypes: true
|
||
})) {
|
||
this.#readdirAddChild(e, children);
|
||
}
|
||
this.#readdirSuccess(children);
|
||
} catch (er) {
|
||
this.#readdirFail(er.code);
|
||
children.provisional = 0;
|
||
}
|
||
this.#asyncReaddirInFlight = void 0;
|
||
resolve();
|
||
}
|
||
return children.slice(0, children.provisional);
|
||
}
|
||
/**
|
||
* synchronous {@link PathBase.readdir}
|
||
*/
|
||
readdirSync() {
|
||
if (!this.canReaddir()) {
|
||
return [];
|
||
}
|
||
const children = this.children();
|
||
if (this.calledReaddir()) {
|
||
return children.slice(0, children.provisional);
|
||
}
|
||
const fullpath = this.fullpath();
|
||
try {
|
||
for (const e of this.#fs.readdirSync(fullpath, {
|
||
withFileTypes: true
|
||
})) {
|
||
this.#readdirAddChild(e, children);
|
||
}
|
||
this.#readdirSuccess(children);
|
||
} catch (er) {
|
||
this.#readdirFail(er.code);
|
||
children.provisional = 0;
|
||
}
|
||
return children.slice(0, children.provisional);
|
||
}
|
||
canReaddir() {
|
||
if (this.#type & ENOCHILD)
|
||
return false;
|
||
const ifmt = IFMT & this.#type;
|
||
if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
|
||
return false;
|
||
}
|
||
return true;
|
||
}
|
||
shouldWalk(dirs, walkFilter) {
|
||
return (this.#type & IFDIR) === IFDIR && !(this.#type & ENOCHILD) && !dirs.has(this) && (!walkFilter || walkFilter(this));
|
||
}
|
||
/**
|
||
* Return the Path object corresponding to path as resolved
|
||
* by realpath(3).
|
||
*
|
||
* If the realpath call fails for any reason, `undefined` is returned.
|
||
*
|
||
* Result is cached, and thus may be outdated if the filesystem is mutated.
|
||
* On success, returns a Path object.
|
||
*/
|
||
async realpath() {
|
||
if (this.#realpath)
|
||
return this.#realpath;
|
||
if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
|
||
return void 0;
|
||
try {
|
||
const rp = await this.#fs.promises.realpath(this.fullpath());
|
||
return this.#realpath = this.resolve(rp);
|
||
} catch (_) {
|
||
this.#markENOREALPATH();
|
||
}
|
||
}
|
||
/**
|
||
* Synchronous {@link realpath}
|
||
*/
|
||
realpathSync() {
|
||
if (this.#realpath)
|
||
return this.#realpath;
|
||
if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
|
||
return void 0;
|
||
try {
|
||
const rp = this.#fs.realpathSync(this.fullpath());
|
||
return this.#realpath = this.resolve(rp);
|
||
} catch (_) {
|
||
this.#markENOREALPATH();
|
||
}
|
||
}
|
||
/**
|
||
* Internal method to mark this Path object as the scurry cwd,
|
||
* called by {@link PathScurry#chdir}
|
||
*
|
||
* @internal
|
||
*/
|
||
[setAsCwd](oldCwd) {
|
||
if (oldCwd === this)
|
||
return;
|
||
const changed = /* @__PURE__ */ new Set([]);
|
||
let rp = [];
|
||
let p = this;
|
||
while (p && p.parent) {
|
||
changed.add(p);
|
||
p.#relative = rp.join(this.sep);
|
||
p.#relativePosix = rp.join("/");
|
||
p = p.parent;
|
||
rp.push("..");
|
||
}
|
||
p = oldCwd;
|
||
while (p && p.parent && !changed.has(p)) {
|
||
p.#relative = void 0;
|
||
p.#relativePosix = void 0;
|
||
p = p.parent;
|
||
}
|
||
}
|
||
};
|
||
var PathWin32 = class _PathWin32 extends PathBase {
|
||
/**
|
||
* Separator for generating path strings.
|
||
*/
|
||
sep = "\\";
|
||
/**
|
||
* Separator for parsing path strings.
|
||
*/
|
||
splitSep = eitherSep;
|
||
/**
|
||
* Do not create new Path objects directly. They should always be accessed
|
||
* via the PathScurry class or other methods on the Path class.
|
||
*
|
||
* @internal
|
||
*/
|
||
constructor(name, type = UNKNOWN, root2, roots, nocase, children, opts) {
|
||
super(name, type, root2, roots, nocase, children, opts);
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
newChild(name, type = UNKNOWN, opts = {}) {
|
||
return new _PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
getRootString(path2) {
|
||
return import_path.win32.parse(path2).root;
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
getRoot(rootPath) {
|
||
rootPath = uncToDrive(rootPath.toUpperCase());
|
||
if (rootPath === this.root.name) {
|
||
return this.root;
|
||
}
|
||
for (const [compare, root2] of Object.entries(this.roots)) {
|
||
if (this.sameRoot(rootPath, compare)) {
|
||
return this.roots[rootPath] = root2;
|
||
}
|
||
}
|
||
return this.roots[rootPath] = new PathScurryWin32(rootPath, this).root;
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
sameRoot(rootPath, compare = this.root.name) {
|
||
rootPath = rootPath.toUpperCase().replace(/\//g, "\\").replace(uncDriveRegexp, "$1\\");
|
||
return rootPath === compare;
|
||
}
|
||
};
|
||
var PathPosix = class _PathPosix extends PathBase {
|
||
/**
|
||
* separator for parsing path strings
|
||
*/
|
||
splitSep = "/";
|
||
/**
|
||
* separator for generating path strings
|
||
*/
|
||
sep = "/";
|
||
/**
|
||
* Do not create new Path objects directly. They should always be accessed
|
||
* via the PathScurry class or other methods on the Path class.
|
||
*
|
||
* @internal
|
||
*/
|
||
constructor(name, type = UNKNOWN, root2, roots, nocase, children, opts) {
|
||
super(name, type, root2, roots, nocase, children, opts);
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
getRootString(path2) {
|
||
return path2.startsWith("/") ? "/" : "";
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
getRoot(_rootPath) {
|
||
return this.root;
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
newChild(name, type = UNKNOWN, opts = {}) {
|
||
return new _PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
|
||
}
|
||
};
|
||
var PathScurryBase = class {
|
||
/**
|
||
* The root Path entry for the current working directory of this Scurry
|
||
*/
|
||
root;
|
||
/**
|
||
* The string path for the root of this Scurry's current working directory
|
||
*/
|
||
rootPath;
|
||
/**
|
||
* A collection of all roots encountered, referenced by rootPath
|
||
*/
|
||
roots;
|
||
/**
|
||
* The Path entry corresponding to this PathScurry's current working directory.
|
||
*/
|
||
cwd;
|
||
#resolveCache;
|
||
#resolvePosixCache;
|
||
#children;
|
||
/**
|
||
* Perform path comparisons case-insensitively.
|
||
*
|
||
* Defaults true on Darwin and Windows systems, false elsewhere.
|
||
*/
|
||
nocase;
|
||
#fs;
|
||
/**
|
||
* This class should not be instantiated directly.
|
||
*
|
||
* Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
|
||
*
|
||
* @internal
|
||
*/
|
||
constructor(cwd = process.cwd(), pathImpl, sep2, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS } = {}) {
|
||
this.#fs = fsFromOption(fs);
|
||
if (cwd instanceof URL || cwd.startsWith("file://")) {
|
||
cwd = (0, import_url.fileURLToPath)(cwd);
|
||
}
|
||
const cwdPath = pathImpl.resolve(cwd);
|
||
this.roots = /* @__PURE__ */ Object.create(null);
|
||
this.rootPath = this.parseRootPath(cwdPath);
|
||
this.#resolveCache = new ResolveCache();
|
||
this.#resolvePosixCache = new ResolveCache();
|
||
this.#children = new ChildrenCache(childrenCacheSize);
|
||
const split = cwdPath.substring(this.rootPath.length).split(sep2);
|
||
if (split.length === 1 && !split[0]) {
|
||
split.pop();
|
||
}
|
||
if (nocase === void 0) {
|
||
throw new TypeError("must provide nocase setting to PathScurryBase ctor");
|
||
}
|
||
this.nocase = nocase;
|
||
this.root = this.newRoot(this.#fs);
|
||
this.roots[this.rootPath] = this.root;
|
||
let prev = this.root;
|
||
let len = split.length - 1;
|
||
const joinSep = pathImpl.sep;
|
||
let abs = this.rootPath;
|
||
let sawFirst = false;
|
||
for (const part of split) {
|
||
const l = len--;
|
||
prev = prev.child(part, {
|
||
relative: new Array(l).fill("..").join(joinSep),
|
||
relativePosix: new Array(l).fill("..").join("/"),
|
||
fullpath: abs += (sawFirst ? "" : joinSep) + part
|
||
});
|
||
sawFirst = true;
|
||
}
|
||
this.cwd = prev;
|
||
}
|
||
/**
|
||
* Get the depth of a provided path, string, or the cwd
|
||
*/
|
||
depth(path2 = this.cwd) {
|
||
if (typeof path2 === "string") {
|
||
path2 = this.cwd.resolve(path2);
|
||
}
|
||
return path2.depth();
|
||
}
|
||
/**
|
||
* Return the cache of child entries. Exposed so subclasses can create
|
||
* child Path objects in a platform-specific way.
|
||
*
|
||
* @internal
|
||
*/
|
||
childrenCache() {
|
||
return this.#children;
|
||
}
|
||
/**
|
||
* Resolve one or more path strings to a resolved string
|
||
*
|
||
* Same interface as require('path').resolve.
|
||
*
|
||
* Much faster than path.resolve() when called multiple times for the same
|
||
* path, because the resolved Path objects are cached. Much slower
|
||
* otherwise.
|
||
*/
|
||
resolve(...paths) {
|
||
let r = "";
|
||
for (let i = paths.length - 1; i >= 0; i--) {
|
||
const p = paths[i];
|
||
if (!p || p === ".")
|
||
continue;
|
||
r = r ? `${p}/${r}` : p;
|
||
if (this.isAbsolute(p)) {
|
||
break;
|
||
}
|
||
}
|
||
const cached = this.#resolveCache.get(r);
|
||
if (cached !== void 0) {
|
||
return cached;
|
||
}
|
||
const result = this.cwd.resolve(r).fullpath();
|
||
this.#resolveCache.set(r, result);
|
||
return result;
|
||
}
|
||
/**
|
||
* Resolve one or more path strings to a resolved string, returning
|
||
* the posix path. Identical to .resolve() on posix systems, but on
|
||
* windows will return a forward-slash separated UNC path.
|
||
*
|
||
* Same interface as require('path').resolve.
|
||
*
|
||
* Much faster than path.resolve() when called multiple times for the same
|
||
* path, because the resolved Path objects are cached. Much slower
|
||
* otherwise.
|
||
*/
|
||
resolvePosix(...paths) {
|
||
let r = "";
|
||
for (let i = paths.length - 1; i >= 0; i--) {
|
||
const p = paths[i];
|
||
if (!p || p === ".")
|
||
continue;
|
||
r = r ? `${p}/${r}` : p;
|
||
if (this.isAbsolute(p)) {
|
||
break;
|
||
}
|
||
}
|
||
const cached = this.#resolvePosixCache.get(r);
|
||
if (cached !== void 0) {
|
||
return cached;
|
||
}
|
||
const result = this.cwd.resolve(r).fullpathPosix();
|
||
this.#resolvePosixCache.set(r, result);
|
||
return result;
|
||
}
|
||
/**
|
||
* find the relative path from the cwd to the supplied path string or entry
|
||
*/
|
||
relative(entry = this.cwd) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
}
|
||
return entry.relative();
|
||
}
|
||
/**
|
||
* find the relative path from the cwd to the supplied path string or
|
||
* entry, using / as the path delimiter, even on Windows.
|
||
*/
|
||
relativePosix(entry = this.cwd) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
}
|
||
return entry.relativePosix();
|
||
}
|
||
/**
|
||
* Return the basename for the provided string or Path object
|
||
*/
|
||
basename(entry = this.cwd) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
}
|
||
return entry.name;
|
||
}
|
||
/**
|
||
* Return the dirname for the provided string or Path object
|
||
*/
|
||
dirname(entry = this.cwd) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
}
|
||
return (entry.parent || entry).fullpath();
|
||
}
|
||
async readdir(entry = this.cwd, opts = {
|
||
withFileTypes: true
|
||
}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
opts = entry;
|
||
entry = this.cwd;
|
||
}
|
||
const { withFileTypes } = opts;
|
||
if (!entry.canReaddir()) {
|
||
return [];
|
||
} else {
|
||
const p = await entry.readdir();
|
||
return withFileTypes ? p : p.map((e) => e.name);
|
||
}
|
||
}
|
||
readdirSync(entry = this.cwd, opts = {
|
||
withFileTypes: true
|
||
}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
opts = entry;
|
||
entry = this.cwd;
|
||
}
|
||
const { withFileTypes = true } = opts;
|
||
if (!entry.canReaddir()) {
|
||
return [];
|
||
} else if (withFileTypes) {
|
||
return entry.readdirSync();
|
||
} else {
|
||
return entry.readdirSync().map((e) => e.name);
|
||
}
|
||
}
|
||
/**
|
||
* Call lstat() on the string or Path object, and update all known
|
||
* information that can be determined.
|
||
*
|
||
* Note that unlike `fs.lstat()`, the returned value does not contain some
|
||
* information, such as `mode`, `dev`, `nlink`, and `ino`. If that
|
||
* information is required, you will need to call `fs.lstat` yourself.
|
||
*
|
||
* If the Path refers to a nonexistent file, or if the lstat call fails for
|
||
* any reason, `undefined` is returned. Otherwise the updated Path object is
|
||
* returned.
|
||
*
|
||
* Results are cached, and thus may be out of date if the filesystem is
|
||
* mutated.
|
||
*/
|
||
async lstat(entry = this.cwd) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
}
|
||
return entry.lstat();
|
||
}
|
||
/**
|
||
* synchronous {@link PathScurryBase.lstat}
|
||
*/
|
||
lstatSync(entry = this.cwd) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
}
|
||
return entry.lstatSync();
|
||
}
|
||
async readlink(entry = this.cwd, { withFileTypes } = {
|
||
withFileTypes: false
|
||
}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
withFileTypes = entry.withFileTypes;
|
||
entry = this.cwd;
|
||
}
|
||
const e = await entry.readlink();
|
||
return withFileTypes ? e : e?.fullpath();
|
||
}
|
||
readlinkSync(entry = this.cwd, { withFileTypes } = {
|
||
withFileTypes: false
|
||
}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
withFileTypes = entry.withFileTypes;
|
||
entry = this.cwd;
|
||
}
|
||
const e = entry.readlinkSync();
|
||
return withFileTypes ? e : e?.fullpath();
|
||
}
|
||
async realpath(entry = this.cwd, { withFileTypes } = {
|
||
withFileTypes: false
|
||
}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
withFileTypes = entry.withFileTypes;
|
||
entry = this.cwd;
|
||
}
|
||
const e = await entry.realpath();
|
||
return withFileTypes ? e : e?.fullpath();
|
||
}
|
||
realpathSync(entry = this.cwd, { withFileTypes } = {
|
||
withFileTypes: false
|
||
}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
withFileTypes = entry.withFileTypes;
|
||
entry = this.cwd;
|
||
}
|
||
const e = entry.realpathSync();
|
||
return withFileTypes ? e : e?.fullpath();
|
||
}
|
||
async walk(entry = this.cwd, opts = {}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
opts = entry;
|
||
entry = this.cwd;
|
||
}
|
||
const { withFileTypes = true, follow = false, filter: filter2, walkFilter } = opts;
|
||
const results = [];
|
||
if (!filter2 || filter2(entry)) {
|
||
results.push(withFileTypes ? entry : entry.fullpath());
|
||
}
|
||
const dirs = /* @__PURE__ */ new Set();
|
||
const walk = (dir2, cb) => {
|
||
dirs.add(dir2);
|
||
dir2.readdirCB((er, entries) => {
|
||
if (er) {
|
||
return cb(er);
|
||
}
|
||
let len = entries.length;
|
||
if (!len)
|
||
return cb();
|
||
const next = () => {
|
||
if (--len === 0) {
|
||
cb();
|
||
}
|
||
};
|
||
for (const e of entries) {
|
||
if (!filter2 || filter2(e)) {
|
||
results.push(withFileTypes ? e : e.fullpath());
|
||
}
|
||
if (follow && e.isSymbolicLink()) {
|
||
e.realpath().then((r) => r?.isUnknown() ? r.lstat() : r).then((r) => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
|
||
} else {
|
||
if (e.shouldWalk(dirs, walkFilter)) {
|
||
walk(e, next);
|
||
} else {
|
||
next();
|
||
}
|
||
}
|
||
}
|
||
}, true);
|
||
};
|
||
const start = entry;
|
||
return new Promise((res, rej) => {
|
||
walk(start, (er) => {
|
||
if (er)
|
||
return rej(er);
|
||
res(results);
|
||
});
|
||
});
|
||
}
|
||
walkSync(entry = this.cwd, opts = {}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
opts = entry;
|
||
entry = this.cwd;
|
||
}
|
||
const { withFileTypes = true, follow = false, filter: filter2, walkFilter } = opts;
|
||
const results = [];
|
||
if (!filter2 || filter2(entry)) {
|
||
results.push(withFileTypes ? entry : entry.fullpath());
|
||
}
|
||
const dirs = /* @__PURE__ */ new Set([entry]);
|
||
for (const dir2 of dirs) {
|
||
const entries = dir2.readdirSync();
|
||
for (const e of entries) {
|
||
if (!filter2 || filter2(e)) {
|
||
results.push(withFileTypes ? e : e.fullpath());
|
||
}
|
||
let r = e;
|
||
if (e.isSymbolicLink()) {
|
||
if (!(follow && (r = e.realpathSync())))
|
||
continue;
|
||
if (r.isUnknown())
|
||
r.lstatSync();
|
||
}
|
||
if (r.shouldWalk(dirs, walkFilter)) {
|
||
dirs.add(r);
|
||
}
|
||
}
|
||
}
|
||
return results;
|
||
}
|
||
/**
|
||
* Support for `for await`
|
||
*
|
||
* Alias for {@link PathScurryBase.iterate}
|
||
*
|
||
* Note: As of Node 19, this is very slow, compared to other methods of
|
||
* walking. Consider using {@link PathScurryBase.stream} if memory overhead
|
||
* and backpressure are concerns, or {@link PathScurryBase.walk} if not.
|
||
*/
|
||
[Symbol.asyncIterator]() {
|
||
return this.iterate();
|
||
}
|
||
iterate(entry = this.cwd, options = {}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
options = entry;
|
||
entry = this.cwd;
|
||
}
|
||
return this.stream(entry, options)[Symbol.asyncIterator]();
|
||
}
|
||
/**
|
||
* Iterating over a PathScurry performs a synchronous walk.
|
||
*
|
||
* Alias for {@link PathScurryBase.iterateSync}
|
||
*/
|
||
[Symbol.iterator]() {
|
||
return this.iterateSync();
|
||
}
|
||
*iterateSync(entry = this.cwd, opts = {}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
opts = entry;
|
||
entry = this.cwd;
|
||
}
|
||
const { withFileTypes = true, follow = false, filter: filter2, walkFilter } = opts;
|
||
if (!filter2 || filter2(entry)) {
|
||
yield withFileTypes ? entry : entry.fullpath();
|
||
}
|
||
const dirs = /* @__PURE__ */ new Set([entry]);
|
||
for (const dir2 of dirs) {
|
||
const entries = dir2.readdirSync();
|
||
for (const e of entries) {
|
||
if (!filter2 || filter2(e)) {
|
||
yield withFileTypes ? e : e.fullpath();
|
||
}
|
||
let r = e;
|
||
if (e.isSymbolicLink()) {
|
||
if (!(follow && (r = e.realpathSync())))
|
||
continue;
|
||
if (r.isUnknown())
|
||
r.lstatSync();
|
||
}
|
||
if (r.shouldWalk(dirs, walkFilter)) {
|
||
dirs.add(r);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
stream(entry = this.cwd, opts = {}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
opts = entry;
|
||
entry = this.cwd;
|
||
}
|
||
const { withFileTypes = true, follow = false, filter: filter2, walkFilter } = opts;
|
||
const results = new Minipass({ objectMode: true });
|
||
if (!filter2 || filter2(entry)) {
|
||
results.write(withFileTypes ? entry : entry.fullpath());
|
||
}
|
||
const dirs = /* @__PURE__ */ new Set();
|
||
const queue = [entry];
|
||
let processing = 0;
|
||
const process2 = () => {
|
||
let paused = false;
|
||
while (!paused) {
|
||
const dir2 = queue.shift();
|
||
if (!dir2) {
|
||
if (processing === 0)
|
||
results.end();
|
||
return;
|
||
}
|
||
processing++;
|
||
dirs.add(dir2);
|
||
const onReaddir = (er, entries, didRealpaths = false) => {
|
||
if (er)
|
||
return results.emit("error", er);
|
||
if (follow && !didRealpaths) {
|
||
const promises = [];
|
||
for (const e of entries) {
|
||
if (e.isSymbolicLink()) {
|
||
promises.push(e.realpath().then((r) => r?.isUnknown() ? r.lstat() : r));
|
||
}
|
||
}
|
||
if (promises.length) {
|
||
Promise.all(promises).then(() => onReaddir(null, entries, true));
|
||
return;
|
||
}
|
||
}
|
||
for (const e of entries) {
|
||
if (e && (!filter2 || filter2(e))) {
|
||
if (!results.write(withFileTypes ? e : e.fullpath())) {
|
||
paused = true;
|
||
}
|
||
}
|
||
}
|
||
processing--;
|
||
for (const e of entries) {
|
||
const r = e.realpathCached() || e;
|
||
if (r.shouldWalk(dirs, walkFilter)) {
|
||
queue.push(r);
|
||
}
|
||
}
|
||
if (paused && !results.flowing) {
|
||
results.once("drain", process2);
|
||
} else if (!sync2) {
|
||
process2();
|
||
}
|
||
};
|
||
let sync2 = true;
|
||
dir2.readdirCB(onReaddir, true);
|
||
sync2 = false;
|
||
}
|
||
};
|
||
process2();
|
||
return results;
|
||
}
|
||
streamSync(entry = this.cwd, opts = {}) {
|
||
if (typeof entry === "string") {
|
||
entry = this.cwd.resolve(entry);
|
||
} else if (!(entry instanceof PathBase)) {
|
||
opts = entry;
|
||
entry = this.cwd;
|
||
}
|
||
const { withFileTypes = true, follow = false, filter: filter2, walkFilter } = opts;
|
||
const results = new Minipass({ objectMode: true });
|
||
const dirs = /* @__PURE__ */ new Set();
|
||
if (!filter2 || filter2(entry)) {
|
||
results.write(withFileTypes ? entry : entry.fullpath());
|
||
}
|
||
const queue = [entry];
|
||
let processing = 0;
|
||
const process2 = () => {
|
||
let paused = false;
|
||
while (!paused) {
|
||
const dir2 = queue.shift();
|
||
if (!dir2) {
|
||
if (processing === 0)
|
||
results.end();
|
||
return;
|
||
}
|
||
processing++;
|
||
dirs.add(dir2);
|
||
const entries = dir2.readdirSync();
|
||
for (const e of entries) {
|
||
if (!filter2 || filter2(e)) {
|
||
if (!results.write(withFileTypes ? e : e.fullpath())) {
|
||
paused = true;
|
||
}
|
||
}
|
||
}
|
||
processing--;
|
||
for (const e of entries) {
|
||
let r = e;
|
||
if (e.isSymbolicLink()) {
|
||
if (!(follow && (r = e.realpathSync())))
|
||
continue;
|
||
if (r.isUnknown())
|
||
r.lstatSync();
|
||
}
|
||
if (r.shouldWalk(dirs, walkFilter)) {
|
||
queue.push(r);
|
||
}
|
||
}
|
||
}
|
||
if (paused && !results.flowing)
|
||
results.once("drain", process2);
|
||
};
|
||
process2();
|
||
return results;
|
||
}
|
||
chdir(path2 = this.cwd) {
|
||
const oldCwd = this.cwd;
|
||
this.cwd = typeof path2 === "string" ? this.cwd.resolve(path2) : path2;
|
||
this.cwd[setAsCwd](oldCwd);
|
||
}
|
||
};
|
||
var PathScurryWin32 = class extends PathScurryBase {
|
||
/**
|
||
* separator for generating path strings
|
||
*/
|
||
sep = "\\";
|
||
constructor(cwd = process.cwd(), opts = {}) {
|
||
const { nocase = true } = opts;
|
||
super(cwd, import_path.win32, "\\", { ...opts, nocase });
|
||
this.nocase = nocase;
|
||
for (let p = this.cwd; p; p = p.parent) {
|
||
p.nocase = this.nocase;
|
||
}
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
parseRootPath(dir2) {
|
||
return import_path.win32.parse(dir2).root.toUpperCase();
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
newRoot(fs) {
|
||
return new PathWin32(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs });
|
||
}
|
||
/**
|
||
* Return true if the provided path string is an absolute path
|
||
*/
|
||
isAbsolute(p) {
|
||
return p.startsWith("/") || p.startsWith("\\") || /^[a-z]:(\/|\\)/i.test(p);
|
||
}
|
||
};
|
||
var PathScurryPosix = class extends PathScurryBase {
|
||
/**
|
||
* separator for generating path strings
|
||
*/
|
||
sep = "/";
|
||
constructor(cwd = process.cwd(), opts = {}) {
|
||
const { nocase = false } = opts;
|
||
super(cwd, import_path.posix, "/", { ...opts, nocase });
|
||
this.nocase = nocase;
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
parseRootPath(_dir) {
|
||
return "/";
|
||
}
|
||
/**
|
||
* @internal
|
||
*/
|
||
newRoot(fs) {
|
||
return new PathPosix(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs });
|
||
}
|
||
/**
|
||
* Return true if the provided path string is an absolute path
|
||
*/
|
||
isAbsolute(p) {
|
||
return p.startsWith("/");
|
||
}
|
||
};
|
||
var PathScurryDarwin = class extends PathScurryPosix {
|
||
constructor(cwd = process.cwd(), opts = {}) {
|
||
const { nocase = true } = opts;
|
||
super(cwd, { ...opts, nocase });
|
||
}
|
||
};
|
||
var Path = process.platform === "win32" ? PathWin32 : PathPosix;
|
||
var PathScurry = process.platform === "win32" ? PathScurryWin32 : process.platform === "darwin" ? PathScurryDarwin : PathScurryPosix;
|
||
|
||
// node_modules/glob/dist/esm/glob.js
|
||
var import_url2 = require("url");
|
||
|
||
// node_modules/glob/dist/esm/pattern.js
|
||
var isPatternList = (pl) => pl.length >= 1;
|
||
var isGlobList = (gl) => gl.length >= 1;
|
||
var Pattern = class _Pattern {
|
||
#patternList;
|
||
#globList;
|
||
#index;
|
||
length;
|
||
#platform;
|
||
#rest;
|
||
#globString;
|
||
#isDrive;
|
||
#isUNC;
|
||
#isAbsolute;
|
||
#followGlobstar = true;
|
||
constructor(patternList, globList, index2, platform) {
|
||
if (!isPatternList(patternList)) {
|
||
throw new TypeError("empty pattern list");
|
||
}
|
||
if (!isGlobList(globList)) {
|
||
throw new TypeError("empty glob list");
|
||
}
|
||
if (globList.length !== patternList.length) {
|
||
throw new TypeError("mismatched pattern list and glob list lengths");
|
||
}
|
||
this.length = patternList.length;
|
||
if (index2 < 0 || index2 >= this.length) {
|
||
throw new TypeError("index out of range");
|
||
}
|
||
this.#patternList = patternList;
|
||
this.#globList = globList;
|
||
this.#index = index2;
|
||
this.#platform = platform;
|
||
if (this.#index === 0) {
|
||
if (this.isUNC()) {
|
||
const [p0, p1, p2, p3, ...prest] = this.#patternList;
|
||
const [g0, g1, g2, g3, ...grest] = this.#globList;
|
||
if (prest[0] === "") {
|
||
prest.shift();
|
||
grest.shift();
|
||
}
|
||
const p = [p0, p1, p2, p3, ""].join("/");
|
||
const g = [g0, g1, g2, g3, ""].join("/");
|
||
this.#patternList = [p, ...prest];
|
||
this.#globList = [g, ...grest];
|
||
this.length = this.#patternList.length;
|
||
} else if (this.isDrive() || this.isAbsolute()) {
|
||
const [p1, ...prest] = this.#patternList;
|
||
const [g1, ...grest] = this.#globList;
|
||
if (prest[0] === "") {
|
||
prest.shift();
|
||
grest.shift();
|
||
}
|
||
const p = p1 + "/";
|
||
const g = g1 + "/";
|
||
this.#patternList = [p, ...prest];
|
||
this.#globList = [g, ...grest];
|
||
this.length = this.#patternList.length;
|
||
}
|
||
}
|
||
}
|
||
/**
|
||
* The first entry in the parsed list of patterns
|
||
*/
|
||
pattern() {
|
||
return this.#patternList[this.#index];
|
||
}
|
||
/**
|
||
* true of if pattern() returns a string
|
||
*/
|
||
isString() {
|
||
return typeof this.#patternList[this.#index] === "string";
|
||
}
|
||
/**
|
||
* true of if pattern() returns GLOBSTAR
|
||
*/
|
||
isGlobstar() {
|
||
return this.#patternList[this.#index] === GLOBSTAR;
|
||
}
|
||
/**
|
||
* true if pattern() returns a regexp
|
||
*/
|
||
isRegExp() {
|
||
return this.#patternList[this.#index] instanceof RegExp;
|
||
}
|
||
/**
|
||
* The /-joined set of glob parts that make up this pattern
|
||
*/
|
||
globString() {
|
||
return this.#globString = this.#globString || (this.#index === 0 ? this.isAbsolute() ? this.#globList[0] + this.#globList.slice(1).join("/") : this.#globList.join("/") : this.#globList.slice(this.#index).join("/"));
|
||
}
|
||
/**
|
||
* true if there are more pattern parts after this one
|
||
*/
|
||
hasMore() {
|
||
return this.length > this.#index + 1;
|
||
}
|
||
/**
|
||
* The rest of the pattern after this part, or null if this is the end
|
||
*/
|
||
rest() {
|
||
if (this.#rest !== void 0)
|
||
return this.#rest;
|
||
if (!this.hasMore())
|
||
return this.#rest = null;
|
||
this.#rest = new _Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
|
||
this.#rest.#isAbsolute = this.#isAbsolute;
|
||
this.#rest.#isUNC = this.#isUNC;
|
||
this.#rest.#isDrive = this.#isDrive;
|
||
return this.#rest;
|
||
}
|
||
/**
|
||
* true if the pattern represents a //unc/path/ on windows
|
||
*/
|
||
isUNC() {
|
||
const pl = this.#patternList;
|
||
return this.#isUNC !== void 0 ? this.#isUNC : this.#isUNC = this.#platform === "win32" && this.#index === 0 && pl[0] === "" && pl[1] === "" && typeof pl[2] === "string" && !!pl[2] && typeof pl[3] === "string" && !!pl[3];
|
||
}
|
||
// pattern like C:/...
|
||
// split = ['C:', ...]
|
||
// XXX: would be nice to handle patterns like `c:*` to test the cwd
|
||
// in c: for *, but I don't know of a way to even figure out what that
|
||
// cwd is without actually chdir'ing into it?
|
||
/**
|
||
* True if the pattern starts with a drive letter on Windows
|
||
*/
|
||
isDrive() {
|
||
const pl = this.#patternList;
|
||
return this.#isDrive !== void 0 ? this.#isDrive : this.#isDrive = this.#platform === "win32" && this.#index === 0 && this.length > 1 && typeof pl[0] === "string" && /^[a-z]:$/i.test(pl[0]);
|
||
}
|
||
// pattern = '/' or '/...' or '/x/...'
|
||
// split = ['', ''] or ['', ...] or ['', 'x', ...]
|
||
// Drive and UNC both considered absolute on windows
|
||
/**
|
||
* True if the pattern is rooted on an absolute path
|
||
*/
|
||
isAbsolute() {
|
||
const pl = this.#patternList;
|
||
return this.#isAbsolute !== void 0 ? this.#isAbsolute : this.#isAbsolute = pl[0] === "" && pl.length > 1 || this.isDrive() || this.isUNC();
|
||
}
|
||
/**
|
||
* consume the root of the pattern, and return it
|
||
*/
|
||
root() {
|
||
const p = this.#patternList[0];
|
||
return typeof p === "string" && this.isAbsolute() && this.#index === 0 ? p : "";
|
||
}
|
||
/**
|
||
* Check to see if the current globstar pattern is allowed to follow
|
||
* a symbolic link.
|
||
*/
|
||
checkFollowGlobstar() {
|
||
return !(this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar);
|
||
}
|
||
/**
|
||
* Mark that the current globstar pattern is following a symbolic link
|
||
*/
|
||
markFollowGlobstar() {
|
||
if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
|
||
return false;
|
||
this.#followGlobstar = false;
|
||
return true;
|
||
}
|
||
};
|
||
|
||
// node_modules/glob/dist/esm/ignore.js
|
||
var defaultPlatform2 = typeof process === "object" && process && typeof process.platform === "string" ? process.platform : "linux";
|
||
var Ignore = class {
|
||
relative;
|
||
relativeChildren;
|
||
absolute;
|
||
absoluteChildren;
|
||
constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform2 }) {
|
||
this.relative = [];
|
||
this.absolute = [];
|
||
this.relativeChildren = [];
|
||
this.absoluteChildren = [];
|
||
const mmopts = {
|
||
dot: true,
|
||
nobrace,
|
||
nocase,
|
||
noext,
|
||
noglobstar,
|
||
optimizationLevel: 2,
|
||
platform,
|
||
nocomment: true,
|
||
nonegate: true
|
||
};
|
||
for (const ign of ignored) {
|
||
const mm = new Minimatch(ign, mmopts);
|
||
for (let i = 0; i < mm.set.length; i++) {
|
||
const parsed = mm.set[i];
|
||
const globParts = mm.globParts[i];
|
||
if (!parsed || !globParts) {
|
||
throw new Error("invalid pattern object");
|
||
}
|
||
const p = new Pattern(parsed, globParts, 0, platform);
|
||
const m = new Minimatch(p.globString(), mmopts);
|
||
const children = globParts[globParts.length - 1] === "**";
|
||
const absolute = p.isAbsolute();
|
||
if (absolute)
|
||
this.absolute.push(m);
|
||
else
|
||
this.relative.push(m);
|
||
if (children) {
|
||
if (absolute)
|
||
this.absoluteChildren.push(m);
|
||
else
|
||
this.relativeChildren.push(m);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
ignored(p) {
|
||
const fullpath = p.fullpath();
|
||
const fullpaths = `${fullpath}/`;
|
||
const relative = p.relative() || ".";
|
||
const relatives = `${relative}/`;
|
||
for (const m of this.relative) {
|
||
if (m.match(relative) || m.match(relatives))
|
||
return true;
|
||
}
|
||
for (const m of this.absolute) {
|
||
if (m.match(fullpath) || m.match(fullpaths))
|
||
return true;
|
||
}
|
||
return false;
|
||
}
|
||
childrenIgnored(p) {
|
||
const fullpath = p.fullpath() + "/";
|
||
const relative = (p.relative() || ".") + "/";
|
||
for (const m of this.relativeChildren) {
|
||
if (m.match(relative))
|
||
return true;
|
||
}
|
||
for (const m of this.absoluteChildren) {
|
||
if (m.match(fullpath))
|
||
return true;
|
||
}
|
||
return false;
|
||
}
|
||
};
|
||
|
||
// node_modules/glob/dist/esm/processor.js
|
||
var HasWalkedCache = class _HasWalkedCache {
|
||
store;
|
||
constructor(store = /* @__PURE__ */ new Map()) {
|
||
this.store = store;
|
||
}
|
||
copy() {
|
||
return new _HasWalkedCache(new Map(this.store));
|
||
}
|
||
hasWalked(target, pattern) {
|
||
return this.store.get(target.fullpath())?.has(pattern.globString());
|
||
}
|
||
storeWalked(target, pattern) {
|
||
const fullpath = target.fullpath();
|
||
const cached = this.store.get(fullpath);
|
||
if (cached)
|
||
cached.add(pattern.globString());
|
||
else
|
||
this.store.set(fullpath, /* @__PURE__ */ new Set([pattern.globString()]));
|
||
}
|
||
};
|
||
var MatchRecord = class {
|
||
store = /* @__PURE__ */ new Map();
|
||
add(target, absolute, ifDir) {
|
||
const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
|
||
const current = this.store.get(target);
|
||
this.store.set(target, current === void 0 ? n : n & current);
|
||
}
|
||
// match, absolute, ifdir
|
||
entries() {
|
||
return [...this.store.entries()].map(([path2, n]) => [
|
||
path2,
|
||
!!(n & 2),
|
||
!!(n & 1)
|
||
]);
|
||
}
|
||
};
|
||
var SubWalks = class {
|
||
store = /* @__PURE__ */ new Map();
|
||
add(target, pattern) {
|
||
if (!target.canReaddir()) {
|
||
return;
|
||
}
|
||
const subs = this.store.get(target);
|
||
if (subs) {
|
||
if (!subs.find((p) => p.globString() === pattern.globString())) {
|
||
subs.push(pattern);
|
||
}
|
||
} else
|
||
this.store.set(target, [pattern]);
|
||
}
|
||
get(target) {
|
||
const subs = this.store.get(target);
|
||
if (!subs) {
|
||
throw new Error("attempting to walk unknown path");
|
||
}
|
||
return subs;
|
||
}
|
||
entries() {
|
||
return this.keys().map((k) => [k, this.store.get(k)]);
|
||
}
|
||
keys() {
|
||
return [...this.store.keys()].filter((t) => t.canReaddir());
|
||
}
|
||
};
|
||
var Processor2 = class _Processor {
|
||
hasWalkedCache;
|
||
matches = new MatchRecord();
|
||
subwalks = new SubWalks();
|
||
patterns;
|
||
follow;
|
||
dot;
|
||
opts;
|
||
constructor(opts, hasWalkedCache) {
|
||
this.opts = opts;
|
||
this.follow = !!opts.follow;
|
||
this.dot = !!opts.dot;
|
||
this.hasWalkedCache = hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
|
||
}
|
||
processPatterns(target, patterns) {
|
||
this.patterns = patterns;
|
||
const processingSet = patterns.map((p) => [target, p]);
|
||
for (let [t, pattern] of processingSet) {
|
||
this.hasWalkedCache.storeWalked(t, pattern);
|
||
const root2 = pattern.root();
|
||
const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
|
||
if (root2) {
|
||
t = t.resolve(root2 === "/" && this.opts.root !== void 0 ? this.opts.root : root2);
|
||
const rest2 = pattern.rest();
|
||
if (!rest2) {
|
||
this.matches.add(t, true, false);
|
||
continue;
|
||
} else {
|
||
pattern = rest2;
|
||
}
|
||
}
|
||
if (t.isENOENT())
|
||
continue;
|
||
let p;
|
||
let rest;
|
||
let changed = false;
|
||
while (typeof (p = pattern.pattern()) === "string" && (rest = pattern.rest())) {
|
||
const c = t.resolve(p);
|
||
t = c;
|
||
pattern = rest;
|
||
changed = true;
|
||
}
|
||
p = pattern.pattern();
|
||
rest = pattern.rest();
|
||
if (changed) {
|
||
if (this.hasWalkedCache.hasWalked(t, pattern))
|
||
continue;
|
||
this.hasWalkedCache.storeWalked(t, pattern);
|
||
}
|
||
if (typeof p === "string") {
|
||
const ifDir = p === ".." || p === "" || p === ".";
|
||
this.matches.add(t.resolve(p), absolute, ifDir);
|
||
continue;
|
||
} else if (p === GLOBSTAR) {
|
||
if (!t.isSymbolicLink() || this.follow || pattern.checkFollowGlobstar()) {
|
||
this.subwalks.add(t, pattern);
|
||
}
|
||
const rp = rest?.pattern();
|
||
const rrest = rest?.rest();
|
||
if (!rest || (rp === "" || rp === ".") && !rrest) {
|
||
this.matches.add(t, absolute, rp === "" || rp === ".");
|
||
} else {
|
||
if (rp === "..") {
|
||
const tp = t.parent || t;
|
||
if (!rrest)
|
||
this.matches.add(tp, absolute, true);
|
||
else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
|
||
this.subwalks.add(tp, rrest);
|
||
}
|
||
}
|
||
}
|
||
} else if (p instanceof RegExp) {
|
||
this.subwalks.add(t, pattern);
|
||
}
|
||
}
|
||
return this;
|
||
}
|
||
subwalkTargets() {
|
||
return this.subwalks.keys();
|
||
}
|
||
child() {
|
||
return new _Processor(this.opts, this.hasWalkedCache);
|
||
}
|
||
// return a new Processor containing the subwalks for each
|
||
// child entry, and a set of matches, and
|
||
// a hasWalkedCache that's a copy of this one
|
||
// then we're going to call
|
||
filterEntries(parent, entries) {
|
||
const patterns = this.subwalks.get(parent);
|
||
const results = this.child();
|
||
for (const e of entries) {
|
||
for (const pattern of patterns) {
|
||
const absolute = pattern.isAbsolute();
|
||
const p = pattern.pattern();
|
||
const rest = pattern.rest();
|
||
if (p === GLOBSTAR) {
|
||
results.testGlobstar(e, pattern, rest, absolute);
|
||
} else if (p instanceof RegExp) {
|
||
results.testRegExp(e, p, rest, absolute);
|
||
} else {
|
||
results.testString(e, p, rest, absolute);
|
||
}
|
||
}
|
||
}
|
||
return results;
|
||
}
|
||
testGlobstar(e, pattern, rest, absolute) {
|
||
if (this.dot || !e.name.startsWith(".")) {
|
||
if (!pattern.hasMore()) {
|
||
this.matches.add(e, absolute, false);
|
||
}
|
||
if (e.canReaddir()) {
|
||
if (this.follow || !e.isSymbolicLink()) {
|
||
this.subwalks.add(e, pattern);
|
||
} else if (e.isSymbolicLink()) {
|
||
if (rest && pattern.checkFollowGlobstar()) {
|
||
this.subwalks.add(e, rest);
|
||
} else if (pattern.markFollowGlobstar()) {
|
||
this.subwalks.add(e, pattern);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (rest) {
|
||
const rp = rest.pattern();
|
||
if (typeof rp === "string" && // dots and empty were handled already
|
||
rp !== ".." && rp !== "" && rp !== ".") {
|
||
this.testString(e, rp, rest.rest(), absolute);
|
||
} else if (rp === "..") {
|
||
const ep = e.parent || e;
|
||
this.subwalks.add(ep, rest);
|
||
} else if (rp instanceof RegExp) {
|
||
this.testRegExp(e, rp, rest.rest(), absolute);
|
||
}
|
||
}
|
||
}
|
||
testRegExp(e, p, rest, absolute) {
|
||
if (!p.test(e.name))
|
||
return;
|
||
if (!rest) {
|
||
this.matches.add(e, absolute, false);
|
||
} else {
|
||
this.subwalks.add(e, rest);
|
||
}
|
||
}
|
||
testString(e, p, rest, absolute) {
|
||
if (!e.isNamed(p))
|
||
return;
|
||
if (!rest) {
|
||
this.matches.add(e, absolute, false);
|
||
} else {
|
||
this.subwalks.add(e, rest);
|
||
}
|
||
}
|
||
};
|
||
|
||
// node_modules/glob/dist/esm/walker.js
|
||
var makeIgnore = (ignore, opts) => typeof ignore === "string" ? new Ignore([ignore], opts) : Array.isArray(ignore) ? new Ignore(ignore, opts) : ignore;
|
||
var GlobUtil = class {
|
||
path;
|
||
patterns;
|
||
opts;
|
||
seen = /* @__PURE__ */ new Set();
|
||
paused = false;
|
||
aborted = false;
|
||
#onResume = [];
|
||
#ignore;
|
||
#sep;
|
||
signal;
|
||
maxDepth;
|
||
constructor(patterns, path2, opts) {
|
||
this.patterns = patterns;
|
||
this.path = path2;
|
||
this.opts = opts;
|
||
this.#sep = !opts.posix && opts.platform === "win32" ? "\\" : "/";
|
||
if (opts.ignore) {
|
||
this.#ignore = makeIgnore(opts.ignore, opts);
|
||
}
|
||
this.maxDepth = opts.maxDepth || Infinity;
|
||
if (opts.signal) {
|
||
this.signal = opts.signal;
|
||
this.signal.addEventListener("abort", () => {
|
||
this.#onResume.length = 0;
|
||
});
|
||
}
|
||
}
|
||
#ignored(path2) {
|
||
return this.seen.has(path2) || !!this.#ignore?.ignored?.(path2);
|
||
}
|
||
#childrenIgnored(path2) {
|
||
return !!this.#ignore?.childrenIgnored?.(path2);
|
||
}
|
||
// backpressure mechanism
|
||
pause() {
|
||
this.paused = true;
|
||
}
|
||
resume() {
|
||
if (this.signal?.aborted)
|
||
return;
|
||
this.paused = false;
|
||
let fn = void 0;
|
||
while (!this.paused && (fn = this.#onResume.shift())) {
|
||
fn();
|
||
}
|
||
}
|
||
onResume(fn) {
|
||
if (this.signal?.aborted)
|
||
return;
|
||
if (!this.paused) {
|
||
fn();
|
||
} else {
|
||
this.#onResume.push(fn);
|
||
}
|
||
}
|
||
// do the requisite realpath/stat checking, and return the path
|
||
// to add or undefined to filter it out.
|
||
async matchCheck(e, ifDir) {
|
||
if (ifDir && this.opts.nodir)
|
||
return void 0;
|
||
let rpc;
|
||
if (this.opts.realpath) {
|
||
rpc = e.realpathCached() || await e.realpath();
|
||
if (!rpc)
|
||
return void 0;
|
||
e = rpc;
|
||
}
|
||
const needStat = e.isUnknown() || this.opts.stat;
|
||
return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir);
|
||
}
|
||
matchCheckTest(e, ifDir) {
|
||
return e && (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && (!ifDir || e.canReaddir()) && (!this.opts.nodir || !e.isDirectory()) && !this.#ignored(e) ? e : void 0;
|
||
}
|
||
matchCheckSync(e, ifDir) {
|
||
if (ifDir && this.opts.nodir)
|
||
return void 0;
|
||
let rpc;
|
||
if (this.opts.realpath) {
|
||
rpc = e.realpathCached() || e.realpathSync();
|
||
if (!rpc)
|
||
return void 0;
|
||
e = rpc;
|
||
}
|
||
const needStat = e.isUnknown() || this.opts.stat;
|
||
return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir);
|
||
}
|
||
matchFinish(e, absolute) {
|
||
if (this.#ignored(e))
|
||
return;
|
||
const abs = this.opts.absolute === void 0 ? absolute : this.opts.absolute;
|
||
this.seen.add(e);
|
||
const mark = this.opts.mark && e.isDirectory() ? this.#sep : "";
|
||
if (this.opts.withFileTypes) {
|
||
this.matchEmit(e);
|
||
} else if (abs) {
|
||
const abs2 = this.opts.posix ? e.fullpathPosix() : e.fullpath();
|
||
this.matchEmit(abs2 + mark);
|
||
} else {
|
||
const rel = this.opts.posix ? e.relativePosix() : e.relative();
|
||
const pre = this.opts.dotRelative && !rel.startsWith(".." + this.#sep) ? "." + this.#sep : "";
|
||
this.matchEmit(!rel ? "." + mark : pre + rel + mark);
|
||
}
|
||
}
|
||
async match(e, absolute, ifDir) {
|
||
const p = await this.matchCheck(e, ifDir);
|
||
if (p)
|
||
this.matchFinish(p, absolute);
|
||
}
|
||
matchSync(e, absolute, ifDir) {
|
||
const p = this.matchCheckSync(e, ifDir);
|
||
if (p)
|
||
this.matchFinish(p, absolute);
|
||
}
|
||
walkCB(target, patterns, cb) {
|
||
if (this.signal?.aborted)
|
||
cb();
|
||
this.walkCB2(target, patterns, new Processor2(this.opts), cb);
|
||
}
|
||
walkCB2(target, patterns, processor, cb) {
|
||
if (this.#childrenIgnored(target))
|
||
return cb();
|
||
if (this.signal?.aborted)
|
||
cb();
|
||
if (this.paused) {
|
||
this.onResume(() => this.walkCB2(target, patterns, processor, cb));
|
||
return;
|
||
}
|
||
processor.processPatterns(target, patterns);
|
||
let tasks = 1;
|
||
const next = () => {
|
||
if (--tasks === 0)
|
||
cb();
|
||
};
|
||
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
||
if (this.#ignored(m))
|
||
continue;
|
||
tasks++;
|
||
this.match(m, absolute, ifDir).then(() => next());
|
||
}
|
||
for (const t of processor.subwalkTargets()) {
|
||
if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
|
||
continue;
|
||
}
|
||
tasks++;
|
||
const childrenCached = t.readdirCached();
|
||
if (t.calledReaddir())
|
||
this.walkCB3(t, childrenCached, processor, next);
|
||
else {
|
||
t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
|
||
}
|
||
}
|
||
next();
|
||
}
|
||
walkCB3(target, entries, processor, cb) {
|
||
processor = processor.filterEntries(target, entries);
|
||
let tasks = 1;
|
||
const next = () => {
|
||
if (--tasks === 0)
|
||
cb();
|
||
};
|
||
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
||
if (this.#ignored(m))
|
||
continue;
|
||
tasks++;
|
||
this.match(m, absolute, ifDir).then(() => next());
|
||
}
|
||
for (const [target2, patterns] of processor.subwalks.entries()) {
|
||
tasks++;
|
||
this.walkCB2(target2, patterns, processor.child(), next);
|
||
}
|
||
next();
|
||
}
|
||
walkCBSync(target, patterns, cb) {
|
||
if (this.signal?.aborted)
|
||
cb();
|
||
this.walkCB2Sync(target, patterns, new Processor2(this.opts), cb);
|
||
}
|
||
walkCB2Sync(target, patterns, processor, cb) {
|
||
if (this.#childrenIgnored(target))
|
||
return cb();
|
||
if (this.signal?.aborted)
|
||
cb();
|
||
if (this.paused) {
|
||
this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
|
||
return;
|
||
}
|
||
processor.processPatterns(target, patterns);
|
||
let tasks = 1;
|
||
const next = () => {
|
||
if (--tasks === 0)
|
||
cb();
|
||
};
|
||
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
||
if (this.#ignored(m))
|
||
continue;
|
||
this.matchSync(m, absolute, ifDir);
|
||
}
|
||
for (const t of processor.subwalkTargets()) {
|
||
if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
|
||
continue;
|
||
}
|
||
tasks++;
|
||
const children = t.readdirSync();
|
||
this.walkCB3Sync(t, children, processor, next);
|
||
}
|
||
next();
|
||
}
|
||
walkCB3Sync(target, entries, processor, cb) {
|
||
processor = processor.filterEntries(target, entries);
|
||
let tasks = 1;
|
||
const next = () => {
|
||
if (--tasks === 0)
|
||
cb();
|
||
};
|
||
for (const [m, absolute, ifDir] of processor.matches.entries()) {
|
||
if (this.#ignored(m))
|
||
continue;
|
||
this.matchSync(m, absolute, ifDir);
|
||
}
|
||
for (const [target2, patterns] of processor.subwalks.entries()) {
|
||
tasks++;
|
||
this.walkCB2Sync(target2, patterns, processor.child(), next);
|
||
}
|
||
next();
|
||
}
|
||
};
|
||
var GlobWalker = class extends GlobUtil {
|
||
matches;
|
||
constructor(patterns, path2, opts) {
|
||
super(patterns, path2, opts);
|
||
this.matches = /* @__PURE__ */ new Set();
|
||
}
|
||
matchEmit(e) {
|
||
this.matches.add(e);
|
||
}
|
||
async walk() {
|
||
if (this.signal?.aborted)
|
||
throw this.signal.reason;
|
||
if (this.path.isUnknown()) {
|
||
await this.path.lstat();
|
||
}
|
||
await new Promise((res, rej) => {
|
||
this.walkCB(this.path, this.patterns, () => {
|
||
if (this.signal?.aborted) {
|
||
rej(this.signal.reason);
|
||
} else {
|
||
res(this.matches);
|
||
}
|
||
});
|
||
});
|
||
return this.matches;
|
||
}
|
||
walkSync() {
|
||
if (this.signal?.aborted)
|
||
throw this.signal.reason;
|
||
if (this.path.isUnknown()) {
|
||
this.path.lstatSync();
|
||
}
|
||
this.walkCBSync(this.path, this.patterns, () => {
|
||
if (this.signal?.aborted)
|
||
throw this.signal.reason;
|
||
});
|
||
return this.matches;
|
||
}
|
||
};
|
||
var GlobStream = class extends GlobUtil {
|
||
results;
|
||
constructor(patterns, path2, opts) {
|
||
super(patterns, path2, opts);
|
||
this.results = new Minipass({
|
||
signal: this.signal,
|
||
objectMode: true
|
||
});
|
||
this.results.on("drain", () => this.resume());
|
||
this.results.on("resume", () => this.resume());
|
||
}
|
||
matchEmit(e) {
|
||
this.results.write(e);
|
||
if (!this.results.flowing)
|
||
this.pause();
|
||
}
|
||
stream() {
|
||
const target = this.path;
|
||
if (target.isUnknown()) {
|
||
target.lstat().then(() => {
|
||
this.walkCB(target, this.patterns, () => this.results.end());
|
||
});
|
||
} else {
|
||
this.walkCB(target, this.patterns, () => this.results.end());
|
||
}
|
||
return this.results;
|
||
}
|
||
streamSync() {
|
||
if (this.path.isUnknown()) {
|
||
this.path.lstatSync();
|
||
}
|
||
this.walkCBSync(this.path, this.patterns, () => this.results.end());
|
||
return this.results;
|
||
}
|
||
};
|
||
|
||
// node_modules/glob/dist/esm/glob.js
|
||
var defaultPlatform3 = typeof process === "object" && process && typeof process.platform === "string" ? process.platform : "linux";
|
||
var Glob = class {
|
||
absolute;
|
||
cwd;
|
||
root;
|
||
dot;
|
||
dotRelative;
|
||
follow;
|
||
ignore;
|
||
magicalBraces;
|
||
mark;
|
||
matchBase;
|
||
maxDepth;
|
||
nobrace;
|
||
nocase;
|
||
nodir;
|
||
noext;
|
||
noglobstar;
|
||
pattern;
|
||
platform;
|
||
realpath;
|
||
scurry;
|
||
stat;
|
||
signal;
|
||
windowsPathsNoEscape;
|
||
withFileTypes;
|
||
/**
|
||
* The options provided to the constructor.
|
||
*/
|
||
opts;
|
||
/**
|
||
* An array of parsed immutable {@link Pattern} objects.
|
||
*/
|
||
patterns;
|
||
/**
|
||
* All options are stored as properties on the `Glob` object.
|
||
*
|
||
* See {@link GlobOptions} for full options descriptions.
|
||
*
|
||
* Note that a previous `Glob` object can be passed as the
|
||
* `GlobOptions` to another `Glob` instantiation to re-use settings
|
||
* and caches with a new pattern.
|
||
*
|
||
* Traversal functions can be called multiple times to run the walk
|
||
* again.
|
||
*/
|
||
constructor(pattern, opts) {
|
||
if (!opts)
|
||
throw new TypeError("glob options required");
|
||
this.withFileTypes = !!opts.withFileTypes;
|
||
this.signal = opts.signal;
|
||
this.follow = !!opts.follow;
|
||
this.dot = !!opts.dot;
|
||
this.dotRelative = !!opts.dotRelative;
|
||
this.nodir = !!opts.nodir;
|
||
this.mark = !!opts.mark;
|
||
if (!opts.cwd) {
|
||
this.cwd = "";
|
||
} else if (opts.cwd instanceof URL || opts.cwd.startsWith("file://")) {
|
||
opts.cwd = (0, import_url2.fileURLToPath)(opts.cwd);
|
||
}
|
||
this.cwd = opts.cwd || "";
|
||
this.root = opts.root;
|
||
this.magicalBraces = !!opts.magicalBraces;
|
||
this.nobrace = !!opts.nobrace;
|
||
this.noext = !!opts.noext;
|
||
this.realpath = !!opts.realpath;
|
||
this.absolute = opts.absolute;
|
||
this.noglobstar = !!opts.noglobstar;
|
||
this.matchBase = !!opts.matchBase;
|
||
this.maxDepth = typeof opts.maxDepth === "number" ? opts.maxDepth : Infinity;
|
||
this.stat = !!opts.stat;
|
||
this.ignore = opts.ignore;
|
||
if (this.withFileTypes && this.absolute !== void 0) {
|
||
throw new Error("cannot set absolute and withFileTypes:true");
|
||
}
|
||
if (typeof pattern === "string") {
|
||
pattern = [pattern];
|
||
}
|
||
this.windowsPathsNoEscape = !!opts.windowsPathsNoEscape || opts.allowWindowsEscape === false;
|
||
if (this.windowsPathsNoEscape) {
|
||
pattern = pattern.map((p) => p.replace(/\\/g, "/"));
|
||
}
|
||
if (this.matchBase) {
|
||
if (opts.noglobstar) {
|
||
throw new TypeError("base matching requires globstar");
|
||
}
|
||
pattern = pattern.map((p) => p.includes("/") ? p : `./**/${p}`);
|
||
}
|
||
this.pattern = pattern;
|
||
this.platform = opts.platform || defaultPlatform3;
|
||
this.opts = { ...opts, platform: this.platform };
|
||
if (opts.scurry) {
|
||
this.scurry = opts.scurry;
|
||
if (opts.nocase !== void 0 && opts.nocase !== opts.scurry.nocase) {
|
||
throw new Error("nocase option contradicts provided scurry option");
|
||
}
|
||
} else {
|
||
const Scurry = opts.platform === "win32" ? PathScurryWin32 : opts.platform === "darwin" ? PathScurryDarwin : opts.platform ? PathScurryPosix : PathScurry;
|
||
this.scurry = new Scurry(this.cwd, {
|
||
nocase: opts.nocase,
|
||
fs: opts.fs
|
||
});
|
||
}
|
||
this.nocase = this.scurry.nocase;
|
||
const nocaseMagicOnly = this.platform === "darwin" || this.platform === "win32";
|
||
const mmo = {
|
||
// default nocase based on platform
|
||
...opts,
|
||
dot: this.dot,
|
||
matchBase: this.matchBase,
|
||
nobrace: this.nobrace,
|
||
nocase: this.nocase,
|
||
nocaseMagicOnly,
|
||
nocomment: true,
|
||
noext: this.noext,
|
||
nonegate: true,
|
||
optimizationLevel: 2,
|
||
platform: this.platform,
|
||
windowsPathsNoEscape: this.windowsPathsNoEscape,
|
||
debug: !!this.opts.debug
|
||
};
|
||
const mms = this.pattern.map((p) => new Minimatch(p, mmo));
|
||
const [matchSet, globParts] = mms.reduce((set, m) => {
|
||
set[0].push(...m.set);
|
||
set[1].push(...m.globParts);
|
||
return set;
|
||
}, [[], []]);
|
||
this.patterns = matchSet.map((set, i) => {
|
||
const g = globParts[i];
|
||
if (!g)
|
||
throw new Error("invalid pattern object");
|
||
return new Pattern(set, g, 0, this.platform);
|
||
});
|
||
}
|
||
async walk() {
|
||
return [
|
||
...await new GlobWalker(this.patterns, this.scurry.cwd, {
|
||
...this.opts,
|
||
maxDepth: this.maxDepth !== Infinity ? this.maxDepth + this.scurry.cwd.depth() : Infinity,
|
||
platform: this.platform,
|
||
nocase: this.nocase
|
||
}).walk()
|
||
];
|
||
}
|
||
walkSync() {
|
||
return [
|
||
...new GlobWalker(this.patterns, this.scurry.cwd, {
|
||
...this.opts,
|
||
maxDepth: this.maxDepth !== Infinity ? this.maxDepth + this.scurry.cwd.depth() : Infinity,
|
||
platform: this.platform,
|
||
nocase: this.nocase
|
||
}).walkSync()
|
||
];
|
||
}
|
||
stream() {
|
||
return new GlobStream(this.patterns, this.scurry.cwd, {
|
||
...this.opts,
|
||
maxDepth: this.maxDepth !== Infinity ? this.maxDepth + this.scurry.cwd.depth() : Infinity,
|
||
platform: this.platform,
|
||
nocase: this.nocase
|
||
}).stream();
|
||
}
|
||
streamSync() {
|
||
return new GlobStream(this.patterns, this.scurry.cwd, {
|
||
...this.opts,
|
||
maxDepth: this.maxDepth !== Infinity ? this.maxDepth + this.scurry.cwd.depth() : Infinity,
|
||
platform: this.platform,
|
||
nocase: this.nocase
|
||
}).streamSync();
|
||
}
|
||
/**
|
||
* Default sync iteration function. Returns a Generator that
|
||
* iterates over the results.
|
||
*/
|
||
iterateSync() {
|
||
return this.streamSync()[Symbol.iterator]();
|
||
}
|
||
[Symbol.iterator]() {
|
||
return this.iterateSync();
|
||
}
|
||
/**
|
||
* Default async iteration function. Returns an AsyncGenerator that
|
||
* iterates over the results.
|
||
*/
|
||
iterate() {
|
||
return this.stream()[Symbol.asyncIterator]();
|
||
}
|
||
[Symbol.asyncIterator]() {
|
||
return this.iterate();
|
||
}
|
||
};
|
||
|
||
// node_modules/glob/dist/esm/has-magic.js
|
||
var hasMagic = (pattern, options = {}) => {
|
||
if (!Array.isArray(pattern)) {
|
||
pattern = [pattern];
|
||
}
|
||
for (const p of pattern) {
|
||
if (new Minimatch(p, options).hasMagic())
|
||
return true;
|
||
}
|
||
return false;
|
||
};
|
||
|
||
// node_modules/glob/dist/esm/index.js
|
||
function globStreamSync(pattern, options = {}) {
|
||
return new Glob(pattern, options).streamSync();
|
||
}
|
||
function globStream(pattern, options = {}) {
|
||
return new Glob(pattern, options).stream();
|
||
}
|
||
function globSync(pattern, options = {}) {
|
||
return new Glob(pattern, options).walkSync();
|
||
}
|
||
async function glob_(pattern, options = {}) {
|
||
return new Glob(pattern, options).walk();
|
||
}
|
||
function globIterateSync(pattern, options = {}) {
|
||
return new Glob(pattern, options).iterateSync();
|
||
}
|
||
function globIterate(pattern, options = {}) {
|
||
return new Glob(pattern, options).iterate();
|
||
}
|
||
var streamSync = globStreamSync;
|
||
var stream = Object.assign(globStream, { sync: globStreamSync });
|
||
var iterateSync = globIterateSync;
|
||
var iterate = Object.assign(globIterate, {
|
||
sync: globIterateSync
|
||
});
|
||
var sync = Object.assign(globSync, {
|
||
stream: globStreamSync,
|
||
iterate: globIterateSync
|
||
});
|
||
var glob = Object.assign(glob_, {
|
||
glob: glob_,
|
||
globSync,
|
||
sync,
|
||
globStream,
|
||
stream,
|
||
globStreamSync,
|
||
streamSync,
|
||
globIterate,
|
||
iterate,
|
||
globIterateSync,
|
||
iterateSync,
|
||
Glob,
|
||
hasMagic,
|
||
escape,
|
||
unescape
|
||
});
|
||
glob.glob = glob;
|
||
|
||
// node_modules/strip-indent/index.js
|
||
var import_min_indent = __toESM(require_min_indent(), 1);
|
||
function stripIndent(string3) {
|
||
const indent = (0, import_min_indent.default)(string3);
|
||
if (indent === 0) {
|
||
return string3;
|
||
}
|
||
const regex = new RegExp(`^[ \\t]{${indent}}`, "gm");
|
||
return string3.replace(regex, "");
|
||
}
|
||
|
||
// node_modules/indent-string/index.js
|
||
function indentString(string3, count = 1, options = {}) {
|
||
const {
|
||
indent = " ",
|
||
includeEmptyLines = false
|
||
} = options;
|
||
if (typeof string3 !== "string") {
|
||
throw new TypeError(
|
||
`Expected \`input\` to be a \`string\`, got \`${typeof string3}\``
|
||
);
|
||
}
|
||
if (typeof count !== "number") {
|
||
throw new TypeError(
|
||
`Expected \`count\` to be a \`number\`, got \`${typeof count}\``
|
||
);
|
||
}
|
||
if (count < 0) {
|
||
throw new RangeError(
|
||
`Expected \`count\` to be at least 0, got \`${count}\``
|
||
);
|
||
}
|
||
if (typeof indent !== "string") {
|
||
throw new TypeError(
|
||
`Expected \`options.indent\` to be a \`string\`, got \`${typeof indent}\``
|
||
);
|
||
}
|
||
if (count === 0) {
|
||
return string3;
|
||
}
|
||
const regex = includeEmptyLines ? /^/gm : /^(?!\s*$)/gm;
|
||
return string3.replace(regex, indent.repeat(count));
|
||
}
|
||
|
||
// index.js
|
||
function convertIndentedToFenced(filePath) {
|
||
const doc = (0, import_fs2.readFileSync)(filePath, "utf-8");
|
||
const outputDoc = doc.split("");
|
||
console.log;
|
||
let changeOffset = 0;
|
||
let hasCode = false;
|
||
const file = remark().use(remarkFrontmatter).use(function detector() {
|
||
return (tree) => {
|
||
visit(tree, "code", (node2) => {
|
||
if (node2.lang)
|
||
return;
|
||
hasCode = true;
|
||
const value2 = stripIndent(node2.value);
|
||
const from = node2.position.start.offset;
|
||
const to = node2.position.end.offset;
|
||
const indentLevel = Math.round(node2.position.start.column / 4) * 4;
|
||
const newBlock = " ".repeat(
|
||
Math.max(0, indentLevel - node2.position.start.column + 1)
|
||
) + "```\n" + indentString(value2 + "\n```", indentLevel);
|
||
outputDoc.splice(
|
||
from + changeOffset,
|
||
to - from,
|
||
...newBlock.split("")
|
||
);
|
||
changeOffset = changeOffset + newBlock.length - (to - from);
|
||
});
|
||
};
|
||
}).processSync(doc);
|
||
if (hasCode) {
|
||
//console.log(`Updating ${filePath} in-place`);
|
||
(0, import_fs2.writeFileSync)(filePath, outputDoc.join(""), "utf-8");
|
||
} else {
|
||
//console.log(`Skip ${filePath} as it doesn't contain code blocks`);
|
||
}
|
||
}
|
||
var dir = process.argv[2];
|
||
if (!dir) {
|
||
console.log("Error: provide directory as argument");
|
||
process.exit(10);
|
||
}
|
||
var mdFiles = sync(dir + "/**/*.md", { nodir: true });
|
||
for (const f of mdFiles) {
|
||
convertIndentedToFenced(f);
|
||
}
|