mirror of
https://github.com/Funkoala14/knowledgebase_law.git
synced 2025-06-08 22:08:16 +08:00
4507 lines
131 KiB
JavaScript
4507 lines
131 KiB
JavaScript
import {
|
||
__publicField
|
||
} from "./chunk-2TUXWMP5.js";
|
||
|
||
// node_modules/devlop/lib/development.js
|
||
var AssertionError = class extends Error {
|
||
/**
|
||
* Create an assertion error.
|
||
*
|
||
* @param {string} message
|
||
* Message explaining error.
|
||
* @param {unknown} actual
|
||
* Value.
|
||
* @param {unknown} expected
|
||
* Baseline.
|
||
* @param {string} operator
|
||
* Name of equality operation.
|
||
* @param {boolean} generated
|
||
* Whether `message` is a custom message or not
|
||
* @returns
|
||
* Instance.
|
||
*/
|
||
// eslint-disable-next-line max-params
|
||
constructor(message, actual, expected, operator, generated) {
|
||
super(message);
|
||
__publicField(
|
||
this,
|
||
"name",
|
||
/** @type {const} */
|
||
"Assertion"
|
||
);
|
||
__publicField(
|
||
this,
|
||
"code",
|
||
/** @type {const} */
|
||
"ERR_ASSERTION"
|
||
);
|
||
if (Error.captureStackTrace) {
|
||
Error.captureStackTrace(this, this.constructor);
|
||
}
|
||
this.actual = actual;
|
||
this.expected = expected;
|
||
this.generated = generated;
|
||
this.operator = operator;
|
||
}
|
||
};
|
||
function ok(value, message) {
|
||
assert(
|
||
Boolean(value),
|
||
false,
|
||
true,
|
||
"ok",
|
||
"Expected value to be truthy",
|
||
message
|
||
);
|
||
}
|
||
function unreachable(message) {
|
||
assert(false, false, true, "ok", "Unreachable", message);
|
||
}
|
||
function assert(bool, actual, expected, operator, defaultMessage, userMessage) {
|
||
if (!bool) {
|
||
throw userMessage instanceof Error ? userMessage : new AssertionError(
|
||
userMessage || defaultMessage,
|
||
actual,
|
||
expected,
|
||
operator,
|
||
!userMessage
|
||
);
|
||
}
|
||
}
|
||
|
||
// node_modules/unist-util-is/lib/index.js
|
||
var convert = (
|
||
// Note: overloads in JSDoc can’t yet use different `@template`s.
|
||
/**
|
||
* @type {(
|
||
* (<Condition extends string>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & {type: Condition}) &
|
||
* (<Condition extends Props>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Condition) &
|
||
* (<Condition extends TestFunction>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Predicate<Condition, Node>) &
|
||
* ((test?: null | undefined) => (node?: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node) &
|
||
* ((test?: Test) => Check)
|
||
* )}
|
||
*/
|
||
/**
|
||
* @param {Test} [test]
|
||
* @returns {Check}
|
||
*/
|
||
function(test) {
|
||
if (test === null || test === void 0) {
|
||
return ok2;
|
||
}
|
||
if (typeof test === "function") {
|
||
return castFactory(test);
|
||
}
|
||
if (typeof test === "object") {
|
||
return Array.isArray(test) ? anyFactory(test) : propsFactory(test);
|
||
}
|
||
if (typeof test === "string") {
|
||
return typeFactory(test);
|
||
}
|
||
throw new Error("Expected function, string, or object as test");
|
||
}
|
||
);
|
||
function anyFactory(tests) {
|
||
const checks = [];
|
||
let index = -1;
|
||
while (++index < tests.length) {
|
||
checks[index] = convert(tests[index]);
|
||
}
|
||
return castFactory(any);
|
||
function any(...parameters) {
|
||
let index2 = -1;
|
||
while (++index2 < checks.length) {
|
||
if (checks[index2].apply(this, parameters)) return true;
|
||
}
|
||
return false;
|
||
}
|
||
}
|
||
function propsFactory(check) {
|
||
const checkAsRecord = (
|
||
/** @type {Record<string, unknown>} */
|
||
check
|
||
);
|
||
return castFactory(all2);
|
||
function all2(node2) {
|
||
const nodeAsRecord = (
|
||
/** @type {Record<string, unknown>} */
|
||
/** @type {unknown} */
|
||
node2
|
||
);
|
||
let key;
|
||
for (key in check) {
|
||
if (nodeAsRecord[key] !== checkAsRecord[key]) return false;
|
||
}
|
||
return true;
|
||
}
|
||
}
|
||
function typeFactory(check) {
|
||
return castFactory(type);
|
||
function type(node2) {
|
||
return node2 && node2.type === check;
|
||
}
|
||
}
|
||
function castFactory(testFunction) {
|
||
return check;
|
||
function check(value, index, parent) {
|
||
return Boolean(
|
||
looksLikeANode(value) && testFunction.call(
|
||
this,
|
||
value,
|
||
typeof index === "number" ? index : void 0,
|
||
parent || void 0
|
||
)
|
||
);
|
||
}
|
||
}
|
||
function ok2() {
|
||
return true;
|
||
}
|
||
function looksLikeANode(value) {
|
||
return value !== null && typeof value === "object" && "type" in value;
|
||
}
|
||
|
||
// node_modules/unist-util-visit-parents/lib/color.js
|
||
function color(d) {
|
||
return d;
|
||
}
|
||
|
||
// node_modules/unist-util-visit-parents/lib/index.js
|
||
var empty = [];
|
||
var CONTINUE = true;
|
||
var EXIT = false;
|
||
var SKIP = "skip";
|
||
function visitParents(tree, test, visitor, reverse) {
|
||
let check;
|
||
if (typeof test === "function" && typeof visitor !== "function") {
|
||
reverse = visitor;
|
||
visitor = test;
|
||
} else {
|
||
check = test;
|
||
}
|
||
const is2 = convert(check);
|
||
const step = reverse ? -1 : 1;
|
||
factory(tree, void 0, [])();
|
||
function factory(node2, index, parents) {
|
||
const value = (
|
||
/** @type {Record<string, unknown>} */
|
||
node2 && typeof node2 === "object" ? node2 : {}
|
||
);
|
||
if (typeof value.type === "string") {
|
||
const name = (
|
||
// `hast`
|
||
typeof value.tagName === "string" ? value.tagName : (
|
||
// `xast`
|
||
typeof value.name === "string" ? value.name : void 0
|
||
)
|
||
);
|
||
Object.defineProperty(visit2, "name", {
|
||
value: "node (" + color(node2.type + (name ? "<" + name + ">" : "")) + ")"
|
||
});
|
||
}
|
||
return visit2;
|
||
function visit2() {
|
||
let result = empty;
|
||
let subresult;
|
||
let offset;
|
||
let grandparents;
|
||
if (!test || is2(node2, index, parents[parents.length - 1] || void 0)) {
|
||
result = toResult(visitor(node2, parents));
|
||
if (result[0] === EXIT) {
|
||
return result;
|
||
}
|
||
}
|
||
if ("children" in node2 && node2.children) {
|
||
const nodeAsParent = (
|
||
/** @type {UnistParent} */
|
||
node2
|
||
);
|
||
if (nodeAsParent.children && result[0] !== SKIP) {
|
||
offset = (reverse ? nodeAsParent.children.length : -1) + step;
|
||
grandparents = parents.concat(nodeAsParent);
|
||
while (offset > -1 && offset < nodeAsParent.children.length) {
|
||
const child = nodeAsParent.children[offset];
|
||
subresult = factory(child, offset, grandparents)();
|
||
if (subresult[0] === EXIT) {
|
||
return subresult;
|
||
}
|
||
offset = typeof subresult[1] === "number" ? subresult[1] : offset + step;
|
||
}
|
||
}
|
||
}
|
||
return result;
|
||
}
|
||
}
|
||
}
|
||
function toResult(value) {
|
||
if (Array.isArray(value)) {
|
||
return value;
|
||
}
|
||
if (typeof value === "number") {
|
||
return [CONTINUE, value];
|
||
}
|
||
return value === null || value === void 0 ? empty : [value];
|
||
}
|
||
|
||
// node_modules/unist-util-visit/lib/index.js
|
||
function visit(tree, testOrVisitor, visitorOrReverse, maybeReverse) {
|
||
let reverse;
|
||
let test;
|
||
let visitor;
|
||
if (typeof testOrVisitor === "function" && typeof visitorOrReverse !== "function") {
|
||
test = void 0;
|
||
visitor = testOrVisitor;
|
||
reverse = visitorOrReverse;
|
||
} else {
|
||
test = testOrVisitor;
|
||
visitor = visitorOrReverse;
|
||
reverse = maybeReverse;
|
||
}
|
||
visitParents(tree, test, overload, reverse);
|
||
function overload(node2, parents) {
|
||
const parent = parents[parents.length - 1];
|
||
const index = parent ? parent.children.indexOf(node2) : void 0;
|
||
return visitor(node2, index, parent);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-symbol/lib/codes.js
|
||
var codes = (
|
||
/** @type {const} */
|
||
{
|
||
carriageReturn: -5,
|
||
lineFeed: -4,
|
||
carriageReturnLineFeed: -3,
|
||
horizontalTab: -2,
|
||
virtualSpace: -1,
|
||
eof: null,
|
||
nul: 0,
|
||
soh: 1,
|
||
stx: 2,
|
||
etx: 3,
|
||
eot: 4,
|
||
enq: 5,
|
||
ack: 6,
|
||
bel: 7,
|
||
bs: 8,
|
||
ht: 9,
|
||
// `\t`
|
||
lf: 10,
|
||
// `\n`
|
||
vt: 11,
|
||
// `\v`
|
||
ff: 12,
|
||
// `\f`
|
||
cr: 13,
|
||
// `\r`
|
||
so: 14,
|
||
si: 15,
|
||
dle: 16,
|
||
dc1: 17,
|
||
dc2: 18,
|
||
dc3: 19,
|
||
dc4: 20,
|
||
nak: 21,
|
||
syn: 22,
|
||
etb: 23,
|
||
can: 24,
|
||
em: 25,
|
||
sub: 26,
|
||
esc: 27,
|
||
fs: 28,
|
||
gs: 29,
|
||
rs: 30,
|
||
us: 31,
|
||
space: 32,
|
||
exclamationMark: 33,
|
||
// `!`
|
||
quotationMark: 34,
|
||
// `"`
|
||
numberSign: 35,
|
||
// `#`
|
||
dollarSign: 36,
|
||
// `$`
|
||
percentSign: 37,
|
||
// `%`
|
||
ampersand: 38,
|
||
// `&`
|
||
apostrophe: 39,
|
||
// `'`
|
||
leftParenthesis: 40,
|
||
// `(`
|
||
rightParenthesis: 41,
|
||
// `)`
|
||
asterisk: 42,
|
||
// `*`
|
||
plusSign: 43,
|
||
// `+`
|
||
comma: 44,
|
||
// `,`
|
||
dash: 45,
|
||
// `-`
|
||
dot: 46,
|
||
// `.`
|
||
slash: 47,
|
||
// `/`
|
||
digit0: 48,
|
||
// `0`
|
||
digit1: 49,
|
||
// `1`
|
||
digit2: 50,
|
||
// `2`
|
||
digit3: 51,
|
||
// `3`
|
||
digit4: 52,
|
||
// `4`
|
||
digit5: 53,
|
||
// `5`
|
||
digit6: 54,
|
||
// `6`
|
||
digit7: 55,
|
||
// `7`
|
||
digit8: 56,
|
||
// `8`
|
||
digit9: 57,
|
||
// `9`
|
||
colon: 58,
|
||
// `:`
|
||
semicolon: 59,
|
||
// `;`
|
||
lessThan: 60,
|
||
// `<`
|
||
equalsTo: 61,
|
||
// `=`
|
||
greaterThan: 62,
|
||
// `>`
|
||
questionMark: 63,
|
||
// `?`
|
||
atSign: 64,
|
||
// `@`
|
||
uppercaseA: 65,
|
||
// `A`
|
||
uppercaseB: 66,
|
||
// `B`
|
||
uppercaseC: 67,
|
||
// `C`
|
||
uppercaseD: 68,
|
||
// `D`
|
||
uppercaseE: 69,
|
||
// `E`
|
||
uppercaseF: 70,
|
||
// `F`
|
||
uppercaseG: 71,
|
||
// `G`
|
||
uppercaseH: 72,
|
||
// `H`
|
||
uppercaseI: 73,
|
||
// `I`
|
||
uppercaseJ: 74,
|
||
// `J`
|
||
uppercaseK: 75,
|
||
// `K`
|
||
uppercaseL: 76,
|
||
// `L`
|
||
uppercaseM: 77,
|
||
// `M`
|
||
uppercaseN: 78,
|
||
// `N`
|
||
uppercaseO: 79,
|
||
// `O`
|
||
uppercaseP: 80,
|
||
// `P`
|
||
uppercaseQ: 81,
|
||
// `Q`
|
||
uppercaseR: 82,
|
||
// `R`
|
||
uppercaseS: 83,
|
||
// `S`
|
||
uppercaseT: 84,
|
||
// `T`
|
||
uppercaseU: 85,
|
||
// `U`
|
||
uppercaseV: 86,
|
||
// `V`
|
||
uppercaseW: 87,
|
||
// `W`
|
||
uppercaseX: 88,
|
||
// `X`
|
||
uppercaseY: 89,
|
||
// `Y`
|
||
uppercaseZ: 90,
|
||
// `Z`
|
||
leftSquareBracket: 91,
|
||
// `[`
|
||
backslash: 92,
|
||
// `\`
|
||
rightSquareBracket: 93,
|
||
// `]`
|
||
caret: 94,
|
||
// `^`
|
||
underscore: 95,
|
||
// `_`
|
||
graveAccent: 96,
|
||
// `` ` ``
|
||
lowercaseA: 97,
|
||
// `a`
|
||
lowercaseB: 98,
|
||
// `b`
|
||
lowercaseC: 99,
|
||
// `c`
|
||
lowercaseD: 100,
|
||
// `d`
|
||
lowercaseE: 101,
|
||
// `e`
|
||
lowercaseF: 102,
|
||
// `f`
|
||
lowercaseG: 103,
|
||
// `g`
|
||
lowercaseH: 104,
|
||
// `h`
|
||
lowercaseI: 105,
|
||
// `i`
|
||
lowercaseJ: 106,
|
||
// `j`
|
||
lowercaseK: 107,
|
||
// `k`
|
||
lowercaseL: 108,
|
||
// `l`
|
||
lowercaseM: 109,
|
||
// `m`
|
||
lowercaseN: 110,
|
||
// `n`
|
||
lowercaseO: 111,
|
||
// `o`
|
||
lowercaseP: 112,
|
||
// `p`
|
||
lowercaseQ: 113,
|
||
// `q`
|
||
lowercaseR: 114,
|
||
// `r`
|
||
lowercaseS: 115,
|
||
// `s`
|
||
lowercaseT: 116,
|
||
// `t`
|
||
lowercaseU: 117,
|
||
// `u`
|
||
lowercaseV: 118,
|
||
// `v`
|
||
lowercaseW: 119,
|
||
// `w`
|
||
lowercaseX: 120,
|
||
// `x`
|
||
lowercaseY: 121,
|
||
// `y`
|
||
lowercaseZ: 122,
|
||
// `z`
|
||
leftCurlyBrace: 123,
|
||
// `{`
|
||
verticalBar: 124,
|
||
// `|`
|
||
rightCurlyBrace: 125,
|
||
// `}`
|
||
tilde: 126,
|
||
// `~`
|
||
del: 127,
|
||
// Unicode Specials block.
|
||
byteOrderMarker: 65279,
|
||
// Unicode Specials block.
|
||
replacementCharacter: 65533
|
||
// `<60>`
|
||
}
|
||
);
|
||
|
||
// node_modules/micromark-util-symbol/lib/constants.js
|
||
var constants = (
|
||
/** @type {const} */
|
||
{
|
||
attentionSideAfter: 2,
|
||
// Symbol to mark an attention sequence as after content: `a*`
|
||
attentionSideBefore: 1,
|
||
// Symbol to mark an attention sequence as before content: `*a`
|
||
atxHeadingOpeningFenceSizeMax: 6,
|
||
// 6 number signs is fine, 7 isn’t.
|
||
autolinkDomainSizeMax: 63,
|
||
// 63 characters is fine, 64 is too many.
|
||
autolinkSchemeSizeMax: 32,
|
||
// 32 characters is fine, 33 is too many.
|
||
cdataOpeningString: "CDATA[",
|
||
// And preceded by `<![`.
|
||
characterGroupPunctuation: 2,
|
||
// Symbol used to indicate a character is punctuation
|
||
characterGroupWhitespace: 1,
|
||
// Symbol used to indicate a character is whitespace
|
||
characterReferenceDecimalSizeMax: 7,
|
||
// `�`.
|
||
characterReferenceHexadecimalSizeMax: 6,
|
||
// `�`.
|
||
characterReferenceNamedSizeMax: 31,
|
||
// `∳`.
|
||
codeFencedSequenceSizeMin: 3,
|
||
// At least 3 ticks or tildes are needed.
|
||
contentTypeContent: "content",
|
||
contentTypeDocument: "document",
|
||
contentTypeFlow: "flow",
|
||
contentTypeString: "string",
|
||
contentTypeText: "text",
|
||
hardBreakPrefixSizeMin: 2,
|
||
// At least 2 trailing spaces are needed.
|
||
htmlBasic: 6,
|
||
// Symbol for `<div`
|
||
htmlCdata: 5,
|
||
// Symbol for `<![CDATA[]]>`
|
||
htmlComment: 2,
|
||
// Symbol for `<!---->`
|
||
htmlComplete: 7,
|
||
// Symbol for `<x>`
|
||
htmlDeclaration: 4,
|
||
// Symbol for `<!doctype>`
|
||
htmlInstruction: 3,
|
||
// Symbol for `<?php?>`
|
||
htmlRawSizeMax: 8,
|
||
// Length of `textarea`.
|
||
htmlRaw: 1,
|
||
// Symbol for `<script>`
|
||
linkResourceDestinationBalanceMax: 32,
|
||
// See: <https://spec.commonmark.org/0.30/#link-destination>, <https://github.com/remarkjs/react-markdown/issues/658#issuecomment-984345577>
|
||
linkReferenceSizeMax: 999,
|
||
// See: <https://spec.commonmark.org/0.30/#link-label>
|
||
listItemValueSizeMax: 10,
|
||
// See: <https://spec.commonmark.org/0.30/#ordered-list-marker>
|
||
numericBaseDecimal: 10,
|
||
numericBaseHexadecimal: 16,
|
||
tabSize: 4,
|
||
// Tabs have a hard-coded size of 4, per CommonMark.
|
||
thematicBreakMarkerCountMin: 3,
|
||
// At least 3 asterisks, dashes, or underscores are needed.
|
||
v8MaxSafeChunkSize: 1e4
|
||
// V8 (and potentially others) have problems injecting giant arrays into other arrays, hence we operate in chunks.
|
||
}
|
||
);
|
||
|
||
// node_modules/micromark-util-symbol/lib/types.js
|
||
var types = (
|
||
/** @type {const} */
|
||
{
|
||
// Generic type for data, such as in a title, a destination, etc.
|
||
data: "data",
|
||
// Generic type for syntactic whitespace (tabs, virtual spaces, spaces).
|
||
// Such as, between a fenced code fence and an info string.
|
||
whitespace: "whitespace",
|
||
// Generic type for line endings (line feed, carriage return, carriage return +
|
||
// line feed).
|
||
lineEnding: "lineEnding",
|
||
// A line ending, but ending a blank line.
|
||
lineEndingBlank: "lineEndingBlank",
|
||
// Generic type for whitespace (tabs, virtual spaces, spaces) at the start of a
|
||
// line.
|
||
linePrefix: "linePrefix",
|
||
// Generic type for whitespace (tabs, virtual spaces, spaces) at the end of a
|
||
// line.
|
||
lineSuffix: "lineSuffix",
|
||
// Whole ATX heading:
|
||
//
|
||
// ```markdown
|
||
// #
|
||
// ## Alpha
|
||
// ### Bravo ###
|
||
// ```
|
||
//
|
||
// Includes `atxHeadingSequence`, `whitespace`, `atxHeadingText`.
|
||
atxHeading: "atxHeading",
|
||
// Sequence of number signs in an ATX heading (`###`).
|
||
atxHeadingSequence: "atxHeadingSequence",
|
||
// Content in an ATX heading (`alpha`).
|
||
// Includes text.
|
||
atxHeadingText: "atxHeadingText",
|
||
// Whole autolink (`<https://example.com>` or `<admin@example.com>`)
|
||
// Includes `autolinkMarker` and `autolinkProtocol` or `autolinkEmail`.
|
||
autolink: "autolink",
|
||
// Email autolink w/o markers (`admin@example.com`)
|
||
autolinkEmail: "autolinkEmail",
|
||
// Marker around an `autolinkProtocol` or `autolinkEmail` (`<` or `>`).
|
||
autolinkMarker: "autolinkMarker",
|
||
// Protocol autolink w/o markers (`https://example.com`)
|
||
autolinkProtocol: "autolinkProtocol",
|
||
// A whole character escape (`\-`).
|
||
// Includes `escapeMarker` and `characterEscapeValue`.
|
||
characterEscape: "characterEscape",
|
||
// The escaped character (`-`).
|
||
characterEscapeValue: "characterEscapeValue",
|
||
// A whole character reference (`&`, `≠`, or `𝌆`).
|
||
// Includes `characterReferenceMarker`, an optional
|
||
// `characterReferenceMarkerNumeric`, in which case an optional
|
||
// `characterReferenceMarkerHexadecimal`, and a `characterReferenceValue`.
|
||
characterReference: "characterReference",
|
||
// The start or end marker (`&` or `;`).
|
||
characterReferenceMarker: "characterReferenceMarker",
|
||
// Mark reference as numeric (`#`).
|
||
characterReferenceMarkerNumeric: "characterReferenceMarkerNumeric",
|
||
// Mark reference as numeric (`x` or `X`).
|
||
characterReferenceMarkerHexadecimal: "characterReferenceMarkerHexadecimal",
|
||
// Value of character reference w/o markers (`amp`, `8800`, or `1D306`).
|
||
characterReferenceValue: "characterReferenceValue",
|
||
// Whole fenced code:
|
||
//
|
||
// ````markdown
|
||
// ```js
|
||
// alert(1)
|
||
// ```
|
||
// ````
|
||
codeFenced: "codeFenced",
|
||
// A fenced code fence, including whitespace, sequence, info, and meta
|
||
// (` ```js `).
|
||
codeFencedFence: "codeFencedFence",
|
||
// Sequence of grave accent or tilde characters (` ``` `) in a fence.
|
||
codeFencedFenceSequence: "codeFencedFenceSequence",
|
||
// Info word (`js`) in a fence.
|
||
// Includes string.
|
||
codeFencedFenceInfo: "codeFencedFenceInfo",
|
||
// Meta words (`highlight="1"`) in a fence.
|
||
// Includes string.
|
||
codeFencedFenceMeta: "codeFencedFenceMeta",
|
||
// A line of code.
|
||
codeFlowValue: "codeFlowValue",
|
||
// Whole indented code:
|
||
//
|
||
// ```markdown
|
||
// alert(1)
|
||
// ```
|
||
//
|
||
// Includes `lineEnding`, `linePrefix`, and `codeFlowValue`.
|
||
codeIndented: "codeIndented",
|
||
// A text code (``` `alpha` ```).
|
||
// Includes `codeTextSequence`, `codeTextData`, `lineEnding`, and can include
|
||
// `codeTextPadding`.
|
||
codeText: "codeText",
|
||
codeTextData: "codeTextData",
|
||
// A space or line ending right after or before a tick.
|
||
codeTextPadding: "codeTextPadding",
|
||
// A text code fence (` `` `).
|
||
codeTextSequence: "codeTextSequence",
|
||
// Whole content:
|
||
//
|
||
// ```markdown
|
||
// [a]: b
|
||
// c
|
||
// =
|
||
// d
|
||
// ```
|
||
//
|
||
// Includes `paragraph` and `definition`.
|
||
content: "content",
|
||
// Whole definition:
|
||
//
|
||
// ```markdown
|
||
// [micromark]: https://github.com/micromark/micromark
|
||
// ```
|
||
//
|
||
// Includes `definitionLabel`, `definitionMarker`, `whitespace`,
|
||
// `definitionDestination`, and optionally `lineEnding` and `definitionTitle`.
|
||
definition: "definition",
|
||
// Destination of a definition (`https://github.com/micromark/micromark` or
|
||
// `<https://github.com/micromark/micromark>`).
|
||
// Includes `definitionDestinationLiteral` or `definitionDestinationRaw`.
|
||
definitionDestination: "definitionDestination",
|
||
// Enclosed destination of a definition
|
||
// (`<https://github.com/micromark/micromark>`).
|
||
// Includes `definitionDestinationLiteralMarker` and optionally
|
||
// `definitionDestinationString`.
|
||
definitionDestinationLiteral: "definitionDestinationLiteral",
|
||
// Markers of an enclosed definition destination (`<` or `>`).
|
||
definitionDestinationLiteralMarker: "definitionDestinationLiteralMarker",
|
||
// Unenclosed destination of a definition
|
||
// (`https://github.com/micromark/micromark`).
|
||
// Includes `definitionDestinationString`.
|
||
definitionDestinationRaw: "definitionDestinationRaw",
|
||
// Text in an destination (`https://github.com/micromark/micromark`).
|
||
// Includes string.
|
||
definitionDestinationString: "definitionDestinationString",
|
||
// Label of a definition (`[micromark]`).
|
||
// Includes `definitionLabelMarker` and `definitionLabelString`.
|
||
definitionLabel: "definitionLabel",
|
||
// Markers of a definition label (`[` or `]`).
|
||
definitionLabelMarker: "definitionLabelMarker",
|
||
// Value of a definition label (`micromark`).
|
||
// Includes string.
|
||
definitionLabelString: "definitionLabelString",
|
||
// Marker between a label and a destination (`:`).
|
||
definitionMarker: "definitionMarker",
|
||
// Title of a definition (`"x"`, `'y'`, or `(z)`).
|
||
// Includes `definitionTitleMarker` and optionally `definitionTitleString`.
|
||
definitionTitle: "definitionTitle",
|
||
// Marker around a title of a definition (`"`, `'`, `(`, or `)`).
|
||
definitionTitleMarker: "definitionTitleMarker",
|
||
// Data without markers in a title (`z`).
|
||
// Includes string.
|
||
definitionTitleString: "definitionTitleString",
|
||
// Emphasis (`*alpha*`).
|
||
// Includes `emphasisSequence` and `emphasisText`.
|
||
emphasis: "emphasis",
|
||
// Sequence of emphasis markers (`*` or `_`).
|
||
emphasisSequence: "emphasisSequence",
|
||
// Emphasis text (`alpha`).
|
||
// Includes text.
|
||
emphasisText: "emphasisText",
|
||
// The character escape marker (`\`).
|
||
escapeMarker: "escapeMarker",
|
||
// A hard break created with a backslash (`\\n`).
|
||
// Note: does not include the line ending.
|
||
hardBreakEscape: "hardBreakEscape",
|
||
// A hard break created with trailing spaces (` \n`).
|
||
// Does not include the line ending.
|
||
hardBreakTrailing: "hardBreakTrailing",
|
||
// Flow HTML:
|
||
//
|
||
// ```markdown
|
||
// <div
|
||
// ```
|
||
//
|
||
// Inlcudes `lineEnding`, `htmlFlowData`.
|
||
htmlFlow: "htmlFlow",
|
||
htmlFlowData: "htmlFlowData",
|
||
// HTML in text (the tag in `a <i> b`).
|
||
// Includes `lineEnding`, `htmlTextData`.
|
||
htmlText: "htmlText",
|
||
htmlTextData: "htmlTextData",
|
||
// Whole image (``, `![alpha][bravo]`, `![alpha][]`, or
|
||
// `![alpha]`).
|
||
// Includes `label` and an optional `resource` or `reference`.
|
||
image: "image",
|
||
// Whole link label (`[*alpha*]`).
|
||
// Includes `labelLink` or `labelImage`, `labelText`, and `labelEnd`.
|
||
label: "label",
|
||
// Text in an label (`*alpha*`).
|
||
// Includes text.
|
||
labelText: "labelText",
|
||
// Start a link label (`[`).
|
||
// Includes a `labelMarker`.
|
||
labelLink: "labelLink",
|
||
// Start an image label (`![`).
|
||
// Includes `labelImageMarker` and `labelMarker`.
|
||
labelImage: "labelImage",
|
||
// Marker of a label (`[` or `]`).
|
||
labelMarker: "labelMarker",
|
||
// Marker to start an image (`!`).
|
||
labelImageMarker: "labelImageMarker",
|
||
// End a label (`]`).
|
||
// Includes `labelMarker`.
|
||
labelEnd: "labelEnd",
|
||
// Whole link (`[alpha](bravo)`, `[alpha][bravo]`, `[alpha][]`, or `[alpha]`).
|
||
// Includes `label` and an optional `resource` or `reference`.
|
||
link: "link",
|
||
// Whole paragraph:
|
||
//
|
||
// ```markdown
|
||
// alpha
|
||
// bravo.
|
||
// ```
|
||
//
|
||
// Includes text.
|
||
paragraph: "paragraph",
|
||
// A reference (`[alpha]` or `[]`).
|
||
// Includes `referenceMarker` and an optional `referenceString`.
|
||
reference: "reference",
|
||
// A reference marker (`[` or `]`).
|
||
referenceMarker: "referenceMarker",
|
||
// Reference text (`alpha`).
|
||
// Includes string.
|
||
referenceString: "referenceString",
|
||
// A resource (`(https://example.com "alpha")`).
|
||
// Includes `resourceMarker`, an optional `resourceDestination` with an optional
|
||
// `whitespace` and `resourceTitle`.
|
||
resource: "resource",
|
||
// A resource destination (`https://example.com`).
|
||
// Includes `resourceDestinationLiteral` or `resourceDestinationRaw`.
|
||
resourceDestination: "resourceDestination",
|
||
// A literal resource destination (`<https://example.com>`).
|
||
// Includes `resourceDestinationLiteralMarker` and optionally
|
||
// `resourceDestinationString`.
|
||
resourceDestinationLiteral: "resourceDestinationLiteral",
|
||
// A resource destination marker (`<` or `>`).
|
||
resourceDestinationLiteralMarker: "resourceDestinationLiteralMarker",
|
||
// A raw resource destination (`https://example.com`).
|
||
// Includes `resourceDestinationString`.
|
||
resourceDestinationRaw: "resourceDestinationRaw",
|
||
// Resource destination text (`https://example.com`).
|
||
// Includes string.
|
||
resourceDestinationString: "resourceDestinationString",
|
||
// A resource marker (`(` or `)`).
|
||
resourceMarker: "resourceMarker",
|
||
// A resource title (`"alpha"`, `'alpha'`, or `(alpha)`).
|
||
// Includes `resourceTitleMarker` and optionally `resourceTitleString`.
|
||
resourceTitle: "resourceTitle",
|
||
// A resource title marker (`"`, `'`, `(`, or `)`).
|
||
resourceTitleMarker: "resourceTitleMarker",
|
||
// Resource destination title (`alpha`).
|
||
// Includes string.
|
||
resourceTitleString: "resourceTitleString",
|
||
// Whole setext heading:
|
||
//
|
||
// ```markdown
|
||
// alpha
|
||
// bravo
|
||
// =====
|
||
// ```
|
||
//
|
||
// Includes `setextHeadingText`, `lineEnding`, `linePrefix`, and
|
||
// `setextHeadingLine`.
|
||
setextHeading: "setextHeading",
|
||
// Content in a setext heading (`alpha\nbravo`).
|
||
// Includes text.
|
||
setextHeadingText: "setextHeadingText",
|
||
// Underline in a setext heading, including whitespace suffix (`==`).
|
||
// Includes `setextHeadingLineSequence`.
|
||
setextHeadingLine: "setextHeadingLine",
|
||
// Sequence of equals or dash characters in underline in a setext heading (`-`).
|
||
setextHeadingLineSequence: "setextHeadingLineSequence",
|
||
// Strong (`**alpha**`).
|
||
// Includes `strongSequence` and `strongText`.
|
||
strong: "strong",
|
||
// Sequence of strong markers (`**` or `__`).
|
||
strongSequence: "strongSequence",
|
||
// Strong text (`alpha`).
|
||
// Includes text.
|
||
strongText: "strongText",
|
||
// Whole thematic break:
|
||
//
|
||
// ```markdown
|
||
// * * *
|
||
// ```
|
||
//
|
||
// Includes `thematicBreakSequence` and `whitespace`.
|
||
thematicBreak: "thematicBreak",
|
||
// A sequence of one or more thematic break markers (`***`).
|
||
thematicBreakSequence: "thematicBreakSequence",
|
||
// Whole block quote:
|
||
//
|
||
// ```markdown
|
||
// > a
|
||
// >
|
||
// > b
|
||
// ```
|
||
//
|
||
// Includes `blockQuotePrefix` and flow.
|
||
blockQuote: "blockQuote",
|
||
// The `>` or `> ` of a block quote.
|
||
blockQuotePrefix: "blockQuotePrefix",
|
||
// The `>` of a block quote prefix.
|
||
blockQuoteMarker: "blockQuoteMarker",
|
||
// The optional ` ` of a block quote prefix.
|
||
blockQuotePrefixWhitespace: "blockQuotePrefixWhitespace",
|
||
// Whole ordered list:
|
||
//
|
||
// ```markdown
|
||
// 1. a
|
||
// b
|
||
// ```
|
||
//
|
||
// Includes `listItemPrefix`, flow, and optionally `listItemIndent` on further
|
||
// lines.
|
||
listOrdered: "listOrdered",
|
||
// Whole unordered list:
|
||
//
|
||
// ```markdown
|
||
// - a
|
||
// b
|
||
// ```
|
||
//
|
||
// Includes `listItemPrefix`, flow, and optionally `listItemIndent` on further
|
||
// lines.
|
||
listUnordered: "listUnordered",
|
||
// The indent of further list item lines.
|
||
listItemIndent: "listItemIndent",
|
||
// A marker, as in, `*`, `+`, `-`, `.`, or `)`.
|
||
listItemMarker: "listItemMarker",
|
||
// The thing that starts a list item, such as `1. `.
|
||
// Includes `listItemValue` if ordered, `listItemMarker`, and
|
||
// `listItemPrefixWhitespace` (unless followed by a line ending).
|
||
listItemPrefix: "listItemPrefix",
|
||
// The whitespace after a marker.
|
||
listItemPrefixWhitespace: "listItemPrefixWhitespace",
|
||
// The numerical value of an ordered item.
|
||
listItemValue: "listItemValue",
|
||
// Internal types used for subtokenizers, compiled away
|
||
chunkDocument: "chunkDocument",
|
||
chunkContent: "chunkContent",
|
||
chunkFlow: "chunkFlow",
|
||
chunkText: "chunkText",
|
||
chunkString: "chunkString"
|
||
}
|
||
);
|
||
|
||
// node_modules/micromark-util-symbol/lib/values.js
|
||
var values = (
|
||
/** @type {const} */
|
||
{
|
||
ht: " ",
|
||
lf: "\n",
|
||
cr: "\r",
|
||
space: " ",
|
||
exclamationMark: "!",
|
||
quotationMark: '"',
|
||
numberSign: "#",
|
||
dollarSign: "$",
|
||
percentSign: "%",
|
||
ampersand: "&",
|
||
apostrophe: "'",
|
||
leftParenthesis: "(",
|
||
rightParenthesis: ")",
|
||
asterisk: "*",
|
||
plusSign: "+",
|
||
comma: ",",
|
||
dash: "-",
|
||
dot: ".",
|
||
slash: "/",
|
||
digit0: "0",
|
||
digit1: "1",
|
||
digit2: "2",
|
||
digit3: "3",
|
||
digit4: "4",
|
||
digit5: "5",
|
||
digit6: "6",
|
||
digit7: "7",
|
||
digit8: "8",
|
||
digit9: "9",
|
||
colon: ":",
|
||
semicolon: ";",
|
||
lessThan: "<",
|
||
equalsTo: "=",
|
||
greaterThan: ">",
|
||
questionMark: "?",
|
||
atSign: "@",
|
||
uppercaseA: "A",
|
||
uppercaseB: "B",
|
||
uppercaseC: "C",
|
||
uppercaseD: "D",
|
||
uppercaseE: "E",
|
||
uppercaseF: "F",
|
||
uppercaseG: "G",
|
||
uppercaseH: "H",
|
||
uppercaseI: "I",
|
||
uppercaseJ: "J",
|
||
uppercaseK: "K",
|
||
uppercaseL: "L",
|
||
uppercaseM: "M",
|
||
uppercaseN: "N",
|
||
uppercaseO: "O",
|
||
uppercaseP: "P",
|
||
uppercaseQ: "Q",
|
||
uppercaseR: "R",
|
||
uppercaseS: "S",
|
||
uppercaseT: "T",
|
||
uppercaseU: "U",
|
||
uppercaseV: "V",
|
||
uppercaseW: "W",
|
||
uppercaseX: "X",
|
||
uppercaseY: "Y",
|
||
uppercaseZ: "Z",
|
||
leftSquareBracket: "[",
|
||
backslash: "\\",
|
||
rightSquareBracket: "]",
|
||
caret: "^",
|
||
underscore: "_",
|
||
graveAccent: "`",
|
||
lowercaseA: "a",
|
||
lowercaseB: "b",
|
||
lowercaseC: "c",
|
||
lowercaseD: "d",
|
||
lowercaseE: "e",
|
||
lowercaseF: "f",
|
||
lowercaseG: "g",
|
||
lowercaseH: "h",
|
||
lowercaseI: "i",
|
||
lowercaseJ: "j",
|
||
lowercaseK: "k",
|
||
lowercaseL: "l",
|
||
lowercaseM: "m",
|
||
lowercaseN: "n",
|
||
lowercaseO: "o",
|
||
lowercaseP: "p",
|
||
lowercaseQ: "q",
|
||
lowercaseR: "r",
|
||
lowercaseS: "s",
|
||
lowercaseT: "t",
|
||
lowercaseU: "u",
|
||
lowercaseV: "v",
|
||
lowercaseW: "w",
|
||
lowercaseX: "x",
|
||
lowercaseY: "y",
|
||
lowercaseZ: "z",
|
||
leftCurlyBrace: "{",
|
||
verticalBar: "|",
|
||
rightCurlyBrace: "}",
|
||
tilde: "~",
|
||
replacementCharacter: "<22>"
|
||
}
|
||
);
|
||
|
||
// node_modules/micromark-util-chunked/dev/index.js
|
||
function splice(list2, start, remove, items) {
|
||
const end = list2.length;
|
||
let chunkStart = 0;
|
||
let parameters;
|
||
if (start < 0) {
|
||
start = -start > end ? 0 : end + start;
|
||
} else {
|
||
start = start > end ? end : start;
|
||
}
|
||
remove = remove > 0 ? remove : 0;
|
||
if (items.length < constants.v8MaxSafeChunkSize) {
|
||
parameters = Array.from(items);
|
||
parameters.unshift(start, remove);
|
||
list2.splice(...parameters);
|
||
} else {
|
||
if (remove) list2.splice(start, remove);
|
||
while (chunkStart < items.length) {
|
||
parameters = items.slice(
|
||
chunkStart,
|
||
chunkStart + constants.v8MaxSafeChunkSize
|
||
);
|
||
parameters.unshift(start, 0);
|
||
list2.splice(...parameters);
|
||
chunkStart += constants.v8MaxSafeChunkSize;
|
||
start += constants.v8MaxSafeChunkSize;
|
||
}
|
||
}
|
||
}
|
||
function push(list2, items) {
|
||
if (list2.length > 0) {
|
||
splice(list2, list2.length, 0, items);
|
||
return list2;
|
||
}
|
||
return items;
|
||
}
|
||
|
||
// node_modules/micromark-util-combine-extensions/index.js
|
||
var hasOwnProperty = {}.hasOwnProperty;
|
||
function combineExtensions(extensions) {
|
||
const all2 = {};
|
||
let index = -1;
|
||
while (++index < extensions.length) {
|
||
syntaxExtension(all2, extensions[index]);
|
||
}
|
||
return all2;
|
||
}
|
||
function syntaxExtension(all2, extension) {
|
||
let hook;
|
||
for (hook in extension) {
|
||
const maybe = hasOwnProperty.call(all2, hook) ? all2[hook] : void 0;
|
||
const left = maybe || (all2[hook] = {});
|
||
const right = extension[hook];
|
||
let code;
|
||
if (right) {
|
||
for (code in right) {
|
||
if (!hasOwnProperty.call(left, code)) left[code] = [];
|
||
const value = right[code];
|
||
constructs(
|
||
// @ts-expect-error Looks like a list.
|
||
left[code],
|
||
Array.isArray(value) ? value : value ? [value] : []
|
||
);
|
||
}
|
||
}
|
||
}
|
||
}
|
||
function constructs(existing, list2) {
|
||
let index = -1;
|
||
const before = [];
|
||
while (++index < list2.length) {
|
||
;
|
||
(list2[index].add === "after" ? existing : before).push(list2[index]);
|
||
}
|
||
splice(existing, 0, 0, before);
|
||
}
|
||
|
||
// node_modules/mdast-util-to-string/lib/index.js
|
||
var emptyOptions = {};
|
||
function toString(value, options) {
|
||
const settings = options || emptyOptions;
|
||
const includeImageAlt = typeof settings.includeImageAlt === "boolean" ? settings.includeImageAlt : true;
|
||
const includeHtml = typeof settings.includeHtml === "boolean" ? settings.includeHtml : true;
|
||
return one(value, includeImageAlt, includeHtml);
|
||
}
|
||
function one(value, includeImageAlt, includeHtml) {
|
||
if (node(value)) {
|
||
if ("value" in value) {
|
||
return value.type === "html" && !includeHtml ? "" : value.value;
|
||
}
|
||
if (includeImageAlt && "alt" in value && value.alt) {
|
||
return value.alt;
|
||
}
|
||
if ("children" in value) {
|
||
return all(value.children, includeImageAlt, includeHtml);
|
||
}
|
||
}
|
||
if (Array.isArray(value)) {
|
||
return all(value, includeImageAlt, includeHtml);
|
||
}
|
||
return "";
|
||
}
|
||
function all(values2, includeImageAlt, includeHtml) {
|
||
const result = [];
|
||
let index = -1;
|
||
while (++index < values2.length) {
|
||
result[index] = one(values2[index], includeImageAlt, includeHtml);
|
||
}
|
||
return result.join("");
|
||
}
|
||
function node(value) {
|
||
return Boolean(value && typeof value === "object");
|
||
}
|
||
|
||
// node_modules/decode-named-character-reference/index.dom.js
|
||
var element = document.createElement("i");
|
||
function decodeNamedCharacterReference(value) {
|
||
const characterReference2 = "&" + value + ";";
|
||
element.innerHTML = characterReference2;
|
||
const character = element.textContent;
|
||
if (
|
||
// @ts-expect-error: TypeScript is wrong that `textContent` on elements can
|
||
// yield `null`.
|
||
character.charCodeAt(character.length - 1) === 59 && value !== "semi"
|
||
) {
|
||
return false;
|
||
}
|
||
return character === characterReference2 ? false : character;
|
||
}
|
||
|
||
// node_modules/micromark-util-decode-numeric-character-reference/dev/index.js
|
||
function decodeNumericCharacterReference(value, base) {
|
||
const code = Number.parseInt(value, base);
|
||
if (
|
||
// C0 except for HT, LF, FF, CR, space.
|
||
code < codes.ht || code === codes.vt || code > codes.cr && code < codes.space || // Control character (DEL) of C0, and C1 controls.
|
||
code > codes.tilde && code < 160 || // Lone high surrogates and low surrogates.
|
||
code > 55295 && code < 57344 || // Noncharacters.
|
||
code > 64975 && code < 65008 || /* eslint-disable no-bitwise */
|
||
(code & 65535) === 65535 || (code & 65535) === 65534 || /* eslint-enable no-bitwise */
|
||
// Out of range
|
||
code > 1114111
|
||
) {
|
||
return values.replacementCharacter;
|
||
}
|
||
return String.fromCodePoint(code);
|
||
}
|
||
|
||
// node_modules/micromark-util-normalize-identifier/dev/index.js
|
||
function normalizeIdentifier(value) {
|
||
return value.replace(/[\t\n\r ]+/g, values.space).replace(/^ | $/g, "").toLowerCase().toUpperCase();
|
||
}
|
||
|
||
// node_modules/micromark-util-character/dev/index.js
|
||
var asciiAlpha = regexCheck(/[A-Za-z]/);
|
||
var asciiAlphanumeric = regexCheck(/[\dA-Za-z]/);
|
||
var asciiAtext = regexCheck(/[#-'*+\--9=?A-Z^-~]/);
|
||
function asciiControl(code) {
|
||
return (
|
||
// Special whitespace codes (which have negative values), C0 and Control
|
||
// character DEL
|
||
code !== null && (code < codes.space || code === codes.del)
|
||
);
|
||
}
|
||
var asciiDigit = regexCheck(/\d/);
|
||
var asciiHexDigit = regexCheck(/[\dA-Fa-f]/);
|
||
var asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/);
|
||
function markdownLineEnding(code) {
|
||
return code !== null && code < codes.horizontalTab;
|
||
}
|
||
function markdownLineEndingOrSpace(code) {
|
||
return code !== null && (code < codes.nul || code === codes.space);
|
||
}
|
||
function markdownSpace(code) {
|
||
return code === codes.horizontalTab || code === codes.virtualSpace || code === codes.space;
|
||
}
|
||
var unicodePunctuation = regexCheck(new RegExp("\\p{P}|\\p{S}", "u"));
|
||
var unicodeWhitespace = regexCheck(/\s/);
|
||
function regexCheck(regex) {
|
||
return check;
|
||
function check(code) {
|
||
return code !== null && code > -1 && regex.test(String.fromCharCode(code));
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-sanitize-uri/dev/index.js
|
||
function normalizeUri(value) {
|
||
const result = [];
|
||
let index = -1;
|
||
let start = 0;
|
||
let skip = 0;
|
||
while (++index < value.length) {
|
||
const code = value.charCodeAt(index);
|
||
let replace = "";
|
||
if (code === codes.percentSign && asciiAlphanumeric(value.charCodeAt(index + 1)) && asciiAlphanumeric(value.charCodeAt(index + 2))) {
|
||
skip = 2;
|
||
} else if (code < 128) {
|
||
if (!/[!#$&-;=?-Z_a-z~]/.test(String.fromCharCode(code))) {
|
||
replace = String.fromCharCode(code);
|
||
}
|
||
} else if (code > 55295 && code < 57344) {
|
||
const next = value.charCodeAt(index + 1);
|
||
if (code < 56320 && next > 56319 && next < 57344) {
|
||
replace = String.fromCharCode(code, next);
|
||
skip = 1;
|
||
} else {
|
||
replace = values.replacementCharacter;
|
||
}
|
||
} else {
|
||
replace = String.fromCharCode(code);
|
||
}
|
||
if (replace) {
|
||
result.push(value.slice(start, index), encodeURIComponent(replace));
|
||
start = index + skip + 1;
|
||
replace = "";
|
||
}
|
||
if (skip) {
|
||
index += skip;
|
||
skip = 0;
|
||
}
|
||
}
|
||
return result.join("") + value.slice(start);
|
||
}
|
||
|
||
// node_modules/micromark-factory-space/dev/index.js
|
||
function factorySpace(effects, ok3, type, max) {
|
||
const limit = max ? max - 1 : Number.POSITIVE_INFINITY;
|
||
let size = 0;
|
||
return start;
|
||
function start(code) {
|
||
if (markdownSpace(code)) {
|
||
effects.enter(type);
|
||
return prefix(code);
|
||
}
|
||
return ok3(code);
|
||
}
|
||
function prefix(code) {
|
||
if (markdownSpace(code) && size++ < limit) {
|
||
effects.consume(code);
|
||
return prefix;
|
||
}
|
||
effects.exit(type);
|
||
return ok3(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-classify-character/dev/index.js
|
||
function classifyCharacter(code) {
|
||
if (code === codes.eof || markdownLineEndingOrSpace(code) || unicodeWhitespace(code)) {
|
||
return constants.characterGroupWhitespace;
|
||
}
|
||
if (unicodePunctuation(code)) {
|
||
return constants.characterGroupPunctuation;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-resolve-all/index.js
|
||
function resolveAll(constructs2, events, context) {
|
||
const called = [];
|
||
let index = -1;
|
||
while (++index < constructs2.length) {
|
||
const resolve = constructs2[index].resolveAll;
|
||
if (resolve && !called.includes(resolve)) {
|
||
events = resolve(events, context);
|
||
called.push(resolve);
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/blank-line.js
|
||
var blankLine = { partial: true, tokenize: tokenizeBlankLine };
|
||
function tokenizeBlankLine(effects, ok3, nok) {
|
||
return start;
|
||
function start(code) {
|
||
return markdownSpace(code) ? factorySpace(effects, after, types.linePrefix)(code) : after(code);
|
||
}
|
||
function after(code) {
|
||
return code === codes.eof || markdownLineEnding(code) ? ok3(code) : nok(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/attention.js
|
||
var attention = {
|
||
name: "attention",
|
||
resolveAll: resolveAllAttention,
|
||
tokenize: tokenizeAttention
|
||
};
|
||
function resolveAllAttention(events, context) {
|
||
let index = -1;
|
||
let open;
|
||
let group;
|
||
let text;
|
||
let openingSequence;
|
||
let closingSequence;
|
||
let use;
|
||
let nextEvents;
|
||
let offset;
|
||
while (++index < events.length) {
|
||
if (events[index][0] === "enter" && events[index][1].type === "attentionSequence" && events[index][1]._close) {
|
||
open = index;
|
||
while (open--) {
|
||
if (events[open][0] === "exit" && events[open][1].type === "attentionSequence" && events[open][1]._open && // If the markers are the same:
|
||
context.sliceSerialize(events[open][1]).charCodeAt(0) === context.sliceSerialize(events[index][1]).charCodeAt(0)) {
|
||
if ((events[open][1]._close || events[index][1]._open) && (events[index][1].end.offset - events[index][1].start.offset) % 3 && !((events[open][1].end.offset - events[open][1].start.offset + events[index][1].end.offset - events[index][1].start.offset) % 3)) {
|
||
continue;
|
||
}
|
||
use = events[open][1].end.offset - events[open][1].start.offset > 1 && events[index][1].end.offset - events[index][1].start.offset > 1 ? 2 : 1;
|
||
const start = { ...events[open][1].end };
|
||
const end = { ...events[index][1].start };
|
||
movePoint(start, -use);
|
||
movePoint(end, use);
|
||
openingSequence = {
|
||
type: use > 1 ? types.strongSequence : types.emphasisSequence,
|
||
start,
|
||
end: { ...events[open][1].end }
|
||
};
|
||
closingSequence = {
|
||
type: use > 1 ? types.strongSequence : types.emphasisSequence,
|
||
start: { ...events[index][1].start },
|
||
end
|
||
};
|
||
text = {
|
||
type: use > 1 ? types.strongText : types.emphasisText,
|
||
start: { ...events[open][1].end },
|
||
end: { ...events[index][1].start }
|
||
};
|
||
group = {
|
||
type: use > 1 ? types.strong : types.emphasis,
|
||
start: { ...openingSequence.start },
|
||
end: { ...closingSequence.end }
|
||
};
|
||
events[open][1].end = { ...openingSequence.start };
|
||
events[index][1].start = { ...closingSequence.end };
|
||
nextEvents = [];
|
||
if (events[open][1].end.offset - events[open][1].start.offset) {
|
||
nextEvents = push(nextEvents, [
|
||
["enter", events[open][1], context],
|
||
["exit", events[open][1], context]
|
||
]);
|
||
}
|
||
nextEvents = push(nextEvents, [
|
||
["enter", group, context],
|
||
["enter", openingSequence, context],
|
||
["exit", openingSequence, context],
|
||
["enter", text, context]
|
||
]);
|
||
ok(
|
||
context.parser.constructs.insideSpan.null,
|
||
"expected `insideSpan` to be populated"
|
||
);
|
||
nextEvents = push(
|
||
nextEvents,
|
||
resolveAll(
|
||
context.parser.constructs.insideSpan.null,
|
||
events.slice(open + 1, index),
|
||
context
|
||
)
|
||
);
|
||
nextEvents = push(nextEvents, [
|
||
["exit", text, context],
|
||
["enter", closingSequence, context],
|
||
["exit", closingSequence, context],
|
||
["exit", group, context]
|
||
]);
|
||
if (events[index][1].end.offset - events[index][1].start.offset) {
|
||
offset = 2;
|
||
nextEvents = push(nextEvents, [
|
||
["enter", events[index][1], context],
|
||
["exit", events[index][1], context]
|
||
]);
|
||
} else {
|
||
offset = 0;
|
||
}
|
||
splice(events, open - 1, index - open + 3, nextEvents);
|
||
index = open + nextEvents.length - offset - 2;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
index = -1;
|
||
while (++index < events.length) {
|
||
if (events[index][1].type === "attentionSequence") {
|
||
events[index][1].type = "data";
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeAttention(effects, ok3) {
|
||
const attentionMarkers = this.parser.constructs.attentionMarkers.null;
|
||
const previous2 = this.previous;
|
||
const before = classifyCharacter(previous2);
|
||
let marker;
|
||
return start;
|
||
function start(code) {
|
||
ok(
|
||
code === codes.asterisk || code === codes.underscore,
|
||
"expected asterisk or underscore"
|
||
);
|
||
marker = code;
|
||
effects.enter("attentionSequence");
|
||
return inside(code);
|
||
}
|
||
function inside(code) {
|
||
if (code === marker) {
|
||
effects.consume(code);
|
||
return inside;
|
||
}
|
||
const token = effects.exit("attentionSequence");
|
||
const after = classifyCharacter(code);
|
||
ok(attentionMarkers, "expected `attentionMarkers` to be populated");
|
||
const open = !after || after === constants.characterGroupPunctuation && before || attentionMarkers.includes(code);
|
||
const close = !before || before === constants.characterGroupPunctuation && after || attentionMarkers.includes(previous2);
|
||
token._open = Boolean(
|
||
marker === codes.asterisk ? open : open && (before || !close)
|
||
);
|
||
token._close = Boolean(
|
||
marker === codes.asterisk ? close : close && (after || !open)
|
||
);
|
||
return ok3(code);
|
||
}
|
||
}
|
||
function movePoint(point, offset) {
|
||
point.column += offset;
|
||
point.offset += offset;
|
||
point._bufferIndex += offset;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/autolink.js
|
||
var autolink = { name: "autolink", tokenize: tokenizeAutolink };
|
||
function tokenizeAutolink(effects, ok3, nok) {
|
||
let size = 0;
|
||
return start;
|
||
function start(code) {
|
||
ok(code === codes.lessThan, "expected `<`");
|
||
effects.enter(types.autolink);
|
||
effects.enter(types.autolinkMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.autolinkMarker);
|
||
effects.enter(types.autolinkProtocol);
|
||
return open;
|
||
}
|
||
function open(code) {
|
||
if (asciiAlpha(code)) {
|
||
effects.consume(code);
|
||
return schemeOrEmailAtext;
|
||
}
|
||
if (code === codes.atSign) {
|
||
return nok(code);
|
||
}
|
||
return emailAtext(code);
|
||
}
|
||
function schemeOrEmailAtext(code) {
|
||
if (code === codes.plusSign || code === codes.dash || code === codes.dot || asciiAlphanumeric(code)) {
|
||
size = 1;
|
||
return schemeInsideOrEmailAtext(code);
|
||
}
|
||
return emailAtext(code);
|
||
}
|
||
function schemeInsideOrEmailAtext(code) {
|
||
if (code === codes.colon) {
|
||
effects.consume(code);
|
||
size = 0;
|
||
return urlInside;
|
||
}
|
||
if ((code === codes.plusSign || code === codes.dash || code === codes.dot || asciiAlphanumeric(code)) && size++ < constants.autolinkSchemeSizeMax) {
|
||
effects.consume(code);
|
||
return schemeInsideOrEmailAtext;
|
||
}
|
||
size = 0;
|
||
return emailAtext(code);
|
||
}
|
||
function urlInside(code) {
|
||
if (code === codes.greaterThan) {
|
||
effects.exit(types.autolinkProtocol);
|
||
effects.enter(types.autolinkMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.autolinkMarker);
|
||
effects.exit(types.autolink);
|
||
return ok3;
|
||
}
|
||
if (code === codes.eof || code === codes.space || code === codes.lessThan || asciiControl(code)) {
|
||
return nok(code);
|
||
}
|
||
effects.consume(code);
|
||
return urlInside;
|
||
}
|
||
function emailAtext(code) {
|
||
if (code === codes.atSign) {
|
||
effects.consume(code);
|
||
return emailAtSignOrDot;
|
||
}
|
||
if (asciiAtext(code)) {
|
||
effects.consume(code);
|
||
return emailAtext;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function emailAtSignOrDot(code) {
|
||
return asciiAlphanumeric(code) ? emailLabel(code) : nok(code);
|
||
}
|
||
function emailLabel(code) {
|
||
if (code === codes.dot) {
|
||
effects.consume(code);
|
||
size = 0;
|
||
return emailAtSignOrDot;
|
||
}
|
||
if (code === codes.greaterThan) {
|
||
effects.exit(types.autolinkProtocol).type = types.autolinkEmail;
|
||
effects.enter(types.autolinkMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.autolinkMarker);
|
||
effects.exit(types.autolink);
|
||
return ok3;
|
||
}
|
||
return emailValue(code);
|
||
}
|
||
function emailValue(code) {
|
||
if ((code === codes.dash || asciiAlphanumeric(code)) && size++ < constants.autolinkDomainSizeMax) {
|
||
const next = code === codes.dash ? emailValue : emailLabel;
|
||
effects.consume(code);
|
||
return next;
|
||
}
|
||
return nok(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/block-quote.js
|
||
var blockQuote = {
|
||
continuation: { tokenize: tokenizeBlockQuoteContinuation },
|
||
exit,
|
||
name: "blockQuote",
|
||
tokenize: tokenizeBlockQuoteStart
|
||
};
|
||
function tokenizeBlockQuoteStart(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code) {
|
||
if (code === codes.greaterThan) {
|
||
const state = self.containerState;
|
||
ok(state, "expected `containerState` to be defined in container");
|
||
if (!state.open) {
|
||
effects.enter(types.blockQuote, { _container: true });
|
||
state.open = true;
|
||
}
|
||
effects.enter(types.blockQuotePrefix);
|
||
effects.enter(types.blockQuoteMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.blockQuoteMarker);
|
||
return after;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function after(code) {
|
||
if (markdownSpace(code)) {
|
||
effects.enter(types.blockQuotePrefixWhitespace);
|
||
effects.consume(code);
|
||
effects.exit(types.blockQuotePrefixWhitespace);
|
||
effects.exit(types.blockQuotePrefix);
|
||
return ok3;
|
||
}
|
||
effects.exit(types.blockQuotePrefix);
|
||
return ok3(code);
|
||
}
|
||
}
|
||
function tokenizeBlockQuoteContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
return contStart;
|
||
function contStart(code) {
|
||
if (markdownSpace(code)) {
|
||
ok(
|
||
self.parser.constructs.disable.null,
|
||
"expected `disable.null` to be populated"
|
||
);
|
||
return factorySpace(
|
||
effects,
|
||
contBefore,
|
||
types.linePrefix,
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : constants.tabSize
|
||
)(code);
|
||
}
|
||
return contBefore(code);
|
||
}
|
||
function contBefore(code) {
|
||
return effects.attempt(blockQuote, ok3, nok)(code);
|
||
}
|
||
}
|
||
function exit(effects) {
|
||
effects.exit(types.blockQuote);
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/character-escape.js
|
||
var characterEscape = {
|
||
name: "characterEscape",
|
||
tokenize: tokenizeCharacterEscape
|
||
};
|
||
function tokenizeCharacterEscape(effects, ok3, nok) {
|
||
return start;
|
||
function start(code) {
|
||
ok(code === codes.backslash, "expected `\\`");
|
||
effects.enter(types.characterEscape);
|
||
effects.enter(types.escapeMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.escapeMarker);
|
||
return inside;
|
||
}
|
||
function inside(code) {
|
||
if (asciiPunctuation(code)) {
|
||
effects.enter(types.characterEscapeValue);
|
||
effects.consume(code);
|
||
effects.exit(types.characterEscapeValue);
|
||
effects.exit(types.characterEscape);
|
||
return ok3;
|
||
}
|
||
return nok(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/character-reference.js
|
||
var characterReference = {
|
||
name: "characterReference",
|
||
tokenize: tokenizeCharacterReference
|
||
};
|
||
function tokenizeCharacterReference(effects, ok3, nok) {
|
||
const self = this;
|
||
let size = 0;
|
||
let max;
|
||
let test;
|
||
return start;
|
||
function start(code) {
|
||
ok(code === codes.ampersand, "expected `&`");
|
||
effects.enter(types.characterReference);
|
||
effects.enter(types.characterReferenceMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.characterReferenceMarker);
|
||
return open;
|
||
}
|
||
function open(code) {
|
||
if (code === codes.numberSign) {
|
||
effects.enter(types.characterReferenceMarkerNumeric);
|
||
effects.consume(code);
|
||
effects.exit(types.characterReferenceMarkerNumeric);
|
||
return numeric;
|
||
}
|
||
effects.enter(types.characterReferenceValue);
|
||
max = constants.characterReferenceNamedSizeMax;
|
||
test = asciiAlphanumeric;
|
||
return value(code);
|
||
}
|
||
function numeric(code) {
|
||
if (code === codes.uppercaseX || code === codes.lowercaseX) {
|
||
effects.enter(types.characterReferenceMarkerHexadecimal);
|
||
effects.consume(code);
|
||
effects.exit(types.characterReferenceMarkerHexadecimal);
|
||
effects.enter(types.characterReferenceValue);
|
||
max = constants.characterReferenceHexadecimalSizeMax;
|
||
test = asciiHexDigit;
|
||
return value;
|
||
}
|
||
effects.enter(types.characterReferenceValue);
|
||
max = constants.characterReferenceDecimalSizeMax;
|
||
test = asciiDigit;
|
||
return value(code);
|
||
}
|
||
function value(code) {
|
||
if (code === codes.semicolon && size) {
|
||
const token = effects.exit(types.characterReferenceValue);
|
||
if (test === asciiAlphanumeric && !decodeNamedCharacterReference(self.sliceSerialize(token))) {
|
||
return nok(code);
|
||
}
|
||
effects.enter(types.characterReferenceMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.characterReferenceMarker);
|
||
effects.exit(types.characterReference);
|
||
return ok3;
|
||
}
|
||
if (test(code) && size++ < max) {
|
||
effects.consume(code);
|
||
return value;
|
||
}
|
||
return nok(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/code-fenced.js
|
||
var nonLazyContinuation = {
|
||
partial: true,
|
||
tokenize: tokenizeNonLazyContinuation
|
||
};
|
||
var codeFenced = {
|
||
concrete: true,
|
||
name: "codeFenced",
|
||
tokenize: tokenizeCodeFenced
|
||
};
|
||
function tokenizeCodeFenced(effects, ok3, nok) {
|
||
const self = this;
|
||
const closeStart = { partial: true, tokenize: tokenizeCloseStart };
|
||
let initialPrefix = 0;
|
||
let sizeOpen = 0;
|
||
let marker;
|
||
return start;
|
||
function start(code) {
|
||
return beforeSequenceOpen(code);
|
||
}
|
||
function beforeSequenceOpen(code) {
|
||
ok(
|
||
code === codes.graveAccent || code === codes.tilde,
|
||
"expected `` ` `` or `~`"
|
||
);
|
||
const tail = self.events[self.events.length - 1];
|
||
initialPrefix = tail && tail[1].type === types.linePrefix ? tail[2].sliceSerialize(tail[1], true).length : 0;
|
||
marker = code;
|
||
effects.enter(types.codeFenced);
|
||
effects.enter(types.codeFencedFence);
|
||
effects.enter(types.codeFencedFenceSequence);
|
||
return sequenceOpen(code);
|
||
}
|
||
function sequenceOpen(code) {
|
||
if (code === marker) {
|
||
sizeOpen++;
|
||
effects.consume(code);
|
||
return sequenceOpen;
|
||
}
|
||
if (sizeOpen < constants.codeFencedSequenceSizeMin) {
|
||
return nok(code);
|
||
}
|
||
effects.exit(types.codeFencedFenceSequence);
|
||
return markdownSpace(code) ? factorySpace(effects, infoBefore, types.whitespace)(code) : infoBefore(code);
|
||
}
|
||
function infoBefore(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.codeFencedFence);
|
||
return self.interrupt ? ok3(code) : effects.check(nonLazyContinuation, atNonLazyBreak, after)(code);
|
||
}
|
||
effects.enter(types.codeFencedFenceInfo);
|
||
effects.enter(types.chunkString, { contentType: constants.contentTypeString });
|
||
return info(code);
|
||
}
|
||
function info(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.chunkString);
|
||
effects.exit(types.codeFencedFenceInfo);
|
||
return infoBefore(code);
|
||
}
|
||
if (markdownSpace(code)) {
|
||
effects.exit(types.chunkString);
|
||
effects.exit(types.codeFencedFenceInfo);
|
||
return factorySpace(effects, metaBefore, types.whitespace)(code);
|
||
}
|
||
if (code === codes.graveAccent && code === marker) {
|
||
return nok(code);
|
||
}
|
||
effects.consume(code);
|
||
return info;
|
||
}
|
||
function metaBefore(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
return infoBefore(code);
|
||
}
|
||
effects.enter(types.codeFencedFenceMeta);
|
||
effects.enter(types.chunkString, { contentType: constants.contentTypeString });
|
||
return meta(code);
|
||
}
|
||
function meta(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.chunkString);
|
||
effects.exit(types.codeFencedFenceMeta);
|
||
return infoBefore(code);
|
||
}
|
||
if (code === codes.graveAccent && code === marker) {
|
||
return nok(code);
|
||
}
|
||
effects.consume(code);
|
||
return meta;
|
||
}
|
||
function atNonLazyBreak(code) {
|
||
ok(markdownLineEnding(code), "expected eol");
|
||
return effects.attempt(closeStart, after, contentBefore)(code);
|
||
}
|
||
function contentBefore(code) {
|
||
ok(markdownLineEnding(code), "expected eol");
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return contentStart;
|
||
}
|
||
function contentStart(code) {
|
||
return initialPrefix > 0 && markdownSpace(code) ? factorySpace(
|
||
effects,
|
||
beforeContentChunk,
|
||
types.linePrefix,
|
||
initialPrefix + 1
|
||
)(code) : beforeContentChunk(code);
|
||
}
|
||
function beforeContentChunk(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code);
|
||
}
|
||
effects.enter(types.codeFlowValue);
|
||
return contentChunk(code);
|
||
}
|
||
function contentChunk(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.codeFlowValue);
|
||
return beforeContentChunk(code);
|
||
}
|
||
effects.consume(code);
|
||
return contentChunk;
|
||
}
|
||
function after(code) {
|
||
effects.exit(types.codeFenced);
|
||
return ok3(code);
|
||
}
|
||
function tokenizeCloseStart(effects2, ok4, nok2) {
|
||
let size = 0;
|
||
return startBefore;
|
||
function startBefore(code) {
|
||
ok(markdownLineEnding(code), "expected eol");
|
||
effects2.enter(types.lineEnding);
|
||
effects2.consume(code);
|
||
effects2.exit(types.lineEnding);
|
||
return start2;
|
||
}
|
||
function start2(code) {
|
||
ok(
|
||
self.parser.constructs.disable.null,
|
||
"expected `disable.null` to be populated"
|
||
);
|
||
effects2.enter(types.codeFencedFence);
|
||
return markdownSpace(code) ? factorySpace(
|
||
effects2,
|
||
beforeSequenceClose,
|
||
types.linePrefix,
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : constants.tabSize
|
||
)(code) : beforeSequenceClose(code);
|
||
}
|
||
function beforeSequenceClose(code) {
|
||
if (code === marker) {
|
||
effects2.enter(types.codeFencedFenceSequence);
|
||
return sequenceClose(code);
|
||
}
|
||
return nok2(code);
|
||
}
|
||
function sequenceClose(code) {
|
||
if (code === marker) {
|
||
size++;
|
||
effects2.consume(code);
|
||
return sequenceClose;
|
||
}
|
||
if (size >= sizeOpen) {
|
||
effects2.exit(types.codeFencedFenceSequence);
|
||
return markdownSpace(code) ? factorySpace(effects2, sequenceCloseAfter, types.whitespace)(code) : sequenceCloseAfter(code);
|
||
}
|
||
return nok2(code);
|
||
}
|
||
function sequenceCloseAfter(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects2.exit(types.codeFencedFence);
|
||
return ok4(code);
|
||
}
|
||
return nok2(code);
|
||
}
|
||
}
|
||
}
|
||
function tokenizeNonLazyContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code) {
|
||
if (code === codes.eof) {
|
||
return nok(code);
|
||
}
|
||
ok(markdownLineEnding(code), "expected eol");
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return lineStart;
|
||
}
|
||
function lineStart(code) {
|
||
return self.parser.lazy[self.now().line] ? nok(code) : ok3(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/code-indented.js
|
||
var codeIndented = {
|
||
name: "codeIndented",
|
||
tokenize: tokenizeCodeIndented
|
||
};
|
||
var furtherStart = { partial: true, tokenize: tokenizeFurtherStart };
|
||
function tokenizeCodeIndented(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code) {
|
||
ok(markdownSpace(code));
|
||
effects.enter(types.codeIndented);
|
||
return factorySpace(
|
||
effects,
|
||
afterPrefix,
|
||
types.linePrefix,
|
||
constants.tabSize + 1
|
||
)(code);
|
||
}
|
||
function afterPrefix(code) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return tail && tail[1].type === types.linePrefix && tail[2].sliceSerialize(tail[1], true).length >= constants.tabSize ? atBreak(code) : nok(code);
|
||
}
|
||
function atBreak(code) {
|
||
if (code === codes.eof) {
|
||
return after(code);
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
return effects.attempt(furtherStart, atBreak, after)(code);
|
||
}
|
||
effects.enter(types.codeFlowValue);
|
||
return inside(code);
|
||
}
|
||
function inside(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.codeFlowValue);
|
||
return atBreak(code);
|
||
}
|
||
effects.consume(code);
|
||
return inside;
|
||
}
|
||
function after(code) {
|
||
effects.exit(types.codeIndented);
|
||
return ok3(code);
|
||
}
|
||
}
|
||
function tokenizeFurtherStart(effects, ok3, nok) {
|
||
const self = this;
|
||
return furtherStart2;
|
||
function furtherStart2(code) {
|
||
if (self.parser.lazy[self.now().line]) {
|
||
return nok(code);
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return furtherStart2;
|
||
}
|
||
return factorySpace(
|
||
effects,
|
||
afterPrefix,
|
||
types.linePrefix,
|
||
constants.tabSize + 1
|
||
)(code);
|
||
}
|
||
function afterPrefix(code) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return tail && tail[1].type === types.linePrefix && tail[2].sliceSerialize(tail[1], true).length >= constants.tabSize ? ok3(code) : markdownLineEnding(code) ? furtherStart2(code) : nok(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/code-text.js
|
||
var codeText = {
|
||
name: "codeText",
|
||
previous,
|
||
resolve: resolveCodeText,
|
||
tokenize: tokenizeCodeText
|
||
};
|
||
function resolveCodeText(events) {
|
||
let tailExitIndex = events.length - 4;
|
||
let headEnterIndex = 3;
|
||
let index;
|
||
let enter;
|
||
if ((events[headEnterIndex][1].type === types.lineEnding || events[headEnterIndex][1].type === "space") && (events[tailExitIndex][1].type === types.lineEnding || events[tailExitIndex][1].type === "space")) {
|
||
index = headEnterIndex;
|
||
while (++index < tailExitIndex) {
|
||
if (events[index][1].type === types.codeTextData) {
|
||
events[headEnterIndex][1].type = types.codeTextPadding;
|
||
events[tailExitIndex][1].type = types.codeTextPadding;
|
||
headEnterIndex += 2;
|
||
tailExitIndex -= 2;
|
||
break;
|
||
}
|
||
}
|
||
}
|
||
index = headEnterIndex - 1;
|
||
tailExitIndex++;
|
||
while (++index <= tailExitIndex) {
|
||
if (enter === void 0) {
|
||
if (index !== tailExitIndex && events[index][1].type !== types.lineEnding) {
|
||
enter = index;
|
||
}
|
||
} else if (index === tailExitIndex || events[index][1].type === types.lineEnding) {
|
||
events[enter][1].type = types.codeTextData;
|
||
if (index !== enter + 2) {
|
||
events[enter][1].end = events[index - 1][1].end;
|
||
events.splice(enter + 2, index - enter - 2);
|
||
tailExitIndex -= index - enter - 2;
|
||
index = enter + 2;
|
||
}
|
||
enter = void 0;
|
||
}
|
||
}
|
||
return events;
|
||
}
|
||
function previous(code) {
|
||
return code !== codes.graveAccent || this.events[this.events.length - 1][1].type === types.characterEscape;
|
||
}
|
||
function tokenizeCodeText(effects, ok3, nok) {
|
||
const self = this;
|
||
let sizeOpen = 0;
|
||
let size;
|
||
let token;
|
||
return start;
|
||
function start(code) {
|
||
ok(code === codes.graveAccent, "expected `` ` ``");
|
||
ok(previous.call(self, self.previous), "expected correct previous");
|
||
effects.enter(types.codeText);
|
||
effects.enter(types.codeTextSequence);
|
||
return sequenceOpen(code);
|
||
}
|
||
function sequenceOpen(code) {
|
||
if (code === codes.graveAccent) {
|
||
effects.consume(code);
|
||
sizeOpen++;
|
||
return sequenceOpen;
|
||
}
|
||
effects.exit(types.codeTextSequence);
|
||
return between(code);
|
||
}
|
||
function between(code) {
|
||
if (code === codes.eof) {
|
||
return nok(code);
|
||
}
|
||
if (code === codes.space) {
|
||
effects.enter("space");
|
||
effects.consume(code);
|
||
effects.exit("space");
|
||
return between;
|
||
}
|
||
if (code === codes.graveAccent) {
|
||
token = effects.enter(types.codeTextSequence);
|
||
size = 0;
|
||
return sequenceClose(code);
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return between;
|
||
}
|
||
effects.enter(types.codeTextData);
|
||
return data(code);
|
||
}
|
||
function data(code) {
|
||
if (code === codes.eof || code === codes.space || code === codes.graveAccent || markdownLineEnding(code)) {
|
||
effects.exit(types.codeTextData);
|
||
return between(code);
|
||
}
|
||
effects.consume(code);
|
||
return data;
|
||
}
|
||
function sequenceClose(code) {
|
||
if (code === codes.graveAccent) {
|
||
effects.consume(code);
|
||
size++;
|
||
return sequenceClose;
|
||
}
|
||
if (size === sizeOpen) {
|
||
effects.exit(types.codeTextSequence);
|
||
effects.exit(types.codeText);
|
||
return ok3(code);
|
||
}
|
||
token.type = types.codeTextData;
|
||
return data(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-subtokenize/dev/lib/splice-buffer.js
|
||
var SpliceBuffer = class {
|
||
/**
|
||
* @param {ReadonlyArray<T> | null | undefined} [initial]
|
||
* Initial items (optional).
|
||
* @returns
|
||
* Splice buffer.
|
||
*/
|
||
constructor(initial) {
|
||
this.left = initial ? [...initial] : [];
|
||
this.right = [];
|
||
}
|
||
/**
|
||
* Array access;
|
||
* does not move the cursor.
|
||
*
|
||
* @param {number} index
|
||
* Index.
|
||
* @return {T}
|
||
* Item.
|
||
*/
|
||
get(index) {
|
||
if (index < 0 || index >= this.left.length + this.right.length) {
|
||
throw new RangeError(
|
||
"Cannot access index `" + index + "` in a splice buffer of size `" + (this.left.length + this.right.length) + "`"
|
||
);
|
||
}
|
||
if (index < this.left.length) return this.left[index];
|
||
return this.right[this.right.length - index + this.left.length - 1];
|
||
}
|
||
/**
|
||
* The length of the splice buffer, one greater than the largest index in the
|
||
* array.
|
||
*/
|
||
get length() {
|
||
return this.left.length + this.right.length;
|
||
}
|
||
/**
|
||
* Remove and return `list[0]`;
|
||
* moves the cursor to `0`.
|
||
*
|
||
* @returns {T | undefined}
|
||
* Item, optional.
|
||
*/
|
||
shift() {
|
||
this.setCursor(0);
|
||
return this.right.pop();
|
||
}
|
||
/**
|
||
* Slice the buffer to get an array;
|
||
* does not move the cursor.
|
||
*
|
||
* @param {number} start
|
||
* Start.
|
||
* @param {number | null | undefined} [end]
|
||
* End (optional).
|
||
* @returns {Array<T>}
|
||
* Array of items.
|
||
*/
|
||
slice(start, end) {
|
||
const stop = end === null || end === void 0 ? Number.POSITIVE_INFINITY : end;
|
||
if (stop < this.left.length) {
|
||
return this.left.slice(start, stop);
|
||
}
|
||
if (start > this.left.length) {
|
||
return this.right.slice(
|
||
this.right.length - stop + this.left.length,
|
||
this.right.length - start + this.left.length
|
||
).reverse();
|
||
}
|
||
return this.left.slice(start).concat(
|
||
this.right.slice(this.right.length - stop + this.left.length).reverse()
|
||
);
|
||
}
|
||
/**
|
||
* Mimics the behavior of Array.prototype.splice() except for the change of
|
||
* interface necessary to avoid segfaults when patching in very large arrays.
|
||
*
|
||
* This operation moves cursor is moved to `start` and results in the cursor
|
||
* placed after any inserted items.
|
||
*
|
||
* @param {number} start
|
||
* Start;
|
||
* zero-based index at which to start changing the array;
|
||
* negative numbers count backwards from the end of the array and values
|
||
* that are out-of bounds are clamped to the appropriate end of the array.
|
||
* @param {number | null | undefined} [deleteCount=0]
|
||
* Delete count (default: `0`);
|
||
* maximum number of elements to delete, starting from start.
|
||
* @param {Array<T> | null | undefined} [items=[]]
|
||
* Items to include in place of the deleted items (default: `[]`).
|
||
* @return {Array<T>}
|
||
* Any removed items.
|
||
*/
|
||
splice(start, deleteCount, items) {
|
||
const count = deleteCount || 0;
|
||
this.setCursor(Math.trunc(start));
|
||
const removed = this.right.splice(
|
||
this.right.length - count,
|
||
Number.POSITIVE_INFINITY
|
||
);
|
||
if (items) chunkedPush(this.left, items);
|
||
return removed.reverse();
|
||
}
|
||
/**
|
||
* Remove and return the highest-numbered item in the array, so
|
||
* `list[list.length - 1]`;
|
||
* Moves the cursor to `length`.
|
||
*
|
||
* @returns {T | undefined}
|
||
* Item, optional.
|
||
*/
|
||
pop() {
|
||
this.setCursor(Number.POSITIVE_INFINITY);
|
||
return this.left.pop();
|
||
}
|
||
/**
|
||
* Inserts a single item to the high-numbered side of the array;
|
||
* moves the cursor to `length`.
|
||
*
|
||
* @param {T} item
|
||
* Item.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
push(item) {
|
||
this.setCursor(Number.POSITIVE_INFINITY);
|
||
this.left.push(item);
|
||
}
|
||
/**
|
||
* Inserts many items to the high-numbered side of the array.
|
||
* Moves the cursor to `length`.
|
||
*
|
||
* @param {Array<T>} items
|
||
* Items.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
pushMany(items) {
|
||
this.setCursor(Number.POSITIVE_INFINITY);
|
||
chunkedPush(this.left, items);
|
||
}
|
||
/**
|
||
* Inserts a single item to the low-numbered side of the array;
|
||
* Moves the cursor to `0`.
|
||
*
|
||
* @param {T} item
|
||
* Item.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
unshift(item) {
|
||
this.setCursor(0);
|
||
this.right.push(item);
|
||
}
|
||
/**
|
||
* Inserts many items to the low-numbered side of the array;
|
||
* moves the cursor to `0`.
|
||
*
|
||
* @param {Array<T>} items
|
||
* Items.
|
||
* @returns {undefined}
|
||
* Nothing.
|
||
*/
|
||
unshiftMany(items) {
|
||
this.setCursor(0);
|
||
chunkedPush(this.right, items.reverse());
|
||
}
|
||
/**
|
||
* Move the cursor to a specific position in the array. Requires
|
||
* time proportional to the distance moved.
|
||
*
|
||
* If `n < 0`, the cursor will end up at the beginning.
|
||
* If `n > length`, the cursor will end up at the end.
|
||
*
|
||
* @param {number} n
|
||
* Position.
|
||
* @return {undefined}
|
||
* Nothing.
|
||
*/
|
||
setCursor(n) {
|
||
if (n === this.left.length || n > this.left.length && this.right.length === 0 || n < 0 && this.left.length === 0)
|
||
return;
|
||
if (n < this.left.length) {
|
||
const removed = this.left.splice(n, Number.POSITIVE_INFINITY);
|
||
chunkedPush(this.right, removed.reverse());
|
||
} else {
|
||
const removed = this.right.splice(
|
||
this.left.length + this.right.length - n,
|
||
Number.POSITIVE_INFINITY
|
||
);
|
||
chunkedPush(this.left, removed.reverse());
|
||
}
|
||
}
|
||
};
|
||
function chunkedPush(list2, right) {
|
||
let chunkStart = 0;
|
||
if (right.length < constants.v8MaxSafeChunkSize) {
|
||
list2.push(...right);
|
||
} else {
|
||
while (chunkStart < right.length) {
|
||
list2.push(
|
||
...right.slice(chunkStart, chunkStart + constants.v8MaxSafeChunkSize)
|
||
);
|
||
chunkStart += constants.v8MaxSafeChunkSize;
|
||
}
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-subtokenize/dev/index.js
|
||
function subtokenize(eventsArray) {
|
||
const jumps = {};
|
||
let index = -1;
|
||
let event;
|
||
let lineIndex;
|
||
let otherIndex;
|
||
let otherEvent;
|
||
let parameters;
|
||
let subevents;
|
||
let more;
|
||
const events = new SpliceBuffer(eventsArray);
|
||
while (++index < events.length) {
|
||
while (index in jumps) {
|
||
index = jumps[index];
|
||
}
|
||
event = events.get(index);
|
||
if (index && event[1].type === types.chunkFlow && events.get(index - 1)[1].type === types.listItemPrefix) {
|
||
ok(event[1]._tokenizer, "expected `_tokenizer` on subtokens");
|
||
subevents = event[1]._tokenizer.events;
|
||
otherIndex = 0;
|
||
if (otherIndex < subevents.length && subevents[otherIndex][1].type === types.lineEndingBlank) {
|
||
otherIndex += 2;
|
||
}
|
||
if (otherIndex < subevents.length && subevents[otherIndex][1].type === types.content) {
|
||
while (++otherIndex < subevents.length) {
|
||
if (subevents[otherIndex][1].type === types.content) {
|
||
break;
|
||
}
|
||
if (subevents[otherIndex][1].type === types.chunkText) {
|
||
subevents[otherIndex][1]._isInFirstContentOfListItem = true;
|
||
otherIndex++;
|
||
}
|
||
}
|
||
}
|
||
}
|
||
if (event[0] === "enter") {
|
||
if (event[1].contentType) {
|
||
Object.assign(jumps, subcontent(events, index));
|
||
index = jumps[index];
|
||
more = true;
|
||
}
|
||
} else if (event[1]._container) {
|
||
otherIndex = index;
|
||
lineIndex = void 0;
|
||
while (otherIndex--) {
|
||
otherEvent = events.get(otherIndex);
|
||
if (otherEvent[1].type === types.lineEnding || otherEvent[1].type === types.lineEndingBlank) {
|
||
if (otherEvent[0] === "enter") {
|
||
if (lineIndex) {
|
||
events.get(lineIndex)[1].type = types.lineEndingBlank;
|
||
}
|
||
otherEvent[1].type = types.lineEnding;
|
||
lineIndex = otherIndex;
|
||
}
|
||
} else if (otherEvent[1].type === types.linePrefix || otherEvent[1].type === types.listItemIndent) {
|
||
} else {
|
||
break;
|
||
}
|
||
}
|
||
if (lineIndex) {
|
||
event[1].end = { ...events.get(lineIndex)[1].start };
|
||
parameters = events.slice(lineIndex, index);
|
||
parameters.unshift(event);
|
||
events.splice(lineIndex, index - lineIndex + 1, parameters);
|
||
}
|
||
}
|
||
}
|
||
splice(eventsArray, 0, Number.POSITIVE_INFINITY, events.slice(0));
|
||
return !more;
|
||
}
|
||
function subcontent(events, eventIndex) {
|
||
const token = events.get(eventIndex)[1];
|
||
const context = events.get(eventIndex)[2];
|
||
let startPosition = eventIndex - 1;
|
||
const startPositions = [];
|
||
ok(token.contentType, "expected `contentType` on subtokens");
|
||
let tokenizer = token._tokenizer;
|
||
if (!tokenizer) {
|
||
tokenizer = context.parser[token.contentType](token.start);
|
||
if (token._contentTypeTextTrailing) {
|
||
tokenizer._contentTypeTextTrailing = true;
|
||
}
|
||
}
|
||
const childEvents = tokenizer.events;
|
||
const jumps = [];
|
||
const gaps = {};
|
||
let stream;
|
||
let previous2;
|
||
let index = -1;
|
||
let current = token;
|
||
let adjust = 0;
|
||
let start = 0;
|
||
const breaks = [start];
|
||
while (current) {
|
||
while (events.get(++startPosition)[1] !== current) {
|
||
}
|
||
ok(
|
||
!previous2 || current.previous === previous2,
|
||
"expected previous to match"
|
||
);
|
||
ok(!previous2 || previous2.next === current, "expected next to match");
|
||
startPositions.push(startPosition);
|
||
if (!current._tokenizer) {
|
||
stream = context.sliceStream(current);
|
||
if (!current.next) {
|
||
stream.push(codes.eof);
|
||
}
|
||
if (previous2) {
|
||
tokenizer.defineSkip(current.start);
|
||
}
|
||
if (current._isInFirstContentOfListItem) {
|
||
tokenizer._gfmTasklistFirstContentOfListItem = true;
|
||
}
|
||
tokenizer.write(stream);
|
||
if (current._isInFirstContentOfListItem) {
|
||
tokenizer._gfmTasklistFirstContentOfListItem = void 0;
|
||
}
|
||
}
|
||
previous2 = current;
|
||
current = current.next;
|
||
}
|
||
current = token;
|
||
while (++index < childEvents.length) {
|
||
if (
|
||
// Find a void token that includes a break.
|
||
childEvents[index][0] === "exit" && childEvents[index - 1][0] === "enter" && childEvents[index][1].type === childEvents[index - 1][1].type && childEvents[index][1].start.line !== childEvents[index][1].end.line
|
||
) {
|
||
ok(current, "expected a current token");
|
||
start = index + 1;
|
||
breaks.push(start);
|
||
current._tokenizer = void 0;
|
||
current.previous = void 0;
|
||
current = current.next;
|
||
}
|
||
}
|
||
tokenizer.events = [];
|
||
if (current) {
|
||
current._tokenizer = void 0;
|
||
current.previous = void 0;
|
||
ok(!current.next, "expected no next token");
|
||
} else {
|
||
breaks.pop();
|
||
}
|
||
index = breaks.length;
|
||
while (index--) {
|
||
const slice = childEvents.slice(breaks[index], breaks[index + 1]);
|
||
const start2 = startPositions.pop();
|
||
ok(start2 !== void 0, "expected a start position when splicing");
|
||
jumps.push([start2, start2 + slice.length - 1]);
|
||
events.splice(start2, 2, slice);
|
||
}
|
||
jumps.reverse();
|
||
index = -1;
|
||
while (++index < jumps.length) {
|
||
gaps[adjust + jumps[index][0]] = adjust + jumps[index][1];
|
||
adjust += jumps[index][1] - jumps[index][0] - 1;
|
||
}
|
||
return gaps;
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/content.js
|
||
var content = { resolve: resolveContent, tokenize: tokenizeContent };
|
||
var continuationConstruct = { partial: true, tokenize: tokenizeContinuation };
|
||
function resolveContent(events) {
|
||
subtokenize(events);
|
||
return events;
|
||
}
|
||
function tokenizeContent(effects, ok3) {
|
||
let previous2;
|
||
return chunkStart;
|
||
function chunkStart(code) {
|
||
ok(
|
||
code !== codes.eof && !markdownLineEnding(code),
|
||
"expected no eof or eol"
|
||
);
|
||
effects.enter(types.content);
|
||
previous2 = effects.enter(types.chunkContent, {
|
||
contentType: constants.contentTypeContent
|
||
});
|
||
return chunkInside(code);
|
||
}
|
||
function chunkInside(code) {
|
||
if (code === codes.eof) {
|
||
return contentEnd(code);
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
return effects.check(
|
||
continuationConstruct,
|
||
contentContinue,
|
||
contentEnd
|
||
)(code);
|
||
}
|
||
effects.consume(code);
|
||
return chunkInside;
|
||
}
|
||
function contentEnd(code) {
|
||
effects.exit(types.chunkContent);
|
||
effects.exit(types.content);
|
||
return ok3(code);
|
||
}
|
||
function contentContinue(code) {
|
||
ok(markdownLineEnding(code), "expected eol");
|
||
effects.consume(code);
|
||
effects.exit(types.chunkContent);
|
||
ok(previous2, "expected previous token");
|
||
previous2.next = effects.enter(types.chunkContent, {
|
||
contentType: constants.contentTypeContent,
|
||
previous: previous2
|
||
});
|
||
previous2 = previous2.next;
|
||
return chunkInside;
|
||
}
|
||
}
|
||
function tokenizeContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
return startLookahead;
|
||
function startLookahead(code) {
|
||
ok(markdownLineEnding(code), "expected a line ending");
|
||
effects.exit(types.chunkContent);
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return factorySpace(effects, prefixed, types.linePrefix);
|
||
}
|
||
function prefixed(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
return nok(code);
|
||
}
|
||
ok(
|
||
self.parser.constructs.disable.null,
|
||
"expected `disable.null` to be populated"
|
||
);
|
||
const tail = self.events[self.events.length - 1];
|
||
if (!self.parser.constructs.disable.null.includes("codeIndented") && tail && tail[1].type === types.linePrefix && tail[2].sliceSerialize(tail[1], true).length >= constants.tabSize) {
|
||
return ok3(code);
|
||
}
|
||
return effects.interrupt(self.parser.constructs.flow, nok, ok3)(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-destination/dev/index.js
|
||
function factoryDestination(effects, ok3, nok, type, literalType, literalMarkerType, rawType, stringType, max) {
|
||
const limit = max || Number.POSITIVE_INFINITY;
|
||
let balance = 0;
|
||
return start;
|
||
function start(code) {
|
||
if (code === codes.lessThan) {
|
||
effects.enter(type);
|
||
effects.enter(literalType);
|
||
effects.enter(literalMarkerType);
|
||
effects.consume(code);
|
||
effects.exit(literalMarkerType);
|
||
return enclosedBefore;
|
||
}
|
||
if (code === codes.eof || code === codes.space || code === codes.rightParenthesis || asciiControl(code)) {
|
||
return nok(code);
|
||
}
|
||
effects.enter(type);
|
||
effects.enter(rawType);
|
||
effects.enter(stringType);
|
||
effects.enter(types.chunkString, { contentType: constants.contentTypeString });
|
||
return raw(code);
|
||
}
|
||
function enclosedBefore(code) {
|
||
if (code === codes.greaterThan) {
|
||
effects.enter(literalMarkerType);
|
||
effects.consume(code);
|
||
effects.exit(literalMarkerType);
|
||
effects.exit(literalType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
effects.enter(stringType);
|
||
effects.enter(types.chunkString, { contentType: constants.contentTypeString });
|
||
return enclosed(code);
|
||
}
|
||
function enclosed(code) {
|
||
if (code === codes.greaterThan) {
|
||
effects.exit(types.chunkString);
|
||
effects.exit(stringType);
|
||
return enclosedBefore(code);
|
||
}
|
||
if (code === codes.eof || code === codes.lessThan || markdownLineEnding(code)) {
|
||
return nok(code);
|
||
}
|
||
effects.consume(code);
|
||
return code === codes.backslash ? enclosedEscape : enclosed;
|
||
}
|
||
function enclosedEscape(code) {
|
||
if (code === codes.lessThan || code === codes.greaterThan || code === codes.backslash) {
|
||
effects.consume(code);
|
||
return enclosed;
|
||
}
|
||
return enclosed(code);
|
||
}
|
||
function raw(code) {
|
||
if (!balance && (code === codes.eof || code === codes.rightParenthesis || markdownLineEndingOrSpace(code))) {
|
||
effects.exit(types.chunkString);
|
||
effects.exit(stringType);
|
||
effects.exit(rawType);
|
||
effects.exit(type);
|
||
return ok3(code);
|
||
}
|
||
if (balance < limit && code === codes.leftParenthesis) {
|
||
effects.consume(code);
|
||
balance++;
|
||
return raw;
|
||
}
|
||
if (code === codes.rightParenthesis) {
|
||
effects.consume(code);
|
||
balance--;
|
||
return raw;
|
||
}
|
||
if (code === codes.eof || code === codes.space || code === codes.leftParenthesis || asciiControl(code)) {
|
||
return nok(code);
|
||
}
|
||
effects.consume(code);
|
||
return code === codes.backslash ? rawEscape : raw;
|
||
}
|
||
function rawEscape(code) {
|
||
if (code === codes.leftParenthesis || code === codes.rightParenthesis || code === codes.backslash) {
|
||
effects.consume(code);
|
||
return raw;
|
||
}
|
||
return raw(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-label/dev/index.js
|
||
function factoryLabel(effects, ok3, nok, type, markerType, stringType) {
|
||
const self = this;
|
||
let size = 0;
|
||
let seen;
|
||
return start;
|
||
function start(code) {
|
||
ok(code === codes.leftSquareBracket, "expected `[`");
|
||
effects.enter(type);
|
||
effects.enter(markerType);
|
||
effects.consume(code);
|
||
effects.exit(markerType);
|
||
effects.enter(stringType);
|
||
return atBreak;
|
||
}
|
||
function atBreak(code) {
|
||
if (size > constants.linkReferenceSizeMax || code === codes.eof || code === codes.leftSquareBracket || code === codes.rightSquareBracket && !seen || // To do: remove in the future once we’ve switched from
|
||
// `micromark-extension-footnote` to `micromark-extension-gfm-footnote`,
|
||
// which doesn’t need this.
|
||
// Hidden footnotes hook.
|
||
/* c8 ignore next 3 */
|
||
code === codes.caret && !size && "_hiddenFootnoteSupport" in self.parser.constructs) {
|
||
return nok(code);
|
||
}
|
||
if (code === codes.rightSquareBracket) {
|
||
effects.exit(stringType);
|
||
effects.enter(markerType);
|
||
effects.consume(code);
|
||
effects.exit(markerType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return atBreak;
|
||
}
|
||
effects.enter(types.chunkString, { contentType: constants.contentTypeString });
|
||
return labelInside(code);
|
||
}
|
||
function labelInside(code) {
|
||
if (code === codes.eof || code === codes.leftSquareBracket || code === codes.rightSquareBracket || markdownLineEnding(code) || size++ > constants.linkReferenceSizeMax) {
|
||
effects.exit(types.chunkString);
|
||
return atBreak(code);
|
||
}
|
||
effects.consume(code);
|
||
if (!seen) seen = !markdownSpace(code);
|
||
return code === codes.backslash ? labelEscape : labelInside;
|
||
}
|
||
function labelEscape(code) {
|
||
if (code === codes.leftSquareBracket || code === codes.backslash || code === codes.rightSquareBracket) {
|
||
effects.consume(code);
|
||
size++;
|
||
return labelInside;
|
||
}
|
||
return labelInside(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-title/dev/index.js
|
||
function factoryTitle(effects, ok3, nok, type, markerType, stringType) {
|
||
let marker;
|
||
return start;
|
||
function start(code) {
|
||
if (code === codes.quotationMark || code === codes.apostrophe || code === codes.leftParenthesis) {
|
||
effects.enter(type);
|
||
effects.enter(markerType);
|
||
effects.consume(code);
|
||
effects.exit(markerType);
|
||
marker = code === codes.leftParenthesis ? codes.rightParenthesis : code;
|
||
return begin;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function begin(code) {
|
||
if (code === marker) {
|
||
effects.enter(markerType);
|
||
effects.consume(code);
|
||
effects.exit(markerType);
|
||
effects.exit(type);
|
||
return ok3;
|
||
}
|
||
effects.enter(stringType);
|
||
return atBreak(code);
|
||
}
|
||
function atBreak(code) {
|
||
if (code === marker) {
|
||
effects.exit(stringType);
|
||
return begin(marker);
|
||
}
|
||
if (code === codes.eof) {
|
||
return nok(code);
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return factorySpace(effects, atBreak, types.linePrefix);
|
||
}
|
||
effects.enter(types.chunkString, { contentType: constants.contentTypeString });
|
||
return inside(code);
|
||
}
|
||
function inside(code) {
|
||
if (code === marker || code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.chunkString);
|
||
return atBreak(code);
|
||
}
|
||
effects.consume(code);
|
||
return code === codes.backslash ? escape : inside;
|
||
}
|
||
function escape(code) {
|
||
if (code === marker || code === codes.backslash) {
|
||
effects.consume(code);
|
||
return inside;
|
||
}
|
||
return inside(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-factory-whitespace/dev/index.js
|
||
function factoryWhitespace(effects, ok3) {
|
||
let seen;
|
||
return start;
|
||
function start(code) {
|
||
if (markdownLineEnding(code)) {
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
seen = true;
|
||
return start;
|
||
}
|
||
if (markdownSpace(code)) {
|
||
return factorySpace(
|
||
effects,
|
||
start,
|
||
seen ? types.linePrefix : types.lineSuffix
|
||
)(code);
|
||
}
|
||
return ok3(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/definition.js
|
||
var definition = { name: "definition", tokenize: tokenizeDefinition };
|
||
var titleBefore = { partial: true, tokenize: tokenizeTitleBefore };
|
||
function tokenizeDefinition(effects, ok3, nok) {
|
||
const self = this;
|
||
let identifier;
|
||
return start;
|
||
function start(code) {
|
||
effects.enter(types.definition);
|
||
return before(code);
|
||
}
|
||
function before(code) {
|
||
ok(code === codes.leftSquareBracket, "expected `[`");
|
||
return factoryLabel.call(
|
||
self,
|
||
effects,
|
||
labelAfter,
|
||
// Note: we don’t need to reset the way `markdown-rs` does.
|
||
nok,
|
||
types.definitionLabel,
|
||
types.definitionLabelMarker,
|
||
types.definitionLabelString
|
||
)(code);
|
||
}
|
||
function labelAfter(code) {
|
||
identifier = normalizeIdentifier(
|
||
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
|
||
);
|
||
if (code === codes.colon) {
|
||
effects.enter(types.definitionMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.definitionMarker);
|
||
return markerAfter;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function markerAfter(code) {
|
||
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, destinationBefore)(code) : destinationBefore(code);
|
||
}
|
||
function destinationBefore(code) {
|
||
return factoryDestination(
|
||
effects,
|
||
destinationAfter,
|
||
// Note: we don’t need to reset the way `markdown-rs` does.
|
||
nok,
|
||
types.definitionDestination,
|
||
types.definitionDestinationLiteral,
|
||
types.definitionDestinationLiteralMarker,
|
||
types.definitionDestinationRaw,
|
||
types.definitionDestinationString
|
||
)(code);
|
||
}
|
||
function destinationAfter(code) {
|
||
return effects.attempt(titleBefore, after, after)(code);
|
||
}
|
||
function after(code) {
|
||
return markdownSpace(code) ? factorySpace(effects, afterWhitespace, types.whitespace)(code) : afterWhitespace(code);
|
||
}
|
||
function afterWhitespace(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.definition);
|
||
self.parser.defined.push(identifier);
|
||
return ok3(code);
|
||
}
|
||
return nok(code);
|
||
}
|
||
}
|
||
function tokenizeTitleBefore(effects, ok3, nok) {
|
||
return titleBefore2;
|
||
function titleBefore2(code) {
|
||
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, beforeMarker)(code) : nok(code);
|
||
}
|
||
function beforeMarker(code) {
|
||
return factoryTitle(
|
||
effects,
|
||
titleAfter,
|
||
nok,
|
||
types.definitionTitle,
|
||
types.definitionTitleMarker,
|
||
types.definitionTitleString
|
||
)(code);
|
||
}
|
||
function titleAfter(code) {
|
||
return markdownSpace(code) ? factorySpace(
|
||
effects,
|
||
titleAfterOptionalWhitespace,
|
||
types.whitespace
|
||
)(code) : titleAfterOptionalWhitespace(code);
|
||
}
|
||
function titleAfterOptionalWhitespace(code) {
|
||
return code === codes.eof || markdownLineEnding(code) ? ok3(code) : nok(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/hard-break-escape.js
|
||
var hardBreakEscape = {
|
||
name: "hardBreakEscape",
|
||
tokenize: tokenizeHardBreakEscape
|
||
};
|
||
function tokenizeHardBreakEscape(effects, ok3, nok) {
|
||
return start;
|
||
function start(code) {
|
||
ok(code === codes.backslash, "expected `\\`");
|
||
effects.enter(types.hardBreakEscape);
|
||
effects.consume(code);
|
||
return after;
|
||
}
|
||
function after(code) {
|
||
if (markdownLineEnding(code)) {
|
||
effects.exit(types.hardBreakEscape);
|
||
return ok3(code);
|
||
}
|
||
return nok(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/heading-atx.js
|
||
var headingAtx = {
|
||
name: "headingAtx",
|
||
resolve: resolveHeadingAtx,
|
||
tokenize: tokenizeHeadingAtx
|
||
};
|
||
function resolveHeadingAtx(events, context) {
|
||
let contentEnd = events.length - 2;
|
||
let contentStart = 3;
|
||
let content2;
|
||
let text;
|
||
if (events[contentStart][1].type === types.whitespace) {
|
||
contentStart += 2;
|
||
}
|
||
if (contentEnd - 2 > contentStart && events[contentEnd][1].type === types.whitespace) {
|
||
contentEnd -= 2;
|
||
}
|
||
if (events[contentEnd][1].type === types.atxHeadingSequence && (contentStart === contentEnd - 1 || contentEnd - 4 > contentStart && events[contentEnd - 2][1].type === types.whitespace)) {
|
||
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4;
|
||
}
|
||
if (contentEnd > contentStart) {
|
||
content2 = {
|
||
type: types.atxHeadingText,
|
||
start: events[contentStart][1].start,
|
||
end: events[contentEnd][1].end
|
||
};
|
||
text = {
|
||
type: types.chunkText,
|
||
start: events[contentStart][1].start,
|
||
end: events[contentEnd][1].end,
|
||
contentType: constants.contentTypeText
|
||
};
|
||
splice(events, contentStart, contentEnd - contentStart + 1, [
|
||
["enter", content2, context],
|
||
["enter", text, context],
|
||
["exit", text, context],
|
||
["exit", content2, context]
|
||
]);
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeHeadingAtx(effects, ok3, nok) {
|
||
let size = 0;
|
||
return start;
|
||
function start(code) {
|
||
effects.enter(types.atxHeading);
|
||
return before(code);
|
||
}
|
||
function before(code) {
|
||
ok(code === codes.numberSign, "expected `#`");
|
||
effects.enter(types.atxHeadingSequence);
|
||
return sequenceOpen(code);
|
||
}
|
||
function sequenceOpen(code) {
|
||
if (code === codes.numberSign && size++ < constants.atxHeadingOpeningFenceSizeMax) {
|
||
effects.consume(code);
|
||
return sequenceOpen;
|
||
}
|
||
if (code === codes.eof || markdownLineEndingOrSpace(code)) {
|
||
effects.exit(types.atxHeadingSequence);
|
||
return atBreak(code);
|
||
}
|
||
return nok(code);
|
||
}
|
||
function atBreak(code) {
|
||
if (code === codes.numberSign) {
|
||
effects.enter(types.atxHeadingSequence);
|
||
return sequenceFurther(code);
|
||
}
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.atxHeading);
|
||
return ok3(code);
|
||
}
|
||
if (markdownSpace(code)) {
|
||
return factorySpace(effects, atBreak, types.whitespace)(code);
|
||
}
|
||
effects.enter(types.atxHeadingText);
|
||
return data(code);
|
||
}
|
||
function sequenceFurther(code) {
|
||
if (code === codes.numberSign) {
|
||
effects.consume(code);
|
||
return sequenceFurther;
|
||
}
|
||
effects.exit(types.atxHeadingSequence);
|
||
return atBreak(code);
|
||
}
|
||
function data(code) {
|
||
if (code === codes.eof || code === codes.numberSign || markdownLineEndingOrSpace(code)) {
|
||
effects.exit(types.atxHeadingText);
|
||
return atBreak(code);
|
||
}
|
||
effects.consume(code);
|
||
return data;
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-html-tag-name/index.js
|
||
var htmlBlockNames = [
|
||
"address",
|
||
"article",
|
||
"aside",
|
||
"base",
|
||
"basefont",
|
||
"blockquote",
|
||
"body",
|
||
"caption",
|
||
"center",
|
||
"col",
|
||
"colgroup",
|
||
"dd",
|
||
"details",
|
||
"dialog",
|
||
"dir",
|
||
"div",
|
||
"dl",
|
||
"dt",
|
||
"fieldset",
|
||
"figcaption",
|
||
"figure",
|
||
"footer",
|
||
"form",
|
||
"frame",
|
||
"frameset",
|
||
"h1",
|
||
"h2",
|
||
"h3",
|
||
"h4",
|
||
"h5",
|
||
"h6",
|
||
"head",
|
||
"header",
|
||
"hr",
|
||
"html",
|
||
"iframe",
|
||
"legend",
|
||
"li",
|
||
"link",
|
||
"main",
|
||
"menu",
|
||
"menuitem",
|
||
"nav",
|
||
"noframes",
|
||
"ol",
|
||
"optgroup",
|
||
"option",
|
||
"p",
|
||
"param",
|
||
"search",
|
||
"section",
|
||
"summary",
|
||
"table",
|
||
"tbody",
|
||
"td",
|
||
"tfoot",
|
||
"th",
|
||
"thead",
|
||
"title",
|
||
"tr",
|
||
"track",
|
||
"ul"
|
||
];
|
||
var htmlRawNames = ["pre", "script", "style", "textarea"];
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/html-flow.js
|
||
var htmlFlow = {
|
||
concrete: true,
|
||
name: "htmlFlow",
|
||
resolveTo: resolveToHtmlFlow,
|
||
tokenize: tokenizeHtmlFlow
|
||
};
|
||
var blankLineBefore = { partial: true, tokenize: tokenizeBlankLineBefore };
|
||
var nonLazyContinuationStart = {
|
||
partial: true,
|
||
tokenize: tokenizeNonLazyContinuationStart
|
||
};
|
||
function resolveToHtmlFlow(events) {
|
||
let index = events.length;
|
||
while (index--) {
|
||
if (events[index][0] === "enter" && events[index][1].type === types.htmlFlow) {
|
||
break;
|
||
}
|
||
}
|
||
if (index > 1 && events[index - 2][1].type === types.linePrefix) {
|
||
events[index][1].start = events[index - 2][1].start;
|
||
events[index + 1][1].start = events[index - 2][1].start;
|
||
events.splice(index - 2, 2);
|
||
}
|
||
return events;
|
||
}
|
||
function tokenizeHtmlFlow(effects, ok3, nok) {
|
||
const self = this;
|
||
let marker;
|
||
let closingTag;
|
||
let buffer;
|
||
let index;
|
||
let markerB;
|
||
return start;
|
||
function start(code) {
|
||
return before(code);
|
||
}
|
||
function before(code) {
|
||
ok(code === codes.lessThan, "expected `<`");
|
||
effects.enter(types.htmlFlow);
|
||
effects.enter(types.htmlFlowData);
|
||
effects.consume(code);
|
||
return open;
|
||
}
|
||
function open(code) {
|
||
if (code === codes.exclamationMark) {
|
||
effects.consume(code);
|
||
return declarationOpen;
|
||
}
|
||
if (code === codes.slash) {
|
||
effects.consume(code);
|
||
closingTag = true;
|
||
return tagCloseStart;
|
||
}
|
||
if (code === codes.questionMark) {
|
||
effects.consume(code);
|
||
marker = constants.htmlInstruction;
|
||
return self.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
if (asciiAlpha(code)) {
|
||
ok(code !== null);
|
||
effects.consume(code);
|
||
buffer = String.fromCharCode(code);
|
||
return tagName;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function declarationOpen(code) {
|
||
if (code === codes.dash) {
|
||
effects.consume(code);
|
||
marker = constants.htmlComment;
|
||
return commentOpenInside;
|
||
}
|
||
if (code === codes.leftSquareBracket) {
|
||
effects.consume(code);
|
||
marker = constants.htmlCdata;
|
||
index = 0;
|
||
return cdataOpenInside;
|
||
}
|
||
if (asciiAlpha(code)) {
|
||
effects.consume(code);
|
||
marker = constants.htmlDeclaration;
|
||
return self.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function commentOpenInside(code) {
|
||
if (code === codes.dash) {
|
||
effects.consume(code);
|
||
return self.interrupt ? ok3 : continuationDeclarationInside;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function cdataOpenInside(code) {
|
||
const value = constants.cdataOpeningString;
|
||
if (code === value.charCodeAt(index++)) {
|
||
effects.consume(code);
|
||
if (index === value.length) {
|
||
return self.interrupt ? ok3 : continuation;
|
||
}
|
||
return cdataOpenInside;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function tagCloseStart(code) {
|
||
if (asciiAlpha(code)) {
|
||
ok(code !== null);
|
||
effects.consume(code);
|
||
buffer = String.fromCharCode(code);
|
||
return tagName;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function tagName(code) {
|
||
if (code === codes.eof || code === codes.slash || code === codes.greaterThan || markdownLineEndingOrSpace(code)) {
|
||
const slash = code === codes.slash;
|
||
const name = buffer.toLowerCase();
|
||
if (!slash && !closingTag && htmlRawNames.includes(name)) {
|
||
marker = constants.htmlRaw;
|
||
return self.interrupt ? ok3(code) : continuation(code);
|
||
}
|
||
if (htmlBlockNames.includes(buffer.toLowerCase())) {
|
||
marker = constants.htmlBasic;
|
||
if (slash) {
|
||
effects.consume(code);
|
||
return basicSelfClosing;
|
||
}
|
||
return self.interrupt ? ok3(code) : continuation(code);
|
||
}
|
||
marker = constants.htmlComplete;
|
||
return self.interrupt && !self.parser.lazy[self.now().line] ? nok(code) : closingTag ? completeClosingTagAfter(code) : completeAttributeNameBefore(code);
|
||
}
|
||
if (code === codes.dash || asciiAlphanumeric(code)) {
|
||
effects.consume(code);
|
||
buffer += String.fromCharCode(code);
|
||
return tagName;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function basicSelfClosing(code) {
|
||
if (code === codes.greaterThan) {
|
||
effects.consume(code);
|
||
return self.interrupt ? ok3 : continuation;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function completeClosingTagAfter(code) {
|
||
if (markdownSpace(code)) {
|
||
effects.consume(code);
|
||
return completeClosingTagAfter;
|
||
}
|
||
return completeEnd(code);
|
||
}
|
||
function completeAttributeNameBefore(code) {
|
||
if (code === codes.slash) {
|
||
effects.consume(code);
|
||
return completeEnd;
|
||
}
|
||
if (code === codes.colon || code === codes.underscore || asciiAlpha(code)) {
|
||
effects.consume(code);
|
||
return completeAttributeName;
|
||
}
|
||
if (markdownSpace(code)) {
|
||
effects.consume(code);
|
||
return completeAttributeNameBefore;
|
||
}
|
||
return completeEnd(code);
|
||
}
|
||
function completeAttributeName(code) {
|
||
if (code === codes.dash || code === codes.dot || code === codes.colon || code === codes.underscore || asciiAlphanumeric(code)) {
|
||
effects.consume(code);
|
||
return completeAttributeName;
|
||
}
|
||
return completeAttributeNameAfter(code);
|
||
}
|
||
function completeAttributeNameAfter(code) {
|
||
if (code === codes.equalsTo) {
|
||
effects.consume(code);
|
||
return completeAttributeValueBefore;
|
||
}
|
||
if (markdownSpace(code)) {
|
||
effects.consume(code);
|
||
return completeAttributeNameAfter;
|
||
}
|
||
return completeAttributeNameBefore(code);
|
||
}
|
||
function completeAttributeValueBefore(code) {
|
||
if (code === codes.eof || code === codes.lessThan || code === codes.equalsTo || code === codes.greaterThan || code === codes.graveAccent) {
|
||
return nok(code);
|
||
}
|
||
if (code === codes.quotationMark || code === codes.apostrophe) {
|
||
effects.consume(code);
|
||
markerB = code;
|
||
return completeAttributeValueQuoted;
|
||
}
|
||
if (markdownSpace(code)) {
|
||
effects.consume(code);
|
||
return completeAttributeValueBefore;
|
||
}
|
||
return completeAttributeValueUnquoted(code);
|
||
}
|
||
function completeAttributeValueQuoted(code) {
|
||
if (code === markerB) {
|
||
effects.consume(code);
|
||
markerB = null;
|
||
return completeAttributeValueQuotedAfter;
|
||
}
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
return nok(code);
|
||
}
|
||
effects.consume(code);
|
||
return completeAttributeValueQuoted;
|
||
}
|
||
function completeAttributeValueUnquoted(code) {
|
||
if (code === codes.eof || code === codes.quotationMark || code === codes.apostrophe || code === codes.slash || code === codes.lessThan || code === codes.equalsTo || code === codes.greaterThan || code === codes.graveAccent || markdownLineEndingOrSpace(code)) {
|
||
return completeAttributeNameAfter(code);
|
||
}
|
||
effects.consume(code);
|
||
return completeAttributeValueUnquoted;
|
||
}
|
||
function completeAttributeValueQuotedAfter(code) {
|
||
if (code === codes.slash || code === codes.greaterThan || markdownSpace(code)) {
|
||
return completeAttributeNameBefore(code);
|
||
}
|
||
return nok(code);
|
||
}
|
||
function completeEnd(code) {
|
||
if (code === codes.greaterThan) {
|
||
effects.consume(code);
|
||
return completeAfter;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function completeAfter(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
return continuation(code);
|
||
}
|
||
if (markdownSpace(code)) {
|
||
effects.consume(code);
|
||
return completeAfter;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function continuation(code) {
|
||
if (code === codes.dash && marker === constants.htmlComment) {
|
||
effects.consume(code);
|
||
return continuationCommentInside;
|
||
}
|
||
if (code === codes.lessThan && marker === constants.htmlRaw) {
|
||
effects.consume(code);
|
||
return continuationRawTagOpen;
|
||
}
|
||
if (code === codes.greaterThan && marker === constants.htmlDeclaration) {
|
||
effects.consume(code);
|
||
return continuationClose;
|
||
}
|
||
if (code === codes.questionMark && marker === constants.htmlInstruction) {
|
||
effects.consume(code);
|
||
return continuationDeclarationInside;
|
||
}
|
||
if (code === codes.rightSquareBracket && marker === constants.htmlCdata) {
|
||
effects.consume(code);
|
||
return continuationCdataInside;
|
||
}
|
||
if (markdownLineEnding(code) && (marker === constants.htmlBasic || marker === constants.htmlComplete)) {
|
||
effects.exit(types.htmlFlowData);
|
||
return effects.check(
|
||
blankLineBefore,
|
||
continuationAfter,
|
||
continuationStart
|
||
)(code);
|
||
}
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.htmlFlowData);
|
||
return continuationStart(code);
|
||
}
|
||
effects.consume(code);
|
||
return continuation;
|
||
}
|
||
function continuationStart(code) {
|
||
return effects.check(
|
||
nonLazyContinuationStart,
|
||
continuationStartNonLazy,
|
||
continuationAfter
|
||
)(code);
|
||
}
|
||
function continuationStartNonLazy(code) {
|
||
ok(markdownLineEnding(code));
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return continuationBefore;
|
||
}
|
||
function continuationBefore(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
return continuationStart(code);
|
||
}
|
||
effects.enter(types.htmlFlowData);
|
||
return continuation(code);
|
||
}
|
||
function continuationCommentInside(code) {
|
||
if (code === codes.dash) {
|
||
effects.consume(code);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code);
|
||
}
|
||
function continuationRawTagOpen(code) {
|
||
if (code === codes.slash) {
|
||
effects.consume(code);
|
||
buffer = "";
|
||
return continuationRawEndTag;
|
||
}
|
||
return continuation(code);
|
||
}
|
||
function continuationRawEndTag(code) {
|
||
if (code === codes.greaterThan) {
|
||
const name = buffer.toLowerCase();
|
||
if (htmlRawNames.includes(name)) {
|
||
effects.consume(code);
|
||
return continuationClose;
|
||
}
|
||
return continuation(code);
|
||
}
|
||
if (asciiAlpha(code) && buffer.length < constants.htmlRawSizeMax) {
|
||
ok(code !== null);
|
||
effects.consume(code);
|
||
buffer += String.fromCharCode(code);
|
||
return continuationRawEndTag;
|
||
}
|
||
return continuation(code);
|
||
}
|
||
function continuationCdataInside(code) {
|
||
if (code === codes.rightSquareBracket) {
|
||
effects.consume(code);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code);
|
||
}
|
||
function continuationDeclarationInside(code) {
|
||
if (code === codes.greaterThan) {
|
||
effects.consume(code);
|
||
return continuationClose;
|
||
}
|
||
if (code === codes.dash && marker === constants.htmlComment) {
|
||
effects.consume(code);
|
||
return continuationDeclarationInside;
|
||
}
|
||
return continuation(code);
|
||
}
|
||
function continuationClose(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.htmlFlowData);
|
||
return continuationAfter(code);
|
||
}
|
||
effects.consume(code);
|
||
return continuationClose;
|
||
}
|
||
function continuationAfter(code) {
|
||
effects.exit(types.htmlFlow);
|
||
return ok3(code);
|
||
}
|
||
}
|
||
function tokenizeNonLazyContinuationStart(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code) {
|
||
if (markdownLineEnding(code)) {
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return after;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function after(code) {
|
||
return self.parser.lazy[self.now().line] ? nok(code) : ok3(code);
|
||
}
|
||
}
|
||
function tokenizeBlankLineBefore(effects, ok3, nok) {
|
||
return start;
|
||
function start(code) {
|
||
ok(markdownLineEnding(code), "expected a line ending");
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return effects.attempt(blankLine, ok3, nok);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/html-text.js
|
||
var htmlText = { name: "htmlText", tokenize: tokenizeHtmlText };
|
||
function tokenizeHtmlText(effects, ok3, nok) {
|
||
const self = this;
|
||
let marker;
|
||
let index;
|
||
let returnState;
|
||
return start;
|
||
function start(code) {
|
||
ok(code === codes.lessThan, "expected `<`");
|
||
effects.enter(types.htmlText);
|
||
effects.enter(types.htmlTextData);
|
||
effects.consume(code);
|
||
return open;
|
||
}
|
||
function open(code) {
|
||
if (code === codes.exclamationMark) {
|
||
effects.consume(code);
|
||
return declarationOpen;
|
||
}
|
||
if (code === codes.slash) {
|
||
effects.consume(code);
|
||
return tagCloseStart;
|
||
}
|
||
if (code === codes.questionMark) {
|
||
effects.consume(code);
|
||
return instruction;
|
||
}
|
||
if (asciiAlpha(code)) {
|
||
effects.consume(code);
|
||
return tagOpen;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function declarationOpen(code) {
|
||
if (code === codes.dash) {
|
||
effects.consume(code);
|
||
return commentOpenInside;
|
||
}
|
||
if (code === codes.leftSquareBracket) {
|
||
effects.consume(code);
|
||
index = 0;
|
||
return cdataOpenInside;
|
||
}
|
||
if (asciiAlpha(code)) {
|
||
effects.consume(code);
|
||
return declaration;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function commentOpenInside(code) {
|
||
if (code === codes.dash) {
|
||
effects.consume(code);
|
||
return commentEnd;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function comment(code) {
|
||
if (code === codes.eof) {
|
||
return nok(code);
|
||
}
|
||
if (code === codes.dash) {
|
||
effects.consume(code);
|
||
return commentClose;
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
returnState = comment;
|
||
return lineEndingBefore(code);
|
||
}
|
||
effects.consume(code);
|
||
return comment;
|
||
}
|
||
function commentClose(code) {
|
||
if (code === codes.dash) {
|
||
effects.consume(code);
|
||
return commentEnd;
|
||
}
|
||
return comment(code);
|
||
}
|
||
function commentEnd(code) {
|
||
return code === codes.greaterThan ? end(code) : code === codes.dash ? commentClose(code) : comment(code);
|
||
}
|
||
function cdataOpenInside(code) {
|
||
const value = constants.cdataOpeningString;
|
||
if (code === value.charCodeAt(index++)) {
|
||
effects.consume(code);
|
||
return index === value.length ? cdata : cdataOpenInside;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function cdata(code) {
|
||
if (code === codes.eof) {
|
||
return nok(code);
|
||
}
|
||
if (code === codes.rightSquareBracket) {
|
||
effects.consume(code);
|
||
return cdataClose;
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
returnState = cdata;
|
||
return lineEndingBefore(code);
|
||
}
|
||
effects.consume(code);
|
||
return cdata;
|
||
}
|
||
function cdataClose(code) {
|
||
if (code === codes.rightSquareBracket) {
|
||
effects.consume(code);
|
||
return cdataEnd;
|
||
}
|
||
return cdata(code);
|
||
}
|
||
function cdataEnd(code) {
|
||
if (code === codes.greaterThan) {
|
||
return end(code);
|
||
}
|
||
if (code === codes.rightSquareBracket) {
|
||
effects.consume(code);
|
||
return cdataEnd;
|
||
}
|
||
return cdata(code);
|
||
}
|
||
function declaration(code) {
|
||
if (code === codes.eof || code === codes.greaterThan) {
|
||
return end(code);
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
returnState = declaration;
|
||
return lineEndingBefore(code);
|
||
}
|
||
effects.consume(code);
|
||
return declaration;
|
||
}
|
||
function instruction(code) {
|
||
if (code === codes.eof) {
|
||
return nok(code);
|
||
}
|
||
if (code === codes.questionMark) {
|
||
effects.consume(code);
|
||
return instructionClose;
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
returnState = instruction;
|
||
return lineEndingBefore(code);
|
||
}
|
||
effects.consume(code);
|
||
return instruction;
|
||
}
|
||
function instructionClose(code) {
|
||
return code === codes.greaterThan ? end(code) : instruction(code);
|
||
}
|
||
function tagCloseStart(code) {
|
||
if (asciiAlpha(code)) {
|
||
effects.consume(code);
|
||
return tagClose;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function tagClose(code) {
|
||
if (code === codes.dash || asciiAlphanumeric(code)) {
|
||
effects.consume(code);
|
||
return tagClose;
|
||
}
|
||
return tagCloseBetween(code);
|
||
}
|
||
function tagCloseBetween(code) {
|
||
if (markdownLineEnding(code)) {
|
||
returnState = tagCloseBetween;
|
||
return lineEndingBefore(code);
|
||
}
|
||
if (markdownSpace(code)) {
|
||
effects.consume(code);
|
||
return tagCloseBetween;
|
||
}
|
||
return end(code);
|
||
}
|
||
function tagOpen(code) {
|
||
if (code === codes.dash || asciiAlphanumeric(code)) {
|
||
effects.consume(code);
|
||
return tagOpen;
|
||
}
|
||
if (code === codes.slash || code === codes.greaterThan || markdownLineEndingOrSpace(code)) {
|
||
return tagOpenBetween(code);
|
||
}
|
||
return nok(code);
|
||
}
|
||
function tagOpenBetween(code) {
|
||
if (code === codes.slash) {
|
||
effects.consume(code);
|
||
return end;
|
||
}
|
||
if (code === codes.colon || code === codes.underscore || asciiAlpha(code)) {
|
||
effects.consume(code);
|
||
return tagOpenAttributeName;
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
returnState = tagOpenBetween;
|
||
return lineEndingBefore(code);
|
||
}
|
||
if (markdownSpace(code)) {
|
||
effects.consume(code);
|
||
return tagOpenBetween;
|
||
}
|
||
return end(code);
|
||
}
|
||
function tagOpenAttributeName(code) {
|
||
if (code === codes.dash || code === codes.dot || code === codes.colon || code === codes.underscore || asciiAlphanumeric(code)) {
|
||
effects.consume(code);
|
||
return tagOpenAttributeName;
|
||
}
|
||
return tagOpenAttributeNameAfter(code);
|
||
}
|
||
function tagOpenAttributeNameAfter(code) {
|
||
if (code === codes.equalsTo) {
|
||
effects.consume(code);
|
||
return tagOpenAttributeValueBefore;
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
returnState = tagOpenAttributeNameAfter;
|
||
return lineEndingBefore(code);
|
||
}
|
||
if (markdownSpace(code)) {
|
||
effects.consume(code);
|
||
return tagOpenAttributeNameAfter;
|
||
}
|
||
return tagOpenBetween(code);
|
||
}
|
||
function tagOpenAttributeValueBefore(code) {
|
||
if (code === codes.eof || code === codes.lessThan || code === codes.equalsTo || code === codes.greaterThan || code === codes.graveAccent) {
|
||
return nok(code);
|
||
}
|
||
if (code === codes.quotationMark || code === codes.apostrophe) {
|
||
effects.consume(code);
|
||
marker = code;
|
||
return tagOpenAttributeValueQuoted;
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
returnState = tagOpenAttributeValueBefore;
|
||
return lineEndingBefore(code);
|
||
}
|
||
if (markdownSpace(code)) {
|
||
effects.consume(code);
|
||
return tagOpenAttributeValueBefore;
|
||
}
|
||
effects.consume(code);
|
||
return tagOpenAttributeValueUnquoted;
|
||
}
|
||
function tagOpenAttributeValueQuoted(code) {
|
||
if (code === marker) {
|
||
effects.consume(code);
|
||
marker = void 0;
|
||
return tagOpenAttributeValueQuotedAfter;
|
||
}
|
||
if (code === codes.eof) {
|
||
return nok(code);
|
||
}
|
||
if (markdownLineEnding(code)) {
|
||
returnState = tagOpenAttributeValueQuoted;
|
||
return lineEndingBefore(code);
|
||
}
|
||
effects.consume(code);
|
||
return tagOpenAttributeValueQuoted;
|
||
}
|
||
function tagOpenAttributeValueUnquoted(code) {
|
||
if (code === codes.eof || code === codes.quotationMark || code === codes.apostrophe || code === codes.lessThan || code === codes.equalsTo || code === codes.graveAccent) {
|
||
return nok(code);
|
||
}
|
||
if (code === codes.slash || code === codes.greaterThan || markdownLineEndingOrSpace(code)) {
|
||
return tagOpenBetween(code);
|
||
}
|
||
effects.consume(code);
|
||
return tagOpenAttributeValueUnquoted;
|
||
}
|
||
function tagOpenAttributeValueQuotedAfter(code) {
|
||
if (code === codes.slash || code === codes.greaterThan || markdownLineEndingOrSpace(code)) {
|
||
return tagOpenBetween(code);
|
||
}
|
||
return nok(code);
|
||
}
|
||
function end(code) {
|
||
if (code === codes.greaterThan) {
|
||
effects.consume(code);
|
||
effects.exit(types.htmlTextData);
|
||
effects.exit(types.htmlText);
|
||
return ok3;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function lineEndingBefore(code) {
|
||
ok(returnState, "expected return state");
|
||
ok(markdownLineEnding(code), "expected eol");
|
||
effects.exit(types.htmlTextData);
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return lineEndingAfter;
|
||
}
|
||
function lineEndingAfter(code) {
|
||
ok(
|
||
self.parser.constructs.disable.null,
|
||
"expected `disable.null` to be populated"
|
||
);
|
||
return markdownSpace(code) ? factorySpace(
|
||
effects,
|
||
lineEndingAfterPrefix,
|
||
types.linePrefix,
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : constants.tabSize
|
||
)(code) : lineEndingAfterPrefix(code);
|
||
}
|
||
function lineEndingAfterPrefix(code) {
|
||
effects.enter(types.htmlTextData);
|
||
return returnState(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/label-end.js
|
||
var labelEnd = {
|
||
name: "labelEnd",
|
||
resolveAll: resolveAllLabelEnd,
|
||
resolveTo: resolveToLabelEnd,
|
||
tokenize: tokenizeLabelEnd
|
||
};
|
||
var resourceConstruct = { tokenize: tokenizeResource };
|
||
var referenceFullConstruct = { tokenize: tokenizeReferenceFull };
|
||
var referenceCollapsedConstruct = { tokenize: tokenizeReferenceCollapsed };
|
||
function resolveAllLabelEnd(events) {
|
||
let index = -1;
|
||
const newEvents = [];
|
||
while (++index < events.length) {
|
||
const token = events[index][1];
|
||
newEvents.push(events[index]);
|
||
if (token.type === types.labelImage || token.type === types.labelLink || token.type === types.labelEnd) {
|
||
const offset = token.type === types.labelImage ? 4 : 2;
|
||
token.type = types.data;
|
||
index += offset;
|
||
}
|
||
}
|
||
if (events.length !== newEvents.length) {
|
||
splice(events, 0, events.length, newEvents);
|
||
}
|
||
return events;
|
||
}
|
||
function resolveToLabelEnd(events, context) {
|
||
let index = events.length;
|
||
let offset = 0;
|
||
let token;
|
||
let open;
|
||
let close;
|
||
let media;
|
||
while (index--) {
|
||
token = events[index][1];
|
||
if (open) {
|
||
if (token.type === types.link || token.type === types.labelLink && token._inactive) {
|
||
break;
|
||
}
|
||
if (events[index][0] === "enter" && token.type === types.labelLink) {
|
||
token._inactive = true;
|
||
}
|
||
} else if (close) {
|
||
if (events[index][0] === "enter" && (token.type === types.labelImage || token.type === types.labelLink) && !token._balanced) {
|
||
open = index;
|
||
if (token.type !== types.labelLink) {
|
||
offset = 2;
|
||
break;
|
||
}
|
||
}
|
||
} else if (token.type === types.labelEnd) {
|
||
close = index;
|
||
}
|
||
}
|
||
ok(open !== void 0, "`open` is supposed to be found");
|
||
ok(close !== void 0, "`close` is supposed to be found");
|
||
const group = {
|
||
type: events[open][1].type === types.labelLink ? types.link : types.image,
|
||
start: { ...events[open][1].start },
|
||
end: { ...events[events.length - 1][1].end }
|
||
};
|
||
const label = {
|
||
type: types.label,
|
||
start: { ...events[open][1].start },
|
||
end: { ...events[close][1].end }
|
||
};
|
||
const text = {
|
||
type: types.labelText,
|
||
start: { ...events[open + offset + 2][1].end },
|
||
end: { ...events[close - 2][1].start }
|
||
};
|
||
media = [
|
||
["enter", group, context],
|
||
["enter", label, context]
|
||
];
|
||
media = push(media, events.slice(open + 1, open + offset + 3));
|
||
media = push(media, [["enter", text, context]]);
|
||
ok(
|
||
context.parser.constructs.insideSpan.null,
|
||
"expected `insideSpan.null` to be populated"
|
||
);
|
||
media = push(
|
||
media,
|
||
resolveAll(
|
||
context.parser.constructs.insideSpan.null,
|
||
events.slice(open + offset + 4, close - 3),
|
||
context
|
||
)
|
||
);
|
||
media = push(media, [
|
||
["exit", text, context],
|
||
events[close - 2],
|
||
events[close - 1],
|
||
["exit", label, context]
|
||
]);
|
||
media = push(media, events.slice(close + 1));
|
||
media = push(media, [["exit", group, context]]);
|
||
splice(events, open, events.length, media);
|
||
return events;
|
||
}
|
||
function tokenizeLabelEnd(effects, ok3, nok) {
|
||
const self = this;
|
||
let index = self.events.length;
|
||
let labelStart;
|
||
let defined;
|
||
while (index--) {
|
||
if ((self.events[index][1].type === types.labelImage || self.events[index][1].type === types.labelLink) && !self.events[index][1]._balanced) {
|
||
labelStart = self.events[index][1];
|
||
break;
|
||
}
|
||
}
|
||
return start;
|
||
function start(code) {
|
||
ok(code === codes.rightSquareBracket, "expected `]`");
|
||
if (!labelStart) {
|
||
return nok(code);
|
||
}
|
||
if (labelStart._inactive) {
|
||
return labelEndNok(code);
|
||
}
|
||
defined = self.parser.defined.includes(
|
||
normalizeIdentifier(
|
||
self.sliceSerialize({ start: labelStart.end, end: self.now() })
|
||
)
|
||
);
|
||
effects.enter(types.labelEnd);
|
||
effects.enter(types.labelMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.labelMarker);
|
||
effects.exit(types.labelEnd);
|
||
return after;
|
||
}
|
||
function after(code) {
|
||
if (code === codes.leftParenthesis) {
|
||
return effects.attempt(
|
||
resourceConstruct,
|
||
labelEndOk,
|
||
defined ? labelEndOk : labelEndNok
|
||
)(code);
|
||
}
|
||
if (code === codes.leftSquareBracket) {
|
||
return effects.attempt(
|
||
referenceFullConstruct,
|
||
labelEndOk,
|
||
defined ? referenceNotFull : labelEndNok
|
||
)(code);
|
||
}
|
||
return defined ? labelEndOk(code) : labelEndNok(code);
|
||
}
|
||
function referenceNotFull(code) {
|
||
return effects.attempt(
|
||
referenceCollapsedConstruct,
|
||
labelEndOk,
|
||
labelEndNok
|
||
)(code);
|
||
}
|
||
function labelEndOk(code) {
|
||
return ok3(code);
|
||
}
|
||
function labelEndNok(code) {
|
||
labelStart._balanced = true;
|
||
return nok(code);
|
||
}
|
||
}
|
||
function tokenizeResource(effects, ok3, nok) {
|
||
return resourceStart;
|
||
function resourceStart(code) {
|
||
ok(code === codes.leftParenthesis, "expected left paren");
|
||
effects.enter(types.resource);
|
||
effects.enter(types.resourceMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.resourceMarker);
|
||
return resourceBefore;
|
||
}
|
||
function resourceBefore(code) {
|
||
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, resourceOpen)(code) : resourceOpen(code);
|
||
}
|
||
function resourceOpen(code) {
|
||
if (code === codes.rightParenthesis) {
|
||
return resourceEnd(code);
|
||
}
|
||
return factoryDestination(
|
||
effects,
|
||
resourceDestinationAfter,
|
||
resourceDestinationMissing,
|
||
types.resourceDestination,
|
||
types.resourceDestinationLiteral,
|
||
types.resourceDestinationLiteralMarker,
|
||
types.resourceDestinationRaw,
|
||
types.resourceDestinationString,
|
||
constants.linkResourceDestinationBalanceMax
|
||
)(code);
|
||
}
|
||
function resourceDestinationAfter(code) {
|
||
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, resourceBetween)(code) : resourceEnd(code);
|
||
}
|
||
function resourceDestinationMissing(code) {
|
||
return nok(code);
|
||
}
|
||
function resourceBetween(code) {
|
||
if (code === codes.quotationMark || code === codes.apostrophe || code === codes.leftParenthesis) {
|
||
return factoryTitle(
|
||
effects,
|
||
resourceTitleAfter,
|
||
nok,
|
||
types.resourceTitle,
|
||
types.resourceTitleMarker,
|
||
types.resourceTitleString
|
||
)(code);
|
||
}
|
||
return resourceEnd(code);
|
||
}
|
||
function resourceTitleAfter(code) {
|
||
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, resourceEnd)(code) : resourceEnd(code);
|
||
}
|
||
function resourceEnd(code) {
|
||
if (code === codes.rightParenthesis) {
|
||
effects.enter(types.resourceMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.resourceMarker);
|
||
effects.exit(types.resource);
|
||
return ok3;
|
||
}
|
||
return nok(code);
|
||
}
|
||
}
|
||
function tokenizeReferenceFull(effects, ok3, nok) {
|
||
const self = this;
|
||
return referenceFull;
|
||
function referenceFull(code) {
|
||
ok(code === codes.leftSquareBracket, "expected left bracket");
|
||
return factoryLabel.call(
|
||
self,
|
||
effects,
|
||
referenceFullAfter,
|
||
referenceFullMissing,
|
||
types.reference,
|
||
types.referenceMarker,
|
||
types.referenceString
|
||
)(code);
|
||
}
|
||
function referenceFullAfter(code) {
|
||
return self.parser.defined.includes(
|
||
normalizeIdentifier(
|
||
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
|
||
)
|
||
) ? ok3(code) : nok(code);
|
||
}
|
||
function referenceFullMissing(code) {
|
||
return nok(code);
|
||
}
|
||
}
|
||
function tokenizeReferenceCollapsed(effects, ok3, nok) {
|
||
return referenceCollapsedStart;
|
||
function referenceCollapsedStart(code) {
|
||
ok(code === codes.leftSquareBracket, "expected left bracket");
|
||
effects.enter(types.reference);
|
||
effects.enter(types.referenceMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.referenceMarker);
|
||
return referenceCollapsedOpen;
|
||
}
|
||
function referenceCollapsedOpen(code) {
|
||
if (code === codes.rightSquareBracket) {
|
||
effects.enter(types.referenceMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.referenceMarker);
|
||
effects.exit(types.reference);
|
||
return ok3;
|
||
}
|
||
return nok(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/label-start-image.js
|
||
var labelStartImage = {
|
||
name: "labelStartImage",
|
||
resolveAll: labelEnd.resolveAll,
|
||
tokenize: tokenizeLabelStartImage
|
||
};
|
||
function tokenizeLabelStartImage(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code) {
|
||
ok(code === codes.exclamationMark, "expected `!`");
|
||
effects.enter(types.labelImage);
|
||
effects.enter(types.labelImageMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.labelImageMarker);
|
||
return open;
|
||
}
|
||
function open(code) {
|
||
if (code === codes.leftSquareBracket) {
|
||
effects.enter(types.labelMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.labelMarker);
|
||
effects.exit(types.labelImage);
|
||
return after;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function after(code) {
|
||
return code === codes.caret && "_hiddenFootnoteSupport" in self.parser.constructs ? nok(code) : ok3(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/label-start-link.js
|
||
var labelStartLink = {
|
||
name: "labelStartLink",
|
||
resolveAll: labelEnd.resolveAll,
|
||
tokenize: tokenizeLabelStartLink
|
||
};
|
||
function tokenizeLabelStartLink(effects, ok3, nok) {
|
||
const self = this;
|
||
return start;
|
||
function start(code) {
|
||
ok(code === codes.leftSquareBracket, "expected `[`");
|
||
effects.enter(types.labelLink);
|
||
effects.enter(types.labelMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.labelMarker);
|
||
effects.exit(types.labelLink);
|
||
return after;
|
||
}
|
||
function after(code) {
|
||
return code === codes.caret && "_hiddenFootnoteSupport" in self.parser.constructs ? nok(code) : ok3(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/line-ending.js
|
||
var lineEnding = { name: "lineEnding", tokenize: tokenizeLineEnding };
|
||
function tokenizeLineEnding(effects, ok3) {
|
||
return start;
|
||
function start(code) {
|
||
ok(markdownLineEnding(code), "expected eol");
|
||
effects.enter(types.lineEnding);
|
||
effects.consume(code);
|
||
effects.exit(types.lineEnding);
|
||
return factorySpace(effects, ok3, types.linePrefix);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/thematic-break.js
|
||
var thematicBreak = {
|
||
name: "thematicBreak",
|
||
tokenize: tokenizeThematicBreak
|
||
};
|
||
function tokenizeThematicBreak(effects, ok3, nok) {
|
||
let size = 0;
|
||
let marker;
|
||
return start;
|
||
function start(code) {
|
||
effects.enter(types.thematicBreak);
|
||
return before(code);
|
||
}
|
||
function before(code) {
|
||
ok(
|
||
code === codes.asterisk || code === codes.dash || code === codes.underscore,
|
||
"expected `*`, `-`, or `_`"
|
||
);
|
||
marker = code;
|
||
return atBreak(code);
|
||
}
|
||
function atBreak(code) {
|
||
if (code === marker) {
|
||
effects.enter(types.thematicBreakSequence);
|
||
return sequence(code);
|
||
}
|
||
if (size >= constants.thematicBreakMarkerCountMin && (code === codes.eof || markdownLineEnding(code))) {
|
||
effects.exit(types.thematicBreak);
|
||
return ok3(code);
|
||
}
|
||
return nok(code);
|
||
}
|
||
function sequence(code) {
|
||
if (code === marker) {
|
||
effects.consume(code);
|
||
size++;
|
||
return sequence;
|
||
}
|
||
effects.exit(types.thematicBreakSequence);
|
||
return markdownSpace(code) ? factorySpace(effects, atBreak, types.whitespace)(code) : atBreak(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/list.js
|
||
var list = {
|
||
continuation: { tokenize: tokenizeListContinuation },
|
||
exit: tokenizeListEnd,
|
||
name: "list",
|
||
tokenize: tokenizeListStart
|
||
};
|
||
var listItemPrefixWhitespaceConstruct = {
|
||
partial: true,
|
||
tokenize: tokenizeListItemPrefixWhitespace
|
||
};
|
||
var indentConstruct = { partial: true, tokenize: tokenizeIndent };
|
||
function tokenizeListStart(effects, ok3, nok) {
|
||
const self = this;
|
||
const tail = self.events[self.events.length - 1];
|
||
let initialSize = tail && tail[1].type === types.linePrefix ? tail[2].sliceSerialize(tail[1], true).length : 0;
|
||
let size = 0;
|
||
return start;
|
||
function start(code) {
|
||
ok(self.containerState, "expected state");
|
||
const kind = self.containerState.type || (code === codes.asterisk || code === codes.plusSign || code === codes.dash ? types.listUnordered : types.listOrdered);
|
||
if (kind === types.listUnordered ? !self.containerState.marker || code === self.containerState.marker : asciiDigit(code)) {
|
||
if (!self.containerState.type) {
|
||
self.containerState.type = kind;
|
||
effects.enter(kind, { _container: true });
|
||
}
|
||
if (kind === types.listUnordered) {
|
||
effects.enter(types.listItemPrefix);
|
||
return code === codes.asterisk || code === codes.dash ? effects.check(thematicBreak, nok, atMarker)(code) : atMarker(code);
|
||
}
|
||
if (!self.interrupt || code === codes.digit1) {
|
||
effects.enter(types.listItemPrefix);
|
||
effects.enter(types.listItemValue);
|
||
return inside(code);
|
||
}
|
||
}
|
||
return nok(code);
|
||
}
|
||
function inside(code) {
|
||
ok(self.containerState, "expected state");
|
||
if (asciiDigit(code) && ++size < constants.listItemValueSizeMax) {
|
||
effects.consume(code);
|
||
return inside;
|
||
}
|
||
if ((!self.interrupt || size < 2) && (self.containerState.marker ? code === self.containerState.marker : code === codes.rightParenthesis || code === codes.dot)) {
|
||
effects.exit(types.listItemValue);
|
||
return atMarker(code);
|
||
}
|
||
return nok(code);
|
||
}
|
||
function atMarker(code) {
|
||
ok(self.containerState, "expected state");
|
||
ok(code !== codes.eof, "eof (`null`) is not a marker");
|
||
effects.enter(types.listItemMarker);
|
||
effects.consume(code);
|
||
effects.exit(types.listItemMarker);
|
||
self.containerState.marker = self.containerState.marker || code;
|
||
return effects.check(
|
||
blankLine,
|
||
// Can’t be empty when interrupting.
|
||
self.interrupt ? nok : onBlank,
|
||
effects.attempt(
|
||
listItemPrefixWhitespaceConstruct,
|
||
endOfPrefix,
|
||
otherPrefix
|
||
)
|
||
);
|
||
}
|
||
function onBlank(code) {
|
||
ok(self.containerState, "expected state");
|
||
self.containerState.initialBlankLine = true;
|
||
initialSize++;
|
||
return endOfPrefix(code);
|
||
}
|
||
function otherPrefix(code) {
|
||
if (markdownSpace(code)) {
|
||
effects.enter(types.listItemPrefixWhitespace);
|
||
effects.consume(code);
|
||
effects.exit(types.listItemPrefixWhitespace);
|
||
return endOfPrefix;
|
||
}
|
||
return nok(code);
|
||
}
|
||
function endOfPrefix(code) {
|
||
ok(self.containerState, "expected state");
|
||
self.containerState.size = initialSize + self.sliceSerialize(effects.exit(types.listItemPrefix), true).length;
|
||
return ok3(code);
|
||
}
|
||
}
|
||
function tokenizeListContinuation(effects, ok3, nok) {
|
||
const self = this;
|
||
ok(self.containerState, "expected state");
|
||
self.containerState._closeFlow = void 0;
|
||
return effects.check(blankLine, onBlank, notBlank);
|
||
function onBlank(code) {
|
||
ok(self.containerState, "expected state");
|
||
ok(typeof self.containerState.size === "number", "expected size");
|
||
self.containerState.furtherBlankLines = self.containerState.furtherBlankLines || self.containerState.initialBlankLine;
|
||
return factorySpace(
|
||
effects,
|
||
ok3,
|
||
types.listItemIndent,
|
||
self.containerState.size + 1
|
||
)(code);
|
||
}
|
||
function notBlank(code) {
|
||
ok(self.containerState, "expected state");
|
||
if (self.containerState.furtherBlankLines || !markdownSpace(code)) {
|
||
self.containerState.furtherBlankLines = void 0;
|
||
self.containerState.initialBlankLine = void 0;
|
||
return notInCurrentItem(code);
|
||
}
|
||
self.containerState.furtherBlankLines = void 0;
|
||
self.containerState.initialBlankLine = void 0;
|
||
return effects.attempt(indentConstruct, ok3, notInCurrentItem)(code);
|
||
}
|
||
function notInCurrentItem(code) {
|
||
ok(self.containerState, "expected state");
|
||
self.containerState._closeFlow = true;
|
||
self.interrupt = void 0;
|
||
ok(
|
||
self.parser.constructs.disable.null,
|
||
"expected `disable.null` to be populated"
|
||
);
|
||
return factorySpace(
|
||
effects,
|
||
effects.attempt(list, ok3, nok),
|
||
types.linePrefix,
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : constants.tabSize
|
||
)(code);
|
||
}
|
||
}
|
||
function tokenizeIndent(effects, ok3, nok) {
|
||
const self = this;
|
||
ok(self.containerState, "expected state");
|
||
ok(typeof self.containerState.size === "number", "expected size");
|
||
return factorySpace(
|
||
effects,
|
||
afterPrefix,
|
||
types.listItemIndent,
|
||
self.containerState.size + 1
|
||
);
|
||
function afterPrefix(code) {
|
||
ok(self.containerState, "expected state");
|
||
const tail = self.events[self.events.length - 1];
|
||
return tail && tail[1].type === types.listItemIndent && tail[2].sliceSerialize(tail[1], true).length === self.containerState.size ? ok3(code) : nok(code);
|
||
}
|
||
}
|
||
function tokenizeListEnd(effects) {
|
||
ok(this.containerState, "expected state");
|
||
ok(typeof this.containerState.type === "string", "expected type");
|
||
effects.exit(this.containerState.type);
|
||
}
|
||
function tokenizeListItemPrefixWhitespace(effects, ok3, nok) {
|
||
const self = this;
|
||
ok(
|
||
self.parser.constructs.disable.null,
|
||
"expected `disable.null` to be populated"
|
||
);
|
||
return factorySpace(
|
||
effects,
|
||
afterPrefix,
|
||
types.listItemPrefixWhitespace,
|
||
self.parser.constructs.disable.null.includes("codeIndented") ? void 0 : constants.tabSize + 1
|
||
);
|
||
function afterPrefix(code) {
|
||
const tail = self.events[self.events.length - 1];
|
||
return !markdownSpace(code) && tail && tail[1].type === types.listItemPrefixWhitespace ? ok3(code) : nok(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-core-commonmark/dev/lib/setext-underline.js
|
||
var setextUnderline = {
|
||
name: "setextUnderline",
|
||
resolveTo: resolveToSetextUnderline,
|
||
tokenize: tokenizeSetextUnderline
|
||
};
|
||
function resolveToSetextUnderline(events, context) {
|
||
let index = events.length;
|
||
let content2;
|
||
let text;
|
||
let definition2;
|
||
while (index--) {
|
||
if (events[index][0] === "enter") {
|
||
if (events[index][1].type === types.content) {
|
||
content2 = index;
|
||
break;
|
||
}
|
||
if (events[index][1].type === types.paragraph) {
|
||
text = index;
|
||
}
|
||
} else {
|
||
if (events[index][1].type === types.content) {
|
||
events.splice(index, 1);
|
||
}
|
||
if (!definition2 && events[index][1].type === types.definition) {
|
||
definition2 = index;
|
||
}
|
||
}
|
||
}
|
||
ok(text !== void 0, "expected a `text` index to be found");
|
||
ok(content2 !== void 0, "expected a `text` index to be found");
|
||
ok(events[content2][2] === context, "enter context should be same");
|
||
ok(
|
||
events[events.length - 1][2] === context,
|
||
"enter context should be same"
|
||
);
|
||
const heading = {
|
||
type: types.setextHeading,
|
||
start: { ...events[content2][1].start },
|
||
end: { ...events[events.length - 1][1].end }
|
||
};
|
||
events[text][1].type = types.setextHeadingText;
|
||
if (definition2) {
|
||
events.splice(text, 0, ["enter", heading, context]);
|
||
events.splice(definition2 + 1, 0, ["exit", events[content2][1], context]);
|
||
events[content2][1].end = { ...events[definition2][1].end };
|
||
} else {
|
||
events[content2][1] = heading;
|
||
}
|
||
events.push(["exit", heading, context]);
|
||
return events;
|
||
}
|
||
function tokenizeSetextUnderline(effects, ok3, nok) {
|
||
const self = this;
|
||
let marker;
|
||
return start;
|
||
function start(code) {
|
||
let index = self.events.length;
|
||
let paragraph;
|
||
ok(
|
||
code === codes.dash || code === codes.equalsTo,
|
||
"expected `=` or `-`"
|
||
);
|
||
while (index--) {
|
||
if (self.events[index][1].type !== types.lineEnding && self.events[index][1].type !== types.linePrefix && self.events[index][1].type !== types.content) {
|
||
paragraph = self.events[index][1].type === types.paragraph;
|
||
break;
|
||
}
|
||
}
|
||
if (!self.parser.lazy[self.now().line] && (self.interrupt || paragraph)) {
|
||
effects.enter(types.setextHeadingLine);
|
||
marker = code;
|
||
return before(code);
|
||
}
|
||
return nok(code);
|
||
}
|
||
function before(code) {
|
||
effects.enter(types.setextHeadingLineSequence);
|
||
return inside(code);
|
||
}
|
||
function inside(code) {
|
||
if (code === marker) {
|
||
effects.consume(code);
|
||
return inside;
|
||
}
|
||
effects.exit(types.setextHeadingLineSequence);
|
||
return markdownSpace(code) ? factorySpace(effects, after, types.lineSuffix)(code) : after(code);
|
||
}
|
||
function after(code) {
|
||
if (code === codes.eof || markdownLineEnding(code)) {
|
||
effects.exit(types.setextHeadingLine);
|
||
return ok3(code);
|
||
}
|
||
return nok(code);
|
||
}
|
||
}
|
||
|
||
// node_modules/micromark-util-decode-string/dev/index.js
|
||
var characterEscapeOrReference = /\\([!-/:-@[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi;
|
||
function decodeString(value) {
|
||
return value.replace(characterEscapeOrReference, decode);
|
||
}
|
||
function decode($0, $1, $2) {
|
||
if ($1) {
|
||
return $1;
|
||
}
|
||
const head = $2.charCodeAt(0);
|
||
if (head === codes.numberSign) {
|
||
const head2 = $2.charCodeAt(1);
|
||
const hex = head2 === codes.lowercaseX || head2 === codes.uppercaseX;
|
||
return decodeNumericCharacterReference(
|
||
$2.slice(hex ? 2 : 1),
|
||
hex ? constants.numericBaseHexadecimal : constants.numericBaseDecimal
|
||
);
|
||
}
|
||
return decodeNamedCharacterReference($2) || $0;
|
||
}
|
||
|
||
export {
|
||
ok,
|
||
unreachable,
|
||
toString,
|
||
decodeNamedCharacterReference,
|
||
codes,
|
||
constants,
|
||
types,
|
||
values,
|
||
splice,
|
||
push,
|
||
combineExtensions,
|
||
decodeNumericCharacterReference,
|
||
normalizeIdentifier,
|
||
asciiAlpha,
|
||
asciiAlphanumeric,
|
||
asciiControl,
|
||
markdownLineEnding,
|
||
markdownLineEndingOrSpace,
|
||
markdownSpace,
|
||
unicodePunctuation,
|
||
unicodeWhitespace,
|
||
normalizeUri,
|
||
factorySpace,
|
||
classifyCharacter,
|
||
resolveAll,
|
||
attention,
|
||
autolink,
|
||
blankLine,
|
||
blockQuote,
|
||
characterEscape,
|
||
characterReference,
|
||
codeFenced,
|
||
codeIndented,
|
||
codeText,
|
||
subtokenize,
|
||
content,
|
||
definition,
|
||
hardBreakEscape,
|
||
headingAtx,
|
||
htmlFlow,
|
||
htmlText,
|
||
labelEnd,
|
||
labelStartImage,
|
||
labelStartLink,
|
||
lineEnding,
|
||
thematicBreak,
|
||
list,
|
||
setextUnderline,
|
||
decodeString,
|
||
convert,
|
||
EXIT,
|
||
visitParents,
|
||
visit
|
||
};
|
||
//# sourceMappingURL=chunk-ZJALRI2F.js.map
|