mirror of
https://github.com/Funkoala14/knowledgebase_law.git
synced 2025-06-09 03:18:15 +08:00
560 lines
12 KiB
JavaScript
560 lines
12 KiB
JavaScript
/**
|
||
* @import {
|
||
* Construct,
|
||
* Event,
|
||
* Resolver,
|
||
* State,
|
||
* TokenizeContext,
|
||
* Tokenizer,
|
||
* Token
|
||
* } from 'micromark-util-types'
|
||
*/
|
||
|
||
import { factoryDestination } from 'micromark-factory-destination';
|
||
import { factoryLabel } from 'micromark-factory-label';
|
||
import { factoryTitle } from 'micromark-factory-title';
|
||
import { factoryWhitespace } from 'micromark-factory-whitespace';
|
||
import { markdownLineEndingOrSpace } from 'micromark-util-character';
|
||
import { push, splice } from 'micromark-util-chunked';
|
||
import { normalizeIdentifier } from 'micromark-util-normalize-identifier';
|
||
import { resolveAll } from 'micromark-util-resolve-all';
|
||
/** @type {Construct} */
|
||
export const labelEnd = {
|
||
name: 'labelEnd',
|
||
resolveAll: resolveAllLabelEnd,
|
||
resolveTo: resolveToLabelEnd,
|
||
tokenize: tokenizeLabelEnd
|
||
};
|
||
|
||
/** @type {Construct} */
|
||
const resourceConstruct = {
|
||
tokenize: tokenizeResource
|
||
};
|
||
/** @type {Construct} */
|
||
const referenceFullConstruct = {
|
||
tokenize: tokenizeReferenceFull
|
||
};
|
||
/** @type {Construct} */
|
||
const referenceCollapsedConstruct = {
|
||
tokenize: tokenizeReferenceCollapsed
|
||
};
|
||
|
||
/** @type {Resolver} */
|
||
function resolveAllLabelEnd(events) {
|
||
let index = -1;
|
||
/** @type {Array<Event>} */
|
||
const newEvents = [];
|
||
while (++index < events.length) {
|
||
const token = events[index][1];
|
||
newEvents.push(events[index]);
|
||
if (token.type === "labelImage" || token.type === "labelLink" || token.type === "labelEnd") {
|
||
// Remove the marker.
|
||
const offset = token.type === "labelImage" ? 4 : 2;
|
||
token.type = "data";
|
||
index += offset;
|
||
}
|
||
}
|
||
|
||
// If the events are equal, we don't have to copy newEvents to events
|
||
if (events.length !== newEvents.length) {
|
||
splice(events, 0, events.length, newEvents);
|
||
}
|
||
return events;
|
||
}
|
||
|
||
/** @type {Resolver} */
|
||
function resolveToLabelEnd(events, context) {
|
||
let index = events.length;
|
||
let offset = 0;
|
||
/** @type {Token} */
|
||
let token;
|
||
/** @type {number | undefined} */
|
||
let open;
|
||
/** @type {number | undefined} */
|
||
let close;
|
||
/** @type {Array<Event>} */
|
||
let media;
|
||
|
||
// Find an opening.
|
||
while (index--) {
|
||
token = events[index][1];
|
||
if (open) {
|
||
// If we see another link, or inactive link label, we’ve been here before.
|
||
if (token.type === "link" || token.type === "labelLink" && token._inactive) {
|
||
break;
|
||
}
|
||
|
||
// Mark other link openings as inactive, as we can’t have links in
|
||
// links.
|
||
if (events[index][0] === 'enter' && token.type === "labelLink") {
|
||
token._inactive = true;
|
||
}
|
||
} else if (close) {
|
||
if (events[index][0] === 'enter' && (token.type === "labelImage" || token.type === "labelLink") && !token._balanced) {
|
||
open = index;
|
||
if (token.type !== "labelLink") {
|
||
offset = 2;
|
||
break;
|
||
}
|
||
}
|
||
} else if (token.type === "labelEnd") {
|
||
close = index;
|
||
}
|
||
}
|
||
const group = {
|
||
type: events[open][1].type === "labelLink" ? "link" : "image",
|
||
start: {
|
||
...events[open][1].start
|
||
},
|
||
end: {
|
||
...events[events.length - 1][1].end
|
||
}
|
||
};
|
||
const label = {
|
||
type: "label",
|
||
start: {
|
||
...events[open][1].start
|
||
},
|
||
end: {
|
||
...events[close][1].end
|
||
}
|
||
};
|
||
const text = {
|
||
type: "labelText",
|
||
start: {
|
||
...events[open + offset + 2][1].end
|
||
},
|
||
end: {
|
||
...events[close - 2][1].start
|
||
}
|
||
};
|
||
media = [['enter', group, context], ['enter', label, context]];
|
||
|
||
// Opening marker.
|
||
media = push(media, events.slice(open + 1, open + offset + 3));
|
||
|
||
// Text open.
|
||
media = push(media, [['enter', text, context]]);
|
||
|
||
// Always populated by defaults.
|
||
|
||
// Between.
|
||
media = push(media, resolveAll(context.parser.constructs.insideSpan.null, events.slice(open + offset + 4, close - 3), context));
|
||
|
||
// Text close, marker close, label close.
|
||
media = push(media, [['exit', text, context], events[close - 2], events[close - 1], ['exit', label, context]]);
|
||
|
||
// Reference, resource, or so.
|
||
media = push(media, events.slice(close + 1));
|
||
|
||
// Media close.
|
||
media = push(media, [['exit', group, context]]);
|
||
splice(events, open, events.length, media);
|
||
return events;
|
||
}
|
||
|
||
/**
|
||
* @this {TokenizeContext}
|
||
* Context.
|
||
* @type {Tokenizer}
|
||
*/
|
||
function tokenizeLabelEnd(effects, ok, nok) {
|
||
const self = this;
|
||
let index = self.events.length;
|
||
/** @type {Token} */
|
||
let labelStart;
|
||
/** @type {boolean} */
|
||
let defined;
|
||
|
||
// Find an opening.
|
||
while (index--) {
|
||
if ((self.events[index][1].type === "labelImage" || self.events[index][1].type === "labelLink") && !self.events[index][1]._balanced) {
|
||
labelStart = self.events[index][1];
|
||
break;
|
||
}
|
||
}
|
||
return start;
|
||
|
||
/**
|
||
* Start of label end.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b) c
|
||
* ^
|
||
* > | [a][b] c
|
||
* ^
|
||
* > | [a][] b
|
||
* ^
|
||
* > | [a] b
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function start(code) {
|
||
// If there is not an okay opening.
|
||
if (!labelStart) {
|
||
return nok(code);
|
||
}
|
||
|
||
// If the corresponding label (link) start is marked as inactive,
|
||
// it means we’d be wrapping a link, like this:
|
||
//
|
||
// ```markdown
|
||
// > | a [b [c](d) e](f) g.
|
||
// ^
|
||
// ```
|
||
//
|
||
// We can’t have that, so it’s just balanced brackets.
|
||
if (labelStart._inactive) {
|
||
return labelEndNok(code);
|
||
}
|
||
defined = self.parser.defined.includes(normalizeIdentifier(self.sliceSerialize({
|
||
start: labelStart.end,
|
||
end: self.now()
|
||
})));
|
||
effects.enter("labelEnd");
|
||
effects.enter("labelMarker");
|
||
effects.consume(code);
|
||
effects.exit("labelMarker");
|
||
effects.exit("labelEnd");
|
||
return after;
|
||
}
|
||
|
||
/**
|
||
* After `]`.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b) c
|
||
* ^
|
||
* > | [a][b] c
|
||
* ^
|
||
* > | [a][] b
|
||
* ^
|
||
* > | [a] b
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function after(code) {
|
||
// Note: `markdown-rs` also parses GFM footnotes here, which for us is in
|
||
// an extension.
|
||
|
||
// Resource (`[asd](fgh)`)?
|
||
if (code === 40) {
|
||
return effects.attempt(resourceConstruct, labelEndOk, defined ? labelEndOk : labelEndNok)(code);
|
||
}
|
||
|
||
// Full (`[asd][fgh]`) or collapsed (`[asd][]`) reference?
|
||
if (code === 91) {
|
||
return effects.attempt(referenceFullConstruct, labelEndOk, defined ? referenceNotFull : labelEndNok)(code);
|
||
}
|
||
|
||
// Shortcut (`[asd]`) reference?
|
||
return defined ? labelEndOk(code) : labelEndNok(code);
|
||
}
|
||
|
||
/**
|
||
* After `]`, at `[`, but not at a full reference.
|
||
*
|
||
* > 👉 **Note**: we only get here if the label is defined.
|
||
*
|
||
* ```markdown
|
||
* > | [a][] b
|
||
* ^
|
||
* > | [a] b
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function referenceNotFull(code) {
|
||
return effects.attempt(referenceCollapsedConstruct, labelEndOk, labelEndNok)(code);
|
||
}
|
||
|
||
/**
|
||
* Done, we found something.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b) c
|
||
* ^
|
||
* > | [a][b] c
|
||
* ^
|
||
* > | [a][] b
|
||
* ^
|
||
* > | [a] b
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function labelEndOk(code) {
|
||
// Note: `markdown-rs` does a bunch of stuff here.
|
||
return ok(code);
|
||
}
|
||
|
||
/**
|
||
* Done, it’s nothing.
|
||
*
|
||
* There was an okay opening, but we didn’t match anything.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b c
|
||
* ^
|
||
* > | [a][b c
|
||
* ^
|
||
* > | [a] b
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function labelEndNok(code) {
|
||
labelStart._balanced = true;
|
||
return nok(code);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @this {TokenizeContext}
|
||
* Context.
|
||
* @type {Tokenizer}
|
||
*/
|
||
function tokenizeResource(effects, ok, nok) {
|
||
return resourceStart;
|
||
|
||
/**
|
||
* At a resource.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b) c
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function resourceStart(code) {
|
||
effects.enter("resource");
|
||
effects.enter("resourceMarker");
|
||
effects.consume(code);
|
||
effects.exit("resourceMarker");
|
||
return resourceBefore;
|
||
}
|
||
|
||
/**
|
||
* In resource, after `(`, at optional whitespace.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b) c
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function resourceBefore(code) {
|
||
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, resourceOpen)(code) : resourceOpen(code);
|
||
}
|
||
|
||
/**
|
||
* In resource, after optional whitespace, at `)` or a destination.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b) c
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function resourceOpen(code) {
|
||
if (code === 41) {
|
||
return resourceEnd(code);
|
||
}
|
||
return factoryDestination(effects, resourceDestinationAfter, resourceDestinationMissing, "resourceDestination", "resourceDestinationLiteral", "resourceDestinationLiteralMarker", "resourceDestinationRaw", "resourceDestinationString", 32)(code);
|
||
}
|
||
|
||
/**
|
||
* In resource, after destination, at optional whitespace.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b) c
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function resourceDestinationAfter(code) {
|
||
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, resourceBetween)(code) : resourceEnd(code);
|
||
}
|
||
|
||
/**
|
||
* At invalid destination.
|
||
*
|
||
* ```markdown
|
||
* > | [a](<<) b
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function resourceDestinationMissing(code) {
|
||
return nok(code);
|
||
}
|
||
|
||
/**
|
||
* In resource, after destination and whitespace, at `(` or title.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b ) c
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function resourceBetween(code) {
|
||
if (code === 34 || code === 39 || code === 40) {
|
||
return factoryTitle(effects, resourceTitleAfter, nok, "resourceTitle", "resourceTitleMarker", "resourceTitleString")(code);
|
||
}
|
||
return resourceEnd(code);
|
||
}
|
||
|
||
/**
|
||
* In resource, after title, at optional whitespace.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b "c") d
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function resourceTitleAfter(code) {
|
||
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, resourceEnd)(code) : resourceEnd(code);
|
||
}
|
||
|
||
/**
|
||
* In resource, at `)`.
|
||
*
|
||
* ```markdown
|
||
* > | [a](b) d
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function resourceEnd(code) {
|
||
if (code === 41) {
|
||
effects.enter("resourceMarker");
|
||
effects.consume(code);
|
||
effects.exit("resourceMarker");
|
||
effects.exit("resource");
|
||
return ok;
|
||
}
|
||
return nok(code);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @this {TokenizeContext}
|
||
* Context.
|
||
* @type {Tokenizer}
|
||
*/
|
||
function tokenizeReferenceFull(effects, ok, nok) {
|
||
const self = this;
|
||
return referenceFull;
|
||
|
||
/**
|
||
* In a reference (full), at the `[`.
|
||
*
|
||
* ```markdown
|
||
* > | [a][b] d
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function referenceFull(code) {
|
||
return factoryLabel.call(self, effects, referenceFullAfter, referenceFullMissing, "reference", "referenceMarker", "referenceString")(code);
|
||
}
|
||
|
||
/**
|
||
* In a reference (full), after `]`.
|
||
*
|
||
* ```markdown
|
||
* > | [a][b] d
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function referenceFullAfter(code) {
|
||
return self.parser.defined.includes(normalizeIdentifier(self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1))) ? ok(code) : nok(code);
|
||
}
|
||
|
||
/**
|
||
* In reference (full) that was missing.
|
||
*
|
||
* ```markdown
|
||
* > | [a][b d
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function referenceFullMissing(code) {
|
||
return nok(code);
|
||
}
|
||
}
|
||
|
||
/**
|
||
* @this {TokenizeContext}
|
||
* Context.
|
||
* @type {Tokenizer}
|
||
*/
|
||
function tokenizeReferenceCollapsed(effects, ok, nok) {
|
||
return referenceCollapsedStart;
|
||
|
||
/**
|
||
* In reference (collapsed), at `[`.
|
||
*
|
||
* > 👉 **Note**: we only get here if the label is defined.
|
||
*
|
||
* ```markdown
|
||
* > | [a][] d
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function referenceCollapsedStart(code) {
|
||
// We only attempt a collapsed label if there’s a `[`.
|
||
|
||
effects.enter("reference");
|
||
effects.enter("referenceMarker");
|
||
effects.consume(code);
|
||
effects.exit("referenceMarker");
|
||
return referenceCollapsedOpen;
|
||
}
|
||
|
||
/**
|
||
* In reference (collapsed), at `]`.
|
||
*
|
||
* > 👉 **Note**: we only get here if the label is defined.
|
||
*
|
||
* ```markdown
|
||
* > | [a][] d
|
||
* ^
|
||
* ```
|
||
*
|
||
* @type {State}
|
||
*/
|
||
function referenceCollapsedOpen(code) {
|
||
if (code === 93) {
|
||
effects.enter("referenceMarker");
|
||
effects.consume(code);
|
||
effects.exit("referenceMarker");
|
||
effects.exit("reference");
|
||
return ok;
|
||
}
|
||
return nok(code);
|
||
}
|
||
} |