mirror of
https://github.com/Funkoala14/knowledgebase_law.git
synced 2025-06-09 04:38:15 +08:00
460 lines
9.1 KiB
JavaScript
460 lines
9.1 KiB
JavaScript
|
/**
|
||
|
* @import {
|
||
|
* Code,
|
||
|
* Construct,
|
||
|
* State,
|
||
|
* TokenizeContext,
|
||
|
* Tokenizer
|
||
|
* } from 'micromark-util-types'
|
||
|
*/
|
||
|
|
||
|
import { factorySpace } from 'micromark-factory-space';
|
||
|
import { markdownLineEnding, markdownSpace } from 'micromark-util-character';
|
||
|
/** @type {Construct} */
|
||
|
const nonLazyContinuation = {
|
||
|
partial: true,
|
||
|
tokenize: tokenizeNonLazyContinuation
|
||
|
};
|
||
|
|
||
|
/** @type {Construct} */
|
||
|
export const codeFenced = {
|
||
|
concrete: true,
|
||
|
name: 'codeFenced',
|
||
|
tokenize: tokenizeCodeFenced
|
||
|
};
|
||
|
|
||
|
/**
|
||
|
* @this {TokenizeContext}
|
||
|
* Context.
|
||
|
* @type {Tokenizer}
|
||
|
*/
|
||
|
function tokenizeCodeFenced(effects, ok, nok) {
|
||
|
const self = this;
|
||
|
/** @type {Construct} */
|
||
|
const closeStart = {
|
||
|
partial: true,
|
||
|
tokenize: tokenizeCloseStart
|
||
|
};
|
||
|
let initialPrefix = 0;
|
||
|
let sizeOpen = 0;
|
||
|
/** @type {NonNullable<Code>} */
|
||
|
let marker;
|
||
|
return start;
|
||
|
|
||
|
/**
|
||
|
* Start of code.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* > | ~~~js
|
||
|
* ^
|
||
|
* | alert(1)
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function start(code) {
|
||
|
// To do: parse whitespace like `markdown-rs`.
|
||
|
return beforeSequenceOpen(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* In opening fence, after prefix, at sequence.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* > | ~~~js
|
||
|
* ^
|
||
|
* | alert(1)
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function beforeSequenceOpen(code) {
|
||
|
const tail = self.events[self.events.length - 1];
|
||
|
initialPrefix = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
|
||
|
marker = code;
|
||
|
effects.enter("codeFenced");
|
||
|
effects.enter("codeFencedFence");
|
||
|
effects.enter("codeFencedFenceSequence");
|
||
|
return sequenceOpen(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* In opening fence sequence.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* > | ~~~js
|
||
|
* ^
|
||
|
* | alert(1)
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function sequenceOpen(code) {
|
||
|
if (code === marker) {
|
||
|
sizeOpen++;
|
||
|
effects.consume(code);
|
||
|
return sequenceOpen;
|
||
|
}
|
||
|
if (sizeOpen < 3) {
|
||
|
return nok(code);
|
||
|
}
|
||
|
effects.exit("codeFencedFenceSequence");
|
||
|
return markdownSpace(code) ? factorySpace(effects, infoBefore, "whitespace")(code) : infoBefore(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* In opening fence, after the sequence (and optional whitespace), before info.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* > | ~~~js
|
||
|
* ^
|
||
|
* | alert(1)
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function infoBefore(code) {
|
||
|
if (code === null || markdownLineEnding(code)) {
|
||
|
effects.exit("codeFencedFence");
|
||
|
return self.interrupt ? ok(code) : effects.check(nonLazyContinuation, atNonLazyBreak, after)(code);
|
||
|
}
|
||
|
effects.enter("codeFencedFenceInfo");
|
||
|
effects.enter("chunkString", {
|
||
|
contentType: "string"
|
||
|
});
|
||
|
return info(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* In info.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* > | ~~~js
|
||
|
* ^
|
||
|
* | alert(1)
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function info(code) {
|
||
|
if (code === null || markdownLineEnding(code)) {
|
||
|
effects.exit("chunkString");
|
||
|
effects.exit("codeFencedFenceInfo");
|
||
|
return infoBefore(code);
|
||
|
}
|
||
|
if (markdownSpace(code)) {
|
||
|
effects.exit("chunkString");
|
||
|
effects.exit("codeFencedFenceInfo");
|
||
|
return factorySpace(effects, metaBefore, "whitespace")(code);
|
||
|
}
|
||
|
if (code === 96 && code === marker) {
|
||
|
return nok(code);
|
||
|
}
|
||
|
effects.consume(code);
|
||
|
return info;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* In opening fence, after info and whitespace, before meta.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* > | ~~~js eval
|
||
|
* ^
|
||
|
* | alert(1)
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function metaBefore(code) {
|
||
|
if (code === null || markdownLineEnding(code)) {
|
||
|
return infoBefore(code);
|
||
|
}
|
||
|
effects.enter("codeFencedFenceMeta");
|
||
|
effects.enter("chunkString", {
|
||
|
contentType: "string"
|
||
|
});
|
||
|
return meta(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* In meta.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* > | ~~~js eval
|
||
|
* ^
|
||
|
* | alert(1)
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function meta(code) {
|
||
|
if (code === null || markdownLineEnding(code)) {
|
||
|
effects.exit("chunkString");
|
||
|
effects.exit("codeFencedFenceMeta");
|
||
|
return infoBefore(code);
|
||
|
}
|
||
|
if (code === 96 && code === marker) {
|
||
|
return nok(code);
|
||
|
}
|
||
|
effects.consume(code);
|
||
|
return meta;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* At eol/eof in code, before a non-lazy closing fence or content.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* > | ~~~js
|
||
|
* ^
|
||
|
* > | alert(1)
|
||
|
* ^
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function atNonLazyBreak(code) {
|
||
|
return effects.attempt(closeStart, after, contentBefore)(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Before code content, not a closing fence, at eol.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* | ~~~js
|
||
|
* > | alert(1)
|
||
|
* ^
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function contentBefore(code) {
|
||
|
effects.enter("lineEnding");
|
||
|
effects.consume(code);
|
||
|
effects.exit("lineEnding");
|
||
|
return contentStart;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Before code content, not a closing fence.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* | ~~~js
|
||
|
* > | alert(1)
|
||
|
* ^
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function contentStart(code) {
|
||
|
return initialPrefix > 0 && markdownSpace(code) ? factorySpace(effects, beforeContentChunk, "linePrefix", initialPrefix + 1)(code) : beforeContentChunk(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Before code content, after optional prefix.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* | ~~~js
|
||
|
* > | alert(1)
|
||
|
* ^
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function beforeContentChunk(code) {
|
||
|
if (code === null || markdownLineEnding(code)) {
|
||
|
return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code);
|
||
|
}
|
||
|
effects.enter("codeFlowValue");
|
||
|
return contentChunk(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* In code content.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* | ~~~js
|
||
|
* > | alert(1)
|
||
|
* ^^^^^^^^
|
||
|
* | ~~~
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function contentChunk(code) {
|
||
|
if (code === null || markdownLineEnding(code)) {
|
||
|
effects.exit("codeFlowValue");
|
||
|
return beforeContentChunk(code);
|
||
|
}
|
||
|
effects.consume(code);
|
||
|
return contentChunk;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* After code.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* | ~~~js
|
||
|
* | alert(1)
|
||
|
* > | ~~~
|
||
|
* ^
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function after(code) {
|
||
|
effects.exit("codeFenced");
|
||
|
return ok(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @this {TokenizeContext}
|
||
|
* Context.
|
||
|
* @type {Tokenizer}
|
||
|
*/
|
||
|
function tokenizeCloseStart(effects, ok, nok) {
|
||
|
let size = 0;
|
||
|
return startBefore;
|
||
|
|
||
|
/**
|
||
|
*
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function startBefore(code) {
|
||
|
effects.enter("lineEnding");
|
||
|
effects.consume(code);
|
||
|
effects.exit("lineEnding");
|
||
|
return start;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* Before closing fence, at optional whitespace.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* | ~~~js
|
||
|
* | alert(1)
|
||
|
* > | ~~~
|
||
|
* ^
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function start(code) {
|
||
|
// Always populated by defaults.
|
||
|
|
||
|
// To do: `enter` here or in next state?
|
||
|
effects.enter("codeFencedFence");
|
||
|
return markdownSpace(code) ? factorySpace(effects, beforeSequenceClose, "linePrefix", self.parser.constructs.disable.null.includes('codeIndented') ? undefined : 4)(code) : beforeSequenceClose(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* In closing fence, after optional whitespace, at sequence.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* | ~~~js
|
||
|
* | alert(1)
|
||
|
* > | ~~~
|
||
|
* ^
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function beforeSequenceClose(code) {
|
||
|
if (code === marker) {
|
||
|
effects.enter("codeFencedFenceSequence");
|
||
|
return sequenceClose(code);
|
||
|
}
|
||
|
return nok(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* In closing fence sequence.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* | ~~~js
|
||
|
* | alert(1)
|
||
|
* > | ~~~
|
||
|
* ^
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function sequenceClose(code) {
|
||
|
if (code === marker) {
|
||
|
size++;
|
||
|
effects.consume(code);
|
||
|
return sequenceClose;
|
||
|
}
|
||
|
if (size >= sizeOpen) {
|
||
|
effects.exit("codeFencedFenceSequence");
|
||
|
return markdownSpace(code) ? factorySpace(effects, sequenceCloseAfter, "whitespace")(code) : sequenceCloseAfter(code);
|
||
|
}
|
||
|
return nok(code);
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* After closing fence sequence, after optional whitespace.
|
||
|
*
|
||
|
* ```markdown
|
||
|
* | ~~~js
|
||
|
* | alert(1)
|
||
|
* > | ~~~
|
||
|
* ^
|
||
|
* ```
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function sequenceCloseAfter(code) {
|
||
|
if (code === null || markdownLineEnding(code)) {
|
||
|
effects.exit("codeFencedFence");
|
||
|
return ok(code);
|
||
|
}
|
||
|
return nok(code);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
* @this {TokenizeContext}
|
||
|
* Context.
|
||
|
* @type {Tokenizer}
|
||
|
*/
|
||
|
function tokenizeNonLazyContinuation(effects, ok, nok) {
|
||
|
const self = this;
|
||
|
return start;
|
||
|
|
||
|
/**
|
||
|
*
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function start(code) {
|
||
|
if (code === null) {
|
||
|
return nok(code);
|
||
|
}
|
||
|
effects.enter("lineEnding");
|
||
|
effects.consume(code);
|
||
|
effects.exit("lineEnding");
|
||
|
return lineStart;
|
||
|
}
|
||
|
|
||
|
/**
|
||
|
*
|
||
|
*
|
||
|
* @type {State}
|
||
|
*/
|
||
|
function lineStart(code) {
|
||
|
return self.parser.lazy[self.now().line] ? nok(code) : ok(code);
|
||
|
}
|
||
|
}
|