Urara-Blog/node_modules/.pnpm-store/v3/files/97/b80a832457566b05a7e5e43db1e24198ee4dc2e04d3a51a93d6014b4e1b7dee41b2181ddd8e3131c4454906297c8133a40b9f67bf58e791bc74b1fa7d25087
2022-08-14 01:14:53 +08:00

135 lines
3.5 KiB
Text
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
* @typedef {import('micromark-util-types').State} State
*/
import {ok as assert} from 'uvu/assert'
import {factoryDestination} from 'micromark-factory-destination'
import {factoryLabel} from 'micromark-factory-label'
import {factorySpace} from 'micromark-factory-space'
import {factoryTitle} from 'micromark-factory-title'
import {factoryWhitespace} from 'micromark-factory-whitespace'
import {normalizeIdentifier} from 'micromark-util-normalize-identifier'
import {
markdownLineEnding,
markdownLineEndingOrSpace
} from 'micromark-util-character'
import {codes} from 'micromark-util-symbol/codes.js'
import {types} from 'micromark-util-symbol/types.js'
/** @type {Construct} */
export const definition = {name: 'definition', tokenize: tokenizeDefinition}
/** @type {Construct} */
const titleConstruct = {tokenize: tokenizeTitle, partial: true}
/** @type {Tokenizer} */
function tokenizeDefinition(effects, ok, nok) {
const self = this
/** @type {string} */
let identifier
return start
/** @type {State} */
function start(code) {
assert(code === codes.leftSquareBracket, 'expected `[`')
effects.enter(types.definition)
return factoryLabel.call(
self,
effects,
labelAfter,
nok,
types.definitionLabel,
types.definitionLabelMarker,
types.definitionLabelString
)(code)
}
/** @type {State} */
function labelAfter(code) {
identifier = normalizeIdentifier(
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
)
if (code === codes.colon) {
effects.enter(types.definitionMarker)
effects.consume(code)
effects.exit(types.definitionMarker)
// Note: blank lines cant exist in content.
return factoryWhitespace(
effects,
factoryDestination(
effects,
effects.attempt(
titleConstruct,
factorySpace(effects, after, types.whitespace),
factorySpace(effects, after, types.whitespace)
),
nok,
types.definitionDestination,
types.definitionDestinationLiteral,
types.definitionDestinationLiteralMarker,
types.definitionDestinationRaw,
types.definitionDestinationString
)
)
}
return nok(code)
}
/** @type {State} */
function after(code) {
if (code === codes.eof || markdownLineEnding(code)) {
effects.exit(types.definition)
if (!self.parser.defined.includes(identifier)) {
self.parser.defined.push(identifier)
}
return ok(code)
}
return nok(code)
}
}
/** @type {Tokenizer} */
function tokenizeTitle(effects, ok, nok) {
return start
/** @type {State} */
function start(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, before)(code)
: nok(code)
}
/** @type {State} */
function before(code) {
if (
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.leftParenthesis
) {
return factoryTitle(
effects,
factorySpace(effects, after, types.whitespace),
nok,
types.definitionTitle,
types.definitionTitleMarker,
types.definitionTitleString
)(code)
}
return nok(code)
}
/** @type {State} */
function after(code) {
return code === codes.eof || markdownLineEnding(code) ? ok(code) : nok(code)
}
}