Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

mdast-util-from-markdown

Package Overview
Dependencies
Maintainers
2
Versions
31
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

mdast-util-from-markdown - npm Package Compare versions

Comparing version 1.2.0 to 1.2.1

12

dev/index.d.ts
export {fromMarkdown} from './lib/index.js'
export type Value = import('./lib/index.js').Value
export type CompileContext = import('./lib/index.js').CompileContext
export type Encoding = import('./lib/index.js').Encoding
export type Options = import('./lib/index.js').Options
export type Extension = import('./lib/index.js').Extension
export type Handle = import('./lib/index.js').Handle
export type Transform = import('./lib/index.js').Transform
export type Token = import('./lib/index.js').Token
export type CompileContext = import('./lib/index.js').CompileContext
export type OnEnterError = import('./lib/index.js').OnEnterError
export type OnExitError = import('./lib/index.js').OnExitError
export type Options = import('./lib/index.js').Options
export type Token = import('./lib/index.js').Token
export type Transform = import('./lib/index.js').Transform
export type Value = import('./lib/index.js').Value
/**
* To do: deprecate next major.
* To do: next major: remove.
*/
export type OnError = import('./lib/index.js').OnEnterError
/**
* @typedef {import('./lib/index.js').Value} Value
* @typedef {import('./lib/index.js').CompileContext} CompileContext
* @typedef {import('./lib/index.js').Encoding} Encoding
* @typedef {import('./lib/index.js').Options} Options
* @typedef {import('./lib/index.js').Extension} Extension
* @typedef {import('./lib/index.js').Handle} Handle
* @typedef {import('./lib/index.js').Transform} Transform
* @typedef {import('./lib/index.js').Token} Token
* @typedef {import('./lib/index.js').CompileContext} CompileContext
* @typedef {import('./lib/index.js').OnEnterError} OnEnterError
* @typedef {import('./lib/index.js').OnExitError} OnExitError
*
* @typedef {import('./lib/index.js').Options} Options
* @typedef {import('./lib/index.js').Token} Token
* @typedef {import('./lib/index.js').Transform} Transform
* @typedef {import('./lib/index.js').Value} Value
*/
/**
* @typedef {import('./lib/index.js').OnEnterError} OnError
* To do: deprecate next major.
* To do: next major: remove.
*/
export {fromMarkdown} from './lib/index.js'
/**
* @param value Markdown to parse (`string` or `Buffer`).
* @param [encoding] Character encoding to understand `value` as when it’s a `Buffer` (`string`, default: `'utf8'`).
* @param [options] Configuration
* @param value
* Markdown to parse.
* @param encoding
* Character encoding for when `value` is `Buffer`.
* @param options
* Configuration.
* @returns
* mdast tree.
*/

@@ -9,5 +14,5 @@ export const fromMarkdown: ((

encoding: Encoding,
options?: Options | undefined
options?: Options | null | undefined
) => Root) &
((value: Value, options?: Options | undefined) => Root)
((value: Value, options?: Options | null | undefined) => Root)
export type Encoding = import('micromark-util-types').Encoding

@@ -22,5 +27,4 @@ export type Event = import('micromark-util-types').Event

export type PhrasingContent = import('mdast').PhrasingContent
export type StaticPhrasingContent = import('mdast').StaticPhrasingContent
export type Content = import('mdast').Content
export type Node = Root | Content
export type Parent = Extract<Node, UnistParent>
export type Break = import('mdast').Break

@@ -45,19 +49,52 @@ export type Blockquote = import('mdast').Blockquote

export type ThematicBreak = import('mdast').ThematicBreak
export type Fragment = UnistParent & {
export type ReferenceType = import('mdast').ReferenceType
export type Node = Root | Content
export type Parent = Extract<Node, UnistParent>
export type Fragment = Omit<UnistParent, 'type' | 'children'> & {
type: 'fragment'
children: Array<PhrasingContent>
}
export type _CompileDataFields = {
expectingFirstListItemValue: boolean | undefined
flowCodeInside: boolean | undefined
setextHeadingSlurpLineEnding: boolean | undefined
atHardBreak: boolean | undefined
referenceType: 'collapsed' | 'full'
inReference: boolean | undefined
characterReferenceType:
/**
* State.
*/
export type CompileData = {
/**
* Whether we’re inside a hard break.
*/
atHardBreak?: boolean | undefined
/**
* Current character reference type.
*/
characterReferenceType?:
| 'characterReferenceMarkerHexadecimal'
| 'characterReferenceMarkerNumeric'
| undefined
/**
* Whether a first list item value (`1` in `1. a`) is expected.
*/
expectingFirstListItemValue?: boolean | undefined
/**
* Whether we’re in flow code.
*/
flowCodeInside?: boolean | undefined
/**
* Whether we’re in a reference.
*/
inReference?: boolean | undefined
/**
* Whether we’re expecting a line ending from a setext heading, which can be slurped.
*/
setextHeadingSlurpLineEnding?: boolean | undefined
/**
* Current reference.
*/
referenceType?: 'collapsed' | 'full' | undefined
}
export type CompileData = Record<string, unknown> & Partial<_CompileDataFields>
export type Transform = (tree: Root) => Root | void
/**
* Extra transform, to change the AST afterwards.
*/
export type Transform = (tree: Root) => Root | undefined | null | void
/**
* Handle a token.
*/
export type Handle = (this: CompileContext, token: Token) => void

@@ -68,15 +105,6 @@ /**

export type Handles = Record<string, Handle>
export type NormalizedExtension = Record<
string,
Record<string, unknown> | Array<unknown>
> & {
canContainEols: Array<string>
transforms: Array<Transform>
enter: Handles
exit: Handles
}
/**
* An mdast extension changes how markdown tokens are turned into mdast.
* Handle the case where the `right` token is open, but it is closed (by the
* `left` token) or because we reached the end of the document.
*/
export type Extension = Partial<NormalizedExtension>
export type OnEnterError = (

@@ -87,2 +115,6 @@ this: Omit<CompileContext, 'sliceSerialize'>,

) => void
/**
* Handle the case where the `right` token is open but it is closed by
* exiting the `left` token.
*/
export type OnExitError = (

@@ -94,52 +126,98 @@ this: Omit<CompileContext, 'sliceSerialize'>,

/**
* mdast compiler context
* Open token on the stack, with an optional error handler for when
* that token isn’t closed properly.
*/
export type TokenTuple = [Token, OnEnterError | undefined]
/**
* Configuration.
*
* We have our defaults, but extensions will add more.
*/
export type Config = {
/**
* Token types where line endings are used.
*/
canContainEols: Array<string>
/**
* Opening handles.
*/
enter: Handles
/**
* Closing handles.
*/
exit: Handles
/**
* Tree transforms.
*/
transforms: Array<Transform>
}
/**
* Change how markdown tokens from micromark are turned into mdast.
*/
export type Extension = Partial<Config>
/**
* mdast compiler context.
*/
export type CompileContext = {
/**
* Stack of nodes.
*/
stack: Array<Node | Fragment>
tokenStack: Array<[Token, OnEnterError | undefined]>
/**
* Set data into the key-value store.
* Stack of tokens.
*/
setData: (key: string, value?: unknown) => void
tokenStack: Array<TokenTuple>
/**
* Get data from the key-value store.
* Get data from the key/value store.
*/
getData: <K extends string>(key: K) => CompileData[K]
getData: <Key extends keyof CompileData>(key: Key) => CompileData[Key]
/**
* Capture some of the output data.
* Set data into the key/value store.
*/
setData: <Key_1 extends keyof CompileData>(
key: Key_1,
value?: CompileData[Key_1] | undefined
) => void
/**
* Capture some of the output data.
*/
buffer: (this: CompileContext) => void
/**
* Stop capturing and access the output data.
* Stop capturing and access the output data.
*/
resume: (this: CompileContext) => string
/**
* Enter a token.
* Enter a token.
*/
enter: <N extends Node>(
enter: <Kind extends Node>(
this: CompileContext,
node: N,
node: Kind,
token: Token,
onError?: OnEnterError | undefined
) => N
onError?: OnEnterError
) => Kind
/**
* Exit a token.
* Exit a token.
*/
exit: (
this: CompileContext,
token: Token,
onError?: OnExitError | undefined
) => Node
exit: (this: CompileContext, token: Token, onError?: OnExitError) => Node
/**
* Get the string value of a token.
* Get the string value of a token.
*/
sliceSerialize: TokenizeContext['sliceSerialize']
/**
* Configuration.
* Configuration.
*/
config: NormalizedExtension
config: Config
}
/**
* Configuration for how to build mdast.
*/
export type FromMarkdownOptions = {
mdastExtensions?: Array<Extension | Array<Extension>>
/**
* Extensions for this utility to change how tokens are turned into a tree.
*/
mdastExtensions?: Array<Extension | Array<Extension>> | null | undefined
}
/**
* Configuration.
*/
export type Options = ParseOptions & FromMarkdownOptions

@@ -8,8 +8,9 @@ /**

* @typedef {import('micromark-util-types').Value} Value
*
* @typedef {import('unist').Parent} UnistParent
* @typedef {import('unist').Point} Point
*
* @typedef {import('mdast').PhrasingContent} PhrasingContent
* @typedef {import('mdast').StaticPhrasingContent} StaticPhrasingContent
* @typedef {import('mdast').Content} Content
* @typedef {Root|Content} Node
* @typedef {Extract<Node, UnistParent>} Parent
* @typedef {import('mdast').Break} Break

@@ -34,37 +35,105 @@ * @typedef {import('mdast').Blockquote} Blockquote

* @typedef {import('mdast').ThematicBreak} ThematicBreak
* @typedef {import('mdast').ReferenceType} ReferenceType
*/
/**
* @typedef {Root | Content} Node
* @typedef {Extract<Node, UnistParent>} Parent
*
* @typedef {UnistParent & {type: 'fragment', children: Array<PhrasingContent>}} Fragment
* @typedef {Omit<UnistParent, 'type' | 'children'> & {type: 'fragment', children: Array<PhrasingContent>}} Fragment
*/
/**
* @typedef _CompileDataFields
* @property {boolean|undefined} expectingFirstListItemValue
* @property {boolean|undefined} flowCodeInside
* @property {boolean|undefined} setextHeadingSlurpLineEnding
* @property {boolean|undefined} atHardBreak
* @property {'collapsed'|'full'} referenceType
* @property {boolean|undefined} inReference
* @property {'characterReferenceMarkerHexadecimal'|'characterReferenceMarkerNumeric'} characterReferenceType
* @typedef CompileData
* State.
* @property {boolean | undefined} [atHardBreak]
* Whether we’re inside a hard break.
* @property {'characterReferenceMarkerHexadecimal' | 'characterReferenceMarkerNumeric' | undefined} [characterReferenceType]
* Current character reference type.
* @property {boolean | undefined} [expectingFirstListItemValue]
* Whether a first list item value (`1` in `1. a`) is expected.
* @property {boolean | undefined} [flowCodeInside]
* Whether we’re in flow code.
* @property {boolean | undefined} [inReference]
* Whether we’re in a reference.
* @property {boolean | undefined} [setextHeadingSlurpLineEnding]
* Whether we’re expecting a line ending from a setext heading, which can be slurped.
* @property {'collapsed' | 'full' | undefined} [referenceType]
* Current reference.
*
* @typedef {Record<string, unknown> & Partial<_CompileDataFields>} CompileData
* @callback Transform
* Extra transform, to change the AST afterwards.
* @param {Root} tree
* Tree to transform.
* @returns {Root | undefined | null | void}
* New tree or nothing (in which case the current tree is used).
*
* @typedef {(tree: Root) => Root|void} Transform
* @typedef {(this: CompileContext, token: Token) => void} Handle
* @callback Handle
* Handle a token.
* @param {CompileContext} this
* Context.
* @param {Token} token
* Current token.
* @returns {void}
* Nothing.
*
* @typedef {Record<string, Handle>} Handles
* Token types mapping to handles
* @typedef {Record<string, Record<string, unknown>|Array<unknown>> & {canContainEols: Array<string>, transforms: Array<Transform>, enter: Handles, exit: Handles}} NormalizedExtension
* @typedef {Partial<NormalizedExtension>} Extension
* An mdast extension changes how markdown tokens are turned into mdast.
*
* @typedef {(this: Omit<CompileContext, 'sliceSerialize'>, left: Token|undefined, right: Token) => void} OnEnterError
* @typedef {(this: Omit<CompileContext, 'sliceSerialize'>, left: Token, right: Token) => void} OnExitError
* @callback OnEnterError
* Handle the case where the `right` token is open, but it is closed (by the
* `left` token) or because we reached the end of the document.
* @param {Omit<CompileContext, 'sliceSerialize'>} this
* Context.
* @param {Token | undefined} left
* Left token.
* @param {Token} right
* Right token.
* @returns {void}
* Nothing.
*
* @callback OnExitError
* Handle the case where the `right` token is open but it is closed by
* exiting the `left` token.
* @param {Omit<CompileContext, 'sliceSerialize'>} this
* Context.
* @param {Token} left
* Left token.
* @param {Token} right
* Right token.
* @returns {void}
* Nothing.
*
* @typedef {[Token, OnEnterError | undefined]} TokenTuple
* Open token on the stack, with an optional error handler for when
* that token isn’t closed properly.
*/
/**
* @typedef Config
* Configuration.
*
* We have our defaults, but extensions will add more.
* @property {Array<string>} canContainEols
* Token types where line endings are used.
* @property {Handles} enter
* Opening handles.
* @property {Handles} exit
* Closing handles.
* @property {Array<Transform>} transforms
* Tree transforms.
*
* @typedef {Partial<Config>} Extension
* Change how markdown tokens from micromark are turned into mdast.
*
* @typedef CompileContext
* mdast compiler context
* mdast compiler context.
* @property {Array<Node | Fragment>} stack
* @property {Array<[Token, OnEnterError|undefined]>} tokenStack
* @property {(key: string, value?: unknown) => void} setData
* Set data into the key-value store.
* @property {<K extends string>(key: K) => CompileData[K]} getData
* Get data from the key-value store.
* Stack of nodes.
* @property {Array<TokenTuple>} tokenStack
* Stack of tokens.
* @property {<Key extends keyof CompileData>(key: Key) => CompileData[Key]} getData
* Get data from the key/value store.
* @property {<Key extends keyof CompileData>(key: Key, value?: CompileData[Key]) => void} setData
* Set data into the key/value store.
* @property {(this: CompileContext) => void} buffer

@@ -74,3 +143,3 @@ * Capture some of the output data.

* Stop capturing and access the output data.
* @property {<N extends Node>(this: CompileContext, node: N, token: Token, onError?: OnEnterError) => N} enter
* @property {<Kind extends Node>(this: CompileContext, node: Kind, token: Token, onError?: OnEnterError) => Kind} enter
* Enter a token.

@@ -81,9 +150,18 @@ * @property {(this: CompileContext, token: Token, onError?: OnExitError) => Node} exit

* Get the string value of a token.
* @property {NormalizedExtension} config
* @property {Config} config
* Configuration.
*
* @typedef {{mdastExtensions?: Array<Extension|Array<Extension>>}} FromMarkdownOptions
* @typedef FromMarkdownOptions
* Configuration for how to build mdast.
* @property {Array<Extension | Array<Extension>> | null | undefined} [mdastExtensions]
* Extensions for this utility to change how tokens are turned into a tree.
*
* @typedef {ParseOptions & FromMarkdownOptions} Options
* Configuration.
*/
// To do: micromark: create a registry of tokens?
// To do: next major: don’t return given `Node` from `enter`.
// To do: next major: remove setter/getter.
import {ok as assert} from 'uvu/assert'

@@ -106,5 +184,10 @@ import {toString} from 'mdast-util-to-string'

/**
* @param value Markdown to parse (`string` or `Buffer`).
* @param [encoding] Character encoding to understand `value` as when it’s a `Buffer` (`string`, default: `'utf8'`).
* @param [options] Configuration
* @param value
* Markdown to parse.
* @param encoding
* Character encoding for when `value` is `Buffer`.
* @param options
* Configuration.
* @returns
* mdast tree.
*/

@@ -114,4 +197,4 @@ export const fromMarkdown =

* @type {(
* ((value: Value, encoding: Encoding, options?: Options) => Root) &
* ((value: Value, options?: Options) => Root)
* ((value: Value, encoding: Encoding, options?: Options | null | undefined) => Root) &
* ((value: Value, options?: Options | null | undefined) => Root)
* )}

@@ -122,4 +205,4 @@ */

* @param {Value} value
* @param {Encoding} [encoding]
* @param {Options} [options]
* @param {Encoding | Options | null | undefined} [encoding]
* @param {Options | null | undefined} [options]
* @returns {Root}

@@ -135,2 +218,3 @@ */

postprocess(
// @ts-expect-error: micromark types need to accept `null`.
parse(options).document().write(preprocess()(value, encoding, true))

@@ -145,114 +229,106 @@ )

*
* @param {Options} [options]
* @param {Options | null | undefined} [options]
*/
function compiler(options = {}) {
/** @type {NormalizedExtension} */
// @ts-expect-error: our base has all required fields, so the result will too.
const config = configure(
{
transforms: [],
canContainEols: [
'emphasis',
'fragment',
'heading',
'paragraph',
'strong'
],
enter: {
autolink: opener(link),
autolinkProtocol: onenterdata,
autolinkEmail: onenterdata,
atxHeading: opener(heading),
blockQuote: opener(blockQuote),
characterEscape: onenterdata,
characterReference: onenterdata,
codeFenced: opener(codeFlow),
codeFencedFenceInfo: buffer,
codeFencedFenceMeta: buffer,
codeIndented: opener(codeFlow, buffer),
codeText: opener(codeText, buffer),
codeTextData: onenterdata,
data: onenterdata,
codeFlowValue: onenterdata,
definition: opener(definition),
definitionDestinationString: buffer,
definitionLabelString: buffer,
definitionTitleString: buffer,
emphasis: opener(emphasis),
hardBreakEscape: opener(hardBreak),
hardBreakTrailing: opener(hardBreak),
htmlFlow: opener(html, buffer),
htmlFlowData: onenterdata,
htmlText: opener(html, buffer),
htmlTextData: onenterdata,
image: opener(image),
label: buffer,
link: opener(link),
listItem: opener(listItem),
listItemValue: onenterlistitemvalue,
listOrdered: opener(list, onenterlistordered),
listUnordered: opener(list),
paragraph: opener(paragraph),
reference: onenterreference,
referenceString: buffer,
resourceDestinationString: buffer,
resourceTitleString: buffer,
setextHeading: opener(heading),
strong: opener(strong),
thematicBreak: opener(thematicBreak)
},
exit: {
atxHeading: closer(),
atxHeadingSequence: onexitatxheadingsequence,
autolink: closer(),
autolinkEmail: onexitautolinkemail,
autolinkProtocol: onexitautolinkprotocol,
blockQuote: closer(),
characterEscapeValue: onexitdata,
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
characterReferenceValue: onexitcharacterreferencevalue,
codeFenced: closer(onexitcodefenced),
codeFencedFence: onexitcodefencedfence,
codeFencedFenceInfo: onexitcodefencedfenceinfo,
codeFencedFenceMeta: onexitcodefencedfencemeta,
codeFlowValue: onexitdata,
codeIndented: closer(onexitcodeindented),
codeText: closer(onexitcodetext),
codeTextData: onexitdata,
data: onexitdata,
definition: closer(),
definitionDestinationString: onexitdefinitiondestinationstring,
definitionLabelString: onexitdefinitionlabelstring,
definitionTitleString: onexitdefinitiontitlestring,
emphasis: closer(),
hardBreakEscape: closer(onexithardbreak),
hardBreakTrailing: closer(onexithardbreak),
htmlFlow: closer(onexithtmlflow),
htmlFlowData: onexitdata,
htmlText: closer(onexithtmltext),
htmlTextData: onexitdata,
image: closer(onexitimage),
label: onexitlabel,
labelText: onexitlabeltext,
lineEnding: onexitlineending,
link: closer(onexitlink),
listItem: closer(),
listOrdered: closer(),
listUnordered: closer(),
paragraph: closer(),
referenceString: onexitreferencestring,
resourceDestinationString: onexitresourcedestinationstring,
resourceTitleString: onexitresourcetitlestring,
resource: onexitresource,
setextHeading: closer(onexitsetextheading),
setextHeadingLineSequence: onexitsetextheadinglinesequence,
setextHeadingText: onexitsetextheadingtext,
strong: closer(),
thematicBreak: closer()
}
function compiler(options) {
/** @type {Config} */
const config = {
transforms: [],
canContainEols: ['emphasis', 'fragment', 'heading', 'paragraph', 'strong'],
enter: {
autolink: opener(link),
autolinkProtocol: onenterdata,
autolinkEmail: onenterdata,
atxHeading: opener(heading),
blockQuote: opener(blockQuote),
characterEscape: onenterdata,
characterReference: onenterdata,
codeFenced: opener(codeFlow),
codeFencedFenceInfo: buffer,
codeFencedFenceMeta: buffer,
codeIndented: opener(codeFlow, buffer),
codeText: opener(codeText, buffer),
codeTextData: onenterdata,
data: onenterdata,
codeFlowValue: onenterdata,
definition: opener(definition),
definitionDestinationString: buffer,
definitionLabelString: buffer,
definitionTitleString: buffer,
emphasis: opener(emphasis),
hardBreakEscape: opener(hardBreak),
hardBreakTrailing: opener(hardBreak),
htmlFlow: opener(html, buffer),
htmlFlowData: onenterdata,
htmlText: opener(html, buffer),
htmlTextData: onenterdata,
image: opener(image),
label: buffer,
link: opener(link),
listItem: opener(listItem),
listItemValue: onenterlistitemvalue,
listOrdered: opener(list, onenterlistordered),
listUnordered: opener(list),
paragraph: opener(paragraph),
reference: onenterreference,
referenceString: buffer,
resourceDestinationString: buffer,
resourceTitleString: buffer,
setextHeading: opener(heading),
strong: opener(strong),
thematicBreak: opener(thematicBreak)
},
options.mdastExtensions || []
)
exit: {
atxHeading: closer(),
atxHeadingSequence: onexitatxheadingsequence,
autolink: closer(),
autolinkEmail: onexitautolinkemail,
autolinkProtocol: onexitautolinkprotocol,
blockQuote: closer(),
characterEscapeValue: onexitdata,
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
characterReferenceValue: onexitcharacterreferencevalue,
codeFenced: closer(onexitcodefenced),
codeFencedFence: onexitcodefencedfence,
codeFencedFenceInfo: onexitcodefencedfenceinfo,
codeFencedFenceMeta: onexitcodefencedfencemeta,
codeFlowValue: onexitdata,
codeIndented: closer(onexitcodeindented),
codeText: closer(onexitcodetext),
codeTextData: onexitdata,
data: onexitdata,
definition: closer(),
definitionDestinationString: onexitdefinitiondestinationstring,
definitionLabelString: onexitdefinitionlabelstring,
definitionTitleString: onexitdefinitiontitlestring,
emphasis: closer(),
hardBreakEscape: closer(onexithardbreak),
hardBreakTrailing: closer(onexithardbreak),
htmlFlow: closer(onexithtmlflow),
htmlFlowData: onexitdata,
htmlText: closer(onexithtmltext),
htmlTextData: onexitdata,
image: closer(onexitimage),
label: onexitlabel,
labelText: onexitlabeltext,
lineEnding: onexitlineending,
link: closer(onexitlink),
listItem: closer(),
listOrdered: closer(),
listUnordered: closer(),
paragraph: closer(),
referenceString: onexitreferencestring,
resourceDestinationString: onexitresourcedestinationstring,
resourceTitleString: onexitresourcetitlestring,
resource: onexitresource,
setextHeading: closer(onexitsetextheading),
setextHeadingLineSequence: onexitsetextheadinglinesequence,
setextHeadingText: onexitsetextheadingtext,
strong: closer(),
thematicBreak: closer()
}
}
configure(config, (options || {}).mdastExtensions || [])
/** @type {CompileData} */

@@ -264,4 +340,8 @@ const data = {}

/**
* Turn micromark events into an mdast tree.
*
* @param {Array<Event>} events
* Events.
* @returns {Root}
* mdast tree.
*/

@@ -271,12 +351,6 @@ function compile(events) {

let tree = {type: 'root', children: []}
/** @type {CompileContext['stack']} */
const stack = [tree]
/** @type {CompileContext['tokenStack']} */
const tokenStack = []
/** @type {Array<number>} */
const listStack = []
/** @type {Omit<CompileContext, 'sliceSerialize'>} */
const context = {
stack,
tokenStack,
stack: [tree],
tokenStack: [],
config,

@@ -290,2 +364,4 @@ enter,

}
/** @type {Array<number>} */
const listStack = []
let index = -1

@@ -326,4 +402,5 @@

if (tokenStack.length > 0) {
const tail = tokenStack[tokenStack.length - 1]
// Handle tokens still being open.
if (context.tokenStack.length > 0) {
const tail = context.tokenStack[context.tokenStack.length - 1]
const handler = tail[1] || defaultOnError

@@ -345,2 +422,3 @@ handler.call(context, undefined, tail[0])

// Call transforms.
index = -1

@@ -364,9 +442,9 @@ while (++index < config.transforms.length) {

let listSpread = false
/** @type {Token|undefined} */
/** @type {Token | undefined} */
let listItem
/** @type {number|undefined} */
/** @type {number | undefined} */
let lineIndex
/** @type {number|undefined} */
/** @type {number | undefined} */
let firstBlankLineIndex
/** @type {boolean|undefined} */
/** @type {boolean | undefined} */
let atMarker

@@ -499,4 +577,12 @@

/**
* @type {CompileContext['setData']}
* @param [value]
* Set data.
*
* @template {keyof CompileData} Key
* Field type.
* @param {Key} key
* Key of field.
* @param {CompileData[Key]} [value]
* New value.
* @returns {void}
* Nothing.
*/

@@ -508,6 +594,10 @@ function setData(key, value) {

/**
* @type {CompileContext['getData']}
* @template {string} K
* @param {K} key
* @returns {CompileData[K]}
* Get data.
*
* @template {keyof CompileData} Key
* Field type.
* @param {Key} key
* Key of field.
* @returns {CompileData[Key]}
* Value.
*/

@@ -519,13 +609,10 @@ function getData(key) {

/**
* @param {Point} d
* @returns {Point}
*/
function point(d) {
return {line: d.line, column: d.column, offset: d.offset}
}
/**
* Create an opener handle.
*
* @param {(token: Token) => Node} create
* Create a node.
* @param {Handle} [and]
* Optional function to also run.
* @returns {Handle}
* Handle.
*/

@@ -546,3 +633,6 @@ function opener(create, and) {

/** @type {CompileContext['buffer']} */
/**
* @this {CompileContext}
* @returns {void}
*/
function buffer() {

@@ -553,9 +643,14 @@ this.stack.push({type: 'fragment', children: []})

/**
* @type {CompileContext['enter']}
* @template {Node} N
* @template {Node} Kind
* Node type.
* @this {CompileContext}
* @param {N} node
* Context.
* @param {Kind} node
* Node to enter.
* @param {Token} token
* @param {OnEnterError} [errorHandler]
* @returns {N}
* Corresponding token.
* @param {OnEnterError | undefined} [errorHandler]
* Handle the case where this token is open, but it is closed by something else.
* @returns {Kind}
* The given node.
*/

@@ -576,4 +671,8 @@ function enter(node, token, errorHandler) {

/**
* Create a closer handle.
*
* @param {Handle} [and]
* Optional function to also run.
* @returns {Handle}
* Handle.
*/

@@ -595,7 +694,10 @@ function closer(and) {

/**
* @type {CompileContext['exit']}
* @this {CompileContext}
* Context.
* @param {Token} token
* @param {OnExitError} [onExitError]
* Corresponding token.
* @param {OnExitError | undefined} [onExitError]
* Handle the case where another token is open.
* @returns {Node}
* The closed node.
*/

@@ -642,3 +744,6 @@ function exit(token, onExitError) {

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterlistordered() {

@@ -648,6 +753,11 @@ setData('expectingFirstListItemValue', true)

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterlistitemvalue(token) {
if (getData('expectingFirstListItemValue')) {
const ancestor = /** @type {List} */ (this.stack[this.stack.length - 2])
const ancestor = this.stack[this.stack.length - 2]
assert(ancestor, 'expected nodes on stack')
assert(ancestor.type === 'list', 'expected list on stack')
ancestor.start = Number.parseInt(

@@ -661,17 +771,30 @@ this.sliceSerialize(token),

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefencedfenceinfo() {
const data = this.resume()
const node = /** @type {Code} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'code', 'expected code on stack')
node.lang = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefencedfencemeta() {
const data = this.resume()
const node = /** @type {Code} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'code', 'expected code on stack')
node.meta = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefencedfence() {

@@ -684,16 +807,25 @@ // Exit if this is the closing fence.

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefenced() {
const data = this.resume()
const node = /** @type {Code} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'code', 'expected code on stack')
node.value = data.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, '')
setData('flowCodeInside')
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodeindented() {
const data = this.resume()
const node = /** @type {Code} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'code', 'expected code on stack')

@@ -703,7 +835,12 @@ node.value = data.replace(/(\r?\n|\r)$/g, '')

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdefinitionlabelstring(token) {
// Discard label, use the source content instead.
const label = this.resume()
const node = /** @type {Definition} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'definition', 'expected definition on stack')
node.label = label

@@ -715,19 +852,37 @@ node.identifier = normalizeIdentifier(

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdefinitiontitlestring() {
const data = this.resume()
const node = /** @type {Definition} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'definition', 'expected definition on stack')
node.title = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdefinitiondestinationstring() {
const data = this.resume()
const node = /** @type {Definition} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'definition', 'expected definition on stack')
node.url = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitatxheadingsequence(token) {
const node = /** @type {Heading} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'heading', 'expected heading on stack')
if (!node.depth) {

@@ -750,3 +905,6 @@ const depth = this.sliceSerialize(token).length

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitsetextheadingtext() {

@@ -756,5 +914,10 @@ setData('setextHeadingSlurpLineEnding', true)

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitsetextheadinglinesequence(token) {
const node = /** @type {Heading} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'heading', 'expected heading on stack')

@@ -765,3 +928,6 @@ node.depth =

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitsetextheading() {

@@ -771,8 +937,14 @@ setData('setextHeadingSlurpLineEnding')

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterdata(token) {
const parent = /** @type {Parent} */ (this.stack[this.stack.length - 1])
/** @type {Node} */
let tail = parent.children[parent.children.length - 1]
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert('children' in node, 'expected parent on stack')
let tail = node.children[node.children.length - 1]
if (!tail || tail.type !== 'text') {

@@ -784,3 +956,3 @@ // Add a new text node.

// @ts-expect-error: Assume `parent` accepts `text`.
parent.children.push(tail)
node.children.push(tail)
}

@@ -791,3 +963,7 @@

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdata(token) {

@@ -802,3 +978,7 @@ const tail = this.stack.pop()

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlineending(token) {

@@ -827,3 +1007,7 @@ const context = this.stack[this.stack.length - 1]

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexithardbreak() {

@@ -833,42 +1017,73 @@ setData('atHardBreak', true)

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexithtmlflow() {
const data = this.resume()
const node = /** @type {HTML} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'html', 'expected html on stack')
node.value = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexithtmltext() {
const data = this.resume()
const node = /** @type {HTML} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'html', 'expected html on stack')
node.value = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodetext() {
const data = this.resume()
const node = /** @type {InlineCode} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'inlineCode', 'expected inline code on stack')
node.value = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlink() {
const context = /** @type {Link & {identifier: string, label: string}} */ (
this.stack[this.stack.length - 1]
)
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'link', 'expected link on stack')
// Note: there are also `identifier` and `label` fields on this link node!
// These are used / cleaned here.
// To do: clean.
if (getData('inReference')) {
context.type += 'Reference'
/** @type {ReferenceType} */
const referenceType = getData('referenceType') || 'shortcut'
node.type += 'Reference'
// @ts-expect-error: mutate.
context.referenceType = getData('referenceType') || 'shortcut'
node.referenceType = referenceType
// @ts-expect-error: mutate.
delete context.url
delete context.title
delete node.url
delete node.title
} else {
// @ts-expect-error: mutate.
delete context.identifier
delete node.identifier
// @ts-expect-error: mutate.
delete context.label
delete node.label
}

@@ -879,21 +1094,31 @@

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitimage() {
const context = /** @type {Image & {identifier: string, label: string}} */ (
this.stack[this.stack.length - 1]
)
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'image', 'expected image on stack')
// Note: there are also `identifier` and `label` fields on this link node!
// These are used / cleaned here.
// To do: clean.
if (getData('inReference')) {
context.type += 'Reference'
/** @type {ReferenceType} */
const referenceType = getData('referenceType') || 'shortcut'
node.type += 'Reference'
// @ts-expect-error: mutate.
context.referenceType = getData('referenceType') || 'shortcut'
node.referenceType = referenceType
// @ts-expect-error: mutate.
delete context.url
delete context.title
delete node.url
delete node.title
} else {
// @ts-expect-error: mutate.
delete context.identifier
delete node.identifier
// @ts-expect-error: mutate.
delete context.label
delete node.label
}

@@ -904,22 +1129,39 @@

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlabeltext(token) {
const ancestor =
/** @type {(Link|Image) & {identifier: string, label: string}} */ (
this.stack[this.stack.length - 2]
)
const string = this.sliceSerialize(token)
const ancestor = this.stack[this.stack.length - 2]
assert(ancestor, 'expected ancestor on stack')
assert(
ancestor.type === 'image' || ancestor.type === 'link',
'expected image or link on stack'
)
// @ts-expect-error: stash this on the node, as it might become a reference
// later.
ancestor.label = decodeString(string)
// @ts-expect-error: same as above.
ancestor.identifier = normalizeIdentifier(string).toLowerCase()
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlabel() {
const fragment = /** @type {Fragment} */ (this.stack[this.stack.length - 1])
const fragment = this.stack[this.stack.length - 1]
assert(fragment, 'expected node on stack')
assert(fragment.type === 'fragment', 'expected fragment on stack')
const value = this.resume()
const node =
/** @type {(Link|Image) & {identifier: string, label: string}} */ (
this.stack[this.stack.length - 1]
)
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(
node.type === 'image' || node.type === 'link',
'expected image or link on stack'
)

@@ -930,4 +1172,7 @@ // Assume a reference.

if (node.type === 'link') {
/** @type {Array<StaticPhrasingContent>} */
// @ts-expect-error: Assume static phrasing content.
node.children = fragment.children
const children = fragment.children
node.children = children
} else {

@@ -938,17 +1183,39 @@ node.alt = value

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitresourcedestinationstring() {
const data = this.resume()
const node = /** @type {Link|Image} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(
node.type === 'image' || node.type === 'link',
'expected image or link on stack'
)
node.url = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitresourcetitlestring() {
const data = this.resume()
const node = /** @type {Link|Image} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(
node.type === 'image' || node.type === 'link',
'expected image or link on stack'
)
node.title = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitresource() {

@@ -958,3 +1225,7 @@ setData('inReference')

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterreference() {

@@ -964,9 +1235,20 @@ setData('referenceType', 'collapsed')

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitreferencestring(token) {
const label = this.resume()
const node = /** @type {LinkReference|ImageReference} */ (
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(
node.type === 'image' || node.type === 'link',
'expected image reference or link reference on stack'
)
// @ts-expect-error: stash this on the node, as it might become a reference
// later.
node.label = label
// @ts-expect-error: same as above.
node.identifier = normalizeIdentifier(

@@ -978,8 +1260,19 @@ this.sliceSerialize(token)

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcharacterreferencemarker(token) {
assert(
token.type === 'characterReferenceMarkerNumeric' ||
token.type === 'characterReferenceMarkerHexadecimal'
)
setData('characterReferenceType', token.type)
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcharacterreferencevalue(token) {

@@ -1000,6 +1293,5 @@ const data = this.sliceSerialize(token)

} else {
// @ts-expect-error `decodeNamedCharacterReference` can return false for
// invalid named character references, but everything we’ve tokenized is
// valid.
value = decodeNamedCharacterReference(data)
const result = decodeNamedCharacterReference(data)
assert(result !== false, 'expected reference to decode')
value = result
}

@@ -1015,13 +1307,25 @@

/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitautolinkprotocol(token) {
onexitdata.call(this, token)
const node = /** @type {Link} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'link', 'expected link on stack')
node.url = this.sliceSerialize(token)
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitautolinkemail(token) {
onexitdata.call(this, token)
const node = /** @type {Link} */ (this.stack[this.stack.length - 1])
const node = this.stack[this.stack.length - 1]
assert(node, 'expected node on stack')
assert(node.type === 'link', 'expected link on stack')
node.url = 'mailto:' + this.sliceSerialize(token)

@@ -1142,6 +1446,18 @@ }

/**
* @param {Extension} combined
* @param {Array<Extension|Array<Extension>>} extensions
* @returns {Extension}
* Copy a point-like value.
*
* @param {Point} d
* Point-like value.
* @returns {Point}
* unist point.
*/
function point(d) {
return {line: d.line, column: d.column, offset: d.offset}
}
/**
* @param {Config} combined
* @param {Array<Extension | Array<Extension>>} extensions
* @returns {void}
*/
function configure(combined, extensions) {

@@ -1159,8 +1475,6 @@ let index = -1

}
return combined
}
/**
* @param {Extension} combined
* @param {Config} combined
* @param {Extension} extension

@@ -1170,3 +1484,3 @@ * @returns {void}

function extension(combined, extension) {
/** @type {string} */
/** @type {keyof Extension} */
let key

@@ -1176,15 +1490,17 @@

if (own.call(extension, key)) {
const list = key === 'canContainEols' || key === 'transforms'
const maybe = own.call(combined, key) ? combined[key] : undefined
/* c8 ignore next */
const left = maybe || (combined[key] = list ? [] : {})
const right = extension[key]
if (right) {
if (list) {
// @ts-expect-error: `left` is an array.
combined[key] = [...left, ...right]
} else {
Object.assign(left, right)
if (key === 'canContainEols') {
const right = extension[key]
if (right) {
combined[key].push(...right)
}
} else if (key === 'transforms') {
const right = extension[key]
if (right) {
combined[key].push(...right)
}
} else if (key === 'enter' || key === 'exit') {
const right = extension[key]
if (right) {
Object.assign(combined[key], right)
}
}

@@ -1191,0 +1507,0 @@ }

export {fromMarkdown} from './lib/index.js'
export type Value = import('./lib/index.js').Value
export type CompileContext = import('./lib/index.js').CompileContext
export type Encoding = import('./lib/index.js').Encoding
export type Options = import('./lib/index.js').Options
export type Extension = import('./lib/index.js').Extension
export type Handle = import('./lib/index.js').Handle
export type Transform = import('./lib/index.js').Transform
export type Token = import('./lib/index.js').Token
export type CompileContext = import('./lib/index.js').CompileContext
export type OnEnterError = import('./lib/index.js').OnEnterError
export type OnExitError = import('./lib/index.js').OnExitError
export type Options = import('./lib/index.js').Options
export type Token = import('./lib/index.js').Token
export type Transform = import('./lib/index.js').Transform
export type Value = import('./lib/index.js').Value
/**
* To do: deprecate next major.
* To do: next major: remove.
*/
export type OnError = import('./lib/index.js').OnEnterError
/**
* @typedef {import('./lib/index.js').Value} Value
* @typedef {import('./lib/index.js').CompileContext} CompileContext
* @typedef {import('./lib/index.js').Encoding} Encoding
* @typedef {import('./lib/index.js').Options} Options
* @typedef {import('./lib/index.js').Extension} Extension
* @typedef {import('./lib/index.js').Handle} Handle
* @typedef {import('./lib/index.js').Transform} Transform
* @typedef {import('./lib/index.js').Token} Token
* @typedef {import('./lib/index.js').CompileContext} CompileContext
* @typedef {import('./lib/index.js').OnEnterError} OnEnterError
* @typedef {import('./lib/index.js').OnExitError} OnExitError
*
* @typedef {import('./lib/index.js').Options} Options
* @typedef {import('./lib/index.js').Token} Token
* @typedef {import('./lib/index.js').Transform} Transform
* @typedef {import('./lib/index.js').Value} Value
*/
/**
* @typedef {import('./lib/index.js').OnEnterError} OnError
* To do: deprecate next major.
* To do: next major: remove.
*/
export {fromMarkdown} from './lib/index.js'
/**
* @param value Markdown to parse (`string` or `Buffer`).
* @param [encoding] Character encoding to understand `value` as when it’s a `Buffer` (`string`, default: `'utf8'`).
* @param [options] Configuration
* @param value
* Markdown to parse.
* @param encoding
* Character encoding for when `value` is `Buffer`.
* @param options
* Configuration.
* @returns
* mdast tree.
*/

@@ -9,5 +14,5 @@ export const fromMarkdown: ((

encoding: Encoding,
options?: Options | undefined
options?: Options | null | undefined
) => Root) &
((value: Value, options?: Options | undefined) => Root)
((value: Value, options?: Options | null | undefined) => Root)
export type Encoding = import('micromark-util-types').Encoding

@@ -22,5 +27,4 @@ export type Event = import('micromark-util-types').Event

export type PhrasingContent = import('mdast').PhrasingContent
export type StaticPhrasingContent = import('mdast').StaticPhrasingContent
export type Content = import('mdast').Content
export type Node = Root | Content
export type Parent = Extract<Node, UnistParent>
export type Break = import('mdast').Break

@@ -45,19 +49,52 @@ export type Blockquote = import('mdast').Blockquote

export type ThematicBreak = import('mdast').ThematicBreak
export type Fragment = UnistParent & {
export type ReferenceType = import('mdast').ReferenceType
export type Node = Root | Content
export type Parent = Extract<Node, UnistParent>
export type Fragment = Omit<UnistParent, 'type' | 'children'> & {
type: 'fragment'
children: Array<PhrasingContent>
}
export type _CompileDataFields = {
expectingFirstListItemValue: boolean | undefined
flowCodeInside: boolean | undefined
setextHeadingSlurpLineEnding: boolean | undefined
atHardBreak: boolean | undefined
referenceType: 'collapsed' | 'full'
inReference: boolean | undefined
characterReferenceType:
/**
* State.
*/
export type CompileData = {
/**
* Whether we’re inside a hard break.
*/
atHardBreak?: boolean | undefined
/**
* Current character reference type.
*/
characterReferenceType?:
| 'characterReferenceMarkerHexadecimal'
| 'characterReferenceMarkerNumeric'
| undefined
/**
* Whether a first list item value (`1` in `1. a`) is expected.
*/
expectingFirstListItemValue?: boolean | undefined
/**
* Whether we’re in flow code.
*/
flowCodeInside?: boolean | undefined
/**
* Whether we’re in a reference.
*/
inReference?: boolean | undefined
/**
* Whether we’re expecting a line ending from a setext heading, which can be slurped.
*/
setextHeadingSlurpLineEnding?: boolean | undefined
/**
* Current reference.
*/
referenceType?: 'collapsed' | 'full' | undefined
}
export type CompileData = Record<string, unknown> & Partial<_CompileDataFields>
export type Transform = (tree: Root) => Root | void
/**
* Extra transform, to change the AST afterwards.
*/
export type Transform = (tree: Root) => Root | undefined | null | void
/**
* Handle a token.
*/
export type Handle = (this: CompileContext, token: Token) => void

@@ -68,15 +105,6 @@ /**

export type Handles = Record<string, Handle>
export type NormalizedExtension = Record<
string,
Record<string, unknown> | Array<unknown>
> & {
canContainEols: Array<string>
transforms: Array<Transform>
enter: Handles
exit: Handles
}
/**
* An mdast extension changes how markdown tokens are turned into mdast.
* Handle the case where the `right` token is open, but it is closed (by the
* `left` token) or because we reached the end of the document.
*/
export type Extension = Partial<NormalizedExtension>
export type OnEnterError = (

@@ -87,2 +115,6 @@ this: Omit<CompileContext, 'sliceSerialize'>,

) => void
/**
* Handle the case where the `right` token is open but it is closed by
* exiting the `left` token.
*/
export type OnExitError = (

@@ -94,52 +126,98 @@ this: Omit<CompileContext, 'sliceSerialize'>,

/**
* mdast compiler context
* Open token on the stack, with an optional error handler for when
* that token isn’t closed properly.
*/
export type TokenTuple = [Token, OnEnterError | undefined]
/**
* Configuration.
*
* We have our defaults, but extensions will add more.
*/
export type Config = {
/**
* Token types where line endings are used.
*/
canContainEols: Array<string>
/**
* Opening handles.
*/
enter: Handles
/**
* Closing handles.
*/
exit: Handles
/**
* Tree transforms.
*/
transforms: Array<Transform>
}
/**
* Change how markdown tokens from micromark are turned into mdast.
*/
export type Extension = Partial<Config>
/**
* mdast compiler context.
*/
export type CompileContext = {
/**
* Stack of nodes.
*/
stack: Array<Node | Fragment>
tokenStack: Array<[Token, OnEnterError | undefined]>
/**
* Set data into the key-value store.
* Stack of tokens.
*/
setData: (key: string, value?: unknown) => void
tokenStack: Array<TokenTuple>
/**
* Get data from the key-value store.
* Get data from the key/value store.
*/
getData: <K extends string>(key: K) => CompileData[K]
getData: <Key extends keyof CompileData>(key: Key) => CompileData[Key]
/**
* Capture some of the output data.
* Set data into the key/value store.
*/
setData: <Key_1 extends keyof CompileData>(
key: Key_1,
value?: CompileData[Key_1] | undefined
) => void
/**
* Capture some of the output data.
*/
buffer: (this: CompileContext) => void
/**
* Stop capturing and access the output data.
* Stop capturing and access the output data.
*/
resume: (this: CompileContext) => string
/**
* Enter a token.
* Enter a token.
*/
enter: <N extends Node>(
enter: <Kind extends Node>(
this: CompileContext,
node: N,
node: Kind,
token: Token,
onError?: OnEnterError | undefined
) => N
onError?: OnEnterError
) => Kind
/**
* Exit a token.
* Exit a token.
*/
exit: (
this: CompileContext,
token: Token,
onError?: OnExitError | undefined
) => Node
exit: (this: CompileContext, token: Token, onError?: OnExitError) => Node
/**
* Get the string value of a token.
* Get the string value of a token.
*/
sliceSerialize: TokenizeContext['sliceSerialize']
/**
* Configuration.
* Configuration.
*/
config: NormalizedExtension
config: Config
}
/**
* Configuration for how to build mdast.
*/
export type FromMarkdownOptions = {
mdastExtensions?: Array<Extension | Array<Extension>>
/**
* Extensions for this utility to change how tokens are turned into a tree.
*/
mdastExtensions?: Array<Extension | Array<Extension>> | null | undefined
}
/**
* Configuration.
*/
export type Options = ParseOptions & FromMarkdownOptions

@@ -8,8 +8,9 @@ /**

* @typedef {import('micromark-util-types').Value} Value
*
* @typedef {import('unist').Parent} UnistParent
* @typedef {import('unist').Point} Point
*
* @typedef {import('mdast').PhrasingContent} PhrasingContent
* @typedef {import('mdast').StaticPhrasingContent} StaticPhrasingContent
* @typedef {import('mdast').Content} Content
* @typedef {Root|Content} Node
* @typedef {Extract<Node, UnistParent>} Parent
* @typedef {import('mdast').Break} Break

@@ -34,37 +35,105 @@ * @typedef {import('mdast').Blockquote} Blockquote

* @typedef {import('mdast').ThematicBreak} ThematicBreak
* @typedef {import('mdast').ReferenceType} ReferenceType
*/
/**
* @typedef {Root | Content} Node
* @typedef {Extract<Node, UnistParent>} Parent
*
* @typedef {UnistParent & {type: 'fragment', children: Array<PhrasingContent>}} Fragment
* @typedef {Omit<UnistParent, 'type' | 'children'> & {type: 'fragment', children: Array<PhrasingContent>}} Fragment
*/
/**
* @typedef _CompileDataFields
* @property {boolean|undefined} expectingFirstListItemValue
* @property {boolean|undefined} flowCodeInside
* @property {boolean|undefined} setextHeadingSlurpLineEnding
* @property {boolean|undefined} atHardBreak
* @property {'collapsed'|'full'} referenceType
* @property {boolean|undefined} inReference
* @property {'characterReferenceMarkerHexadecimal'|'characterReferenceMarkerNumeric'} characterReferenceType
* @typedef CompileData
* State.
* @property {boolean | undefined} [atHardBreak]
* Whether we’re inside a hard break.
* @property {'characterReferenceMarkerHexadecimal' | 'characterReferenceMarkerNumeric' | undefined} [characterReferenceType]
* Current character reference type.
* @property {boolean | undefined} [expectingFirstListItemValue]
* Whether a first list item value (`1` in `1. a`) is expected.
* @property {boolean | undefined} [flowCodeInside]
* Whether we’re in flow code.
* @property {boolean | undefined} [inReference]
* Whether we’re in a reference.
* @property {boolean | undefined} [setextHeadingSlurpLineEnding]
* Whether we’re expecting a line ending from a setext heading, which can be slurped.
* @property {'collapsed' | 'full' | undefined} [referenceType]
* Current reference.
*
* @typedef {Record<string, unknown> & Partial<_CompileDataFields>} CompileData
* @callback Transform
* Extra transform, to change the AST afterwards.
* @param {Root} tree
* Tree to transform.
* @returns {Root | undefined | null | void}
* New tree or nothing (in which case the current tree is used).
*
* @typedef {(tree: Root) => Root|void} Transform
* @typedef {(this: CompileContext, token: Token) => void} Handle
* @callback Handle
* Handle a token.
* @param {CompileContext} this
* Context.
* @param {Token} token
* Current token.
* @returns {void}
* Nothing.
*
* @typedef {Record<string, Handle>} Handles
* Token types mapping to handles
* @typedef {Record<string, Record<string, unknown>|Array<unknown>> & {canContainEols: Array<string>, transforms: Array<Transform>, enter: Handles, exit: Handles}} NormalizedExtension
* @typedef {Partial<NormalizedExtension>} Extension
* An mdast extension changes how markdown tokens are turned into mdast.
*
* @typedef {(this: Omit<CompileContext, 'sliceSerialize'>, left: Token|undefined, right: Token) => void} OnEnterError
* @typedef {(this: Omit<CompileContext, 'sliceSerialize'>, left: Token, right: Token) => void} OnExitError
* @callback OnEnterError
* Handle the case where the `right` token is open, but it is closed (by the
* `left` token) or because we reached the end of the document.
* @param {Omit<CompileContext, 'sliceSerialize'>} this
* Context.
* @param {Token | undefined} left
* Left token.
* @param {Token} right
* Right token.
* @returns {void}
* Nothing.
*
* @callback OnExitError
* Handle the case where the `right` token is open but it is closed by
* exiting the `left` token.
* @param {Omit<CompileContext, 'sliceSerialize'>} this
* Context.
* @param {Token} left
* Left token.
* @param {Token} right
* Right token.
* @returns {void}
* Nothing.
*
* @typedef {[Token, OnEnterError | undefined]} TokenTuple
* Open token on the stack, with an optional error handler for when
* that token isn’t closed properly.
*/
/**
* @typedef Config
* Configuration.
*
* We have our defaults, but extensions will add more.
* @property {Array<string>} canContainEols
* Token types where line endings are used.
* @property {Handles} enter
* Opening handles.
* @property {Handles} exit
* Closing handles.
* @property {Array<Transform>} transforms
* Tree transforms.
*
* @typedef {Partial<Config>} Extension
* Change how markdown tokens from micromark are turned into mdast.
*
* @typedef CompileContext
* mdast compiler context
* mdast compiler context.
* @property {Array<Node | Fragment>} stack
* @property {Array<[Token, OnEnterError|undefined]>} tokenStack
* @property {(key: string, value?: unknown) => void} setData
* Set data into the key-value store.
* @property {<K extends string>(key: K) => CompileData[K]} getData
* Get data from the key-value store.
* Stack of nodes.
* @property {Array<TokenTuple>} tokenStack
* Stack of tokens.
* @property {<Key extends keyof CompileData>(key: Key) => CompileData[Key]} getData
* Get data from the key/value store.
* @property {<Key extends keyof CompileData>(key: Key, value?: CompileData[Key]) => void} setData
* Set data into the key/value store.
* @property {(this: CompileContext) => void} buffer

@@ -74,3 +143,3 @@ * Capture some of the output data.

* Stop capturing and access the output data.
* @property {<N extends Node>(this: CompileContext, node: N, token: Token, onError?: OnEnterError) => N} enter
* @property {<Kind extends Node>(this: CompileContext, node: Kind, token: Token, onError?: OnEnterError) => Kind} enter
* Enter a token.

@@ -81,8 +150,18 @@ * @property {(this: CompileContext, token: Token, onError?: OnExitError) => Node} exit

* Get the string value of a token.
* @property {NormalizedExtension} config
* @property {Config} config
* Configuration.
*
* @typedef {{mdastExtensions?: Array<Extension|Array<Extension>>}} FromMarkdownOptions
* @typedef FromMarkdownOptions
* Configuration for how to build mdast.
* @property {Array<Extension | Array<Extension>> | null | undefined} [mdastExtensions]
* Extensions for this utility to change how tokens are turned into a tree.
*
* @typedef {ParseOptions & FromMarkdownOptions} Options
* Configuration.
*/
// To do: micromark: create a registry of tokens?
// To do: next major: don’t return given `Node` from `enter`.
// To do: next major: remove setter/getter.
import {toString} from 'mdast-util-to-string'

@@ -98,13 +177,18 @@ import {parse} from 'micromark/lib/parse.js'

const own = {}.hasOwnProperty
/**
* @param value Markdown to parse (`string` or `Buffer`).
* @param [encoding] Character encoding to understand `value` as when it’s a `Buffer` (`string`, default: `'utf8'`).
* @param [options] Configuration
* @param value
* Markdown to parse.
* @param encoding
* Character encoding for when `value` is `Buffer`.
* @param options
* Configuration.
* @returns
* mdast tree.
*/
export const fromMarkdown =
/**
* @type {(
* ((value: Value, encoding: Encoding, options?: Options) => Root) &
* ((value: Value, options?: Options) => Root)
* ((value: Value, encoding: Encoding, options?: Options | null | undefined) => Root) &
* ((value: Value, options?: Options | null | undefined) => Root)
* )}

@@ -115,4 +199,4 @@ */

* @param {Value} value
* @param {Encoding} [encoding]
* @param {Options} [options]
* @param {Encoding | Options | null | undefined} [encoding]
* @param {Options | null | undefined} [options]
* @returns {Root}

@@ -125,5 +209,5 @@ */

}
return compiler(options)(
postprocess(
// @ts-expect-error: micromark types need to accept `null`.
parse(options).document().write(preprocess()(value, encoding, true))

@@ -133,126 +217,121 @@ )

}
/**
* Note this compiler only understand complete buffering, not streaming.
*
* @param {Options} [options]
* @param {Options | null | undefined} [options]
*/
function compiler(options) {
/** @type {Config} */
const config = {
transforms: [],
canContainEols: ['emphasis', 'fragment', 'heading', 'paragraph', 'strong'],
enter: {
autolink: opener(link),
autolinkProtocol: onenterdata,
autolinkEmail: onenterdata,
atxHeading: opener(heading),
blockQuote: opener(blockQuote),
characterEscape: onenterdata,
characterReference: onenterdata,
codeFenced: opener(codeFlow),
codeFencedFenceInfo: buffer,
codeFencedFenceMeta: buffer,
codeIndented: opener(codeFlow, buffer),
codeText: opener(codeText, buffer),
codeTextData: onenterdata,
data: onenterdata,
codeFlowValue: onenterdata,
definition: opener(definition),
definitionDestinationString: buffer,
definitionLabelString: buffer,
definitionTitleString: buffer,
emphasis: opener(emphasis),
hardBreakEscape: opener(hardBreak),
hardBreakTrailing: opener(hardBreak),
htmlFlow: opener(html, buffer),
htmlFlowData: onenterdata,
htmlText: opener(html, buffer),
htmlTextData: onenterdata,
image: opener(image),
label: buffer,
link: opener(link),
listItem: opener(listItem),
listItemValue: onenterlistitemvalue,
listOrdered: opener(list, onenterlistordered),
listUnordered: opener(list),
paragraph: opener(paragraph),
reference: onenterreference,
referenceString: buffer,
resourceDestinationString: buffer,
resourceTitleString: buffer,
setextHeading: opener(heading),
strong: opener(strong),
thematicBreak: opener(thematicBreak)
},
exit: {
atxHeading: closer(),
atxHeadingSequence: onexitatxheadingsequence,
autolink: closer(),
autolinkEmail: onexitautolinkemail,
autolinkProtocol: onexitautolinkprotocol,
blockQuote: closer(),
characterEscapeValue: onexitdata,
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
characterReferenceValue: onexitcharacterreferencevalue,
codeFenced: closer(onexitcodefenced),
codeFencedFence: onexitcodefencedfence,
codeFencedFenceInfo: onexitcodefencedfenceinfo,
codeFencedFenceMeta: onexitcodefencedfencemeta,
codeFlowValue: onexitdata,
codeIndented: closer(onexitcodeindented),
codeText: closer(onexitcodetext),
codeTextData: onexitdata,
data: onexitdata,
definition: closer(),
definitionDestinationString: onexitdefinitiondestinationstring,
definitionLabelString: onexitdefinitionlabelstring,
definitionTitleString: onexitdefinitiontitlestring,
emphasis: closer(),
hardBreakEscape: closer(onexithardbreak),
hardBreakTrailing: closer(onexithardbreak),
htmlFlow: closer(onexithtmlflow),
htmlFlowData: onexitdata,
htmlText: closer(onexithtmltext),
htmlTextData: onexitdata,
image: closer(onexitimage),
label: onexitlabel,
labelText: onexitlabeltext,
lineEnding: onexitlineending,
link: closer(onexitlink),
listItem: closer(),
listOrdered: closer(),
listUnordered: closer(),
paragraph: closer(),
referenceString: onexitreferencestring,
resourceDestinationString: onexitresourcedestinationstring,
resourceTitleString: onexitresourcetitlestring,
resource: onexitresource,
setextHeading: closer(onexitsetextheading),
setextHeadingLineSequence: onexitsetextheadinglinesequence,
setextHeadingText: onexitsetextheadingtext,
strong: closer(),
thematicBreak: closer()
}
}
configure(config, (options || {}).mdastExtensions || [])
function compiler(options = {}) {
/** @type {NormalizedExtension} */
// @ts-expect-error: our base has all required fields, so the result will too.
const config = configure(
{
transforms: [],
canContainEols: [
'emphasis',
'fragment',
'heading',
'paragraph',
'strong'
],
enter: {
autolink: opener(link),
autolinkProtocol: onenterdata,
autolinkEmail: onenterdata,
atxHeading: opener(heading),
blockQuote: opener(blockQuote),
characterEscape: onenterdata,
characterReference: onenterdata,
codeFenced: opener(codeFlow),
codeFencedFenceInfo: buffer,
codeFencedFenceMeta: buffer,
codeIndented: opener(codeFlow, buffer),
codeText: opener(codeText, buffer),
codeTextData: onenterdata,
data: onenterdata,
codeFlowValue: onenterdata,
definition: opener(definition),
definitionDestinationString: buffer,
definitionLabelString: buffer,
definitionTitleString: buffer,
emphasis: opener(emphasis),
hardBreakEscape: opener(hardBreak),
hardBreakTrailing: opener(hardBreak),
htmlFlow: opener(html, buffer),
htmlFlowData: onenterdata,
htmlText: opener(html, buffer),
htmlTextData: onenterdata,
image: opener(image),
label: buffer,
link: opener(link),
listItem: opener(listItem),
listItemValue: onenterlistitemvalue,
listOrdered: opener(list, onenterlistordered),
listUnordered: opener(list),
paragraph: opener(paragraph),
reference: onenterreference,
referenceString: buffer,
resourceDestinationString: buffer,
resourceTitleString: buffer,
setextHeading: opener(heading),
strong: opener(strong),
thematicBreak: opener(thematicBreak)
},
exit: {
atxHeading: closer(),
atxHeadingSequence: onexitatxheadingsequence,
autolink: closer(),
autolinkEmail: onexitautolinkemail,
autolinkProtocol: onexitautolinkprotocol,
blockQuote: closer(),
characterEscapeValue: onexitdata,
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
characterReferenceValue: onexitcharacterreferencevalue,
codeFenced: closer(onexitcodefenced),
codeFencedFence: onexitcodefencedfence,
codeFencedFenceInfo: onexitcodefencedfenceinfo,
codeFencedFenceMeta: onexitcodefencedfencemeta,
codeFlowValue: onexitdata,
codeIndented: closer(onexitcodeindented),
codeText: closer(onexitcodetext),
codeTextData: onexitdata,
data: onexitdata,
definition: closer(),
definitionDestinationString: onexitdefinitiondestinationstring,
definitionLabelString: onexitdefinitionlabelstring,
definitionTitleString: onexitdefinitiontitlestring,
emphasis: closer(),
hardBreakEscape: closer(onexithardbreak),
hardBreakTrailing: closer(onexithardbreak),
htmlFlow: closer(onexithtmlflow),
htmlFlowData: onexitdata,
htmlText: closer(onexithtmltext),
htmlTextData: onexitdata,
image: closer(onexitimage),
label: onexitlabel,
labelText: onexitlabeltext,
lineEnding: onexitlineending,
link: closer(onexitlink),
listItem: closer(),
listOrdered: closer(),
listUnordered: closer(),
paragraph: closer(),
referenceString: onexitreferencestring,
resourceDestinationString: onexitresourcedestinationstring,
resourceTitleString: onexitresourcetitlestring,
resource: onexitresource,
setextHeading: closer(onexitsetextheading),
setextHeadingLineSequence: onexitsetextheadinglinesequence,
setextHeadingText: onexitsetextheadingtext,
strong: closer(),
thematicBreak: closer()
}
},
options.mdastExtensions || []
)
/** @type {CompileData} */
const data = {}
return compile
/**
* Turn micromark events into an mdast tree.
*
* @param {Array<Event>} events
* Events.
* @returns {Root}
* mdast tree.
*/
function compile(events) {

@@ -264,16 +343,6 @@ /** @type {Root} */

}
/** @type {CompileContext['stack']} */
const stack = [tree]
/** @type {CompileContext['tokenStack']} */
const tokenStack = []
/** @type {Array<number>} */
const listStack = []
/** @type {Omit<CompileContext, 'sliceSerialize'>} */
const context = {
stack,
tokenStack,
stack: [tree],
tokenStack: [],
config,

@@ -287,4 +356,5 @@ enter,

}
/** @type {Array<number>} */
const listStack = []
let index = -1
while (++index < events.length) {

@@ -305,8 +375,5 @@ // We preprocess lists to add `listItem` tokens, and to infer whether

}
index = -1
while (++index < events.length) {
const handler = config[events[index][0]]
if (own.call(handler, events[index][1].type)) {

@@ -325,8 +392,10 @@ handler[events[index][1].type].call(

if (tokenStack.length > 0) {
const tail = tokenStack[tokenStack.length - 1]
// Handle tokens still being open.
if (context.tokenStack.length > 0) {
const tail = context.tokenStack[context.tokenStack.length - 1]
const handler = tail[1] || defaultOnError
handler.call(context, undefined, tail[0])
} // Figure out `root` position.
}
// Figure out `root` position.
tree.position = {

@@ -352,10 +421,11 @@ start: point(

}
// Call transforms.
index = -1
while (++index < config.transforms.length) {
tree = config.transforms[index](tree) || tree
}
return tree
}
/**

@@ -367,3 +437,2 @@ * @param {Array<Event>} events

*/
function prepareList(events, start, length) {

@@ -373,18 +442,12 @@ let index = start - 1

let listSpread = false
/** @type {Token|undefined} */
/** @type {Token | undefined} */
let listItem
/** @type {number|undefined} */
/** @type {number | undefined} */
let lineIndex
/** @type {number|undefined} */
/** @type {number | undefined} */
let firstBlankLineIndex
/** @type {boolean|undefined} */
/** @type {boolean | undefined} */
let atMarker
while (++index <= length) {
const event = events[index]
if (

@@ -400,3 +463,2 @@ event[1].type === 'listUnordered' ||

}
atMarker = undefined

@@ -413,3 +475,2 @@ } else if (event[1].type === 'lineEndingBlank') {

}
atMarker = undefined

@@ -428,3 +489,2 @@ }

}
if (

@@ -442,6 +502,4 @@ (!containerBalance &&

lineIndex = undefined
while (tailIndex--) {
const tailEvent = events[tailIndex]
if (

@@ -452,3 +510,2 @@ tailEvent[1].type === 'lineEnding' ||

if (tailEvent[0] === 'exit') continue
if (lineIndex) {

@@ -458,3 +515,2 @@ events[lineIndex][1].type = 'lineEndingBlank'

}
tailEvent[1].type = 'lineEnding'

@@ -474,3 +530,2 @@ lineIndex = tailIndex

}
if (

@@ -482,4 +537,5 @@ firstBlankLineIndex &&

listItem._spread = true
} // Fix position.
}
// Fix position.
listItem.end = Object.assign(

@@ -492,4 +548,5 @@ {},

length++
} // Create a new list item.
}
// Create a new list item.
if (event[1].type === 'listItemPrefix') {

@@ -501,4 +558,4 @@ listItem = {

start: Object.assign({}, event[1].start)
} // @ts-expect-error: `listItem` is most definitely defined, TS...
}
// @ts-expect-error: `listItem` is most definitely defined, TS...
events.splice(index, 0, ['enter', listItem, event[2]])

@@ -511,45 +568,52 @@ index++

}
} // @ts-expect-error Patched.
}
// @ts-expect-error Patched.
events[start][1]._spread = listSpread
return length
}
/**
* @type {CompileContext['setData']}
* @param [value]
* Set data.
*
* @template {keyof CompileData} Key
* Field type.
* @param {Key} key
* Key of field.
* @param {CompileData[Key]} [value]
* New value.
* @returns {void}
* Nothing.
*/
function setData(key, value) {
data[key] = value
}
/**
* @type {CompileContext['getData']}
* @template {string} K
* @param {K} key
* @returns {CompileData[K]}
* Get data.
*
* @template {keyof CompileData} Key
* Field type.
* @param {Key} key
* Key of field.
* @returns {CompileData[Key]}
* Value.
*/
function getData(key) {
return data[key]
}
/**
* @param {Point} d
* @returns {Point}
*/
function point(d) {
return {
line: d.line,
column: d.column,
offset: d.offset
}
}
/**
* Create an opener handle.
*
* @param {(token: Token) => Node} create
* Create a node.
* @param {Handle} [and]
* Optional function to also run.
* @returns {Handle}
* Handle.
*/
function opener(create, and) {
return open
/**

@@ -560,3 +624,2 @@ * @this {CompileContext}

*/
function open(token) {

@@ -567,4 +630,7 @@ enter.call(this, create(token), token)

}
/** @type {CompileContext['buffer']} */
/**
* @this {CompileContext}
* @returns {void}
*/
function buffer() {

@@ -576,12 +642,17 @@ this.stack.push({

}
/**
* @type {CompileContext['enter']}
* @template {Node} N
* @template {Node} Kind
* Node type.
* @this {CompileContext}
* @param {N} node
* Context.
* @param {Kind} node
* Node to enter.
* @param {Token} token
* @param {OnEnterError} [errorHandler]
* @returns {N}
* Corresponding token.
* @param {OnEnterError | undefined} [errorHandler]
* Handle the case where this token is open, but it is closed by something else.
* @returns {Kind}
* The given node.
*/
function enter(node, token, errorHandler) {

@@ -592,4 +663,4 @@ const parent = this.stack[this.stack.length - 1]

this.stack.push(node)
this.tokenStack.push([token, errorHandler]) // @ts-expect-error: `end` will be patched later.
this.tokenStack.push([token, errorHandler])
// @ts-expect-error: `end` will be patched later.
node.position = {

@@ -600,9 +671,14 @@ start: point(token.start)

}
/**
* Create a closer handle.
*
* @param {Handle} [and]
* Optional function to also run.
* @returns {Handle}
* Handle.
*/
function closer(and) {
return close
/**

@@ -613,3 +689,2 @@ * @this {CompileContext}

*/
function close(token) {

@@ -620,14 +695,16 @@ if (and) and.call(this, token)

}
/**
* @type {CompileContext['exit']}
* @this {CompileContext}
* Context.
* @param {Token} token
* @param {OnExitError} [onExitError]
* Corresponding token.
* @param {OnExitError | undefined} [onExitError]
* Handle the case where another token is open.
* @returns {Node}
* The closed node.
*/
function exit(token, onExitError) {
const node = this.stack.pop()
const open = this.tokenStack.pop()
if (!open) {

@@ -652,6 +729,6 @@ throw new Error(

}
node.position.end = point(token.end)
return node
}
/**

@@ -661,21 +738,25 @@ * @this {CompileContext}

*/
function resume() {
return toString(this.stack.pop())
} //
}
//
// Handlers.
//
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterlistordered() {
setData('expectingFirstListItemValue', true)
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterlistitemvalue(token) {
if (getData('expectingFirstListItemValue')) {
const ancestor =
/** @type {List} */
this.stack[this.stack.length - 2]
const ancestor = this.stack[this.stack.length - 2]
ancestor.start = Number.parseInt(this.sliceSerialize(token), 10)

@@ -685,22 +766,27 @@ setData('expectingFirstListItemValue')

}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefencedfenceinfo() {
const data = this.resume()
const node =
/** @type {Code} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.lang = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefencedfencemeta() {
const data = this.resume()
const node =
/** @type {Code} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.meta = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefencedfence() {

@@ -712,29 +798,31 @@ // Exit if this is the closing fence.

}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodefenced() {
const data = this.resume()
const node =
/** @type {Code} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.value = data.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, '')
setData('flowCodeInside')
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodeindented() {
const data = this.resume()
const node =
/** @type {Code} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.value = data.replace(/(\r?\n|\r)$/g, '')
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdefinitionlabelstring(token) {
// Discard label, use the source content instead.
const label = this.resume()
const node =
/** @type {Definition} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.label = label

@@ -745,27 +833,29 @@ node.identifier = normalizeIdentifier(

}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdefinitiontitlestring() {
const data = this.resume()
const node =
/** @type {Definition} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.title = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdefinitiondestinationstring() {
const data = this.resume()
const node =
/** @type {Definition} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.url = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitatxheadingsequence(token) {
const node =
/** @type {Heading} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
if (!node.depth) {

@@ -776,45 +866,54 @@ const depth = this.sliceSerialize(token).length

}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitsetextheadingtext() {
setData('setextHeadingSlurpLineEnding', true)
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitsetextheadinglinesequence(token) {
const node =
/** @type {Heading} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.depth = this.sliceSerialize(token).charCodeAt(0) === 61 ? 1 : 2
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitsetextheading() {
setData('setextHeadingSlurpLineEnding')
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterdata(token) {
const parent =
/** @type {Parent} */
this.stack[this.stack.length - 1]
/** @type {Node} */
let tail = parent.children[parent.children.length - 1]
const node = this.stack[this.stack.length - 1]
let tail = node.children[node.children.length - 1]
if (!tail || tail.type !== 'text') {
// Add a new text node.
tail = text() // @ts-expect-error: we’ll add `end` later.
tail = text()
// @ts-expect-error: we’ll add `end` later.
tail.position = {
start: point(token.start)
} // @ts-expect-error: Assume `parent` accepts `text`.
parent.children.push(tail)
}
// @ts-expect-error: Assume `parent` accepts `text`.
node.children.push(tail)
}
this.stack.push(tail)
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitdata(token) {

@@ -825,7 +924,10 @@ const tail = this.stack.pop()

}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlineending(token) {
const context = this.stack[this.stack.length - 1]
// If we’re at a hard break, include the line ending in there.

@@ -838,3 +940,2 @@ if (getData('atHardBreak')) {

}
if (

@@ -848,106 +949,134 @@ !getData('setextHeadingSlurpLineEnding') &&

}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexithardbreak() {
setData('atHardBreak', true)
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexithtmlflow() {
const data = this.resume()
const node =
/** @type {HTML} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.value = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexithtmltext() {
const data = this.resume()
const node =
/** @type {HTML} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.value = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcodetext() {
const data = this.resume()
const node =
/** @type {InlineCode} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.value = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlink() {
const context =
/** @type {Link & {identifier: string, label: string}} */
this.stack[this.stack.length - 1] // To do: clean.
const node = this.stack[this.stack.length - 1]
// Note: there are also `identifier` and `label` fields on this link node!
// These are used / cleaned here.
// To do: clean.
if (getData('inReference')) {
context.type += 'Reference' // @ts-expect-error: mutate.
context.referenceType = getData('referenceType') || 'shortcut' // @ts-expect-error: mutate.
delete context.url
delete context.title
/** @type {ReferenceType} */
const referenceType = getData('referenceType') || 'shortcut'
node.type += 'Reference'
// @ts-expect-error: mutate.
node.referenceType = referenceType
// @ts-expect-error: mutate.
delete node.url
delete node.title
} else {
// @ts-expect-error: mutate.
delete context.identifier // @ts-expect-error: mutate.
delete context.label
delete node.identifier
// @ts-expect-error: mutate.
delete node.label
}
setData('referenceType')
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitimage() {
const context =
/** @type {Image & {identifier: string, label: string}} */
this.stack[this.stack.length - 1] // To do: clean.
const node = this.stack[this.stack.length - 1]
// Note: there are also `identifier` and `label` fields on this link node!
// These are used / cleaned here.
// To do: clean.
if (getData('inReference')) {
context.type += 'Reference' // @ts-expect-error: mutate.
context.referenceType = getData('referenceType') || 'shortcut' // @ts-expect-error: mutate.
delete context.url
delete context.title
/** @type {ReferenceType} */
const referenceType = getData('referenceType') || 'shortcut'
node.type += 'Reference'
// @ts-expect-error: mutate.
node.referenceType = referenceType
// @ts-expect-error: mutate.
delete node.url
delete node.title
} else {
// @ts-expect-error: mutate.
delete context.identifier // @ts-expect-error: mutate.
delete context.label
delete node.identifier
// @ts-expect-error: mutate.
delete node.label
}
setData('referenceType')
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlabeltext(token) {
const ancestor =
/** @type {(Link|Image) & {identifier: string, label: string}} */
this.stack[this.stack.length - 2]
const string = this.sliceSerialize(token)
const ancestor = this.stack[this.stack.length - 2]
// @ts-expect-error: stash this on the node, as it might become a reference
// later.
ancestor.label = decodeString(string)
// @ts-expect-error: same as above.
ancestor.identifier = normalizeIdentifier(string).toLowerCase()
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitlabel() {
const fragment =
/** @type {Fragment} */
this.stack[this.stack.length - 1]
const fragment = this.stack[this.stack.length - 1]
const value = this.resume()
const node =
/** @type {(Link|Image) & {identifier: string, label: string}} */
this.stack[this.stack.length - 1] // Assume a reference.
const node = this.stack[this.stack.length - 1]
// Assume a reference.
setData('inReference', true)
if (node.type === 'link') {
/** @type {Array<StaticPhrasingContent>} */
// @ts-expect-error: Assume static phrasing content.
node.children = fragment.children
const children = fragment.children
node.children = children
} else {

@@ -957,38 +1086,55 @@ node.alt = value

}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitresourcedestinationstring() {
const data = this.resume()
const node =
/** @type {Link|Image} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.url = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitresourcetitlestring() {
const data = this.resume()
const node =
/** @type {Link|Image} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.title = data
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitresource() {
setData('inReference')
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onenterreference() {
setData('referenceType', 'collapsed')
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitreferencestring(token) {
const label = this.resume()
const node =
/** @type {LinkReference|ImageReference} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
// @ts-expect-error: stash this on the node, as it might become a reference
// later.
node.label = label
// @ts-expect-error: same as above.
node.identifier = normalizeIdentifier(

@@ -999,9 +1145,16 @@ this.sliceSerialize(token)

}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcharacterreferencemarker(token) {
setData('characterReferenceType', token.type)
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitcharacterreferencevalue(token) {

@@ -1011,5 +1164,3 @@ const data = this.sliceSerialize(token)

/** @type {string} */
let value
if (type) {

@@ -1022,8 +1173,5 @@ value = decodeNumericCharacterReference(

} else {
// @ts-expect-error `decodeNamedCharacterReference` can return false for
// invalid named character references, but everything we’ve tokenized is
// valid.
value = decodeNamedCharacterReference(data)
const result = decodeNamedCharacterReference(data)
value = result
}
const tail = this.stack.pop()

@@ -1033,20 +1181,24 @@ tail.value += value

}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitautolinkprotocol(token) {
onexitdata.call(this, token)
const node =
/** @type {Link} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.url = this.sliceSerialize(token)
}
/** @type {Handle} */
/**
* @this {CompileContext}
* @type {Handle}
*/
function onexitautolinkemail(token) {
onexitdata.call(this, token)
const node =
/** @type {Link} */
this.stack[this.stack.length - 1]
const node = this.stack[this.stack.length - 1]
node.url = 'mailto:' + this.sliceSerialize(token)
} //
}
//
// Creaters.

@@ -1056,3 +1208,2 @@ //

/** @returns {Blockquote} */
function blockQuote() {

@@ -1064,4 +1215,4 @@ return {

}
/** @returns {Code} */
function codeFlow() {

@@ -1075,4 +1226,4 @@ return {

}
/** @returns {InlineCode} */
function codeText() {

@@ -1084,4 +1235,4 @@ return {

}
/** @returns {Definition} */
function definition() {

@@ -1096,4 +1247,4 @@ return {

}
/** @returns {Emphasis} */
function emphasis() {

@@ -1105,4 +1256,4 @@ return {

}
/** @returns {Heading} */
function heading() {

@@ -1116,4 +1267,4 @@ // @ts-expect-error `depth` will be set later.

}
/** @returns {Break} */
function hardBreak() {

@@ -1124,4 +1275,4 @@ return {

}
/** @returns {HTML} */
function html() {

@@ -1133,4 +1284,4 @@ return {

}
/** @returns {Image} */
function image() {

@@ -1144,4 +1295,4 @@ return {

}
/** @returns {Link} */
function link() {

@@ -1155,2 +1306,3 @@ return {

}
/**

@@ -1160,3 +1312,2 @@ * @param {Token} token

*/
function list(token) {

@@ -1172,2 +1323,3 @@ return {

}
/**

@@ -1177,3 +1329,2 @@ * @param {Token} token

*/
function listItem(token) {

@@ -1188,4 +1339,4 @@ return {

}
/** @returns {Paragraph} */
function paragraph() {

@@ -1197,4 +1348,4 @@ return {

}
/** @returns {Strong} */
function strong() {

@@ -1206,4 +1357,4 @@ return {

}
/** @returns {Text} */
function text() {

@@ -1215,4 +1366,4 @@ return {

}
/** @returns {ThematicBreak} */
function thematicBreak() {

@@ -1224,14 +1375,28 @@ return {

}
/**
* @param {Extension} combined
* @param {Array<Extension|Array<Extension>>} extensions
* @returns {Extension}
* Copy a point-like value.
*
* @param {Point} d
* Point-like value.
* @returns {Point}
* unist point.
*/
function point(d) {
return {
line: d.line,
column: d.column,
offset: d.offset
}
}
/**
* @param {Config} combined
* @param {Array<Extension | Array<Extension>>} extensions
* @returns {void}
*/
function configure(combined, extensions) {
let index = -1
while (++index < extensions.length) {
const value = extensions[index]
if (Array.isArray(value)) {

@@ -1243,31 +1408,29 @@ configure(combined, value)

}
}
return combined
}
/**
* @param {Extension} combined
* @param {Config} combined
* @param {Extension} extension
* @returns {void}
*/
function extension(combined, extension) {
/** @type {string} */
/** @type {keyof Extension} */
let key
for (key in extension) {
if (own.call(extension, key)) {
const list = key === 'canContainEols' || key === 'transforms'
const maybe = own.call(combined, key) ? combined[key] : undefined
/* c8 ignore next */
const left = maybe || (combined[key] = list ? [] : {})
const right = extension[key]
if (right) {
if (list) {
// @ts-expect-error: `left` is an array.
combined[key] = [...left, ...right]
} else {
Object.assign(left, right)
if (key === 'canContainEols') {
const right = extension[key]
if (right) {
combined[key].push(...right)
}
} else if (key === 'transforms') {
const right = extension[key]
if (right) {
combined[key].push(...right)
}
} else if (key === 'enter' || key === 'exit') {
const right = extension[key]
if (right) {
Object.assign(combined[key], right)
}
}

@@ -1277,4 +1440,4 @@ }

}
/** @type {OnEnterError} */
function defaultOnError(left, right) {

@@ -1281,0 +1444,0 @@ if (left) {

{
"name": "mdast-util-from-markdown",
"version": "1.2.0",
"version": "1.2.1",
"description": "mdast utility to parse markdown",

@@ -58,8 +58,8 @@ "license": "MIT",

"devDependencies": {
"@types/power-assert": "^1.0.0",
"@types/tape": "^4.0.0",
"@types/node": "^18.0.0",
"c8": "^7.0.0",
"commonmark.json": "^0.30.0",
"esbuild": "^0.13.0",
"esbuild": "^0.17.0",
"gzip-size-cli": "^5.0.0",
"hast-util-from-html": "^1.0.0",
"hast-util-to-html": "^8.0.0",

@@ -69,19 +69,15 @@ "mdast-util-to-hast": "^12.0.0",

"prettier": "^2.0.0",
"rehype-parse": "^8.0.0",
"rehype-stringify": "^9.0.0",
"remark-cli": "^10.0.0",
"remark-cli": "^11.0.0",
"remark-preset-wooorm": "^9.0.0",
"rimraf": "^3.0.0",
"tape": "^5.0.0",
"terser": "^5.0.0",
"type-coverage": "^2.0.0",
"typescript": "^4.0.0",
"unified": "^10.0.0",
"xo": "^0.46.0"
"xo": "^0.53.0"
},
"scripts": {
"build": "rimraf \"dev/**/*.d.ts\" \"test/**/*.d.ts\" && tsc && type-coverage && micromark-build && esbuild . --bundle --minify | terser | gzip-size --raw",
"prepack": "npm run build && npm run format",
"build": "tsc --build --clean && tsc --build && type-coverage && micromark-build && esbuild . --bundle --minify | terser | gzip-size --raw",
"format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix",
"test-api": "node --conditions development test/index.js",
"test-coverage": "c8 --check-coverage --branches 100 --functions 100 --lines 100 --statements 100 --reporter lcov node --conditions development test/index.js",
"test-coverage": "c8 --check-coverage --100 --reporter lcov npm run test-api",
"test": "npm run build && npm run format && npm run test-coverage"

@@ -101,6 +97,15 @@ },

"complexity": "off",
"node/file-extension-in-import": "off",
"n/file-extension-in-import": "off",
"unicorn/prefer-code-point": "off",
"unicorn/prefer-switch": "off",
"unicorn/prefer-node-protocol": "off"
}
},
"overrides": [
{
"files": "test/**/*.js",
"rules": {
"no-await-in-loop": "off"
}
}
]
},

@@ -107,0 +112,0 @@ "remarkConfig": {

@@ -11,17 +11,63 @@ # mdast-util-from-markdown

**[mdast][]** utility to parse markdown.
**[mdast][]** utility that turns markdown into a syntax tree.
## When to use this
## Contents
Use this if you want to use **[micromark][]** but need an AST.
Use **[remark][]** instead, which includes both to provide a nice interface and
hundreds of plugins.
* [What is this?](#what-is-this)
* [When should I use this?](#when-should-i-use-this)
* [Install](#install)
* [Use](#use)
* [API](#api)
* [`fromMarkdown(value[, encoding][, options])`](#frommarkdownvalue-encoding-options)
* [`CompileContext`](#compilecontext)
* [`Encoding`](#encoding)
* [`Extension`](#extension)
* [`Handle`](#handle)
* [`OnEnterError`](#onentererror)
* [`OnExitError`](#onexiterror)
* [`Options`](#options)
* [`Token`](#token)
* [`Transform`](#transform)
* [`Value`](#value)
* [List of extensions](#list-of-extensions)
* [Syntax](#syntax)
* [Syntax tree](#syntax-tree)
* [Types](#types)
* [Compatibility](#compatibility)
* [Security](#security)
* [Related](#related)
* [Contribute](#contribute)
* [License](#license)
## What is this?
This package is a utility that takes markdown input and turns it into an
[mdast][] syntax tree.
This utility uses [`micromark`][micromark], which turns markdown into tokens,
and then turns those tokens into nodes.
This package is used inside [`remark-parse`][remark-parse], which focusses on
making it easier to transform content by abstracting these internals away.
## When should I use this?
If you want to handle syntax trees manually, use this.
When you *just* want to turn markdown into HTML, use [`micromark`][micromark]
instead.
For an easier time processing content, use the **[remark][]** ecosystem instead.
You can combine this package with other packages to add syntax extensions to
markdown.
Notable examples that deeply integrate with this package are
[`mdast-util-gfm`][mdast-util-gfm],
[`mdast-util-mdx`][mdast-util-mdx],
[`mdast-util-frontmatter`][mdast-util-frontmatter],
[`mdast-util-math`][mdast-util-math], and
[`mdast-util-directive`][mdast-util-directive].
## Install
This package is [ESM only](https://gist.github.com/sindresorhus/a39789f98801d908bbc7ff3ecc99d99c):
Node 12+ is needed to use it and it must be `import`ed instead of `require`d.
This package is [ESM only][esm].
In Node.js (version 14.14+ and 16.0+), install with [npm][]:
[npm][]:
```sh

@@ -31,5 +77,19 @@ npm install mdast-util-from-markdown

In Deno with [`esm.sh`][esmsh]:
```js
import {fromMarkdown} from 'https://esm.sh/mdast-util-from-markdown@1'
```
In browsers with [`esm.sh`][esmsh]:
```html
<script type="module">
import {fromMarkdown} from 'https://esm.sh/mdast-util-from-markdown@1?bundle'
</script>
```
## Use
Say we have the following markdown file, `example.md`:
Say we have the following markdown file `example.md`:

@@ -40,10 +100,9 @@ ```markdown

And our script, `example.js`, looks as follows:
…and our module `example.js` looks as follows:
```js
import fs from 'node:fs'
import fs from 'node:fs/promises'
import {fromMarkdown} from 'mdast-util-from-markdown'
const doc = fs.readFileSync('example.md')
const doc = await fs.readFile('example.md')
const tree = fromMarkdown(doc)

@@ -54,3 +113,3 @@

Now, running `node example` yields (positional info removed for brevity):
…now running `node example.js` yields (positional info removed for brevity):

@@ -66,6 +125,3 @@ ```js

{type: 'text', value: 'Hello, '},
{
type: 'emphasis',
children: [{type: 'text', value: 'World'}]
},
{type: 'emphasis', children: [{type: 'text', value: 'World'}]},
{type: 'text', value: '!'}

@@ -80,67 +136,254 @@ ]

This package exports the following identifier: `fromMarkdown`.
This package exports the identifier [`fromMarkdown`][api-frommarkdown].
There is no default export.
The export map supports the endorsed
[`development` condition](https://nodejs.org/api/packages.html#packages_resolving_user_conditions).
Run `node --conditions development module.js` to get instrumented dev code.
The export map supports the [`development` condition][development].
Run `node --conditions development example.js` to get instrumented dev code.
Without this condition, production code is loaded.
### `fromMarkdown(doc[, encoding][, options])`
### `fromMarkdown(value[, encoding][, options])`
Parse markdown to a **[mdast][]** tree.
Turn markdown into a syntax tree.
##### Parameters
###### Overloads
###### `doc`
* `(value: Value, encoding: Encoding, options?: Options) => Root`
* `(value: Value, options?: Options) => Root`
Value to parse (`string` or [`Buffer`][buffer]).
###### Parameters
###### `encoding`
* `value` ([`Value`][api-value])
— markdown to parse
* `encoding` ([`Encoding`][api-encoding], default: `'utf8'`)
— [character encoding][character-encoding] for when `value` is
[`Buffer`][buffer]
* `options` ([`Options`][api-options], optional)
— configuration
[Character encoding][encoding] to understand `doc` as when it’s a
[`Buffer`][buffer] (`string`, default: `'utf8'`).
###### Returns
###### `options.extensions`
mdast tree ([`Root`][root]).
Array of syntax extensions (`Array<MicromarkSyntaxExtension>`, default: `[]`).
Passed to [`micromark` as `extensions`][micromark-extensions].
### `CompileContext`
###### `options.mdastExtensions`
mdast compiler context (TypeScript type).
Array of mdast extensions (`Array<MdastExtension>`, default: `[]`).
###### Fields
##### Returns
* `stack` ([`Array<Node>`][node])
— stack of nodes
* `tokenStack` (`Array<[Token, OnEnterError | undefined]>`)
— stack of tokens
* `getData` (`(key: string) => unknown`)
— get data from the key/value store
* `setData` (`(key: string, value?: unknown) => void`)
— set data into the key/value store
* `buffer` (`() => void`)
— capture some of the output data
* `resume` (`() => string`)
— stop capturing and access the output data
* `enter` (`(node: Node, token: Token, onError?: OnEnterError) => Node`)
— enter a token
* `exit` (`(token: Token, onError?: OnExitError) => Node`)
— exit a token
* `sliceSerialize` (`(token: Token, expandTabs?: boolean) => string`)
— get the string value of a token
* `config` (`Required<Extension>`)
— configuration
[`Root`][root].
### `Encoding`
Encodings supported by the [`Buffer`][buffer] class (TypeScript type).
<!-- To do: link to micromark type, when documented. -->
See [`micromark`](https://github.com/micromark/micromark#api) for more info.
###### Type
```ts
type Encoding = 'utf8' | /* … */
```
### `Extension`
Change how markdown tokens from micromark are turned into mdast (TypeScript
type).
###### Properties
* `canContainEols` (`Array<string>`, optional)
— token types where line endings are used
* `enter` ([`Record<string, Handle>`][api-handle], optional)
— opening handles
* `exit` ([`Record<string, Handle>`][api-handle], optional)
— closing handles
* `transforms` ([`Array<Transform>`][api-transform], optional)
— tree transforms
### `Handle`
Handle a token (TypeScript type).
###### Parameters
* `this` ([`CompileContext`][api-compilecontext])
— context
* `token` ([`Token`][api-token])
— current token
###### Returns
Nothing (`void`).
### `OnEnterError`
Handle the case where the `right` token is open, but it is closed (by the
`left` token) or because we reached the end of the document (TypeScript type).
###### Parameters
* `this` ([`CompileContext`][api-compilecontext])
— context
* `left` ([`Token`][api-token] or `undefined`)
— left token
* `right` ([`Token`][api-token])
— right token
###### Returns
Nothing (`void`).
### `OnExitError`
Handle the case where the `right` token is open but it is closed by
exiting the `left` token (TypeScript type).
###### Parameters
* `this` ([`CompileContext`][api-compilecontext])
— context
* `left` ([`Token`][api-token])
— left token
* `right` ([`Token`][api-token])
— right token
###### Returns
Nothing (`void`).
### `Options`
Configuration (TypeScript type).
###### Properties
* `extensions` ([`Array<MicromarkExtension>`][micromark-extension], optional)
— micromark extensions to change how markdown is parsed
* `mdastExtensions` ([`Array<Extension | Array<Extension>>`][api-extension],
optional)
— extensions for this utility to change how tokens are turned into a tree
### `Token`
Token from micromark (TypeScript type).
<!-- To do: link to micromark type, when documented. -->
See [`micromark`](https://github.com/micromark/micromark#api) for more info.
###### Type
```ts
type Token = { /* … */ }
```
### `Transform`
Extra transform, to change the AST afterwards (TypeScript type).
###### Parameters
* `tree` ([`Root`][root])
— tree to transform
###### Returns
New tree ([`Root`][root]) or nothing (in which case the current tree is used).
### `Value`
Contents of the file (TypeScript type).
<!-- To do: link to micromark type, when documented. -->
See [`micromark`](https://github.com/micromark/micromark#api) for more info.
###### Type
```ts
type Value = string | Uint8Array
```
## List of extensions
* [`syntax-tree/mdast-util-directive`](https://github.com/syntax-tree/mdast-util-directive)
— parse directives
— directives
* [`syntax-tree/mdast-util-frontmatter`](https://github.com/syntax-tree/mdast-util-frontmatter)
— parse frontmatter (YAML, TOML, more)
— frontmatter (YAML, TOML, more)
* [`syntax-tree/mdast-util-gfm`](https://github.com/syntax-tree/mdast-util-gfm)
— parse GFM
— GFM
* [`syntax-tree/mdast-util-gfm-autolink-literal`](https://github.com/syntax-tree/mdast-util-gfm-autolink-literal)
— parse GFM autolink literals
— GFM autolink literals
* [`syntax-tree/mdast-util-gfm-footnote`](https://github.com/syntax-tree/mdast-util-gfm-footnote)
— parse GFM footnotes
— GFM footnotes
* [`syntax-tree/mdast-util-gfm-strikethrough`](https://github.com/syntax-tree/mdast-util-gfm-strikethrough)
— parse GFM strikethrough
— GFM strikethrough
* [`syntax-tree/mdast-util-gfm-table`](https://github.com/syntax-tree/mdast-util-gfm-table)
— parse GFM tables
— GFM tables
* [`syntax-tree/mdast-util-gfm-task-list-item`](https://github.com/syntax-tree/mdast-util-gfm-task-list-item)
— parse GFM task list items
— GFM task list items
* [`syntax-tree/mdast-util-math`](https://github.com/syntax-tree/mdast-util-math)
— parse math
— math
* [`syntax-tree/mdast-util-mdx`](https://github.com/syntax-tree/mdast-util-mdx)
— parse MDX or MDX.js
— MDX
* [`syntax-tree/mdast-util-mdx-expression`](https://github.com/syntax-tree/mdast-util-mdx-expression)
— parse MDX or MDX.js expressions
— MDX expressions
* [`syntax-tree/mdast-util-mdx-jsx`](https://github.com/syntax-tree/mdast-util-mdx-jsx)
— parse MDX or MDX.js JSX
— MDX JSX
* [`syntax-tree/mdast-util-mdxjs-esm`](https://github.com/syntax-tree/mdast-util-mdxjs-esm)
— parse MDX.js ESM
— MDX ESM
## Syntax
Markdown is parsed according to CommonMark.
Extensions can add support for other syntax.
If you’re interested in extending markdown,
[more information is available in micromark’s readme][micromark-extend].
## Syntax tree
The syntax tree is [mdast][].
## Types
This package is fully typed with [TypeScript][].
It exports the additional types [`CompileContext`][api-compilecontext],
[`Encoding`][api-encoding],
[`Extension`][api-extension],
[`Handle`][api-handle],
[`OnEnterError`][api-onentererror],
[`OnExitError`][api-onexiterror],
[`Options`][api-options],
[`Token`][api-token],
[`Transform`][api-transform], and
[`Value`][api-value].
## Compatibility
Projects maintained by the unified collective are compatible with all maintained
versions of Node.js.
As of now, that is Node.js 14.14+ and 16.0+.
Our projects sometimes work with older versions, but this is not guaranteed.
## Security

@@ -152,17 +395,17 @@

When going to HTML, use this utility in combination with
[`hast-util-sanitize`][sanitize] to make the tree safe.
[`hast-util-sanitize`][hast-util-sanitize] to make the tree safe.
## Related
* [`syntax-tree/mdast-util-to-markdown`](https://github.com/syntax-tree/mdast-util-to-markdown)
— serialize mdast as markdown
* [`micromark/micromark`](https://github.com/micromark/micromark)
— the smallest commonmark-compliant markdown parser that exists
— parse markdown
* [`remarkjs/remark`](https://github.com/remarkjs/remark)
— markdown processor powered by plugins
* [`syntax-tree/mdast-util-to-markdown`](https://github.com/syntax-tree/mdast-util-to-markdown)
— serialize mdast to markdown
— process markdown
## Contribute
See [`contributing.md` in `syntax-tree/.github`][contributing] for ways to get
started.
See [`contributing.md`][contributing] in [`syntax-tree/.github`][health] for
ways to get started.
See [`support.md`][support] for ways to get help.

@@ -208,2 +451,4 @@

[esmsh]: https://esm.sh
[license]: license

@@ -213,13 +458,31 @@

[contributing]: https://github.com/syntax-tree/.github/blob/HEAD/contributing.md
[health]: https://github.com/syntax-tree/.github
[support]: https://github.com/syntax-tree/.github/blob/HEAD/support.md
[contributing]: https://github.com/syntax-tree/.github/blob/main/contributing.md
[coc]: https://github.com/syntax-tree/.github/blob/HEAD/code-of-conduct.md
[support]: https://github.com/syntax-tree/.github/blob/main/support.md
[coc]: https://github.com/syntax-tree/.github/blob/main/code-of-conduct.md
[esm]: https://gist.github.com/sindresorhus/a39789f98801d908bbc7ff3ecc99d99c
[typescript]: https://www.typescriptlang.org
[mdast]: https://github.com/syntax-tree/mdast
[node]: https://github.com/syntax-tree/mdast#nodes
[mdast-util-gfm]: https://github.com/syntax-tree/mdast-util-gfm
[mdast-util-mdx]: https://github.com/syntax-tree/mdast-util-mdx
[mdast-util-frontmatter]: https://github.com/syntax-tree/mdast-util-frontmatter
[mdast-util-math]: https://github.com/syntax-tree/mdast-util-math
[mdast-util-directive]: https://github.com/syntax-tree/mdast-util-directive
[root]: https://github.com/syntax-tree/mdast#root
[encoding]: https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings
[character-encoding]: https://nodejs.org/api/buffer.html#buffer_buffers_and_character_encodings

@@ -230,8 +493,36 @@ [buffer]: https://nodejs.org/api/buffer.html

[sanitize]: https://github.com/syntax-tree/hast-util-sanitize
[hast-util-sanitize]: https://github.com/syntax-tree/hast-util-sanitize
[micromark]: https://github.com/micromark/micromark
[micromark-extensions]: https://github.com/micromark/micromark#optionsextensions
[micromark-extension]: https://github.com/micromark/micromark#optionsextensions
[micromark-extend]: https://github.com/micromark/micromark#extensions
[remark]: https://github.com/remarkjs/remark
[remark-parse]: https://github.com/remarkjs/remark/tree/main/packages/remark-parse
[development]: https://nodejs.org/api/packages.html#packages_resolving_user_conditions
[api-frommarkdown]: #frommarkdownvalue-encoding-options
[api-compilecontext]: #compilecontext
[api-encoding]: #encoding
[api-extension]: #extension
[api-handle]: #handle
[api-onentererror]: #onentererror
[api-onexiterror]: #onexiterror
[api-options]: #options
[api-token]: #token
[api-transform]: #transform
[api-value]: #value
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc