Socket
Socket
Sign inDemoInstall

micromark

Package Overview
Dependencies
Maintainers
1
Versions
40
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

micromark - npm Package Compare versions

Comparing version 2.11.4 to 3.0.0-alpha.1

dev/index.d.ts

22

index.d.ts

@@ -1,5 +0,17 @@

// Minimum TypeScript Version: 3.0
import buffer from './buffer'
export default buffer
/**
* @param value Markdown to parse (`string` or `Buffer`).
* @param [encoding] Character encoding to understand `value` as when it’s a `Buffer` (`string`, default: `'utf8'`).
* @param [options] Configuration
*/
export const micromark: ((
value: Value,
encoding: Encoding,
options?: import('micromark-util-types').Options | undefined
) => string) &
((
value: Value,
options?: import('micromark-util-types').Options | undefined
) => string)
export type Options = import('micromark-util-types').Options
export type Value = import('micromark-util-types').Value
export type Encoding = import('micromark-util-types').Encoding

@@ -1,3 +0,40 @@

'use strict'
/**
* @typedef {import('micromark-util-types').Options} Options
* @typedef {import('micromark-util-types').Value} Value
* @typedef {import('micromark-util-types').Encoding} Encoding
*/
import {compile} from './lib/compile.js'
import {parse} from './lib/parse.js'
import {postprocess} from './lib/postprocess.js'
import {preprocess} from './lib/preprocess.js'
/**
* @param value Markdown to parse (`string` or `Buffer`).
* @param [encoding] Character encoding to understand `value` as when it’s a `Buffer` (`string`, default: `'utf8'`).
* @param [options] Configuration
*/
module.exports = require('./buffer.js')
export const micromark =
/**
* @type {(
* ((value: Value, encoding: Encoding, options?: Options) => string) &
* ((value: Value, options?: Options) => string)
* )}
*/
/**
* @param {Value} value
* @param {Encoding} [encoding]
* @param {Options} [options]
*/
function (value, encoding, options) {
if (typeof encoding !== 'string') {
options = encoding
encoding = undefined
}
return compile(options)(
postprocess(
parse(options).document().write(preprocess()(value, encoding, true))
)
)
}

@@ -1,98 +0,100 @@

'use strict'
/**
* @typedef {import('micromark-util-types').Extension} Extension
*/
import {
attention,
autolink,
blockQuote,
characterEscape,
characterReference,
codeFenced,
codeIndented,
codeText,
definition,
hardBreakEscape,
headingAtx,
htmlFlow,
htmlText,
labelEnd,
labelStartImage,
labelStartLink,
lineEnding,
list,
setextUnderline,
thematicBreak
} from 'micromark-core-commonmark'
import {resolver as resolveText} from './initialize/text.js'
/** @type {Extension['document']} */
Object.defineProperty(exports, '__esModule', {value: true})
export const document = {
[42]: list,
[43]: list,
[45]: list,
[48]: list,
[49]: list,
[50]: list,
[51]: list,
[52]: list,
[53]: list,
[54]: list,
[55]: list,
[56]: list,
[57]: list,
[62]: blockQuote
}
/** @type {Extension['contentInitial']} */
var text$1 = require('./initialize/text.js')
var attention = require('./tokenize/attention.js')
var autolink = require('./tokenize/autolink.js')
var blockQuote = require('./tokenize/block-quote.js')
var characterEscape = require('./tokenize/character-escape.js')
var characterReference = require('./tokenize/character-reference.js')
var codeFenced = require('./tokenize/code-fenced.js')
var codeIndented = require('./tokenize/code-indented.js')
var codeText = require('./tokenize/code-text.js')
var definition = require('./tokenize/definition.js')
var hardBreakEscape = require('./tokenize/hard-break-escape.js')
var headingAtx = require('./tokenize/heading-atx.js')
var htmlFlow = require('./tokenize/html-flow.js')
var htmlText = require('./tokenize/html-text.js')
var labelEnd = require('./tokenize/label-end.js')
var labelStartImage = require('./tokenize/label-start-image.js')
var labelStartLink = require('./tokenize/label-start-link.js')
var lineEnding = require('./tokenize/line-ending.js')
var list = require('./tokenize/list.js')
var setextUnderline = require('./tokenize/setext-underline.js')
var thematicBreak = require('./tokenize/thematic-break.js')
var document = {
42: list, // Asterisk
43: list, // Plus sign
45: list, // Dash
48: list, // 0
49: list, // 1
50: list, // 2
51: list, // 3
52: list, // 4
53: list, // 5
54: list, // 6
55: list, // 7
56: list, // 8
57: list, // 9
62: blockQuote // Greater than
export const contentInitial = {
[91]: definition
}
/** @type {Extension['flowInitial']} */
var contentInitial = {
91: definition // Left square bracket
export const flowInitial = {
[-2]: codeIndented,
[-1]: codeIndented,
[32]: codeIndented
}
/** @type {Extension['flow']} */
var flowInitial = {
'-2': codeIndented, // Horizontal tab
'-1': codeIndented, // Virtual space
32: codeIndented // Space
export const flow = {
[35]: headingAtx,
[42]: thematicBreak,
[45]: [setextUnderline, thematicBreak],
[60]: htmlFlow,
[61]: setextUnderline,
[95]: thematicBreak,
[96]: codeFenced,
[126]: codeFenced
}
/** @type {Extension['string']} */
var flow = {
35: headingAtx, // Number sign
42: thematicBreak, // Asterisk
45: [setextUnderline, thematicBreak], // Dash
60: htmlFlow, // Less than
61: setextUnderline, // Equals to
95: thematicBreak, // Underscore
96: codeFenced, // Grave accent
126: codeFenced // Tilde
export const string = {
[38]: characterReference,
[92]: characterEscape
}
/** @type {Extension['text']} */
var string = {
38: characterReference, // Ampersand
92: characterEscape // Backslash
export const text = {
[-5]: lineEnding,
[-4]: lineEnding,
[-3]: lineEnding,
[33]: labelStartImage,
[38]: characterReference,
[42]: attention,
[60]: [autolink, htmlText],
[91]: labelStartLink,
[92]: [hardBreakEscape, characterEscape],
[93]: labelEnd,
[95]: attention,
[96]: codeText
}
/** @type {Extension['insideSpan']} */
var text = {
'-5': lineEnding, // Carriage return
'-4': lineEnding, // Line feed
'-3': lineEnding, // Carriage return + line feed
33: labelStartImage, // Exclamation mark
38: characterReference, // Ampersand
42: attention, // Asterisk
60: [autolink, htmlText], // Less than
91: labelStartLink, // Left square bracket
92: [hardBreakEscape, characterEscape], // Backslash
93: labelEnd, // Right square bracket
95: attention, // Underscore
96: codeText // Grave accent
export const insideSpan = {
null: [attention, resolveText]
}
/** @type {Extension['disable']} */
var insideSpan = {
null: [attention, text$1.resolver]
export const disable = {
null: []
}
var disable = {null: []}
exports.contentInitial = contentInitial
exports.disable = disable
exports.document = document
exports.flow = flow
exports.flowInitial = flowInitial
exports.insideSpan = insideSpan
exports.string = string
exports.text = text

@@ -1,22 +0,18 @@

'use strict'
/**
* @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct
* @typedef {import('micromark-util-types').Initializer} Initializer
* @typedef {import('micromark-util-types').Token} Token
* @typedef {import('micromark-util-types').State} State
*/
import {factorySpace} from 'micromark-factory-space'
import {markdownLineEnding} from 'micromark-util-character'
Object.defineProperty(exports, '__esModule', {value: true})
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var factorySpace = require('../tokenize/factory-space.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
/** @type {InitialConstruct} */
export const content = {
tokenize: initializeContent
}
/** @type {Initializer} */
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var tokenize = initializeContent
function initializeContent(effects) {
var contentStart = effects.attempt(
const contentStart = effects.attempt(
this.parser.constructs.contentInitial,

@@ -26,13 +22,10 @@ afterContentStartConstruct,

)
var previous
/** @type {Token} */
let previous
return contentStart
/** @type {State} */
function afterContentStartConstruct(code) {
assert__default['default'](
code === codes.eof || markdownLineEnding(code),
'expected eol or eof'
)
if (code === codes.eof) {
if (code === null) {
effects.consume(code)

@@ -42,21 +35,19 @@ return

effects.enter(types.lineEnding)
effects.enter('lineEnding')
effects.consume(code)
effects.exit(types.lineEnding)
return factorySpace(effects, contentStart, types.linePrefix)
effects.exit('lineEnding')
return factorySpace(effects, contentStart, 'linePrefix')
}
/** @type {State} */
function paragraphInitial(code) {
assert__default['default'](
code !== codes.eof && !markdownLineEnding(code),
'expected anything other than a line ending or EOF'
)
effects.enter(types.paragraph)
effects.enter('paragraph')
return lineStart(code)
}
/** @type {State} */
function lineStart(code) {
var token = effects.enter(types.chunkText, {
contentType: constants.contentTypeText,
previous: previous
const token = effects.enter('chunkText', {
contentType: 'text',
previous
})

@@ -69,10 +60,10 @@

previous = token
return data(code)
}
/** @type {State} */
function data(code) {
if (code === codes.eof) {
effects.exit(types.chunkText)
effects.exit(types.paragraph)
if (code === null) {
effects.exit('chunkText')
effects.exit('paragraph')
effects.consume(code)

@@ -84,7 +75,6 @@ return

effects.consume(code)
effects.exit(types.chunkText)
effects.exit('chunkText')
return lineStart
}
} // Data.
// Data.
effects.consume(code)

@@ -94,3 +84,1 @@ return data

}
exports.tokenize = tokenize

@@ -1,33 +0,67 @@

'use strict'
/**
* @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct
* @typedef {import('micromark-util-types').Initializer} Initializer
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
* @typedef {import('micromark-util-types').Token} Token
* @typedef {import('micromark-util-types').State} State
*/
Object.defineProperty(exports, '__esModule', {value: true})
/**
* @typedef {Record<string, unknown>} StackState
* @typedef {[Construct, StackState]} StackItem
*
* @typedef {{flowContinue: boolean, lazy: boolean, continued: number, flowEnd: boolean}} Result
*/
import {blankLine} from 'micromark-core-commonmark'
import {factorySpace} from 'micromark-factory-space'
import {markdownLineEnding} from 'micromark-util-character'
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var factorySpace = require('../tokenize/factory-space.js')
var partialBlankLine = require('../tokenize/partial-blank-line.js')
/** @type {InitialConstruct} */
export const document = {
tokenize: initializeDocument
}
/** @type {Construct} */
var tokenize = initializeDocument
const containerConstruct = {
tokenize: tokenizeContainer
}
/** @type {Construct} */
var containerConstruct = {tokenize: tokenizeContainer}
var lazyFlowConstruct = {tokenize: tokenizeLazyFlow}
const lazyFlowConstruct = {
tokenize: tokenizeLazyFlow
}
/** @type {Initializer} */
function initializeDocument(effects) {
var self = this
var stack = []
var continued = 0
var inspectConstruct = {tokenize: tokenizeInspect, partial: true}
var inspectResult
var childFlow
var childToken
const self = this
/** @type {StackItem[]} */
const stack = []
/** @type {Construct} */
const inspectConstruct = {
tokenize: tokenizeInspect,
partial: true
}
let continued = 0
/** @type {Result|undefined} */
let inspectResult
/** @type {TokenizeContext|undefined} */
let childFlow
/** @type {Token|undefined} */
let childToken
return start
/** @type {State} */
function start(code) {
if (continued < stack.length) {
self.containerState = stack[continued][1]
const item = stack[continued]
self.containerState = item[1]
return effects.attempt(
stack[continued][0].continuation,
item[0].continuation,
documentContinue,

@@ -40,2 +74,3 @@ documentContinued

}
/** @type {State} */

@@ -46,2 +81,3 @@ function documentContinue(code) {

}
/** @type {State} */

@@ -66,2 +102,3 @@ function documentContinued(code) {

}
/** @type {State} */

@@ -73,5 +110,6 @@ function containerContinue(code) {

}
/** @type {State} */
function flowStart(code) {
if (code === codes.eof) {
if (code === null) {
exitContainers(0, true)

@@ -83,15 +121,14 @@ effects.consume(code)

childFlow = childFlow || self.parser.flow(self.now())
effects.enter(types.chunkFlow, {
contentType: constants.contentTypeFlow,
effects.enter('chunkFlow', {
contentType: 'flow',
previous: childToken,
_tokenizer: childFlow
})
return flowContinue(code)
}
/** @type {State} */
function flowContinue(code) {
if (code === codes.eof) {
continueFlow(effects.exit(types.chunkFlow))
if (code === null) {
continueFlow(effects.exit('chunkFlow'))
return flowStart(code)

@@ -102,3 +139,3 @@ }

effects.consume(code)
continueFlow(effects.exit(types.chunkFlow))
continueFlow(effects.exit('chunkFlow'))
return effects.check(inspectConstruct, documentAfterPeek)

@@ -110,11 +147,13 @@ }

}
/** @type {State} */
function documentAfterPeek(code) {
exitContainers(
inspectResult.continued,
inspectResult && inspectResult.flowEnd
)
exitContainers(inspectResult.continued, inspectResult.flowEnd)
continued = 0
return start(code)
}
/**
* @param {Token} token
* @returns {void}
*/

@@ -124,20 +163,25 @@ function continueFlow(token) {

childToken = token
childFlow.lazy = inspectResult && inspectResult.lazy
childFlow.lazy = inspectResult ? inspectResult.lazy : false
childFlow.defineSkip(token.start)
childFlow.write(self.sliceStream(token))
}
/**
* @param {number} size
* @param {boolean} end
* @returns {void}
*/
function exitContainers(size, end) {
var index = stack.length
let index = stack.length // Close the flow.
// Close the flow.
if (childFlow && end) {
childFlow.write([codes.eof])
childToken = childFlow = undefined
}
childFlow.write([null])
childToken = undefined
childFlow = undefined
} // Exit open containers.
// Exit open containers.
while (index-- > size) {
self.containerState = stack[index][1]
stack[index][0].exit.call(self, effects)
const entry = stack[index]
entry[0].exit.call(self, effects)
}

@@ -147,15 +191,21 @@

}
/** @type {Tokenizer} */
function tokenizeInspect(effects, ok) {
var subcontinued = 0
inspectResult = {}
let subcontinued = 0
inspectResult = {
flowContinue: false,
lazy: false,
continued: 0,
flowEnd: false
}
return inspectStart
/** @type {State} */
function inspectStart(code) {
if (subcontinued < stack.length) {
self.containerState = stack[subcontinued][1]
const entry = stack[subcontinued]
self.containerState = entry[1]
return effects.attempt(
stack[subcontinued][0].continuation,
entry[0].continuation,
inspectContinue,

@@ -182,2 +232,3 @@ inspectLess

}
/** @type {State} */

@@ -190,2 +241,3 @@ function inspectContinue(code) {

}
/** @type {State} */

@@ -198,15 +250,14 @@ function inspectLess(code) {

containerConstruct,
inspectFlowEnd,
// Maybe flow, or a blank line?
inspectFlowEnd, // Maybe flow, or a blank line?
effects.attempt(
lazyFlowConstruct,
inspectFlowEnd,
effects.check(partialBlankLine, inspectFlowEnd, inspectLazy)
effects.check(blankLine, inspectFlowEnd, inspectLazy)
)
)(code)
}
} // Otherwise we’re interrupting.
// Otherwise we’re interrupting.
return inspectFlowEnd(code)
}
/** @type {State} */

@@ -219,5 +270,6 @@ function inspectLazy(code) {

return inspectDone(code)
}
} // We’re done with flow if we have more containers, or an interruption.
// We’re done with flow if we have more containers, or an interruption.
/** @type {State} */
function inspectFlowEnd(code) {

@@ -227,6 +279,8 @@ inspectResult.flowEnd = true

}
/** @type {State} */
function inspectDone(code) {
inspectResult.continued = subcontinued
self.interrupt = self.containerState = undefined
self.interrupt = undefined
self.containerState = undefined
return ok(code)

@@ -236,2 +290,3 @@ }

}
/** @type {Tokenizer} */

@@ -242,8 +297,7 @@ function tokenizeContainer(effects, ok, nok) {

effects.attempt(this.parser.constructs.document, ok, nok),
types.linePrefix,
this.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize
'linePrefix',
this.parser.constructs.disable.null.includes('codeIndented') ? undefined : 4
)
}
/** @type {Tokenizer} */

@@ -254,9 +308,5 @@ function tokenizeLazyFlow(effects, ok, nok) {

effects.lazy(this.parser.constructs.flow, ok, nok),
types.linePrefix,
this.parser.constructs.disable.null.indexOf('codeIndented') > -1
? undefined
: constants.tabSize
'linePrefix',
this.parser.constructs.disable.null.includes('codeIndented') ? undefined : 4
)
}
exports.tokenize = tokenize

@@ -1,28 +0,22 @@

'use strict'
/**
* @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct
* @typedef {import('micromark-util-types').Initializer} Initializer
* @typedef {import('micromark-util-types').State} State
*/
import {blankLine, content} from 'micromark-core-commonmark'
import {factorySpace} from 'micromark-factory-space'
import {markdownLineEnding} from 'micromark-util-character'
Object.defineProperty(exports, '__esModule', {value: true})
var assert = require('assert')
var codes = require('../character/codes.js')
var markdownLineEnding = require('../character/markdown-line-ending.js')
var types = require('../constant/types.js')
var content = require('../tokenize/content.js')
var factorySpace = require('../tokenize/factory-space.js')
var partialBlankLine = require('../tokenize/partial-blank-line.js')
function _interopDefaultLegacy(e) {
return e && typeof e === 'object' && 'default' in e ? e : {default: e}
/** @type {InitialConstruct} */
export const flow = {
tokenize: initializeFlow
}
/** @type {Initializer} */
var assert__default = /*#__PURE__*/ _interopDefaultLegacy(assert)
var tokenize = initializeFlow
function initializeFlow(effects) {
var self = this
var initial = effects.attempt(
const self = this
const initial = effects.attempt(
// Try to parse a blank line.
partialBlankLine,
atBlankEnding,
// Try to parse initial flow (essentially, only code).
blankLine,
atBlankEnding, // Try to parse initial flow (essentially, only code).
effects.attempt(

@@ -38,16 +32,11 @@ this.parser.constructs.flowInitial,

),
types.linePrefix
'linePrefix'
)
)
)
return initial
/** @type {State} */
function atBlankEnding(code) {
assert__default['default'](
code === codes.eof || markdownLineEnding(code),
'expected eol or eof'
)
if (code === codes.eof) {
if (code === null) {
effects.consume(code)

@@ -57,16 +46,12 @@ return

effects.enter(types.lineEndingBlank)
effects.enter('lineEndingBlank')
effects.consume(code)
effects.exit(types.lineEndingBlank)
effects.exit('lineEndingBlank')
self.currentConstruct = undefined
return initial
}
/** @type {State} */
function afterConstruct(code) {
assert__default['default'](
code === codes.eof || markdownLineEnding(code),
'expected eol or eof'
)
if (code === codes.eof) {
if (code === null) {
effects.consume(code)

@@ -76,5 +61,5 @@ return

effects.enter(types.lineEnding)
effects.enter('lineEnding')
effects.consume(code)
effects.exit(types.lineEnding)
effects.exit('lineEnding')
self.currentConstruct = undefined

@@ -84,3 +69,1 @@ return initial

}
exports.tokenize = tokenize

@@ -1,15 +0,19 @@

'use strict'
/**
* @typedef {import('micromark-util-types').Resolver} Resolver
* @typedef {import('micromark-util-types').Initializer} Initializer
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').Code} Code
*/
export const resolver = {
resolveAll: createResolver()
}
export const string = initializeFactory('string')
export const text = initializeFactory('text')
/**
* @param {'string'|'text'} field
* @returns {InitialConstruct}
*/
Object.defineProperty(exports, '__esModule', {value: true})
var codes = require('../character/codes.js')
var assign = require('../constant/assign.js')
var constants = require('../constant/constants.js')
var types = require('../constant/types.js')
var shallow = require('../util/shallow.js')
var text = initializeFactory('text')
var string = initializeFactory('string')
var resolver = {resolveAll: createResolver()}
function initializeFactory(field) {

@@ -22,9 +26,10 @@ return {

}
/** @type {Initializer} */
function initializeText(effects) {
var self = this
var constructs = this.parser.constructs[field]
var text = effects.attempt(constructs, start, notText)
const self = this
const constructs = this.parser.constructs[field]
const text = effects.attempt(constructs, start, notText)
return start
/** @type {State} */

@@ -34,5 +39,6 @@ function start(code) {

}
/** @type {State} */
function notText(code) {
if (code === codes.eof) {
if (code === null) {
effects.consume(code)

@@ -42,32 +48,35 @@ return

effects.enter(types.data)
effects.enter('data')
effects.consume(code)
return data
}
/** @type {State} */
function data(code) {
if (atBreak(code)) {
effects.exit(types.data)
effects.exit('data')
return text(code)
}
} // Data.
// Data.
effects.consume(code)
return data
}
/**
* @param {Code} code
* @returns {boolean}
*/
function atBreak(code) {
var list = constructs[code]
var index = -1
if (code === codes.eof) {
if (code === null) {
return true
}
const list = constructs[code]
let index = -1
if (list) {
while (++index < list.length) {
if (
!list[index].previous ||
list[index].previous.call(self, self.previous)
) {
const item = list[index]
if (!item.previous || item.previous.call(self, self.previous)) {
return true

@@ -77,22 +86,30 @@ }

}
return false
}
}
}
/**
* @param {Resolver} [extraResolver]
* @returns {Resolver}
*/
function createResolver(extraResolver) {
return resolveAllText
/** @type {Resolver} */
function resolveAllText(events, context) {
var index = -1
var enter
let index = -1
/** @type {number|undefined} */
// A rather boring computation (to merge adjacent `data` events) which
let enter // A rather boring computation (to merge adjacent `data` events) which
// improves mm performance by 29%.
while (++index <= events.length) {
if (enter === undefined) {
if (events[index] && events[index][1].type === types.data) {
if (events[index] && events[index][1].type === 'data') {
enter = index
index++
}
} else if (!events[index] || events[index][1].type !== types.data) {
} else if (!events[index] || events[index][1].type !== 'data') {
// Don’t do anything if there is one data token.

@@ -112,20 +129,16 @@ if (index !== enter + 2) {

}
/**
* A rather ugly set of instructions which again looks at chunks in the input
* stream.
* The reason to do this here is that it is *much* faster to parse in reverse.
* And that we can’t hook into `null` to split the line suffix before an EOF.
* To do: figure out if we can make this into a clean utility, or even in core.
* As it will be useful for GFMs literal autolink extension (and maybe even
* tables?)
*
* @type {Resolver}
*/
// A rather ugly set of instructions which again looks at chunks in the input
// stream.
// The reason to do this here is that it is *much* faster to parse in reverse.
// And that we can’t hook into `null` to split the line suffix before an EOF.
// To do: figure out if we can make this into a clean utility, or even in core.
// As it will be useful for GFMs literal autolink extension (and maybe even
// tables?)
function resolveAllLineSuffixes(events, context) {
var eventIndex = -1
var chunks
var data
var chunk
var index
var bufferIndex
var size
var tabs
var token
let eventIndex = -1

@@ -135,14 +148,16 @@ while (++eventIndex <= events.length) {

(eventIndex === events.length ||
events[eventIndex][1].type === types.lineEnding) &&
events[eventIndex - 1][1].type === types.data
events[eventIndex][1].type === 'lineEnding') &&
events[eventIndex - 1][1].type === 'data'
) {
data = events[eventIndex - 1][1]
chunks = context.sliceStream(data)
index = chunks.length
bufferIndex = -1
size = 0
tabs = undefined
const data = events[eventIndex - 1][1]
const chunks = context.sliceStream(data)
let index = chunks.length
let bufferIndex = -1
let size = 0
/** @type {boolean|undefined} */
let tabs
while (index--) {
chunk = chunks[index]
const chunk = chunks[index]

@@ -152,3 +167,3 @@ if (typeof chunk === 'string') {

while (chunk.charCodeAt(bufferIndex - 1) === codes.space) {
while (chunk.charCodeAt(bufferIndex - 1) === 32) {
size++

@@ -160,9 +175,9 @@ bufferIndex--

bufferIndex = -1
}
// Number
else if (chunk === codes.horizontalTab) {
} // Number
else if (chunk === -2) {
tabs = true
size++
} else if (chunk === codes.virtualSpace);
else {
} else if (chunk === -1) {
// Empty
} else {
// Replacement character, exit.

@@ -175,9 +190,7 @@ index++

if (size) {
token = {
const token = {
type:
eventIndex === events.length ||
tabs ||
size < constants.hardBreakPrefixSizeMin
? types.lineSuffix
: types.hardBreakTrailing,
eventIndex === events.length || tabs || size < 2
? 'lineSuffix'
: 'hardBreakTrailing',
start: {

@@ -192,9 +205,8 @@ line: data.end.line,

},
end: shallow(data.end)
end: Object.assign({}, data.end)
}
data.end = Object.assign({}, token.start)
data.end = shallow(token.start)
if (data.start.offset === data.end.offset) {
assign(data, token)
Object.assign(data, token)
} else {

@@ -217,5 +229,1 @@ events.splice(

}
exports.resolver = resolver
exports.string = string
exports.text = text

@@ -1,5 +0,13 @@

import {ParseOptions, Parser} from './shared-types'
declare function createParser(options?: ParseOptions): Parser
export default createParser
/**
* @param {ParseOptions} [options]
* @returns {ParseContext}
*/
export function parse(
options?: import('micromark-util-types').ParseOptions | undefined
): ParseContext
export type InitialConstruct = import('micromark-util-types').InitialConstruct
export type FullNormalizedExtension =
import('micromark-util-types').FullNormalizedExtension
export type ParseOptions = import('micromark-util-types').ParseOptions
export type ParseContext = import('micromark-util-types').ParseContext
export type Create = import('micromark-util-types').Create

@@ -1,36 +0,51 @@

'use strict'
/**
* @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct
* @typedef {import('micromark-util-types').FullNormalizedExtension} FullNormalizedExtension
* @typedef {import('micromark-util-types').ParseOptions} ParseOptions
* @typedef {import('micromark-util-types').ParseContext} ParseContext
* @typedef {import('micromark-util-types').Create} Create
*/
import {combineExtensions} from 'micromark-util-combine-extensions'
import {content} from './initialize/content.js'
import {document} from './initialize/document.js'
import {flow} from './initialize/flow.js'
import {text, string} from './initialize/text.js'
import {createTokenizer} from './create-tokenizer.js'
import * as defaultConstructs from './constructs.js'
/**
* @param {ParseOptions} [options]
* @returns {ParseContext}
*/
var content = require('./initialize/content.js')
var document = require('./initialize/document.js')
var flow = require('./initialize/flow.js')
var text = require('./initialize/text.js')
var combineExtensions = require('./util/combine-extensions.js')
var createTokenizer = require('./util/create-tokenizer.js')
var miniflat = require('./util/miniflat.js')
var constructs = require('./constructs.js')
export function parse(options = {}) {
/** @type {FullNormalizedExtension} */
// @ts-expect-error `defaultConstructs` is full, so the result will be too.
const constructs = combineExtensions(
// @ts-expect-error Same as above.
[defaultConstructs].concat(options.extensions || [])
)
/** @type {ParseContext} */
function parse(options) {
var settings = options || {}
var parser = {
const parser = {
defined: [],
constructs: combineExtensions(
[constructs].concat(miniflat(settings.extensions))
),
constructs,
content: create(content),
document: create(document),
flow: create(flow),
string: create(text.string),
text: create(text.text)
string: create(string),
text: create(text)
}
return parser
/**
* @param {InitialConstruct} initial
*/
function create(initializer) {
function create(initial) {
return creator
/** @type {Create} */
function creator(from) {
return createTokenizer(parser, initializer, from)
return createTokenizer(parser, initial, from)
}
}
}
module.exports = parse

@@ -1,5 +0,8 @@

import {Event} from './shared-types'
declare function postprocess(events: Event[]): Event[]
export default postprocess
/**
* @param {Event[]} events
* @returns {Event[]}
*/
export function postprocess(
events: import('micromark-util-types').Event[]
): import('micromark-util-types').Event[]
export type Event = import('micromark-util-types').Event

@@ -1,6 +0,11 @@

'use strict'
/**
* @typedef {import('micromark-util-types').Event} Event
*/
import {subtokenize} from 'micromark-util-subtokenize'
/**
* @param {Event[]} events
* @returns {Event[]}
*/
var subtokenize = require('./util/subtokenize.js')
function postprocess(events) {
export function postprocess(events) {
while (!subtokenize(events)) {

@@ -12,3 +17,1 @@ // Empty

}
module.exports = postprocess

@@ -1,11 +0,13 @@

import {BufferEncoding} from './shared-types'
type PreprocessReturn = (
value: string,
encoding: BufferEncoding,
end?: boolean
) => string[]
declare function preprocess(): PreprocessReturn
export default preprocess
/**
* @returns {Preprocessor}
*/
export function preprocess(): Preprocessor
export type Encoding = import('micromark-util-types').Encoding
export type Value = import('micromark-util-types').Value
export type Chunk = import('micromark-util-types').Chunk
export type Code = import('micromark-util-types').Code
export type Preprocessor = (
value: Value,
encoding?: import('micromark-util-types').Encoding | undefined,
end?: boolean | undefined
) => Chunk[]

@@ -1,24 +0,51 @@

'use strict'
/**
* @typedef {import('micromark-util-types').Encoding} Encoding
* @typedef {import('micromark-util-types').Value} Value
* @typedef {import('micromark-util-types').Chunk} Chunk
* @typedef {import('micromark-util-types').Code} Code
*/
var codes = require('./character/codes.js')
var constants = require('./constant/constants.js')
/**
* @callback Preprocessor
* @param {Value} value
* @param {Encoding} [encoding]
* @param {boolean} [end=false]
* @returns {Chunk[]}
*/
const search = /[\0\t\n\r]/g
/**
* @returns {Preprocessor}
*/
var search = /[\0\t\n\r]/g
export function preprocess() {
let column = 1
let buffer = ''
/** @type {boolean|undefined} */
function preprocess() {
var start = true
var column = 1
var buffer = ''
var atCarriageReturn
let start = true
/** @type {boolean|undefined} */
let atCarriageReturn
return preprocessor
/** @type {Preprocessor} */
function preprocessor(value, encoding, end) {
var chunks = []
var match
var next
var startPosition
var endPosition
var code
/** @type {Chunk[]} */
const chunks = []
/** @type {RegExpMatchArray|null} */
let match
/** @type {number} */
let next
/** @type {number} */
let startPosition
/** @type {number} */
let endPosition
/** @type {Code} */
let code // @ts-expect-error `Buffer` does allow an encoding.
value = buffer + value.toString(encoding)

@@ -29,3 +56,3 @@ startPosition = 0

if (start) {
if (value.charCodeAt(0) === codes.byteOrderMarker) {
if (value.charCodeAt(0) === 65279) {
startPosition++

@@ -40,3 +67,4 @@ }

match = search.exec(value)
endPosition = match ? match.index : value.length
endPosition =
match && match.index !== undefined ? match.index : value.length
code = value.charCodeAt(endPosition)

@@ -49,12 +77,8 @@

if (
code === codes.lf &&
startPosition === endPosition &&
atCarriageReturn
) {
chunks.push(codes.carriageReturnLineFeed)
if (code === 10 && startPosition === endPosition && atCarriageReturn) {
chunks.push(-3)
atCarriageReturn = undefined
} else {
if (atCarriageReturn) {
chunks.push(codes.carriageReturn)
chunks.push(-5)
atCarriageReturn = undefined

@@ -68,18 +92,29 @@ }

if (code === codes.nul) {
chunks.push(codes.replacementCharacter)
column++
} else if (code === codes.ht) {
next = Math.ceil(column / constants.tabSize) * constants.tabSize
chunks.push(codes.horizontalTab)
while (column++ < next) chunks.push(codes.virtualSpace)
} else if (code === codes.lf) {
chunks.push(codes.lineFeed)
column = 1
switch (code) {
case 0: {
chunks.push(65533)
column++
break
}
case 9: {
next = Math.ceil(column / 4) * 4
chunks.push(-2)
while (column++ < next) chunks.push(-1)
break
}
case 10: {
chunks.push(-4)
column = 1
break
}
default: {
atCarriageReturn = true
column = 1
}
}
// Must be carriage return.
else {
atCarriageReturn = true
column = 1
}
}

@@ -91,5 +126,5 @@

if (end) {
if (atCarriageReturn) chunks.push(codes.carriageReturn)
if (atCarriageReturn) chunks.push(-5)
if (buffer) chunks.push(buffer)
chunks.push(codes.eof)
chunks.push(null)
}

@@ -100,3 +135,1 @@

}
module.exports = preprocess
{
"name": "micromark",
"version": "2.11.4",
"version": "3.0.0-alpha.1",
"description": "small commonmark compliant markdown parser with positional info and concrete tokens",

@@ -26,3 +26,3 @@ "license": "MIT",

],
"repository": "micromark/micromark",
"repository": "https://github.com/micromark/micromark/tree/main/packages/micromark",
"bugs": "https://github.com/micromark/micromark/issues",

@@ -41,170 +41,86 @@ "funding": [

"contributors": [
"Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)",
"Bogdan Chadkin <trysound@yandex.ru>",
"Christian Murphy <christian.murphy.42@gmail.com>",
"Marouane Fazouane <fazouanem3@gmail.com>",
"John Otander <johnotander@gmail.com> (https://johno.com)",
"Stephan Schneider <stephanschndr@gmail.com>",
"Victor Felder <victor@draft.li> (https://draft.li)",
"Mudit Ameta <zeusdeux@gmail.com> (https://mudit.xyz)",
"Merlijn Vos <merlijn@soverin.net>"
"Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)"
],
"type": "module",
"main": "index.js",
"types": "dev/index.d.ts",
"files": [
"dist/",
"dev/",
"lib/",
"buffer.d.ts",
"buffer.js",
"buffer.mjs",
"index.js",
"index.mjs",
"index.d.ts",
"stream.d.ts",
"stream.js",
"stream.mjs"
"index.js"
],
"main": "./index.js",
"types": "index.d.ts",
"exports": {
".": {
"development": "./dev/index.js",
"default": "./index.js"
},
"./stream": {
"development": "./dev/stream.js",
"default": "./stream.js"
},
"./stream.js": {
"development": "./dev/stream.js",
"default": "./stream.js"
},
"./lib/compile": {
"development": "./dev/lib/compile.js",
"default": "./lib/compile.js"
},
"./lib/compile.js": {
"development": "./dev/lib/compile.js",
"default": "./lib/compile.js"
},
"./lib/parse": {
"development": "./dev/lib/parse.js",
"default": "./lib/parse.js"
},
"./lib/parse.js": {
"development": "./dev/lib/parse.js",
"default": "./lib/parse.js"
},
"./lib/postprocess": {
"development": "./dev/lib/postprocess.js",
"default": "./lib/postprocess.js"
},
"./lib/postprocess.js": {
"development": "./dev/lib/postprocess.js",
"default": "./lib/postprocess.js"
},
"./lib/preprocess": {
"development": "./dev/lib/preprocess.js",
"default": "./lib/preprocess.js"
},
"./lib/preprocess.js": {
"development": "./dev/lib/preprocess.js",
"default": "./lib/preprocess.js"
}
},
"dependencies": {
"@types/debug": "^4.0.0",
"debug": "^4.0.0",
"parse-entities": "^2.0.0"
"micromark-core-commonmark": "1.0.0-alpha.1",
"micromark-factory-space": "1.0.0-alpha.1",
"micromark-util-character": "1.0.0-alpha.1",
"micromark-util-chunked": "1.0.0-alpha.1",
"micromark-util-combine-extensions": "1.0.0-alpha.1",
"micromark-util-encode": "1.0.0-alpha.1",
"micromark-util-normalize-identifier": "1.0.0-alpha.1",
"micromark-util-resolve-all": "1.0.0-alpha.1",
"micromark-util-sanitize-uri": "1.0.0-alpha.1",
"micromark-util-subtokenize": "1.0.0-alpha.1",
"micromark-util-symbol": "1.0.0-alpha.1",
"micromark-util-types": "1.0.0-alpha.1",
"parse-entities": "^3.0.0"
},
"devDependencies": {
"@babel/core": "^7.0.0",
"@rollup/plugin-babel": "^5.0.0",
"@rollup/plugin-commonjs": "^17.0.0",
"@rollup/plugin-node-resolve": "^11.0.0",
"@types/events": "^3.0.0",
"@unicode/unicode-13.0.0": "^1.0.0",
"babel-plugin-inline-constants": "^1.0.0",
"babel-plugin-unassert": "^3.0.0",
"babel-plugin-undebug": "^1.0.0",
"c8": "^7.0.0",
"character-entities": "^1.0.0",
"commonmark.json": "^0.29.0",
"concat-stream": "^2.0.0",
"cross-env": "^7.0.0",
"dtslint": "^4.0.0",
"eslint-plugin-es": "^4.0.0",
"eslint-plugin-security": "^1.0.0",
"esm": "^3.0.0",
"glob": "^7.0.0",
"gzip-size-cli": "^4.0.0",
"jsfuzz": "1.0.14",
"ms": "^2.0.0",
"patch-package": "^6.0.0",
"prettier": "^2.0.0",
"regenerate": "^1.0.0",
"remark-cli": "^9.0.0",
"remark-preset-wooorm": "^8.0.0",
"resolve-from": "^5.0.0",
"rollup": "^2.0.0",
"rollup-plugin-terser": "^7.0.0",
"tape": "^5.0.0",
"xo": "^0.37.0"
},
"scripts": {
"generate-lib-types": "node --experimental-modules script/generate-constant-typings.mjs",
"generate-lib-expressions": "node --experimental-modules script/generate-expressions.mjs",
"generate-lib-cjs": "rollup -c --silent",
"generate-lib": "npm run generate-lib-types && npm run generate-lib-expressions && npm run generate-lib-cjs",
"generate-dist-types": "node --experimental-modules script/copy-dict.mjs",
"generate-dist-js": "cross-env BUILD=dist rollup -c --silent",
"generate-dist": "npm run generate-dist-types && npm run generate-dist-js",
"generate-size": "cross-env BUILD=size rollup -c --silent && gzip-size micromark.min.js && gzip-size --raw micromark.min.js",
"generate": "npm run generate-lib && npm run generate-dist && npm run generate-size",
"format": "remark . -qfo && prettier . -w --loglevel warn && xo --fix",
"pretest-fuzz": "patch-package --patch-dir script/patches && node script/generate-fixtures.mjs",
"test-fuzz": "cross-env NODE_OPTIONS=\"-r esm\" timeout 15m jsfuzz test/fuzz.js test/fixtures",
"test-api": "node --experimental-modules test/index.mjs",
"test-coverage": "c8 --check-coverage --lines 100 --functions 100 --branches 100 --reporter lcov node --experimental-modules test/index.mjs",
"test-types": "dtslint .",
"test": "npm run generate && npm run format && npm run test-coverage && npm run test-types"
"build": "rimraf \"*.d.ts\" \"{dev/,lib/}**/*.d.ts\" && tsc && micromark-build && type-coverage"
},
"prettier": {
"tabWidth": 2,
"useTabs": false,
"singleQuote": true,
"bracketSpacing": false,
"semi": false,
"trailingComma": "none"
},
"xo": {
"esnext": false,
"extensions": [
"mjs"
],
"prettier": true,
"envs": [
"shared-node-browser"
],
"rules": {
"import/extensions": [
"error",
"always"
]
},
"overrides": [
{
"files": [
"lib/**/*.{js,mjs}"
],
"plugin": [
"es"
],
"extends": [
"plugin:es/no-new-in-es2015",
"plugin:security/recommended"
],
"rules": {
"complexity": "off",
"es/no-array-from": "off",
"es/no-object-assign": "off",
"es/no-modules": "off",
"import/no-mutable-exports": "off",
"import/no-anonymous-default-export": "off",
"guard-for-in": "off",
"max-depth": "off",
"no-multi-assign": "off",
"no-unmodified-loop-condition": "off",
"security/detect-object-injection": "off",
"unicorn/explicit-length-check": "off",
"unicorn/prefer-includes": "off",
"unicorn/prefer-number-properties": "off"
}
},
{
"files": [
"**/*.d.ts"
],
"rules": {
"import/extensions": [
"error",
"never"
]
}
},
{
"files": [
"test/**/*.{js,mjs}"
],
"rules": {
"import/no-unassigned-import": "off"
}
}
],
"ignores": [
"dist/",
"lib/**/*.js",
"micromark.test.ts"
]
},
"remarkConfig": {
"plugins": [
"preset-wooorm",
[
"lint-no-html",
false
]
]
"xo": false,
"typeCoverage": {
"atLeast": 100,
"detail": true,
"strict": true,
"ignoreCatch": true
}
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc