Comparing version 2.0.0 to 2.1.0
@@ -34,3 +34,3 @@ module.exports = compileHtml | ||
codeIndented: onentercodeindented, | ||
codeSpan: onentercodespan, | ||
codeText: onentercodetext, | ||
content: onentercontent, | ||
@@ -43,3 +43,3 @@ definition: onenterdefinition, | ||
htmlFlow: onenterhtmlflow, | ||
htmlSpan: onenterhtml, | ||
htmlText: onenterhtml, | ||
image: onenterimage, | ||
@@ -65,3 +65,3 @@ label: buffer, | ||
autolinkEmail: onexitautolinkemail, | ||
autolinkUri: onexitautolinkuri, | ||
autolinkProtocol: onexitautolinkprotocol, | ||
blockQuote: onexitblockquote, | ||
@@ -78,3 +78,3 @@ characterEscapeValue: onexitcharacterescapevalue, | ||
codeIndented: onexitflowcode, | ||
codeSpan: onexitcodespan, | ||
codeText: onexitcodetext, | ||
data: onexitdata, | ||
@@ -89,3 +89,3 @@ definition: onexitdefinition, | ||
htmlFlow: onexithtml, | ||
htmlSpan: onexithtml, | ||
htmlText: onexithtml, | ||
image: onexitmedia, | ||
@@ -120,3 +120,3 @@ label: onexitlabel, | ||
var characterReferenceType | ||
var inCodeSpan | ||
var inCodeText | ||
var ignoreEncode | ||
@@ -623,3 +623,3 @@ var media | ||
if (inCodeSpan) { | ||
if (inCodeText) { | ||
raw(' ') | ||
@@ -664,9 +664,9 @@ return | ||
function onentercodespan() { | ||
inCodeSpan = true | ||
function onentercodetext() { | ||
inCodeText = true | ||
tag('<code>') | ||
} | ||
function onexitcodespan() { | ||
inCodeSpan = undefined | ||
function onexitcodetext() { | ||
inCodeText = undefined | ||
tag('</code>') | ||
@@ -713,3 +713,3 @@ } | ||
function onexitautolinkuri(token) { | ||
function onexitautolinkprotocol(token) { | ||
var uri = this.sliceSerialize(token) | ||
@@ -716,0 +716,0 @@ tag('<a href="' + url(uri, protocolHref) + '">') |
@@ -44,3 +44,3 @@ // This module is compiled away! | ||
// Whole autolink (`<https://example.com>` or `<admin@example.com>`) | ||
// Includes `autolinkMarker` and `autolinkUri` or `autolinkEmail`. | ||
// Includes `autolinkMarker` and `autolinkProtocol` or `autolinkEmail`. | ||
exports.autolink = 'autolink' | ||
@@ -51,7 +51,7 @@ | ||
// Marker around an `autolinkUri` or `autolinkEmail` (`<` or `>`). | ||
// Marker around an `autolinkProtocol` or `autolinkEmail` (`<` or `>`). | ||
exports.autolinkMarker = 'autolinkMarker' | ||
// URI autolink w/o markers (`https://example.com`) | ||
exports.autolinkUri = 'autolinkUri' | ||
// Protocol autolink w/o markers (`https://example.com`) | ||
exports.autolinkProtocol = 'autolinkProtocol' | ||
@@ -120,15 +120,15 @@ // A whole character escape (`\-`). | ||
// A code span (``` `alpha` ```). | ||
// Includes `codeSpanSequence`, `data`, `lineEnding`, and can include | ||
// `codeSpanPaddingWhitespace` and `codeSpanPaddingLineEnding`. | ||
exports.codeSpan = 'codeSpan' | ||
// A text code (``` `alpha` ```). | ||
// Includes `codeTextSequence`, `data`, `lineEnding`, and can include | ||
// `codeTextPaddingWhitespace` and `codeTextPaddingLineEnding`. | ||
exports.codeText = 'codeText' | ||
// A space right after or before a tick. | ||
exports.codeSpanPaddingWhitespace = 'codeSpanPaddingWhitespace' | ||
exports.codeTextPaddingWhitespace = 'codeTextPaddingWhitespace' | ||
// A line ending right after or before a tick. | ||
exports.codeSpanPaddingLineEnding = 'codeSpanPaddingLineEnding' | ||
exports.codeTextPaddingLineEnding = 'codeTextPaddingLineEnding' | ||
// A code span fence (` `` `). | ||
exports.codeSpanSequence = 'codeSpanSequence' | ||
// A text code fence (` `` `). | ||
exports.codeTextSequence = 'codeTextSequence' | ||
@@ -235,5 +235,5 @@ // Whole content: | ||
// HTML in content (the tag in `a <i> b`). | ||
// HTML in text (the tag in `a <i> b`). | ||
// Includes `lineEnding`, `data`. | ||
exports.htmlSpan = 'htmlSpan' | ||
exports.htmlText = 'htmlText' | ||
@@ -240,0 +240,0 @@ // Whole image (`![alpha](bravo)`, `![alpha][bravo]`, `![alpha][]`, or |
@@ -8,3 +8,3 @@ module.exports = createParser | ||
var attention = require('./tokenize/attention') | ||
var atxHeading = require('./tokenize/atx-heading') | ||
var headingAtx = require('./tokenize/heading-atx') | ||
var autolink = require('./tokenize/autolink') | ||
@@ -17,7 +17,7 @@ var list = require('./tokenize/list') | ||
var codeIndented = require('./tokenize/code-indented') | ||
var codeSpan = require('./tokenize/code-span') | ||
var codeText = require('./tokenize/code-text') | ||
var definition = require('./tokenize/definition') | ||
var hardBreakEscape = require('./tokenize/hard-break-escape') | ||
var htmlFlow = require('./tokenize/html-flow') | ||
var htmlSpan = require('./tokenize/html-span') | ||
var htmlText = require('./tokenize/html-text') | ||
var labelEnd = require('./tokenize/label-end') | ||
@@ -62,3 +62,3 @@ var labelImage = require('./tokenize/label-start-image') | ||
flow[35] = atxHeading | ||
flow[35] = headingAtx | ||
flow[42] = thematicBreak | ||
@@ -83,3 +83,3 @@ flow[45] = [setextUnderline, thematicBreak] | ||
text[42] = attention | ||
text[60] = [autolink, htmlSpan] | ||
text[60] = [autolink, htmlText] | ||
text[91] = labelLink | ||
@@ -89,3 +89,3 @@ text[92] = [hardBreakEscape, characterEscape] | ||
text[95] = attention | ||
text[96] = codeSpan | ||
text[96] = codeText | ||
@@ -92,0 +92,0 @@ parser = { |
@@ -6,5 +6,6 @@ exports.tokenize = tokenizeAttention | ||
var classifyCharacter = require('../util/classify-character') | ||
var movePoint = require('../util/move-point') | ||
// Internal type for markers that could turn into emphasis or strong sequences. | ||
var attention = 'attention' | ||
var attentionSequence = 'attentionSequence' | ||
@@ -29,4 +30,4 @@ // Take all events and resolve attention to emphasis or strong. | ||
var sequenceToken | ||
var textToken | ||
var dataToken | ||
var point | ||
var marker | ||
@@ -38,3 +39,3 @@ | ||
if (token.type === attention && token._events) { | ||
if (token.type === attentionSequence && token._events) { | ||
previous = lastSequence | ||
@@ -106,18 +107,17 @@ lastSequence = {token: token, size: token._size, previous: previous} | ||
// New token. | ||
attentionToken = { | ||
type: use > 1 ? 'strong' : 'emphasis', | ||
start: openerToken.start, | ||
end: token.end | ||
sequenceToken = { | ||
type: use > 1 ? 'strongSequence' : 'emphasisSequence', | ||
start: movePoint(shallow(openerToken.end), -use), | ||
end: shallow(openerToken.end) | ||
} | ||
point = context.shiftPoint(shallow(openerToken.start), use) | ||
dataToken = { | ||
textToken = { | ||
type: use > 1 ? 'strongText' : 'emphasisText', | ||
start: shallow(point) | ||
start: shallow(openerToken.end), | ||
end: shallow(token.start) | ||
} | ||
sequenceToken = { | ||
type: use > 1 ? 'strongSequence' : 'emphasisSequence', | ||
start: shallow(openerToken.start), | ||
end: shallow(point) | ||
attentionToken = { | ||
type: use > 1 ? 'strong' : 'emphasis', | ||
start: shallow(sequenceToken.start) | ||
} | ||
@@ -129,18 +129,17 @@ | ||
['exit', sequenceToken, context], | ||
['enter', dataToken, context] | ||
['enter', textToken, context] | ||
) | ||
openerToken.start = point | ||
openerToken.end = shallow(sequenceToken.start) | ||
openerToken._side = 1 | ||
point = context.shiftPoint(shallow(token.end), -use) | ||
dataToken.end = shallow(point) | ||
sequenceToken = { | ||
type: sequenceToken.type, | ||
start: shallow(point), | ||
end: shallow(token.end) | ||
start: token.start, | ||
end: movePoint(shallow(token.start), use) | ||
} | ||
attentionToken.end = shallow(sequenceToken.end) | ||
token._events.push( | ||
['exit', dataToken, context], | ||
['exit', textToken, context], | ||
['enter', sequenceToken, context], | ||
@@ -151,3 +150,3 @@ ['exit', sequenceToken, context], | ||
token.end = point | ||
token.start = shallow(sequenceToken.end) | ||
token._side = 2 | ||
@@ -190,3 +189,3 @@ | ||
if (token.type === attention && token._events) { | ||
if (token.type === attentionSequence && token._events) { | ||
subevents = token._events | ||
@@ -246,3 +245,3 @@ | ||
effects.enter(attention) | ||
effects.enter(attentionSequence) | ||
marker = code | ||
@@ -265,3 +264,3 @@ return more(code) | ||
token = effects.exit(attention) | ||
token = effects.exit(attentionSequence) | ||
after = classifyCharacter(code) | ||
@@ -268,0 +267,0 @@ open = !after || (before && after === 2) |
@@ -24,3 +24,3 @@ exports.tokenize = tokenizeAutolink | ||
effects.exit('autolinkMarker') | ||
token = effects.enter('autolinkUri') | ||
token = effects.enter('autolinkProtocol') | ||
return open | ||
@@ -44,3 +44,3 @@ } | ||
return asciiAtext(code) ? emailAtext(code) : nok(code) | ||
return code === 64 || asciiAtext(code) ? emailAtext(code) : nok(code) | ||
} | ||
@@ -67,3 +67,3 @@ | ||
if (code === 62) { | ||
effects.exit('autolinkUri') | ||
effects.exit('autolinkProtocol') | ||
return end(code) | ||
@@ -70,0 +70,0 @@ } |
@@ -21,6 +21,8 @@ exports.tokenize = tokenizeListStart | ||
var listItemValueSizelimit = 10 - 1 | ||
function tokenizeListStart(effects, ok, nok) { | ||
var self = this | ||
var token | ||
var initialSize | ||
var size | ||
@@ -57,3 +59,4 @@ return start | ||
effects.enter('listItemPrefix') | ||
token = effects.enter('listItemPrefix') | ||
token._size = 0 | ||
return atMarker(code) | ||
@@ -72,6 +75,6 @@ } | ||
effects.enter('listItemPrefix') | ||
token = effects.enter('listItemPrefix') | ||
effects.enter('listItemValue') | ||
effects.consume(code) | ||
size = 1 | ||
token._size = 1 | ||
return self.interrupt ? afterValue : inside | ||
@@ -81,4 +84,5 @@ } | ||
function inside(code) { | ||
if (++size < 10 && asciiDigit(code)) { | ||
if (token._size < listItemValueSizelimit && asciiDigit(code)) { | ||
effects.consume(code) | ||
token._size++ | ||
return inside | ||
@@ -103,2 +107,3 @@ } | ||
effects.exit('listItemMarker') | ||
token._size++ | ||
return effects.check( | ||
@@ -127,3 +132,3 @@ blank, | ||
if (markdownSpace(code)) { | ||
effects.enter('listItemPrefixWhitespace') | ||
effects.enter('listItemPrefixWhitespace')._size = 1 | ||
effects.consume(code) | ||
@@ -138,5 +143,5 @@ effects.exit('listItemPrefixWhitespace') | ||
function endOfPrefix(code) { | ||
token._size += prefixSize(self.events, 'listItemPrefixWhitespace') | ||
self.containerState.size = initialSize + token._size | ||
effects.exit('listItemPrefix') | ||
self.containerState.size = | ||
initialSize + prefixSize(self.events, 'listItemPrefix') | ||
return ok(code) | ||
@@ -158,7 +163,7 @@ } | ||
return effects.check(blank, onBlank, checkContent) | ||
return effects.check(blank, onBlank, notBlank) | ||
function onBlank(code) { | ||
if (self.containerState.initialBlankLine) { | ||
self.containerState.furtherBlankLine = true | ||
self.containerState.furtherBlankLines = true | ||
} | ||
@@ -169,7 +174,11 @@ | ||
function checkContent(code) { | ||
if (self.containerState.furtherBlankLine || !markdownSpace(code)) { | ||
function notBlank(code) { | ||
if (self.containerState.furtherBlankLines || !markdownSpace(code)) { | ||
self.containerState.initialBlankLine = undefined | ||
self.containerState.furtherBlankLines = undefined | ||
return effects.attempt(nextItem, onItem, nok)(code) | ||
} | ||
self.containerState.initialBlankLine = undefined | ||
self.containerState.furtherBlankLines = undefined | ||
return effects.attempt( | ||
@@ -176,0 +185,0 @@ indent, |
@@ -8,2 +8,4 @@ exports.tokenize = tokenizeSpaceOrLineEnding | ||
function tokenizeSpaceOrLineEnding(effects, ok) { | ||
var token | ||
return start | ||
@@ -20,5 +22,5 @@ | ||
if (markdownSpace(code)) { | ||
effects.enter('whitespace') | ||
effects.consume(code) | ||
return whitespace | ||
token = effects.enter('whitespace') | ||
token._size = 0 | ||
return whitespace(code) | ||
} | ||
@@ -32,2 +34,3 @@ | ||
effects.consume(code) | ||
token._size++ | ||
return whitespace | ||
@@ -34,0 +37,0 @@ } |
@@ -6,3 +6,3 @@ module.exports = createSpaceTokenizer | ||
function createSpaceTokenizer(type, max) { | ||
var limit = max || Infinity | ||
var limit = (max || Infinity) - 1 | ||
@@ -12,3 +12,3 @@ return {tokenize: tokenizeSpace, partial: true} | ||
function tokenizeSpace(effects, ok) { | ||
var size = 0 | ||
var token | ||
@@ -19,3 +19,4 @@ return start | ||
if (markdownSpace(code)) { | ||
effects.enter(type) | ||
token = effects.enter(type) | ||
token._size = 0 | ||
return prefix(code) | ||
@@ -28,3 +29,4 @@ } | ||
function prefix(code) { | ||
if (++size < limit && markdownSpace(code)) { | ||
if (token._size < limit && markdownSpace(code)) { | ||
token._size++ | ||
effects.consume(code) | ||
@@ -31,0 +33,0 @@ return prefix |
@@ -10,3 +10,2 @@ module.exports = createTokenizer | ||
var sliceChunks = require('../util/slice-chunks') | ||
var movePoint = require('../util/move-point') | ||
@@ -43,3 +42,2 @@ function createTokenizer(parser, initialize, from) { | ||
sliceSerialize: sliceSerialize, | ||
shiftPoint: shiftPoint, | ||
now: now, | ||
@@ -91,6 +89,2 @@ defineSkip: defineSkip, | ||
function shiftPoint(point, offset) { | ||
return movePoint(chunks, point, offset) | ||
} | ||
function sliceSerialize(token) { | ||
@@ -166,3 +160,3 @@ return serializeChunks(sliceChunks(chunks, token)) | ||
// Anything else. | ||
else { | ||
else if (code !== -1) { | ||
point.column++ | ||
@@ -184,3 +178,6 @@ } | ||
point.offset += code === -3 ? 2 : 1 | ||
if (code !== -1) { | ||
point.offset += code === -3 ? 2 : 1 | ||
} | ||
point._bufferIndex = bufferIndex | ||
@@ -187,0 +184,0 @@ point._index = index |
@@ -5,20 +5,7 @@ module.exports = movePoint | ||
// chunks (replacement characters, tabs, or line endings). | ||
function movePoint(chunks, point, offset) { | ||
function movePoint(point, offset) { | ||
point.column += offset | ||
point.offset += offset | ||
if (offset > -1) { | ||
if (point._bufferIndex < 0) { | ||
point._bufferIndex = 0 | ||
} | ||
} else { | ||
if (point._bufferIndex < 0) { | ||
point._index-- | ||
point._bufferIndex = chunks[point._index].length | ||
} | ||
} | ||
point._bufferIndex += offset | ||
return point | ||
} |
@@ -6,5 +6,5 @@ module.exports = prefixSize | ||
var tail = events[events.length - 1] | ||
return tail && tail[1].type === kind | ||
? tail[1].end.column - tail[1].start.column | ||
: 0 | ||
if (!tail || tail[1].type !== kind) return 0 | ||
return tail[1]._size | ||
} |
@@ -69,2 +69,11 @@ module.exports = subtokenize | ||
if (lineIndex) { | ||
tailIndex = lineIndex | ||
while ( | ||
tailIndex-- && | ||
events[tailIndex][1].end.offset > events[lineIndex][1].start.offset | ||
) { | ||
events[tailIndex][1].end = shallow(events[lineIndex][1].start) | ||
} | ||
// Fix position. | ||
@@ -135,3 +144,3 @@ event[1].end = shallow(events[lineIndex][1].start) | ||
seenEnter && | ||
(events[lineIndex][1].type === 'codeSpanPaddingLineEnding' || | ||
(events[lineIndex][1].type === 'codeTextPaddingLineEnding' || | ||
events[lineIndex][1].type === 'lineEnding' || | ||
@@ -138,0 +147,0 @@ events[lineIndex][1].type === 'lineEndingBlank' || |
@@ -36,3 +36,3 @@ module.exports = compileHtml | ||
codeIndented: onentercodeindented, | ||
codeSpan: onentercodespan, | ||
codeText: onentercodetext, | ||
content: onentercontent, | ||
@@ -45,3 +45,3 @@ definition: onenterdefinition, | ||
htmlFlow: onenterhtmlflow, | ||
htmlSpan: onenterhtml, | ||
htmlText: onenterhtml, | ||
image: onenterimage, | ||
@@ -66,3 +66,3 @@ label: buffer, | ||
autolinkEmail: onexitautolinkemail, | ||
autolinkUri: onexitautolinkuri, | ||
autolinkProtocol: onexitautolinkprotocol, | ||
blockQuote: onexitblockquote, | ||
@@ -79,3 +79,3 @@ characterEscapeValue: onexitcharacterescapevalue, | ||
codeIndented: onexitflowcode, | ||
codeSpan: onexitcodespan, | ||
codeText: onexitcodetext, | ||
data: onexitdata, | ||
@@ -90,3 +90,3 @@ definition: onexitdefinition, | ||
htmlFlow: onexithtml, | ||
htmlSpan: onexithtml, | ||
htmlText: onexithtml, | ||
image: onexitmedia, | ||
@@ -121,3 +121,3 @@ label: onexitlabel, | ||
var characterReferenceType | ||
var inCodeSpan | ||
var inCodeText | ||
var ignoreEncode | ||
@@ -631,3 +631,3 @@ var media | ||
if (inCodeSpan) { | ||
if (inCodeText) { | ||
raw(' ') | ||
@@ -672,9 +672,9 @@ return | ||
function onentercodespan() { | ||
inCodeSpan = true | ||
function onentercodetext() { | ||
inCodeText = true | ||
tag('<code>') | ||
} | ||
function onexitcodespan() { | ||
inCodeSpan = undefined | ||
function onexitcodetext() { | ||
inCodeText = undefined | ||
tag('</code>') | ||
@@ -723,3 +723,3 @@ } | ||
function onexitautolinkuri(token) { | ||
function onexitautolinkprotocol(token) { | ||
var uri = this.sliceSerialize(token) | ||
@@ -726,0 +726,0 @@ tag('<a href="' + url(uri, protocolHref) + '">') |
@@ -44,3 +44,3 @@ // This module is compiled away! | ||
// Whole autolink (`<https://example.com>` or `<admin@example.com>`) | ||
// Includes `autolinkMarker` and `autolinkUri` or `autolinkEmail`. | ||
// Includes `autolinkMarker` and `autolinkProtocol` or `autolinkEmail`. | ||
exports.autolink = 'autolink' | ||
@@ -51,7 +51,7 @@ | ||
// Marker around an `autolinkUri` or `autolinkEmail` (`<` or `>`). | ||
// Marker around an `autolinkProtocol` or `autolinkEmail` (`<` or `>`). | ||
exports.autolinkMarker = 'autolinkMarker' | ||
// URI autolink w/o markers (`https://example.com`) | ||
exports.autolinkUri = 'autolinkUri' | ||
// Protocol autolink w/o markers (`https://example.com`) | ||
exports.autolinkProtocol = 'autolinkProtocol' | ||
@@ -120,15 +120,15 @@ // A whole character escape (`\-`). | ||
// A code span (``` `alpha` ```). | ||
// Includes `codeSpanSequence`, `data`, `lineEnding`, and can include | ||
// `codeSpanPaddingWhitespace` and `codeSpanPaddingLineEnding`. | ||
exports.codeSpan = 'codeSpan' | ||
// A text code (``` `alpha` ```). | ||
// Includes `codeTextSequence`, `data`, `lineEnding`, and can include | ||
// `codeTextPaddingWhitespace` and `codeTextPaddingLineEnding`. | ||
exports.codeText = 'codeText' | ||
// A space right after or before a tick. | ||
exports.codeSpanPaddingWhitespace = 'codeSpanPaddingWhitespace' | ||
exports.codeTextPaddingWhitespace = 'codeTextPaddingWhitespace' | ||
// A line ending right after or before a tick. | ||
exports.codeSpanPaddingLineEnding = 'codeSpanPaddingLineEnding' | ||
exports.codeTextPaddingLineEnding = 'codeTextPaddingLineEnding' | ||
// A code span fence (` `` `). | ||
exports.codeSpanSequence = 'codeSpanSequence' | ||
// A text code fence (` `` `). | ||
exports.codeTextSequence = 'codeTextSequence' | ||
@@ -235,5 +235,5 @@ // Whole content: | ||
// HTML in content (the tag in `a <i> b`). | ||
// HTML in text (the tag in `a <i> b`). | ||
// Includes `lineEnding`, `data`. | ||
exports.htmlSpan = 'htmlSpan' | ||
exports.htmlText = 'htmlText' | ||
@@ -240,0 +240,0 @@ // Whole image (`![alpha](bravo)`, `![alpha][bravo]`, `![alpha][]`, or |
@@ -9,3 +9,3 @@ module.exports = createParser | ||
var attention = require('./tokenize/attention') | ||
var atxHeading = require('./tokenize/atx-heading') | ||
var headingAtx = require('./tokenize/heading-atx') | ||
var autolink = require('./tokenize/autolink') | ||
@@ -18,7 +18,7 @@ var list = require('./tokenize/list') | ||
var codeIndented = require('./tokenize/code-indented') | ||
var codeSpan = require('./tokenize/code-span') | ||
var codeText = require('./tokenize/code-text') | ||
var definition = require('./tokenize/definition') | ||
var hardBreakEscape = require('./tokenize/hard-break-escape') | ||
var htmlFlow = require('./tokenize/html-flow') | ||
var htmlSpan = require('./tokenize/html-span') | ||
var htmlText = require('./tokenize/html-text') | ||
var labelEnd = require('./tokenize/label-end') | ||
@@ -63,3 +63,3 @@ var labelImage = require('./tokenize/label-start-image') | ||
flow[codes.numberSign] = atxHeading | ||
flow[codes.numberSign] = headingAtx | ||
flow[codes.asterisk] = thematicBreak | ||
@@ -84,3 +84,3 @@ flow[codes.dash] = [setextUnderline, thematicBreak] | ||
text[codes.asterisk] = attention | ||
text[codes.lessThan] = [autolink, htmlSpan] | ||
text[codes.lessThan] = [autolink, htmlText] | ||
text[codes.leftSquareBracket] = labelLink | ||
@@ -90,3 +90,3 @@ text[codes.backslash] = [hardBreakEscape, characterEscape] | ||
text[codes.underscore] = attention | ||
text[codes.graveAccent] = codeSpan | ||
text[codes.graveAccent] = codeText | ||
@@ -93,0 +93,0 @@ parser = { |
@@ -9,5 +9,6 @@ exports.tokenize = tokenizeAttention | ||
var classifyCharacter = require('../util/classify-character') | ||
var movePoint = require('../util/move-point') | ||
// Internal type for markers that could turn into emphasis or strong sequences. | ||
var attention = 'attention' | ||
var attentionSequence = 'attentionSequence' | ||
@@ -32,4 +33,4 @@ // Take all events and resolve attention to emphasis or strong. | ||
var sequenceToken | ||
var textToken | ||
var dataToken | ||
var point | ||
var marker | ||
@@ -41,3 +42,3 @@ | ||
if (token.type === attention && token._events) { | ||
if (token.type === attentionSequence && token._events) { | ||
previous = lastSequence | ||
@@ -109,17 +110,15 @@ lastSequence = {token: token, size: token._size, previous: previous} | ||
// New token. | ||
attentionToken = { | ||
type: use > 1 ? types.strong : types.emphasis, | ||
start: openerToken.start, | ||
end: token.end | ||
sequenceToken = { | ||
type: use > 1 ? types.strongSequence : types.emphasisSequence, | ||
start: movePoint(shallow(openerToken.end), -use), | ||
end: shallow(openerToken.end) | ||
} | ||
point = context.shiftPoint(shallow(openerToken.start), use) | ||
dataToken = { | ||
textToken = { | ||
type: use > 1 ? types.strongText : types.emphasisText, | ||
start: shallow(point) | ||
start: shallow(openerToken.end), | ||
end: shallow(token.start) | ||
} | ||
sequenceToken = { | ||
type: use > 1 ? types.strongSequence : types.emphasisSequence, | ||
start: shallow(openerToken.start), | ||
end: shallow(point) | ||
attentionToken = { | ||
type: use > 1 ? types.strong : types.emphasis, | ||
start: shallow(sequenceToken.start) | ||
} | ||
@@ -130,16 +129,15 @@ openerToken._events.unshift( | ||
['exit', sequenceToken, context], | ||
['enter', dataToken, context] | ||
['enter', textToken, context] | ||
) | ||
openerToken.start = point | ||
openerToken.end = shallow(sequenceToken.start) | ||
openerToken._side = constants.attentionSideBefore | ||
point = context.shiftPoint(shallow(token.end), -use) | ||
dataToken.end = shallow(point) | ||
sequenceToken = { | ||
type: sequenceToken.type, | ||
start: shallow(point), | ||
end: shallow(token.end) | ||
start: token.start, | ||
end: movePoint(shallow(token.start), use) | ||
} | ||
attentionToken.end = shallow(sequenceToken.end) | ||
token._events.push( | ||
['exit', dataToken, context], | ||
['exit', textToken, context], | ||
['enter', sequenceToken, context], | ||
@@ -149,3 +147,3 @@ ['exit', sequenceToken, context], | ||
) | ||
token.end = point | ||
token.start = shallow(sequenceToken.end) | ||
token._side = constants.attentionSideAfter | ||
@@ -188,3 +186,3 @@ | ||
if (token.type === attention && token._events) { | ||
if (token.type === attentionSequence && token._events) { | ||
subevents = token._events | ||
@@ -243,3 +241,3 @@ | ||
effects.enter(attention) | ||
effects.enter(attentionSequence) | ||
marker = code | ||
@@ -262,3 +260,3 @@ return more(code) | ||
token = effects.exit(attention) | ||
token = effects.exit(attentionSequence) | ||
after = classifyCharacter(code) | ||
@@ -265,0 +263,0 @@ open = !after || (before && after === constants.characterGroupPunctuation) |
@@ -28,3 +28,3 @@ exports.tokenize = tokenizeAutolink | ||
effects.exit(types.autolinkMarker) | ||
token = effects.enter(types.autolinkUri) | ||
token = effects.enter(types.autolinkProtocol) | ||
return open | ||
@@ -53,3 +53,5 @@ } | ||
return asciiAtext(code) ? emailAtext(code) : nok(code) | ||
return code === codes.atSign || asciiAtext(code) | ||
? emailAtext(code) | ||
: nok(code) | ||
} | ||
@@ -81,3 +83,3 @@ | ||
if (code === codes.greaterThan) { | ||
effects.exit(types.autolinkUri) | ||
effects.exit(types.autolinkProtocol) | ||
return end(code) | ||
@@ -84,0 +86,0 @@ } |
@@ -23,6 +23,8 @@ exports.tokenize = tokenizeListStart | ||
var listItemValueSizelimit = constants.listItemValueSizeMax - 1 | ||
function tokenizeListStart(effects, ok, nok) { | ||
var self = this | ||
var token | ||
var initialSize | ||
var size | ||
@@ -61,3 +63,4 @@ return start | ||
effects.enter(types.listItemPrefix) | ||
token = effects.enter(types.listItemPrefix) | ||
token._size = 0 | ||
return atMarker(code) | ||
@@ -80,6 +83,6 @@ } | ||
effects.enter(types.listItemPrefix) | ||
token = effects.enter(types.listItemPrefix) | ||
effects.enter(types.listItemValue) | ||
effects.consume(code) | ||
size = 1 | ||
token._size = 1 | ||
return self.interrupt ? afterValue : inside | ||
@@ -89,4 +92,5 @@ } | ||
function inside(code) { | ||
if (++size < constants.listItemValueSizeMax && asciiDigit(code)) { | ||
if (token._size < listItemValueSizelimit && asciiDigit(code)) { | ||
effects.consume(code) | ||
token._size++ | ||
return inside | ||
@@ -122,2 +126,3 @@ } | ||
effects.exit(types.listItemMarker) | ||
token._size++ | ||
return effects.check( | ||
@@ -146,3 +151,3 @@ blank, | ||
if (markdownSpace(code)) { | ||
effects.enter(types.listItemPrefixWhitespace) | ||
effects.enter(types.listItemPrefixWhitespace)._size = 1 | ||
effects.consume(code) | ||
@@ -157,5 +162,5 @@ effects.exit(types.listItemPrefixWhitespace) | ||
function endOfPrefix(code) { | ||
token._size += prefixSize(self.events, types.listItemPrefixWhitespace) | ||
self.containerState.size = initialSize + token._size | ||
effects.exit(types.listItemPrefix) | ||
self.containerState.size = | ||
initialSize + prefixSize(self.events, types.listItemPrefix) | ||
return ok(code) | ||
@@ -177,7 +182,7 @@ } | ||
return effects.check(blank, onBlank, checkContent) | ||
return effects.check(blank, onBlank, notBlank) | ||
function onBlank(code) { | ||
if (self.containerState.initialBlankLine) { | ||
self.containerState.furtherBlankLine = true | ||
self.containerState.furtherBlankLines = true | ||
} | ||
@@ -188,7 +193,11 @@ | ||
function checkContent(code) { | ||
if (self.containerState.furtherBlankLine || !markdownSpace(code)) { | ||
function notBlank(code) { | ||
if (self.containerState.furtherBlankLines || !markdownSpace(code)) { | ||
self.containerState.initialBlankLine = undefined | ||
self.containerState.furtherBlankLines = undefined | ||
return effects.attempt(nextItem, onItem, nok)(code) | ||
} | ||
self.containerState.initialBlankLine = undefined | ||
self.containerState.furtherBlankLines = undefined | ||
return effects.attempt( | ||
@@ -195,0 +204,0 @@ indent, |
@@ -9,2 +9,4 @@ exports.tokenize = tokenizeSpaceOrLineEnding | ||
function tokenizeSpaceOrLineEnding(effects, ok) { | ||
var token | ||
return start | ||
@@ -21,5 +23,5 @@ | ||
if (markdownSpace(code)) { | ||
effects.enter(types.whitespace) | ||
effects.consume(code) | ||
return whitespace | ||
token = effects.enter(types.whitespace) | ||
token._size = 0 | ||
return whitespace(code) | ||
} | ||
@@ -33,2 +35,3 @@ | ||
effects.consume(code) | ||
token._size++ | ||
return whitespace | ||
@@ -35,0 +38,0 @@ } |
@@ -6,3 +6,3 @@ module.exports = createSpaceTokenizer | ||
function createSpaceTokenizer(type, max) { | ||
var limit = max || Infinity | ||
var limit = (max || Infinity) - 1 | ||
@@ -12,3 +12,3 @@ return {tokenize: tokenizeSpace, partial: true} | ||
function tokenizeSpace(effects, ok) { | ||
var size = 0 | ||
var token | ||
@@ -19,3 +19,4 @@ return start | ||
if (markdownSpace(code)) { | ||
effects.enter(type) | ||
token = effects.enter(type) | ||
token._size = 0 | ||
return prefix(code) | ||
@@ -28,3 +29,4 @@ } | ||
function prefix(code) { | ||
if (++size < limit && markdownSpace(code)) { | ||
if (token._size < limit && markdownSpace(code)) { | ||
token._size++ | ||
effects.consume(code) | ||
@@ -31,0 +33,0 @@ return prefix |
@@ -12,3 +12,2 @@ module.exports = createTokenizer | ||
var sliceChunks = require('../util/slice-chunks') | ||
var movePoint = require('../util/move-point') | ||
@@ -45,3 +44,2 @@ function createTokenizer(parser, initialize, from) { | ||
sliceSerialize: sliceSerialize, | ||
shiftPoint: shiftPoint, | ||
now: now, | ||
@@ -93,6 +91,2 @@ defineSkip: defineSkip, | ||
function shiftPoint(point, offset) { | ||
return movePoint(chunks, point, offset) | ||
} | ||
function sliceSerialize(token) { | ||
@@ -185,3 +179,3 @@ return serializeChunks(sliceChunks(chunks, token)) | ||
// Anything else. | ||
else { | ||
else if (code !== codes.virtualSpace) { | ||
point.column++ | ||
@@ -203,3 +197,6 @@ } | ||
point.offset += code === codes.carriageReturnLineFeed ? 2 : 1 | ||
if (code !== codes.virtualSpace) { | ||
point.offset += code === codes.carriageReturnLineFeed ? 2 : 1 | ||
} | ||
point._bufferIndex = bufferIndex | ||
@@ -206,0 +203,0 @@ point._index = index |
@@ -5,20 +5,7 @@ module.exports = movePoint | ||
// chunks (replacement characters, tabs, or line endings). | ||
function movePoint(chunks, point, offset) { | ||
function movePoint(point, offset) { | ||
point.column += offset | ||
point.offset += offset | ||
if (offset > -1) { | ||
if (point._bufferIndex < 0) { | ||
point._bufferIndex = 0 | ||
} | ||
} else { | ||
if (point._bufferIndex < 0) { | ||
point._index-- | ||
point._bufferIndex = chunks[point._index].length | ||
} | ||
} | ||
point._bufferIndex += offset | ||
return point | ||
} |
module.exports = prefixSize | ||
var assert = require('assert') | ||
var types = require('../constant/types') | ||
@@ -8,5 +9,5 @@ | ||
var tail = events[events.length - 1] | ||
return tail && tail[1].type === kind | ||
? tail[1].end.column - tail[1].start.column | ||
: 0 | ||
if (!tail || tail[1].type !== kind) return 0 | ||
assert(typeof tail[1]._size === 'number', 'expected size') | ||
return tail[1]._size | ||
} |
@@ -77,2 +77,11 @@ module.exports = subtokenize | ||
if (lineIndex) { | ||
tailIndex = lineIndex | ||
while ( | ||
tailIndex-- && | ||
events[tailIndex][1].end.offset > events[lineIndex][1].start.offset | ||
) { | ||
events[tailIndex][1].end = shallow(events[lineIndex][1].start) | ||
} | ||
// Fix position. | ||
@@ -144,3 +153,3 @@ event[1].end = shallow(events[lineIndex][1].start) | ||
seenEnter && | ||
(events[lineIndex][1].type === types.codeSpanPaddingLineEnding || | ||
(events[lineIndex][1].type === types.codeTextPaddingLineEnding || | ||
events[lineIndex][1].type === types.lineEnding || | ||
@@ -147,0 +156,0 @@ events[lineIndex][1].type === types.lineEndingBlank || |
{ | ||
"name": "micromark", | ||
"version": "2.0.0", | ||
"version": "2.1.0", | ||
"description": "smol markdown parser that’s different", | ||
@@ -5,0 +5,0 @@ "license": "MIT", |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Native code
Supply chain riskContains native code (e.g., compiled binaries or shared libraries). Including native code can obscure malicious behavior.
Found 4 instances in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
12188
0
369120
149