micromark-util-subtokenize
Advanced tools
Comparing version 2.0.0 to 2.0.1
/** | ||
* Tokenize subcontent. | ||
* | ||
* @param {Array<Event>} events | ||
* @param {Array<Event>} eventsArray | ||
* List of events. | ||
@@ -9,7 +9,7 @@ * @returns {boolean} | ||
*/ | ||
export function subtokenize( | ||
events: Array<import('micromark-util-types').Event> | ||
): boolean | ||
export type Chunk = import('micromark-util-types').Chunk | ||
export type Event = import('micromark-util-types').Event | ||
export type Token = import('micromark-util-types').Token | ||
export function subtokenize(eventsArray: Array<import("micromark-util-types").Event>): boolean; | ||
export { SpliceBuffer } from "./lib/splice-buffer.js"; | ||
export type Chunk = import('micromark-util-types').Chunk; | ||
export type Event = import('micromark-util-types').Event; | ||
export type Token = import('micromark-util-types').Token; | ||
//# sourceMappingURL=index.d.ts.map |
@@ -10,7 +10,11 @@ /** | ||
import {ok as assert} from 'devlop' | ||
import {SpliceBuffer} from './lib/splice-buffer.js' | ||
// Hidden API exposed for testing. | ||
export {SpliceBuffer} from './lib/splice-buffer.js' | ||
/** | ||
* Tokenize subcontent. | ||
* | ||
* @param {Array<Event>} events | ||
* @param {Array<Event>} eventsArray | ||
* List of events. | ||
@@ -21,3 +25,3 @@ * @returns {boolean} | ||
// eslint-disable-next-line complexity | ||
export function subtokenize(events) { | ||
export function subtokenize(eventsArray) { | ||
/** @type {Record<string, number>} */ | ||
@@ -40,2 +44,3 @@ const jumps = {} | ||
let more | ||
const events = new SpliceBuffer(eventsArray) | ||
@@ -47,3 +52,3 @@ while (++index < events.length) { | ||
event = events[index] | ||
event = events.get(index) | ||
@@ -55,3 +60,3 @@ // Add a hook for the GFM tasklist extension, which needs to know if text | ||
event[1].type === types.chunkFlow && | ||
events[index - 1][1].type === types.listItemPrefix | ||
events.get(index - 1)[1].type === types.listItemPrefix | ||
) { | ||
@@ -100,3 +105,3 @@ assert(event[1]._tokenizer, 'expected `_tokenizer` on subtokens') | ||
while (otherIndex--) { | ||
otherEvent = events[otherIndex] | ||
otherEvent = events.get(otherIndex) | ||
@@ -109,3 +114,3 @@ if ( | ||
if (lineIndex) { | ||
events[lineIndex][1].type = types.lineEndingBlank | ||
events.get(lineIndex)[1].type = types.lineEndingBlank | ||
} | ||
@@ -123,3 +128,3 @@ | ||
// Fix position. | ||
event[1].end = Object.assign({}, events[lineIndex][1].start) | ||
event[1].end = Object.assign({}, events.get(lineIndex)[1].start) | ||
@@ -129,3 +134,3 @@ // Switch container exit w/ line endings. | ||
parameters.unshift(event) | ||
splice(events, lineIndex, index - lineIndex + 1, parameters) | ||
events.splice(lineIndex, index - lineIndex + 1, parameters) | ||
} | ||
@@ -135,2 +140,4 @@ } | ||
// The changes to the `events` buffer must be copied back into the eventsArray | ||
splice(eventsArray, 0, Number.POSITIVE_INFINITY, events.slice(0)) | ||
return !more | ||
@@ -142,3 +149,3 @@ } | ||
* | ||
* @param {Array<Event>} events | ||
* @param {SpliceBuffer<Event>} events | ||
* @param {number} eventIndex | ||
@@ -148,4 +155,4 @@ * @returns {Record<string, number>} | ||
function subcontent(events, eventIndex) { | ||
const token = events[eventIndex][1] | ||
const context = events[eventIndex][2] | ||
const token = events.get(eventIndex)[1] | ||
const context = events.get(eventIndex)[2] | ||
let startPosition = eventIndex - 1 | ||
@@ -177,3 +184,3 @@ /** @type {Array<number>} */ | ||
// Find the position of the event for this token. | ||
while (events[++startPosition][1] !== current) { | ||
while (events.get(++startPosition)[1] !== current) { | ||
// Empty. | ||
@@ -262,6 +269,7 @@ } | ||
assert(start !== undefined, 'expected a start position when splicing') | ||
jumps.unshift([start, start + slice.length - 1]) | ||
splice(events, start, 2, slice) | ||
jumps.push([start, start + slice.length - 1]) | ||
events.splice(start, 2, slice) | ||
} | ||
jumps.reverse() | ||
index = -1 | ||
@@ -268,0 +276,0 @@ |
/** | ||
* Tokenize subcontent. | ||
* | ||
* @param {Array<Event>} events | ||
* @param {Array<Event>} eventsArray | ||
* List of events. | ||
@@ -9,7 +9,7 @@ * @returns {boolean} | ||
*/ | ||
export function subtokenize( | ||
events: Array<import('micromark-util-types').Event> | ||
): boolean | ||
export type Chunk = import('micromark-util-types').Chunk | ||
export type Event = import('micromark-util-types').Event | ||
export type Token = import('micromark-util-types').Token | ||
export function subtokenize(eventsArray: Array<import("micromark-util-types").Event>): boolean; | ||
export { SpliceBuffer } from "./lib/splice-buffer.js"; | ||
export type Chunk = import('micromark-util-types').Chunk; | ||
export type Event = import('micromark-util-types').Event; | ||
export type Token = import('micromark-util-types').Token; | ||
//# sourceMappingURL=index.d.ts.map |
209
index.js
@@ -7,61 +7,58 @@ /** | ||
import {splice} from 'micromark-util-chunked' | ||
import { splice } from 'micromark-util-chunked'; | ||
import { SpliceBuffer } from './lib/splice-buffer.js'; | ||
// Hidden API exposed for testing. | ||
export { SpliceBuffer } from './lib/splice-buffer.js'; | ||
/** | ||
* Tokenize subcontent. | ||
* | ||
* @param {Array<Event>} events | ||
* @param {Array<Event>} eventsArray | ||
* List of events. | ||
* @returns {boolean} | ||
* Whether subtokens were found. | ||
*/ // eslint-disable-next-line complexity | ||
export function subtokenize(events) { | ||
*/ | ||
// eslint-disable-next-line complexity | ||
export function subtokenize(eventsArray) { | ||
/** @type {Record<string, number>} */ | ||
const jumps = {} | ||
let index = -1 | ||
const jumps = {}; | ||
let index = -1; | ||
/** @type {Event} */ | ||
let event | ||
let event; | ||
/** @type {number | undefined} */ | ||
let lineIndex | ||
let lineIndex; | ||
/** @type {number} */ | ||
let otherIndex | ||
let otherIndex; | ||
/** @type {Event} */ | ||
let otherEvent | ||
let otherEvent; | ||
/** @type {Array<Event>} */ | ||
let parameters | ||
let parameters; | ||
/** @type {Array<Event>} */ | ||
let subevents | ||
let subevents; | ||
/** @type {boolean | undefined} */ | ||
let more | ||
let more; | ||
const events = new SpliceBuffer(eventsArray); | ||
while (++index < events.length) { | ||
while (index in jumps) { | ||
index = jumps[index] | ||
index = jumps[index]; | ||
} | ||
event = events[index] | ||
event = events.get(index); | ||
// Add a hook for the GFM tasklist extension, which needs to know if text | ||
// is in the first content of a list item. | ||
if ( | ||
index && | ||
event[1].type === 'chunkFlow' && | ||
events[index - 1][1].type === 'listItemPrefix' | ||
) { | ||
subevents = event[1]._tokenizer.events | ||
otherIndex = 0 | ||
if ( | ||
otherIndex < subevents.length && | ||
subevents[otherIndex][1].type === 'lineEndingBlank' | ||
) { | ||
otherIndex += 2 | ||
if (index && event[1].type === "chunkFlow" && events.get(index - 1)[1].type === "listItemPrefix") { | ||
subevents = event[1]._tokenizer.events; | ||
otherIndex = 0; | ||
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "lineEndingBlank") { | ||
otherIndex += 2; | ||
} | ||
if ( | ||
otherIndex < subevents.length && | ||
subevents[otherIndex][1].type === 'content' | ||
) { | ||
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "content") { | ||
while (++otherIndex < subevents.length) { | ||
if (subevents[otherIndex][1].type === 'content') { | ||
break | ||
if (subevents[otherIndex][1].type === "content") { | ||
break; | ||
} | ||
if (subevents[otherIndex][1].type === 'chunkText') { | ||
subevents[otherIndex][1]._isInFirstContentOfListItem = true | ||
otherIndex++ | ||
if (subevents[otherIndex][1].type === "chunkText") { | ||
subevents[otherIndex][1]._isInFirstContentOfListItem = true; | ||
otherIndex++; | ||
} | ||
@@ -75,5 +72,5 @@ } | ||
if (event[1].contentType) { | ||
Object.assign(jumps, subcontent(events, index)) | ||
index = jumps[index] | ||
more = true | ||
Object.assign(jumps, subcontent(events, index)); | ||
index = jumps[index]; | ||
more = true; | ||
} | ||
@@ -83,19 +80,16 @@ } | ||
else if (event[1]._container) { | ||
otherIndex = index | ||
lineIndex = undefined | ||
otherIndex = index; | ||
lineIndex = undefined; | ||
while (otherIndex--) { | ||
otherEvent = events[otherIndex] | ||
if ( | ||
otherEvent[1].type === 'lineEnding' || | ||
otherEvent[1].type === 'lineEndingBlank' | ||
) { | ||
otherEvent = events.get(otherIndex); | ||
if (otherEvent[1].type === "lineEnding" || otherEvent[1].type === "lineEndingBlank") { | ||
if (otherEvent[0] === 'enter') { | ||
if (lineIndex) { | ||
events[lineIndex][1].type = 'lineEndingBlank' | ||
events.get(lineIndex)[1].type = "lineEndingBlank"; | ||
} | ||
otherEvent[1].type = 'lineEnding' | ||
lineIndex = otherIndex | ||
otherEvent[1].type = "lineEnding"; | ||
lineIndex = otherIndex; | ||
} | ||
} else { | ||
break | ||
break; | ||
} | ||
@@ -105,12 +99,15 @@ } | ||
// Fix position. | ||
event[1].end = Object.assign({}, events[lineIndex][1].start) | ||
event[1].end = Object.assign({}, events.get(lineIndex)[1].start); | ||
// Switch container exit w/ line endings. | ||
parameters = events.slice(lineIndex, index) | ||
parameters.unshift(event) | ||
splice(events, lineIndex, index - lineIndex + 1, parameters) | ||
parameters = events.slice(lineIndex, index); | ||
parameters.unshift(event); | ||
events.splice(lineIndex, index - lineIndex + 1, parameters); | ||
} | ||
} | ||
} | ||
return !more | ||
// The changes to the `events` buffer must be copied back into the eventsArray | ||
splice(eventsArray, 0, Number.POSITIVE_INFINITY, events.slice(0)); | ||
return !more; | ||
} | ||
@@ -121,3 +118,3 @@ | ||
* | ||
* @param {Array<Event>} events | ||
* @param {SpliceBuffer<Event>} events | ||
* @param {number} eventIndex | ||
@@ -127,24 +124,23 @@ * @returns {Record<string, number>} | ||
function subcontent(events, eventIndex) { | ||
const token = events[eventIndex][1] | ||
const context = events[eventIndex][2] | ||
let startPosition = eventIndex - 1 | ||
const token = events.get(eventIndex)[1]; | ||
const context = events.get(eventIndex)[2]; | ||
let startPosition = eventIndex - 1; | ||
/** @type {Array<number>} */ | ||
const startPositions = [] | ||
const tokenizer = | ||
token._tokenizer || context.parser[token.contentType](token.start) | ||
const childEvents = tokenizer.events | ||
const startPositions = []; | ||
const tokenizer = token._tokenizer || context.parser[token.contentType](token.start); | ||
const childEvents = tokenizer.events; | ||
/** @type {Array<[number, number]>} */ | ||
const jumps = [] | ||
const jumps = []; | ||
/** @type {Record<string, number>} */ | ||
const gaps = {} | ||
const gaps = {}; | ||
/** @type {Array<Chunk>} */ | ||
let stream | ||
let stream; | ||
/** @type {Token | undefined} */ | ||
let previous | ||
let index = -1 | ||
let previous; | ||
let index = -1; | ||
/** @type {Token | undefined} */ | ||
let current = token | ||
let adjust = 0 | ||
let start = 0 | ||
const breaks = [start] | ||
let current = token; | ||
let adjust = 0; | ||
let start = 0; | ||
const breaks = [start]; | ||
@@ -155,20 +151,20 @@ // Loop forward through the linked tokens to pass them in order to the | ||
// Find the position of the event for this token. | ||
while (events[++startPosition][1] !== current) { | ||
while (events.get(++startPosition)[1] !== current) { | ||
// Empty. | ||
} | ||
startPositions.push(startPosition) | ||
startPositions.push(startPosition); | ||
if (!current._tokenizer) { | ||
stream = context.sliceStream(current) | ||
stream = context.sliceStream(current); | ||
if (!current.next) { | ||
stream.push(null) | ||
stream.push(null); | ||
} | ||
if (previous) { | ||
tokenizer.defineSkip(current.start) | ||
tokenizer.defineSkip(current.start); | ||
} | ||
if (current._isInFirstContentOfListItem) { | ||
tokenizer._gfmTasklistFirstContentOfListItem = true | ||
tokenizer._gfmTasklistFirstContentOfListItem = true; | ||
} | ||
tokenizer.write(stream) | ||
tokenizer.write(stream); | ||
if (current._isInFirstContentOfListItem) { | ||
tokenizer._gfmTasklistFirstContentOfListItem = undefined | ||
tokenizer._gfmTasklistFirstContentOfListItem = undefined; | ||
} | ||
@@ -178,4 +174,4 @@ } | ||
// Unravel the next token. | ||
previous = current | ||
current = current.next | ||
previous = current; | ||
current = current.next; | ||
} | ||
@@ -185,17 +181,13 @@ | ||
// parts belong where. | ||
current = token | ||
current = token; | ||
while (++index < childEvents.length) { | ||
if ( | ||
// Find a void token that includes a break. | ||
childEvents[index][0] === 'exit' && | ||
childEvents[index - 1][0] === 'enter' && | ||
childEvents[index][1].type === childEvents[index - 1][1].type && | ||
childEvents[index][1].start.line !== childEvents[index][1].end.line | ||
) { | ||
start = index + 1 | ||
breaks.push(start) | ||
// Find a void token that includes a break. | ||
childEvents[index][0] === 'exit' && childEvents[index - 1][0] === 'enter' && childEvents[index][1].type === childEvents[index - 1][1].type && childEvents[index][1].start.line !== childEvents[index][1].end.line) { | ||
start = index + 1; | ||
breaks.push(start); | ||
// Help GC. | ||
current._tokenizer = undefined | ||
current.previous = undefined | ||
current = current.next | ||
current._tokenizer = undefined; | ||
current.previous = undefined; | ||
current = current.next; | ||
} | ||
@@ -205,3 +197,3 @@ } | ||
// Help GC. | ||
tokenizer.events = [] | ||
tokenizer.events = []; | ||
@@ -213,6 +205,6 @@ // If there’s one more token (which is the cases for lines that end in an | ||
// Help GC. | ||
current._tokenizer = undefined | ||
current.previous = undefined | ||
current._tokenizer = undefined; | ||
current.previous = undefined; | ||
} else { | ||
breaks.pop() | ||
breaks.pop(); | ||
} | ||
@@ -222,15 +214,16 @@ | ||
// moving back to front so that splice indices aren’t affected. | ||
index = breaks.length | ||
index = breaks.length; | ||
while (index--) { | ||
const slice = childEvents.slice(breaks[index], breaks[index + 1]) | ||
const start = startPositions.pop() | ||
jumps.unshift([start, start + slice.length - 1]) | ||
splice(events, start, 2, slice) | ||
const slice = childEvents.slice(breaks[index], breaks[index + 1]); | ||
const start = startPositions.pop(); | ||
jumps.push([start, start + slice.length - 1]); | ||
events.splice(start, 2, slice); | ||
} | ||
index = -1 | ||
jumps.reverse(); | ||
index = -1; | ||
while (++index < jumps.length) { | ||
gaps[adjust + jumps[index][0]] = adjust + jumps[index][1] | ||
adjust += jumps[index][1] - jumps[index][0] - 1 | ||
gaps[adjust + jumps[index][0]] = adjust + jumps[index][1]; | ||
adjust += jumps[index][1] - jumps[index][0] - 1; | ||
} | ||
return gaps | ||
} | ||
return gaps; | ||
} |
{ | ||
"name": "micromark-util-subtokenize", | ||
"version": "2.0.0", | ||
"version": "2.0.1", | ||
"description": "micromark utility to tokenize subtokens", | ||
@@ -32,2 +32,4 @@ "license": "MIT", | ||
"dev/", | ||
"lib/", | ||
"index.d.ts.map", | ||
"index.d.ts", | ||
@@ -49,3 +51,12 @@ "index.js" | ||
}, | ||
"xo": false | ||
"xo": { | ||
"envs": [ | ||
"shared-node-browser" | ||
], | ||
"prettier": true, | ||
"rules": { | ||
"max-depth": "off", | ||
"unicorn/prefer-code-point": "off" | ||
} | ||
} | ||
} |
@@ -15,13 +15,13 @@ # micromark-util-subtokenize | ||
* [What is this?](#what-is-this) | ||
* [When should I use this?](#when-should-i-use-this) | ||
* [Install](#install) | ||
* [Use](#use) | ||
* [API](#api) | ||
* [`subtokenize(events)`](#subtokenizeevents) | ||
* [Types](#types) | ||
* [Compatibility](#compatibility) | ||
* [Security](#security) | ||
* [Contribute](#contribute) | ||
* [License](#license) | ||
* [What is this?](#what-is-this) | ||
* [When should I use this?](#when-should-i-use-this) | ||
* [Install](#install) | ||
* [Use](#use) | ||
* [API](#api) | ||
* [`subtokenize(events)`](#subtokenizeevents) | ||
* [Types](#types) | ||
* [Compatibility](#compatibility) | ||
* [Security](#security) | ||
* [Contribute](#contribute) | ||
* [License](#license) | ||
@@ -87,4 +87,4 @@ ## What is this? | ||
* `events` (`Array<Event>`) | ||
— list of events | ||
* `events` (`Array<Event>`) | ||
— list of events | ||
@@ -91,0 +91,0 @@ ###### Returns |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
51090
15
1288
1