Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

es-html-parser

Package Overview
Dependencies
Maintainers
1
Versions
15
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

es-html-parser - npm Package Compare versions

Comparing version 0.0.10 to 1.0.0-alpha.0

dist/tokenizer/tokenizer-state.d.ts

3

dist/parser/parse.js

@@ -10,3 +10,4 @@ "use strict";

const tokenAdapter = (options && options.tokenAdapter) || token_adapter_1.defaultTokenAdapter;
const { tokens } = (0, tokenizer_1.tokenize)(html, tokenAdapter);
const templateRanges = (options && options.templateRanges) || [];
const { tokens } = (0, tokenizer_1.tokenize)(html, tokenAdapter, templateRanges);
const { ast } = (0, tree_constructor_1.constructTree)(tokens, undefined);

@@ -13,0 +14,0 @@ return {

import type { TokenizerState } from "../../types";
export declare function parse(chars: string, state: TokenizerState): void;
export declare function parse(chars: string, state: TokenizerState, charIndex: number): void;

@@ -7,3 +7,7 @@ "use strict";

const utils_1 = require("../../utils");
function parse(chars, state) {
function parse(chars, state, charIndex) {
const range = state.consumeTemplateRangeAt(charIndex);
if (range) {
return parseTemplate(state, range);
}
if (isKeyBreak(chars)) {

@@ -26,2 +30,3 @@ return parseKeyEnd(state);

loc: position.loc,
isTemplate: false,
});

@@ -32,1 +37,16 @@ state.accumulatedContent = "";

}
function parseTemplate(state, [start, end]) {
const value = state.source.slice(start, end);
const range = [start, end];
state.tokens.push({
type: token_types_1.TokenTypes.AttributeKey,
value,
range,
loc: (0, utils_1.calculateTokenLocation)(state.source, range),
isTemplate: true,
});
state.accumulatedContent = "";
state.decisionBuffer = "";
state.caretPosition = end;
state.currentContext = constants_1.TokenizerContextTypes.Attributes;
}
import type { TokenizerState } from "../../types";
export declare function parseValueEnd(state: TokenizerState): void;
export declare function parse(chars: string, state: TokenizerState): void;
export declare function parse(chars: string, state: TokenizerState, charIndex: number): void;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.parseValueEnd = parseValueEnd;
exports.parse = parse;

@@ -14,2 +13,3 @@ const constants_1 = require("../../constants");

loc: position.loc,
isTemplate: false,
});

@@ -20,3 +20,22 @@ state.accumulatedContent = "";

}
function parse(chars, state) {
function parseTemplate(state, [start, end]) {
const value = state.source.slice(start, end);
const range = [start, end];
state.tokens.push({
type: constants_1.TokenTypes.AttributeValue,
value,
range,
loc: (0, utils_1.calculateTokenLocation)(state.source, range),
isTemplate: true,
});
state.accumulatedContent = "";
state.decisionBuffer = "";
state.caretPosition = end;
state.currentContext = constants_1.TokenizerContextTypes.Attributes;
}
function parse(chars, state, charIndex) {
const range = state.consumeTemplateRangeAt(charIndex);
if (range) {
return parseTemplate(state, range);
}
if ((0, utils_1.isWhitespace)(chars) || chars === ">" || chars === "/") {

@@ -23,0 +42,0 @@ return parseValueEnd(state);

import type { TokenizerState } from "../../types";
export declare function parse(chars: string, state: TokenizerState): void;
export declare function parse(chars: string, state: TokenizerState, charIndex: number): void;

@@ -6,5 +6,9 @@ "use strict";

const utils_1 = require("../../utils");
function parse(chars, state) {
function parse(chars, state, charIndex) {
var _a;
const wrapperChar = (_a = state.contextParams[constants_1.TokenizerContextTypes.AttributeValueWrapped]) === null || _a === void 0 ? void 0 : _a.wrapper;
const range = state.consumeTemplateRangeAt(charIndex);
if (range) {
return parseTemplate(state, range);
}
if (chars === wrapperChar) {

@@ -20,8 +24,11 @@ return parseWrapper(state);

const endWrapperPosition = position.range[1];
state.tokens.push({
type: constants_1.TokenTypes.AttributeValue,
value: state.accumulatedContent,
range: position.range,
loc: position.loc,
});
if (state.accumulatedContent) {
state.tokens.push({
type: constants_1.TokenTypes.AttributeValue,
value: state.accumulatedContent,
range: position.range,
loc: position.loc,
isTemplate: false,
});
}
const range = [endWrapperPosition, endWrapperPosition + 1];

@@ -41,1 +48,15 @@ const loc = (0, utils_1.calculateTokenLocation)(state.source, range);

}
function parseTemplate(state, [start, end]) {
const value = state.source.slice(start, end);
const range = [start, end];
state.tokens.push({
type: constants_1.TokenTypes.AttributeValue,
isTemplate: true,
value,
range,
loc: (0, utils_1.calculateTokenLocation)(state.source, range),
});
state.accumulatedContent = "";
state.decisionBuffer = "";
state.caretPosition = end;
}
import type { TokenizerState } from "../../types";
export declare function parse(chars: string, state: TokenizerState): void;
export declare function parse(chars: string, state: TokenizerState, charIndex: number): void;

@@ -6,3 +6,3 @@ "use strict";

const utils_1 = require("../../utils");
function parse(chars, state) {
function parse(chars, state, charIndex) {
if (chars === '"' || chars === "'") {

@@ -14,3 +14,3 @@ return parseWrapper(state);

}
if (!(0, utils_1.isWhitespace)(chars)) {
if (!(0, utils_1.isWhitespace)(chars) || state.getTemplateRangeAt(charIndex)) {
return parseBare(state);

@@ -17,0 +17,0 @@ }

import type { TokenizerState } from "../../types";
export declare function parse(chars: string, state: TokenizerState): void;
export declare function parse(chars: string, state: TokenizerState, charIndex: number): void;

@@ -7,3 +7,7 @@ "use strict";

const COMMENT_END = "-->";
function parse(chars, state) {
function parse(chars, state, charIndex) {
const range = state.consumeTemplateRangeAt(charIndex);
if (range) {
return parseTemplate(state, range);
}
if (chars === "-" || chars === "--") {

@@ -27,9 +31,12 @@ state.caretPosition++;

const endLoc = (0, utils_1.calculateTokenLocation)(state.source, endRange);
if (state.accumulatedContent.length !== 0) {
state.tokens.push({
type: constants_1.TokenTypes.CommentContent,
value: state.accumulatedContent,
range: position.range,
loc: position.loc,
isTemplate: false,
});
}
state.tokens.push({
type: constants_1.TokenTypes.CommentContent,
value: state.accumulatedContent,
range: position.range,
loc: position.loc,
});
state.tokens.push({
type: constants_1.TokenTypes.CommentClose,

@@ -45,1 +52,25 @@ value: state.decisionBuffer,

}
function parseTemplate(state, [start, end]) {
if (state.accumulatedContent.length !== 0) {
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false });
state.tokens.push({
type: constants_1.TokenTypes.CommentContent,
value: state.accumulatedContent,
range: position.range,
loc: position.loc,
isTemplate: false,
});
}
const value = state.source.slice(start, end);
const range = [start, end];
state.tokens.push({
type: constants_1.TokenTypes.CommentContent,
value,
range,
loc: (0, utils_1.calculateTokenLocation)(state.source, range),
isTemplate: true,
});
state.accumulatedContent = "";
state.decisionBuffer = "";
state.caretPosition = end;
}
import { TokenizerState } from "../../types";
export declare function parse(chars: string, state: TokenizerState): void;
export declare function parse(chars: string, state: TokenizerState, charIndex: number): void;
export declare function handleContentEnd(state: TokenizerState): void;

@@ -9,3 +9,7 @@ "use strict";

const OPEN_TAG_START_PATTERN = /^<\w/;
function parse(chars, state) {
function parse(chars, state, charIndex) {
const range = state.consumeTemplateRangeAt(charIndex);
if (range) {
return parseTemplate(state, range);
}
if (OPEN_TAG_START_PATTERN.test(chars)) {

@@ -44,2 +48,3 @@ return parseOpeningCornerBraceWithText(state);

loc: position.loc,
isTemplate: false,
});

@@ -55,2 +60,3 @@ }

loc: position.loc,
isTemplate: false,
};

@@ -115,1 +121,18 @@ }

}
function parseTemplate(state, [start, end]) {
if (state.accumulatedContent.length !== 0) {
state.tokens.push(generateTextToken(state));
}
const value = state.source.slice(start, end);
const range = [start, end];
state.tokens.push({
type: constants_1.TokenTypes.Text,
value,
range,
loc: (0, utils_1.calculateTokenLocation)(state.source, range),
isTemplate: true,
});
state.accumulatedContent = "";
state.decisionBuffer = "";
state.caretPosition = end;
}

@@ -37,5 +37,6 @@ "use strict";

state.currentContext =
contextsMap[tagName || "default"] || contextsMap["default"];
contextsMap[tagName || "default"] ||
contextsMap["default"];
state.caretPosition++;
state.contextParams[constants_1.TokenizerContextTypes.OpenTagEnd] = undefined;
}
import { TokenizerState } from "../../types";
export declare function parse(chars: string, state: TokenizerState): void;
export declare function parse(chars: string, state: TokenizerState, charIndex: number): void;

@@ -6,3 +6,3 @@ "use strict";

const utils_1 = require("../../utils");
function parse(chars, state) {
function parse(chars, state, charIndex) {
if (chars === "<" ||

@@ -17,2 +17,6 @@ chars === "</" ||

}
const range = state.consumeTemplateRangeAt(charIndex);
if (range) {
return parseTemplate(state, range);
}
state.accumulatedContent += state.decisionBuffer;

@@ -30,2 +34,3 @@ state.decisionBuffer = "";

loc: position.loc,
isTemplate: false,
});

@@ -49,1 +54,25 @@ }

}
function parseTemplate(state, [start, end]) {
if (state.accumulatedContent.length !== 0) {
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false });
state.tokens.push({
type: constants_1.TokenTypes.ScriptTagContent,
value: state.accumulatedContent,
range: position.range,
loc: position.loc,
isTemplate: false,
});
}
const value = state.source.slice(start, end);
const range = [start, end];
state.tokens.push({
type: constants_1.TokenTypes.ScriptTagContent,
value,
range,
loc: (0, utils_1.calculateTokenLocation)(state.source, range),
isTemplate: true,
});
state.accumulatedContent = "";
state.decisionBuffer = "";
state.caretPosition = end;
}
import { TokenizerState } from "../../types";
export declare function parse(chars: string, state: TokenizerState): void;
export declare function parse(chars: string, state: TokenizerState, charIndex: number): void;

@@ -7,3 +7,3 @@ "use strict";

const CLOSING_STYLE_TAG_PATTERN = /<\/style\s*>/i;
function parse(chars, state) {
function parse(chars, state, charIndex) {
if (chars === "<" ||

@@ -18,2 +18,6 @@ chars === "</" ||

}
const range = state.consumeTemplateRangeAt(charIndex);
if (range) {
return parseTemplate(state, range);
}
state.accumulatedContent += state.decisionBuffer;

@@ -31,2 +35,3 @@ state.decisionBuffer = "";

loc: position.loc,
isTemplate: false,
});

@@ -50,1 +55,25 @@ }

}
function parseTemplate(state, [start, end]) {
if (state.accumulatedContent.length !== 0) {
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false });
state.tokens.push({
type: constants_1.TokenTypes.StyleTagContent,
value: state.accumulatedContent,
range: position.range,
loc: position.loc,
isTemplate: false,
});
}
const value = state.source.slice(start, end);
const range = [start, end];
state.tokens.push({
type: constants_1.TokenTypes.StyleTagContent,
value,
range,
loc: (0, utils_1.calculateTokenLocation)(state.source, range),
isTemplate: true,
});
state.accumulatedContent = "";
state.decisionBuffer = "";
state.caretPosition = end;
}

@@ -1,7 +0,6 @@

import { AnyToken, TokenAdapter, TokenizerState } from "../types";
export declare function tokenize(source: string | undefined, tokenAdapter: TokenAdapter, { isFinalChunk, }?: {
import { AnyToken, Range, TokenAdapter } from "../types";
export declare function tokenize(source: string | undefined, tokenAdapter: TokenAdapter, templateRanges: Range[], { isFinalChunk, }?: {
isFinalChunk?: boolean;
}): {
state: TokenizerState;
tokens: AnyToken[];
};

@@ -6,2 +6,3 @@ "use strict";

const handlers_1 = require("./handlers");
const tokenizer_state_1 = require("./tokenizer-state");
const contextHandlers = {

@@ -28,14 +29,9 @@ [constants_1.TokenizerContextTypes.Data]: handlers_1.data,

};
function tokenizeChars(chars, state, tokens, { isFinalChunk, positionOffset, }) {
let charIndex = state.caretPosition - positionOffset;
let charIndexBefore = charIndex;
function tokenizeChars(chars, state, { isFinalChunk, }) {
let charIndex = state.caretPosition;
while (charIndex < chars.length) {
const handler = contextHandlers[state.currentContext];
state.decisionBuffer += chars[charIndex];
if (charIndexBefore !== charIndex && chars[charIndex] === "\n") {
state.linePosition++;
}
charIndexBefore = charIndex;
handler.parse(state.decisionBuffer, state, tokens);
charIndex = state.caretPosition - positionOffset;
handler.parse(state.decisionBuffer, state, charIndex);
charIndex = state.caretPosition;
}

@@ -46,30 +42,14 @@ if (isFinalChunk) {

if (handler.handleContentEnd !== undefined) {
handler.handleContentEnd(state, tokens);
handler.handleContentEnd(state);
}
}
}
function tokenize(source = "", tokenAdapter, { isFinalChunk, } = {}) {
function tokenize(source = "", tokenAdapter, templateRanges, { isFinalChunk, } = {}) {
isFinalChunk = isFinalChunk === undefined ? true : isFinalChunk;
const tokens = [];
const state = {
currentContext: constants_1.TokenizerContextTypes.Data,
contextParams: {},
decisionBuffer: "",
accumulatedContent: "",
caretPosition: 0,
linePosition: 1,
source,
tokens: {
push(token) {
tokens.push(Object.assign(Object.assign({}, token), { range: tokenAdapter.finalizeRange(token), loc: tokenAdapter.finalizeLocation(token) }));
},
},
};
const state = new tokenizer_state_1.HTMLTokenizerState(source, tokenAdapter, templateRanges);
const chars = state.decisionBuffer + source;
const positionOffset = state.caretPosition - state.decisionBuffer.length;
tokenizeChars(chars, state, tokens, {
tokenizeChars(chars, state, {
isFinalChunk,
positionOffset,
});
return { state, tokens };
return { tokens: state.getTokens() };
}

@@ -12,3 +12,4 @@ "use strict";

function handleCommentContent(state, token) {
state.currentNode.value = (0, utils_1.createNodeFrom)(token);
const node = (0, utils_1.createNodeFrom)(token);
state.currentNode.children.push(node);
state.caretPosition++;

@@ -15,0 +16,0 @@ return state;

@@ -30,3 +30,4 @@ "use strict";

function handleScriptContent(state, token) {
state.currentNode.value = (0, utils_1.createNodeFrom)(token);
const node = (0, utils_1.createNodeFrom)(token);
state.currentNode.children.push(node);
(0, utils_1.updateNodeEnd)(state.currentNode, token);

@@ -33,0 +34,0 @@ state.caretPosition++;

@@ -30,3 +30,4 @@ "use strict";

function handleStyleContent(state, token) {
state.currentNode.value = (0, utils_1.createNodeFrom)(token);
const node = (0, utils_1.createNodeFrom)(token);
state.currentNode.children.push(node);
(0, utils_1.updateNodeEnd)(state.currentNode, token);

@@ -33,0 +34,0 @@ state.caretPosition++;

@@ -40,2 +40,3 @@ "use strict";

loc: (0, utils_1.cloneLocation)(token.loc),
children: [],
};

@@ -84,2 +85,3 @@ state.currentNode.children.push(commentNode);

attributes: [],
children: [],
};

@@ -103,2 +105,3 @@ state.currentNode.children.push(scriptNode);

attributes: [],
children: [],
};

@@ -105,0 +108,0 @@ state.currentNode.children.push(styleNode);

@@ -6,6 +6,6 @@ import { CommentNode, DoctypeNode, ScriptTagNode, StyleTagNode, TagNode, DocumentNode, AnyNode, AttributeNode } from "./node";

};
export type ContextualScriptTagNode = ContextualNode<ScriptTagNode, "close" | "openStart" | "value" | "openEnd">;
export type ContextualStyleTagNode = ContextualNode<StyleTagNode, "openStart" | "openEnd" | "value" | "close">;
export type ContextualScriptTagNode = ContextualNode<ScriptTagNode, "close" | "openStart" | "openEnd">;
export type ContextualStyleTagNode = ContextualNode<StyleTagNode, "openStart" | "openEnd" | "close">;
export type ContextualDoctypeNode = ContextualNode<DoctypeNode, "open" | "close">;
export type ContextualCommentNode = ContextualNode<CommentNode, "open" | "close" | "value">;
export type ContextualCommentNode = ContextualNode<CommentNode, "open" | "close">;
export type ContextualTagNode = ContextualNode<TagNode, "close" | "selfClosing" | "name" | "openEnd" | "openStart"> & {

@@ -12,0 +12,0 @@ children: Array<ContextualScriptTagNode | ContextualStyleTagNode | ContextualDoctypeNode | ContextualCommentNode | ContextualTagNode | TagNode["children"][number]>;

import { NodeTypes } from "../constants";
import { SimpleNode } from "./simple-node";
import { BaseNode } from "./base-node";
interface SimpleNode<T extends NodeTypes> extends BaseNode {
type: T;
value: string;
}
interface TemplatableNode<T extends NodeTypes> extends BaseNode {
type: T;
value: string;
}
export interface DocumentNode extends BaseNode {

@@ -8,3 +15,3 @@ type: NodeTypes.Document;

}
export type TextNode = SimpleNode<NodeTypes.Text>;
export type TextNode = TemplatableNode<NodeTypes.Text>;
export interface TagNode extends BaseNode {

@@ -30,4 +37,4 @@ type: NodeTypes.Tag;

}
export type AttributeKeyNode = SimpleNode<NodeTypes.AttributeKey>;
export type AttributeValueNode = SimpleNode<NodeTypes.AttributeValue>;
export type AttributeKeyNode = TemplatableNode<NodeTypes.AttributeKey>;
export type AttributeValueNode = TemplatableNode<NodeTypes.AttributeValue>;
export type AttributeValueWrapperStartNode = SimpleNode<NodeTypes.AttributeValueWrapperStart>;

@@ -41,3 +48,3 @@ export type AttributeValueWrapperEndNode = SimpleNode<NodeTypes.AttributeValueWrapperEnd>;

close: CloseScriptTagNode;
value?: ScriptTagContentNode;
children: ScriptTagContentNode[];
}

@@ -47,3 +54,3 @@ export type OpenScriptTagStartNode = SimpleNode<NodeTypes.OpenScriptTagStart>;

export type OpenScriptTagEndNode = SimpleNode<NodeTypes.OpenScriptTagEnd>;
export type ScriptTagContentNode = SimpleNode<NodeTypes.ScriptTagContent>;
export type ScriptTagContentNode = TemplatableNode<NodeTypes.ScriptTagContent>;
export interface StyleTagNode extends BaseNode {

@@ -55,7 +62,7 @@ type: NodeTypes.StyleTag;

close: CloseStyleTagNode;
value?: StyleTagContentNode;
children: StyleTagContentNode[];
}
export type OpenStyleTagStartNode = SimpleNode<NodeTypes.OpenStyleTagStart>;
export type OpenStyleTagEndNode = SimpleNode<NodeTypes.OpenStyleTagEnd>;
export type StyleTagContentNode = SimpleNode<NodeTypes.StyleTagContent>;
export type StyleTagContentNode = TemplatableNode<NodeTypes.StyleTagContent>;
export type CloseStyleTagNode = SimpleNode<NodeTypes.CloseStyleTag>;

@@ -66,7 +73,7 @@ export interface CommentNode extends BaseNode {

close: CommentCloseNode;
value: CommentContentNode;
children: CommentContentNode[];
}
export type CommentOpenNode = SimpleNode<NodeTypes.CommentOpen>;
export type CommentCloseNode = SimpleNode<NodeTypes.CommentClose>;
export type CommentContentNode = SimpleNode<NodeTypes.CommentContent>;
export type CommentContentNode = TemplatableNode<NodeTypes.CommentContent>;
export interface DoctypeNode extends BaseNode {

@@ -90,1 +97,2 @@ type: NodeTypes.Doctype;

export type AnyNode = DocumentNode | TextNode | TagNode | OpenTagStartNode | OpenTagEndNode | CloseTagNode | AttributeNode | AttributeKeyNode | AttributeValueNode | AttributeValueWrapperStartNode | AttributeValueWrapperEndNode | ScriptTagNode | OpenScriptTagStartNode | CloseScriptTagNode | OpenScriptTagEndNode | ScriptTagContentNode | StyleTagNode | OpenStyleTagStartNode | OpenStyleTagEndNode | StyleTagContentNode | CloseStyleTagNode | CommentNode | CommentOpenNode | CommentCloseNode | CommentContentNode | DoctypeNode | DoctypeOpenNode | DoctypeCloseNode | DoctypeAttributeNode | DoctypeAttributeValueNode | DoctypeAttributeWrapperStartNode | DoctypeAttributeWrapperEndNode;
export {};
import { DocumentNode } from "./node";
import { TokenAdapter } from "./token-adapter";
import { AnyToken } from "./token";
import { Range } from "./range";
export type ParseResult = {

@@ -9,3 +10,4 @@ ast: DocumentNode;

export type Options = {
tokenAdapter: TokenAdapter;
tokenAdapter?: TokenAdapter;
templateRanges?: Range[];
};

@@ -10,2 +10,5 @@ import { TokenTypes } from "../constants";

}
export type AnyToken = Token<TokenTypes.Text> | Token<TokenTypes.OpenTagStart> | Token<TokenTypes.OpenTagEnd> | Token<TokenTypes.CloseTag> | Token<TokenTypes.AttributeKey> | Token<TokenTypes.AttributeAssignment> | Token<TokenTypes.AttributeValueWrapperStart> | Token<TokenTypes.AttributeValue> | Token<TokenTypes.AttributeValueWrapperEnd> | Token<TokenTypes.DoctypeOpen> | Token<TokenTypes.DoctypeAttributeValue> | Token<TokenTypes.DoctypeAttributeWrapperStart> | Token<TokenTypes.DoctypeAttributeWrapperEnd> | Token<TokenTypes.DoctypeClose> | Token<TokenTypes.CommentOpen> | Token<TokenTypes.CommentContent> | Token<TokenTypes.CommentClose> | Token<TokenTypes.OpenScriptTagStart> | Token<TokenTypes.OpenScriptTagEnd> | Token<TokenTypes.ScriptTagContent> | Token<TokenTypes.CloseScriptTag> | Token<TokenTypes.OpenStyleTagStart> | Token<TokenTypes.OpenStyleTagEnd> | Token<TokenTypes.StyleTagContent> | Token<TokenTypes.CloseStyleTag>;
export type TemplatableToken<T extends TokenTypes> = Token<T> & {
isTemplate: boolean;
};
export type AnyToken = TemplatableToken<TokenTypes.Text> | Token<TokenTypes.OpenTagStart> | Token<TokenTypes.OpenTagEnd> | Token<TokenTypes.CloseTag> | TemplatableToken<TokenTypes.AttributeKey> | Token<TokenTypes.AttributeAssignment> | Token<TokenTypes.AttributeValueWrapperStart> | TemplatableToken<TokenTypes.AttributeValue> | Token<TokenTypes.AttributeValueWrapperEnd> | Token<TokenTypes.DoctypeOpen> | Token<TokenTypes.DoctypeAttributeValue> | Token<TokenTypes.DoctypeAttributeWrapperStart> | Token<TokenTypes.DoctypeAttributeWrapperEnd> | Token<TokenTypes.DoctypeClose> | Token<TokenTypes.CommentOpen> | TemplatableToken<TokenTypes.CommentContent> | Token<TokenTypes.CommentClose> | Token<TokenTypes.OpenScriptTagStart> | Token<TokenTypes.OpenScriptTagEnd> | TemplatableToken<TokenTypes.ScriptTagContent> | Token<TokenTypes.CloseScriptTag> | Token<TokenTypes.OpenStyleTagStart> | Token<TokenTypes.OpenStyleTagEnd> | TemplatableToken<TokenTypes.StyleTagContent> | Token<TokenTypes.CloseStyleTag>;

@@ -1,6 +0,5 @@

import { AnyToken } from "./token";
import { TokenizerState } from "./tokenizer-state";
export interface TokenizeHandler {
parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void;
handleContentEnd?: (state: TokenizerState, tokens: AnyToken[]) => void;
parse(chars: string, state: TokenizerState, charIndex: number): void;
handleContentEnd?: (state: TokenizerState) => void;
}
import { TokenizerContextTypes } from "../constants";
import { Range } from "./range";
import { AnyToken } from "./token";
type ContextParams = {
export type ContextParams = {
[TokenizerContextTypes.AttributeValueWrapped]?: {

@@ -17,3 +18,3 @@ wrapper: string;

};
export type TokenizerState = {
export interface TokenizerState {
currentContext: TokenizerContextTypes;

@@ -29,3 +30,4 @@ contextParams: ContextParams;

};
};
export {};
consumeTemplateRangeAt(charIndex: number): Range | null;
getTemplateRangeAt(charIndex: number): Range | null;
}

@@ -9,3 +9,3 @@ "use strict";

const range = (0, clone_range_1.cloneRange)(token.range);
return {
const ret = {
type: token.type,

@@ -15,3 +15,9 @@ value: token.value,

range,
// @ts-ignore
isTemplate: token["isTemplate"],
};
if (ret.isTemplate === undefined) {
delete ret.isTemplate;
}
return ret;
}
{
"name": "es-html-parser",
"version": "0.0.10",
"version": "1.0.0-alpha.0",
"main": "dist/index.js",

@@ -5,0 +5,0 @@ "license": "MIT",

@@ -473,3 +473,3 @@ # ES HTML Parser

close: CloseScriptTagNode;
value?: ScriptTagContentNode;
children: ScriptTagContentNode[];
}

@@ -533,3 +533,3 @@ ```

close: CloseStyleTagNode;
value?: StyleTagContentNode;
children: StyleTagContentNode[];
}

@@ -591,3 +591,3 @@ ```

close: CommentCloseNode;
value: CommentContentNode;
children: CommentContentNode[];
}

@@ -594,0 +594,0 @@ ```

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc