es-html-parser
Advanced tools
Comparing version 0.0.9 to 0.0.10
@@ -13,2 +13,2 @@ "use strict"; | ||
AstTypes["Style"] = "Style"; | ||
})(AstTypes = exports.AstTypes || (exports.AstTypes = {})); | ||
})(AstTypes || (exports.AstTypes = AstTypes = {})); |
@@ -18,2 +18,2 @@ "use strict"; | ||
ConstructTreeContextTypes["StyleTag"] = "StyleTag"; | ||
})(ConstructTreeContextTypes = exports.ConstructTreeContextTypes || (exports.ConstructTreeContextTypes = {})); | ||
})(ConstructTreeContextTypes || (exports.ConstructTreeContextTypes = ConstructTreeContextTypes = {})); |
@@ -38,2 +38,2 @@ "use strict"; | ||
NodeTypes["DoctypeAttributeWrapperEnd"] = "DoctypeAttributeWrapperEnd"; | ||
})(NodeTypes = exports.NodeTypes || (exports.NodeTypes = {})); | ||
})(NodeTypes || (exports.NodeTypes = NodeTypes = {})); |
@@ -31,2 +31,2 @@ "use strict"; | ||
TokenTypes["CloseStyleTag"] = "CloseStyleTag"; | ||
})(TokenTypes = exports.TokenTypes || (exports.TokenTypes = {})); | ||
})(TokenTypes || (exports.TokenTypes = TokenTypes = {})); |
@@ -25,2 +25,2 @@ "use strict"; | ||
TokenizerContextTypes["CommentClose"] = "CommentClose"; | ||
})(TokenizerContextTypes = exports.TokenizerContextTypes || (exports.TokenizerContextTypes = {})); | ||
})(TokenizerContextTypes || (exports.TokenizerContextTypes = TokenizerContextTypes = {})); |
import { ParseResult } from "../types"; | ||
export declare function parse(html: string): ParseResult; | ||
import { Options } from "../types/parse"; | ||
export declare function parse(html: string, options?: Options): ParseResult; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const tree_constructor_1 = require("../tree-constructor"); | ||
const tokenizer_1 = require("../tokenizer"); | ||
const utils_1 = require("../utils"); | ||
function parse(html) { | ||
const { tokens } = (0, tokenizer_1.tokenize)(html, undefined); | ||
const token_adapter_1 = require("../token-adapter"); | ||
function parse(html, options) { | ||
const tokenAdapter = (options && options.tokenAdapter) || token_adapter_1.defaultTokenAdapter; | ||
const { tokens } = (0, tokenizer_1.tokenize)(html, tokenAdapter); | ||
const { ast } = (0, tree_constructor_1.constructTree)(tokens, undefined); | ||
@@ -15,2 +17,1 @@ return { | ||
} | ||
exports.parse = parse; |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const token_types_1 = require("../../constants/token-types"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (isKeyBreak(chars)) { | ||
return parseKeyEnd(state, tokens); | ||
return parseKeyEnd(state); | ||
} | ||
@@ -15,9 +15,8 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function isKeyBreak(chars) { | ||
return chars === "=" || chars === "/" || chars === ">" || (0, utils_1.isWhitespace)(chars); | ||
} | ||
function parseKeyEnd(state, tokens) { | ||
function parseKeyEnd(state) { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: token_types_1.TokenTypes.AttributeKey, | ||
@@ -24,0 +23,0 @@ value: state.accumulatedContent, |
@@ -1,3 +0,3 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parseValueEnd(state: TokenizerState, tokens: AnyToken[]): void; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parseValueEnd(state: TokenizerState): void; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = exports.parseValueEnd = void 0; | ||
exports.parseValueEnd = parseValueEnd; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parseValueEnd(state, tokens) { | ||
function parseValueEnd(state) { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.AttributeValue, | ||
@@ -18,6 +19,5 @@ value: state.accumulatedContent, | ||
} | ||
exports.parseValueEnd = parseValueEnd; | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if ((0, utils_1.isWhitespace)(chars) || chars === ">" || chars === "/") { | ||
return parseValueEnd(state, tokens); | ||
return parseValueEnd(state); | ||
} | ||
@@ -28,2 +28,1 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
var _a; | ||
const wrapperChar = (_a = state.contextParams[constants_1.TokenizerContextTypes.AttributeValueWrapped]) === null || _a === void 0 ? void 0 : _a.wrapper; | ||
if (chars === wrapperChar) { | ||
return parseWrapper(state, tokens); | ||
return parseWrapper(state); | ||
} | ||
@@ -16,7 +16,6 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function parseWrapper(state, tokens) { | ||
function parseWrapper(state) { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
const endWrapperPosition = position.range[1]; | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.AttributeValue, | ||
@@ -29,3 +28,3 @@ value: state.accumulatedContent, | ||
const loc = (0, utils_1.calculateTokenLocation)(state.source, range); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.AttributeValueWrapperEnd, | ||
@@ -32,0 +31,0 @@ value: state.decisionBuffer, |
@@ -1,2 +0,2 @@ | ||
import { TokenizerState, AnyToken } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (chars === '"' || chars === "'") { | ||
return parseWrapper(state, tokens); | ||
return parseWrapper(state); | ||
} | ||
@@ -19,8 +19,7 @@ if (chars === ">" || chars === "/") { | ||
} | ||
exports.parse = parse; | ||
function parseWrapper(state, tokens) { | ||
function parseWrapper(state) { | ||
const wrapper = state.decisionBuffer; | ||
const range = [state.caretPosition, state.caretPosition + 1]; | ||
const loc = (0, utils_1.calculateTokenLocation)(state.source, range); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.AttributeValueWrapperStart, | ||
@@ -27,0 +26,0 @@ value: wrapper, |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (chars === ">" || chars === "/") { | ||
@@ -11,3 +11,3 @@ return parseTagEnd(state); | ||
if (chars === "=") { | ||
return parseEqual(state, tokens); | ||
return parseEqual(state); | ||
} | ||
@@ -20,3 +20,2 @@ if (!(0, utils_1.isWhitespace)(chars)) { | ||
} | ||
exports.parse = parse; | ||
function parseTagEnd(state) { | ||
@@ -37,5 +36,5 @@ var _a; | ||
} | ||
function parseEqual(state, tokens) { | ||
function parseEqual(state) { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: true }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.AttributeAssignment, | ||
@@ -42,0 +41,0 @@ value: state.decisionBuffer, |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (chars === ">") { | ||
return parseClosingCornerBrace(state, tokens); | ||
return parseClosingCornerBrace(state); | ||
} | ||
@@ -14,6 +14,5 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function parseClosingCornerBrace(state, tokens) { | ||
function parseClosingCornerBrace(state) { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: true }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.CloseTag, | ||
@@ -20,0 +19,0 @@ value: state.accumulatedContent + state.decisionBuffer, |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
const COMMENT_END = "-->"; | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (chars === "-" || chars === "--") { | ||
@@ -13,3 +13,3 @@ state.caretPosition++; | ||
if (chars === COMMENT_END) { | ||
return parseCommentClose(state, tokens); | ||
return parseCommentClose(state); | ||
} | ||
@@ -20,4 +20,3 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function parseCommentClose(state, tokens) { | ||
function parseCommentClose(state) { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
@@ -29,3 +28,3 @@ const endRange = [ | ||
const endLoc = (0, utils_1.calculateTokenLocation)(state.source, endRange); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.CommentContent, | ||
@@ -35,3 +34,4 @@ value: state.accumulatedContent, | ||
loc: position.loc, | ||
}, { | ||
}); | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.CommentClose, | ||
@@ -38,0 +38,0 @@ value: state.decisionBuffer, |
@@ -1,3 +0,3 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
export declare function handleContentEnd(state: TokenizerState, tokens: AnyToken[]): void; | ||
import { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; | ||
export declare function handleContentEnd(state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.handleContentEnd = exports.parse = void 0; | ||
exports.parse = parse; | ||
exports.handleContentEnd = handleContentEnd; | ||
const constants_1 = require("../../constants"); | ||
@@ -8,8 +9,8 @@ const utils_1 = require("../../utils"); | ||
const OPEN_TAG_START_PATTERN = /^<\w/; | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (OPEN_TAG_START_PATTERN.test(chars)) { | ||
return parseOpeningCornerBraceWithText(state, tokens); | ||
return parseOpeningCornerBraceWithText(state); | ||
} | ||
if (chars === "</") { | ||
return parseOpeningCornerBraceWithSlash(state, tokens); | ||
return parseOpeningCornerBraceWithSlash(state); | ||
} | ||
@@ -21,3 +22,3 @@ if (chars === "<" || chars === "<!" || chars === "<!-") { | ||
if (chars === COMMENT_START) { | ||
return parseCommentOpen(state, tokens); | ||
return parseCommentOpen(state); | ||
} | ||
@@ -29,3 +30,3 @@ if (isIncompleteDoctype(chars)) { | ||
if (chars.toUpperCase() === "<!DOCTYPE") { | ||
return parseDoctypeOpen(state, tokens); | ||
return parseDoctypeOpen(state); | ||
} | ||
@@ -36,8 +37,7 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function handleContentEnd(state, tokens) { | ||
function handleContentEnd(state) { | ||
const textContent = state.accumulatedContent + state.decisionBuffer; | ||
if (textContent.length !== 0) { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.Text, | ||
@@ -50,3 +50,2 @@ value: textContent, | ||
} | ||
exports.handleContentEnd = handleContentEnd; | ||
function generateTextToken(state) { | ||
@@ -61,5 +60,5 @@ const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
} | ||
function parseOpeningCornerBraceWithText(state, tokens) { | ||
function parseOpeningCornerBraceWithText(state) { | ||
if (state.accumulatedContent.length !== 0) { | ||
tokens.push(generateTextToken(state)); | ||
state.tokens.push(generateTextToken(state)); | ||
} | ||
@@ -71,5 +70,5 @@ state.accumulatedContent = state.decisionBuffer; | ||
} | ||
function parseOpeningCornerBraceWithSlash(state, tokens) { | ||
function parseOpeningCornerBraceWithSlash(state) { | ||
if (state.accumulatedContent.length !== 0) { | ||
tokens.push(generateTextToken(state)); | ||
state.tokens.push(generateTextToken(state)); | ||
} | ||
@@ -91,5 +90,5 @@ state.accumulatedContent = state.decisionBuffer; | ||
} | ||
function parseCommentOpen(state, tokens) { | ||
function parseCommentOpen(state) { | ||
if (state.accumulatedContent.length !== 0) { | ||
tokens.push(generateTextToken(state)); | ||
state.tokens.push(generateTextToken(state)); | ||
} | ||
@@ -101,3 +100,3 @@ const range = [ | ||
const loc = (0, utils_1.calculateTokenLocation)(state.source, range); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.CommentOpen, | ||
@@ -113,5 +112,5 @@ value: state.decisionBuffer, | ||
} | ||
function parseDoctypeOpen(state, tokens) { | ||
function parseDoctypeOpen(state) { | ||
if (state.accumulatedContent.length !== 0) { | ||
tokens.push(generateTextToken(state)); | ||
state.tokens.push(generateTextToken(state)); | ||
} | ||
@@ -118,0 +117,0 @@ state.accumulatedContent = state.decisionBuffer; |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if ((0, utils_1.isWhitespace)(chars) || chars === ">") { | ||
return parseAttributeEnd(state, tokens); | ||
return parseAttributeEnd(state); | ||
} | ||
@@ -14,6 +14,5 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function parseAttributeEnd(state, tokens) { | ||
function parseAttributeEnd(state) { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.DoctypeAttributeValue, | ||
@@ -20,0 +19,0 @@ value: state.accumulatedContent, |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
var _a; | ||
const wrapperChar = (_a = state.contextParams[constants_1.TokenizerContextTypes.DoctypeAttributeWrapped]) === null || _a === void 0 ? void 0 : _a.wrapper; | ||
if (chars === wrapperChar) { | ||
return parseWrapper(state, tokens); | ||
return parseWrapper(state); | ||
} | ||
@@ -16,7 +16,6 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function parseWrapper(state, tokens) { | ||
function parseWrapper(state) { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
const endWrapperPosition = position.range[1]; | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.DoctypeAttributeValue, | ||
@@ -29,3 +28,3 @@ value: state.accumulatedContent, | ||
const loc = (0, utils_1.calculateTokenLocation)(state.source, range); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.DoctypeAttributeWrapperEnd, | ||
@@ -32,0 +31,0 @@ value: state.decisionBuffer, |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (chars === '"' || chars === "'") { | ||
return parseWrapper(state, tokens); | ||
return parseWrapper(state); | ||
} | ||
@@ -19,4 +19,3 @@ if (chars === ">") { | ||
} | ||
exports.parse = parse; | ||
function parseWrapper(state, tokens) { | ||
function parseWrapper(state) { | ||
const wrapper = state.decisionBuffer; | ||
@@ -28,3 +27,3 @@ const range = [ | ||
const loc = (0, utils_1.calculateTokenLocation)(state.source, range); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.DoctypeAttributeWrapperStart, | ||
@@ -31,0 +30,0 @@ value: wrapper, |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: true }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.DoctypeClose, | ||
@@ -19,2 +19,1 @@ value: state.decisionBuffer, | ||
} | ||
exports.parse = parse; |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if ((0, utils_1.isWhitespace)(chars)) { | ||
return parseWhitespace(state, tokens); | ||
return parseWhitespace(state); | ||
} | ||
if (chars === ">") { | ||
return parseClosingCornerBrace(state, tokens); | ||
return parseClosingCornerBrace(state); | ||
} | ||
@@ -16,3 +16,2 @@ state.decisionBuffer = ""; | ||
} | ||
exports.parse = parse; | ||
function generateDoctypeOpenToken(state) { | ||
@@ -27,4 +26,4 @@ const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
} | ||
function parseWhitespace(state, tokens) { | ||
tokens.push(generateDoctypeOpenToken(state)); | ||
function parseWhitespace(state) { | ||
state.tokens.push(generateDoctypeOpenToken(state)); | ||
state.accumulatedContent = ""; | ||
@@ -34,4 +33,4 @@ state.decisionBuffer = ""; | ||
} | ||
function parseClosingCornerBrace(state, tokens) { | ||
tokens.push(generateDoctypeOpenToken(state)); | ||
function parseClosingCornerBrace(state) { | ||
state.tokens.push(generateDoctypeOpenToken(state)); | ||
state.accumulatedContent = ""; | ||
@@ -38,0 +37,0 @@ state.decisionBuffer = ""; |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
@@ -16,5 +16,5 @@ const utils_1 = require("../../utils"); | ||
}; | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (chars === ">") { | ||
return parseClosingCornerBrace(state, tokens); | ||
return parseClosingCornerBrace(state); | ||
} | ||
@@ -25,8 +25,7 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function parseClosingCornerBrace(state, tokens) { | ||
function parseClosingCornerBrace(state) { | ||
var _a; | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: true }); | ||
const tagName = (_a = state.contextParams[constants_1.TokenizerContextTypes.OpenTagEnd]) === null || _a === void 0 ? void 0 : _a.tagName; | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: tokensMap[tagName] || tokensMap.default, | ||
@@ -33,0 +32,0 @@ value: state.accumulatedContent + state.decisionBuffer, |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import type { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
@@ -11,8 +11,8 @@ const utils_1 = require("../../utils"); | ||
}; | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (chars === ">" || chars === "/") { | ||
return parseTagEnd(state, tokens); | ||
return parseTagEnd(state); | ||
} | ||
if ((0, utils_1.isWhitespace)(chars)) { | ||
return parseWhitespace(state, tokens); | ||
return parseWhitespace(state); | ||
} | ||
@@ -23,7 +23,6 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function parseWhitespace(state, tokens) { | ||
function parseWhitespace(state) { | ||
const tagName = (0, utils_1.parseOpenTagName)(state.accumulatedContent); | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: tokensMap[tagName] || tokensMap.default, | ||
@@ -40,6 +39,6 @@ value: state.accumulatedContent, | ||
} | ||
function parseTagEnd(state, tokens) { | ||
function parseTagEnd(state) { | ||
const tagName = (0, utils_1.parseOpenTagName)(state.accumulatedContent); | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: tokensMap[tagName] || tokensMap.default, | ||
@@ -46,0 +45,0 @@ value: state.accumulatedContent, |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (chars === "<" || | ||
@@ -14,3 +14,3 @@ chars === "</" || | ||
if (constants_1.CLOSING_SCRIPT_TAG_PATTERN.test(chars)) { | ||
return parseClosingScriptTag(state, tokens); | ||
return parseClosingScriptTag(state); | ||
} | ||
@@ -21,7 +21,6 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function parseClosingScriptTag(state, tokens) { | ||
function parseClosingScriptTag(state) { | ||
if (state.accumulatedContent !== "") { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.ScriptTagContent, | ||
@@ -38,3 +37,3 @@ value: state.accumulatedContent, | ||
const loc = (0, utils_1.calculateTokenLocation)(state.source, range); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.CloseScriptTag, | ||
@@ -41,0 +40,0 @@ value: state.decisionBuffer, |
@@ -1,2 +0,2 @@ | ||
import { AnyToken, TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState, tokens: AnyToken[]): void; | ||
import { TokenizerState } from "../../types"; | ||
export declare function parse(chars: string, state: TokenizerState): void; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parse = void 0; | ||
exports.parse = parse; | ||
const constants_1 = require("../../constants"); | ||
const utils_1 = require("../../utils"); | ||
const CLOSING_STYLE_TAG_PATTERN = /<\/style\s*>/i; | ||
function parse(chars, state, tokens) { | ||
function parse(chars, state) { | ||
if (chars === "<" || | ||
@@ -15,3 +15,3 @@ chars === "</" || | ||
if (CLOSING_STYLE_TAG_PATTERN.test(chars)) { | ||
return parseClosingStyleTag(state, tokens); | ||
return parseClosingStyleTag(state); | ||
} | ||
@@ -22,7 +22,6 @@ state.accumulatedContent += state.decisionBuffer; | ||
} | ||
exports.parse = parse; | ||
function parseClosingStyleTag(state, tokens) { | ||
function parseClosingStyleTag(state) { | ||
if (state.accumulatedContent !== "") { | ||
const position = (0, utils_1.calculateTokenPosition)(state, { keepBuffer: false }); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.StyleTagContent, | ||
@@ -39,3 +38,3 @@ value: state.accumulatedContent, | ||
const loc = (0, utils_1.calculateTokenLocation)(state.source, range); | ||
tokens.push({ | ||
state.tokens.push({ | ||
type: constants_1.TokenTypes.CloseStyleTag, | ||
@@ -42,0 +41,0 @@ value: state.decisionBuffer, |
@@ -1,3 +0,3 @@ | ||
import { AnyToken, TokenizerState } from "../types"; | ||
export declare function tokenize(source?: string, { isFinalChunk, }?: { | ||
import { AnyToken, TokenAdapter, TokenizerState } from "../types"; | ||
export declare function tokenize(source: string | undefined, tokenAdapter: TokenAdapter, { isFinalChunk, }?: { | ||
isFinalChunk?: boolean; | ||
@@ -4,0 +4,0 @@ }): { |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.tokenize = void 0; | ||
exports.tokenize = tokenize; | ||
const constants_1 = require("../constants"); | ||
@@ -48,4 +48,5 @@ const handlers_1 = require("./handlers"); | ||
} | ||
function tokenize(source = "", { isFinalChunk, } = {}) { | ||
function tokenize(source = "", tokenAdapter, { isFinalChunk, } = {}) { | ||
isFinalChunk = isFinalChunk === undefined ? true : isFinalChunk; | ||
const tokens = []; | ||
const state = { | ||
@@ -59,5 +60,9 @@ currentContext: constants_1.TokenizerContextTypes.Data, | ||
source, | ||
tokens: { | ||
push(token) { | ||
tokens.push(Object.assign(Object.assign({}, token), { range: tokenAdapter.finalizeRange(token), loc: tokenAdapter.finalizeLocation(token) })); | ||
}, | ||
}, | ||
}; | ||
const chars = state.decisionBuffer + source; | ||
const tokens = []; | ||
const positionOffset = state.caretPosition - state.decisionBuffer.length; | ||
@@ -70,2 +75,1 @@ tokenizeChars(chars, state, tokens, { | ||
} | ||
exports.tokenize = tokenize; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.constructTree = void 0; | ||
exports.constructTree = constructTree; | ||
const constants_1 = require("../constants"); | ||
@@ -68,3 +68,2 @@ const utils_1 = require("../utils"); | ||
} | ||
exports.constructTree = constructTree; | ||
function processTokens(tokens, state, positionOffset) { | ||
@@ -75,2 +74,3 @@ let tokenIndex = state.caretPosition - positionOffset; | ||
const handler = contextHandlers[state.currentContext.type].construct; | ||
// @ts-ignore | ||
state = handler(token, state); | ||
@@ -77,0 +77,0 @@ tokenIndex = state.caretPosition - positionOffset; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -61,2 +61,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -55,2 +55,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -43,2 +43,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -37,2 +37,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -62,2 +62,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -38,2 +38,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -43,2 +43,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -62,2 +62,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -62,2 +62,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -136,2 +136,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -18,2 +18,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.construct = void 0; | ||
exports.construct = construct; | ||
const constants_1 = require("../../constants"); | ||
@@ -86,2 +86,1 @@ const utils_1 = require("../../utils"); | ||
} | ||
exports.construct = construct; |
import { ConstructTreeContextTypes } from "../constants"; | ||
import { DocumentNode } from "./node"; | ||
import { AnyContextualNode } from "./contextual-node"; | ||
export declare type ConstructTreeState<N extends AnyContextualNode> = { | ||
export type ConstructTreeState<N extends AnyContextualNode> = { | ||
caretPosition: number; | ||
@@ -6,0 +6,0 @@ currentContext: { |
import { CommentNode, DoctypeNode, ScriptTagNode, StyleTagNode, TagNode, DocumentNode, AnyNode, AttributeNode } from "./node"; | ||
declare type PartialBy<T, K extends keyof T> = Omit<T, K> & Partial<Pick<T, K>>; | ||
export declare type ContextualNode<T extends AnyNode, K extends keyof T> = PartialBy<T, K> & { | ||
type PartialBy<T, K extends keyof T> = Omit<T, K> & Partial<Pick<T, K>>; | ||
export type ContextualNode<T extends AnyNode, K extends keyof T> = PartialBy<T, K> & { | ||
parentRef?: any; | ||
}; | ||
export declare type ContextualScriptTagNode = ContextualNode<ScriptTagNode, "close" | "openStart" | "value" | "openEnd">; | ||
export declare type ContextualStyleTagNode = ContextualNode<StyleTagNode, "openStart" | "openEnd" | "value" | "close">; | ||
export declare type ContextualDoctypeNode = ContextualNode<DoctypeNode, "open" | "close">; | ||
export declare type ContextualCommentNode = ContextualNode<CommentNode, "open" | "close" | "value">; | ||
export declare type ContextualTagNode = ContextualNode<TagNode, "close" | "selfClosing" | "name" | "openEnd" | "openStart"> & { | ||
export type ContextualScriptTagNode = ContextualNode<ScriptTagNode, "close" | "openStart" | "value" | "openEnd">; | ||
export type ContextualStyleTagNode = ContextualNode<StyleTagNode, "openStart" | "openEnd" | "value" | "close">; | ||
export type ContextualDoctypeNode = ContextualNode<DoctypeNode, "open" | "close">; | ||
export type ContextualCommentNode = ContextualNode<CommentNode, "open" | "close" | "value">; | ||
export type ContextualTagNode = ContextualNode<TagNode, "close" | "selfClosing" | "name" | "openEnd" | "openStart"> & { | ||
children: Array<ContextualScriptTagNode | ContextualStyleTagNode | ContextualDoctypeNode | ContextualCommentNode | ContextualTagNode | TagNode["children"][number]>; | ||
attributes: ContextualAttributeNode[]; | ||
}; | ||
export declare type ContextualAttributeNode = ContextualNode<AttributeNode, "key">; | ||
export declare type ContextualDocumentNode = Omit<ContextualNode<DocumentNode, never>, "children"> & { | ||
export type ContextualAttributeNode = ContextualNode<AttributeNode, "key">; | ||
export type ContextualDocumentNode = Omit<ContextualNode<DocumentNode, never>, "children"> & { | ||
children: Array<Exclude<AnyContextualNode, ContextualDocumentNode> | DocumentNode["children"][number]>; | ||
}; | ||
export declare type AnyContextualNode = ContextualScriptTagNode | ContextualStyleTagNode | ContextualDoctypeNode | ContextualCommentNode | ContextualTagNode | ContextualDocumentNode; | ||
export type AnyContextualNode = ContextualScriptTagNode | ContextualStyleTagNode | ContextualDoctypeNode | ContextualCommentNode | ContextualTagNode | ContextualDocumentNode; | ||
export {}; |
@@ -11,1 +11,2 @@ export * from "./token"; | ||
export * from "./parse-result"; | ||
export * from "./token-adapter"; |
@@ -27,1 +27,2 @@ "use strict"; | ||
__exportStar(require("./parse-result"), exports); | ||
__exportStar(require("./token-adapter"), exports); |
@@ -8,3 +8,3 @@ import { NodeTypes } from "../constants"; | ||
} | ||
export declare type TextNode = SimpleNode<NodeTypes.Text>; | ||
export type TextNode = SimpleNode<NodeTypes.Text>; | ||
export interface TagNode extends BaseNode { | ||
@@ -20,5 +20,5 @@ type: NodeTypes.Tag; | ||
} | ||
export declare type OpenTagStartNode = SimpleNode<NodeTypes.OpenTagStart>; | ||
export declare type OpenTagEndNode = SimpleNode<NodeTypes.OpenTagEnd>; | ||
export declare type CloseTagNode = SimpleNode<NodeTypes.CloseTag>; | ||
export type OpenTagStartNode = SimpleNode<NodeTypes.OpenTagStart>; | ||
export type OpenTagEndNode = SimpleNode<NodeTypes.OpenTagEnd>; | ||
export type CloseTagNode = SimpleNode<NodeTypes.CloseTag>; | ||
export interface AttributeNode extends BaseNode { | ||
@@ -31,6 +31,6 @@ type: NodeTypes.Attribute; | ||
} | ||
export declare type AttributeKeyNode = SimpleNode<NodeTypes.AttributeKey>; | ||
export declare type AttributeValueNode = SimpleNode<NodeTypes.AttributeValue>; | ||
export declare type AttributeValueWrapperStartNode = SimpleNode<NodeTypes.AttributeValueWrapperStart>; | ||
export declare type AttributeValueWrapperEndNode = SimpleNode<NodeTypes.AttributeValueWrapperEnd>; | ||
export type AttributeKeyNode = SimpleNode<NodeTypes.AttributeKey>; | ||
export type AttributeValueNode = SimpleNode<NodeTypes.AttributeValue>; | ||
export type AttributeValueWrapperStartNode = SimpleNode<NodeTypes.AttributeValueWrapperStart>; | ||
export type AttributeValueWrapperEndNode = SimpleNode<NodeTypes.AttributeValueWrapperEnd>; | ||
export interface ScriptTagNode extends BaseNode { | ||
@@ -44,6 +44,6 @@ type: NodeTypes.ScriptTag; | ||
} | ||
export declare type OpenScriptTagStartNode = SimpleNode<NodeTypes.OpenScriptTagStart>; | ||
export declare type CloseScriptTagNode = SimpleNode<NodeTypes.CloseScriptTag>; | ||
export declare type OpenScriptTagEndNode = SimpleNode<NodeTypes.OpenScriptTagEnd>; | ||
export declare type ScriptTagContentNode = SimpleNode<NodeTypes.ScriptTagContent>; | ||
export type OpenScriptTagStartNode = SimpleNode<NodeTypes.OpenScriptTagStart>; | ||
export type CloseScriptTagNode = SimpleNode<NodeTypes.CloseScriptTag>; | ||
export type OpenScriptTagEndNode = SimpleNode<NodeTypes.OpenScriptTagEnd>; | ||
export type ScriptTagContentNode = SimpleNode<NodeTypes.ScriptTagContent>; | ||
export interface StyleTagNode extends BaseNode { | ||
@@ -57,6 +57,6 @@ type: NodeTypes.StyleTag; | ||
} | ||
export declare type OpenStyleTagStartNode = SimpleNode<NodeTypes.OpenStyleTagStart>; | ||
export declare type OpenStyleTagEndNode = SimpleNode<NodeTypes.OpenStyleTagEnd>; | ||
export declare type StyleTagContentNode = SimpleNode<NodeTypes.StyleTagContent>; | ||
export declare type CloseStyleTagNode = SimpleNode<NodeTypes.CloseStyleTag>; | ||
export type OpenStyleTagStartNode = SimpleNode<NodeTypes.OpenStyleTagStart>; | ||
export type OpenStyleTagEndNode = SimpleNode<NodeTypes.OpenStyleTagEnd>; | ||
export type StyleTagContentNode = SimpleNode<NodeTypes.StyleTagContent>; | ||
export type CloseStyleTagNode = SimpleNode<NodeTypes.CloseStyleTag>; | ||
export interface CommentNode extends BaseNode { | ||
@@ -68,5 +68,5 @@ type: NodeTypes.Comment; | ||
} | ||
export declare type CommentOpenNode = SimpleNode<NodeTypes.CommentOpen>; | ||
export declare type CommentCloseNode = SimpleNode<NodeTypes.CommentClose>; | ||
export declare type CommentContentNode = SimpleNode<NodeTypes.CommentContent>; | ||
export type CommentOpenNode = SimpleNode<NodeTypes.CommentOpen>; | ||
export type CommentCloseNode = SimpleNode<NodeTypes.CommentClose>; | ||
export type CommentContentNode = SimpleNode<NodeTypes.CommentContent>; | ||
export interface DoctypeNode extends BaseNode { | ||
@@ -78,4 +78,4 @@ type: NodeTypes.Doctype; | ||
} | ||
export declare type DoctypeOpenNode = SimpleNode<NodeTypes.DoctypeOpen>; | ||
export declare type DoctypeCloseNode = SimpleNode<NodeTypes.DoctypeClose>; | ||
export type DoctypeOpenNode = SimpleNode<NodeTypes.DoctypeOpen>; | ||
export type DoctypeCloseNode = SimpleNode<NodeTypes.DoctypeClose>; | ||
export interface DoctypeAttributeNode extends BaseNode { | ||
@@ -87,5 +87,5 @@ type: NodeTypes.DoctypeAttribute; | ||
} | ||
export declare type DoctypeAttributeValueNode = SimpleNode<NodeTypes.DoctypeAttributeValue>; | ||
export declare type DoctypeAttributeWrapperStartNode = SimpleNode<NodeTypes.DoctypeAttributeWrapperStart>; | ||
export declare type DoctypeAttributeWrapperEndNode = SimpleNode<NodeTypes.DoctypeAttributeWrapperEnd>; | ||
export declare type AnyNode = DocumentNode | TextNode | TagNode | OpenTagStartNode | OpenTagEndNode | CloseTagNode | AttributeNode | AttributeKeyNode | AttributeValueNode | AttributeValueWrapperStartNode | AttributeValueWrapperEndNode | ScriptTagNode | OpenScriptTagStartNode | CloseScriptTagNode | OpenScriptTagEndNode | ScriptTagContentNode | StyleTagNode | OpenStyleTagStartNode | OpenStyleTagEndNode | StyleTagContentNode | CloseStyleTagNode | CommentNode | CommentOpenNode | CommentCloseNode | CommentContentNode | DoctypeNode | DoctypeOpenNode | DoctypeCloseNode | DoctypeAttributeNode | DoctypeAttributeValueNode | DoctypeAttributeWrapperStartNode | DoctypeAttributeWrapperEndNode; | ||
export type DoctypeAttributeValueNode = SimpleNode<NodeTypes.DoctypeAttributeValue>; | ||
export type DoctypeAttributeWrapperStartNode = SimpleNode<NodeTypes.DoctypeAttributeWrapperStart>; | ||
export type DoctypeAttributeWrapperEndNode = SimpleNode<NodeTypes.DoctypeAttributeWrapperEnd>; | ||
export type AnyNode = DocumentNode | TextNode | TagNode | OpenTagStartNode | OpenTagEndNode | CloseTagNode | AttributeNode | AttributeKeyNode | AttributeValueNode | AttributeValueWrapperStartNode | AttributeValueWrapperEndNode | ScriptTagNode | OpenScriptTagStartNode | CloseScriptTagNode | OpenScriptTagEndNode | ScriptTagContentNode | StyleTagNode | OpenStyleTagStartNode | OpenStyleTagEndNode | StyleTagContentNode | CloseStyleTagNode | CommentNode | CommentOpenNode | CommentCloseNode | CommentContentNode | DoctypeNode | DoctypeOpenNode | DoctypeCloseNode | DoctypeAttributeNode | DoctypeAttributeValueNode | DoctypeAttributeWrapperStartNode | DoctypeAttributeWrapperEndNode; |
import { DocumentNode } from "./node"; | ||
import { AnyToken } from "./token"; | ||
export declare type ParseResult = { | ||
export type ParseResult = { | ||
ast: DocumentNode; | ||
tokens: AnyToken[]; | ||
}; |
@@ -1,1 +0,1 @@ | ||
export declare type Range = [number, number]; | ||
export type Range = [number, number]; |
@@ -10,2 +10,2 @@ import { TokenTypes } from "../constants"; | ||
} | ||
export declare type AnyToken = Token<TokenTypes.Text> | Token<TokenTypes.OpenTagStart> | Token<TokenTypes.OpenTagEnd> | Token<TokenTypes.CloseTag> | Token<TokenTypes.AttributeKey> | Token<TokenTypes.AttributeAssignment> | Token<TokenTypes.AttributeValueWrapperStart> | Token<TokenTypes.AttributeValue> | Token<TokenTypes.AttributeValueWrapperEnd> | Token<TokenTypes.DoctypeOpen> | Token<TokenTypes.DoctypeAttributeValue> | Token<TokenTypes.DoctypeAttributeWrapperStart> | Token<TokenTypes.DoctypeAttributeWrapperEnd> | Token<TokenTypes.DoctypeClose> | Token<TokenTypes.CommentOpen> | Token<TokenTypes.CommentContent> | Token<TokenTypes.CommentClose> | Token<TokenTypes.OpenScriptTagStart> | Token<TokenTypes.OpenScriptTagEnd> | Token<TokenTypes.ScriptTagContent> | Token<TokenTypes.CloseScriptTag> | Token<TokenTypes.OpenStyleTagStart> | Token<TokenTypes.OpenStyleTagEnd> | Token<TokenTypes.StyleTagContent> | Token<TokenTypes.CloseStyleTag>; | ||
export type AnyToken = Token<TokenTypes.Text> | Token<TokenTypes.OpenTagStart> | Token<TokenTypes.OpenTagEnd> | Token<TokenTypes.CloseTag> | Token<TokenTypes.AttributeKey> | Token<TokenTypes.AttributeAssignment> | Token<TokenTypes.AttributeValueWrapperStart> | Token<TokenTypes.AttributeValue> | Token<TokenTypes.AttributeValueWrapperEnd> | Token<TokenTypes.DoctypeOpen> | Token<TokenTypes.DoctypeAttributeValue> | Token<TokenTypes.DoctypeAttributeWrapperStart> | Token<TokenTypes.DoctypeAttributeWrapperEnd> | Token<TokenTypes.DoctypeClose> | Token<TokenTypes.CommentOpen> | Token<TokenTypes.CommentContent> | Token<TokenTypes.CommentClose> | Token<TokenTypes.OpenScriptTagStart> | Token<TokenTypes.OpenScriptTagEnd> | Token<TokenTypes.ScriptTagContent> | Token<TokenTypes.CloseScriptTag> | Token<TokenTypes.OpenStyleTagStart> | Token<TokenTypes.OpenStyleTagEnd> | Token<TokenTypes.StyleTagContent> | Token<TokenTypes.CloseStyleTag>; |
import { TokenizerContextTypes } from "../constants"; | ||
declare type ContextParams = { | ||
import { AnyToken } from "./token"; | ||
type ContextParams = { | ||
[TokenizerContextTypes.AttributeValueWrapped]?: { | ||
@@ -16,3 +17,3 @@ wrapper: string; | ||
}; | ||
export declare type TokenizerState = { | ||
export type TokenizerState = { | ||
currentContext: TokenizerContextTypes; | ||
@@ -25,3 +26,6 @@ contextParams: ContextParams; | ||
source: string; | ||
tokens: { | ||
push(token: AnyToken): void; | ||
}; | ||
}; | ||
export {}; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.calculateTokenCharactersRange = void 0; | ||
exports.calculateTokenCharactersRange = calculateTokenCharactersRange; | ||
function calculateTokenCharactersRange(state, { keepBuffer }) { | ||
@@ -17,2 +17,1 @@ const startPosition = state.caretPosition - | ||
} | ||
exports.calculateTokenCharactersRange = calculateTokenCharactersRange; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.calculateTokenLocation = void 0; | ||
exports.calculateTokenLocation = calculateTokenLocation; | ||
const get_line_info_1 = require("./get-line-info"); | ||
@@ -11,2 +11,1 @@ function calculateTokenLocation(source, range) { | ||
} | ||
exports.calculateTokenLocation = calculateTokenLocation; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.calculateTokenPosition = void 0; | ||
exports.calculateTokenPosition = calculateTokenPosition; | ||
const calculate_token_characters_range_1 = require("./calculate-token-characters-range"); | ||
@@ -14,2 +14,1 @@ const calculate_token_location_1 = require("./calculate-token-location"); | ||
} | ||
exports.calculateTokenPosition = calculateTokenPosition; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.clearParent = void 0; | ||
exports.clearParent = clearParent; | ||
function clearParent(ast) { | ||
@@ -15,2 +15,1 @@ const cleanAst = ast; | ||
} | ||
exports.clearParent = clearParent; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.cloneLocation = void 0; | ||
exports.cloneLocation = cloneLocation; | ||
function cloneLocation(loc) { | ||
@@ -16,2 +16,1 @@ return { | ||
} | ||
exports.cloneLocation = cloneLocation; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.cloneRange = void 0; | ||
exports.cloneRange = cloneRange; | ||
function cloneRange(range) { | ||
return [range[0], range[1]]; | ||
} | ||
exports.cloneRange = cloneRange; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.createNodeFrom = void 0; | ||
exports.createNodeFrom = createNodeFrom; | ||
const clone_location_1 = require("./clone-location"); | ||
@@ -16,2 +16,1 @@ const clone_range_1 = require("./clone-range"); | ||
} | ||
exports.createNodeFrom = createNodeFrom; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.first = void 0; | ||
exports.first = first; | ||
function first(items) { | ||
return items[0]; | ||
} | ||
exports.first = first; |
"use strict"; | ||
//https://github.com/acornjs/acorn/blob/master/acorn/src/whitespace.js | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getLineInfo = void 0; | ||
exports.getLineInfo = getLineInfo; | ||
function isNewLine(code) { | ||
@@ -27,2 +27,1 @@ return code === 10 || code === 13 || code === 0x2028 || code === 0x2029; | ||
} | ||
exports.getLineInfo = getLineInfo; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.initAttributesIfNone = exports.initChildrenIfNone = void 0; | ||
exports.initChildrenIfNone = initChildrenIfNone; | ||
exports.initAttributesIfNone = initAttributesIfNone; | ||
function initChildrenIfNone(node) { | ||
@@ -10,3 +11,2 @@ /* istanbul ignore next */ | ||
} | ||
exports.initChildrenIfNone = initChildrenIfNone; | ||
function initAttributesIfNone(node) { | ||
@@ -18,2 +18,1 @@ /* istanbul ignore next */ | ||
} | ||
exports.initAttributesIfNone = initAttributesIfNone; |
@@ -1,1 +0,1 @@ | ||
export declare function isWhitespace(char: string): boolean; | ||
export declare function isWhitespace(char: string): char is " " | "\n" | "\t" | "\r"; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.isWhitespace = void 0; | ||
exports.isWhitespace = isWhitespace; | ||
function isWhitespace(char) { | ||
return char === " " || char === "\n" || char === "\t" || char === "\r"; | ||
} | ||
exports.isWhitespace = isWhitespace; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.last = void 0; | ||
exports.last = last; | ||
function last(items) { | ||
return items[items.length - 1]; | ||
} | ||
exports.last = last; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parseCloseTagName = void 0; | ||
exports.parseCloseTagName = parseCloseTagName; | ||
const constants_1 = require("../constants"); | ||
@@ -13,2 +13,1 @@ function parseCloseTagName(closeTagTokenContent) { | ||
} | ||
exports.parseCloseTagName = parseCloseTagName; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parseOpenTagName = void 0; | ||
exports.parseOpenTagName = parseOpenTagName; | ||
const constants_1 = require("../constants"); | ||
@@ -13,2 +13,1 @@ function parseOpenTagName(openTagStartTokenContent) { | ||
} | ||
exports.parseOpenTagName = parseOpenTagName; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.updateNodeEnd = void 0; | ||
exports.updateNodeEnd = updateNodeEnd; | ||
function updateNodeEnd(node, token) { | ||
@@ -8,2 +8,1 @@ node.range[1] = token.range[1]; | ||
} | ||
exports.updateNodeEnd = updateNodeEnd; |
{ | ||
"name": "es-html-parser", | ||
"version": "0.0.9", | ||
"version": "0.0.10", | ||
"main": "dist/index.js", | ||
@@ -13,3 +13,3 @@ "license": "MIT", | ||
"check:ts": "tsc --noEmit", | ||
"check:format": "prettier --list-different .", | ||
"check:format": "prettier . --check", | ||
"prepublish": "yarn check:lint && yarn check:ts && yarn check:format && yarn build" | ||
@@ -26,3 +26,3 @@ }, | ||
"ts-jest": "^29.1.1", | ||
"typescript": "^4.8.2" | ||
"typescript": "^5.6.3" | ||
}, | ||
@@ -33,3 +33,4 @@ "files": [ | ||
"dist" | ||
] | ||
], | ||
"packageManager": "yarn@4.0.2" | ||
} |
@@ -11,2 +11,5 @@ # ES HTML Parser | ||
</a> | ||
<a href="https://www.npmjs.com/package/es-html-parser"> | ||
<img src="https://img.shields.io/npm/dw/es-html-parser"/> | ||
</a> | ||
</p> | ||
@@ -61,3 +64,3 @@ | ||
```ts | ||
parse(html: string): ParseResult; | ||
parse(html: string, options?: Options): ParseResult; | ||
``` | ||
@@ -68,2 +71,4 @@ | ||
- `html`: HTML string to parse. | ||
- `options (optional)` | ||
- `tokenAdapter`: The adapter option for changing tokens information. | ||
@@ -70,0 +75,0 @@ **Returns** |
125948
163
2674
709