@johanneslumpe/basic-lexer
Advanced tools
Comparing version 0.1.0 to 0.2.0
@@ -1,1 +0,1 @@ | ||
export { Lexer, ILexToken, EOS } from './lexer'; | ||
export { Lexer, IBaseLexToken, EOS } from './lexer'; |
export declare const EOS: unique symbol; | ||
export interface ILexToken<T> { | ||
export interface IBaseLexToken<T, D> { | ||
/** | ||
@@ -11,4 +11,17 @@ * The lexed value of the token | ||
type: T; | ||
/** | ||
* The position where token extraction started | ||
*/ | ||
startPos: number; | ||
/** | ||
* The position where token extraction ended. | ||
* This is the position excluding the token. | ||
*/ | ||
endPos: number; | ||
/** | ||
* Additional custom meta data | ||
*/ | ||
data?: D; | ||
} | ||
export declare class Lexer<TokenType> { | ||
export declare class Lexer<TokenTypes, AdditionalTokenData = {}> { | ||
/** | ||
@@ -58,3 +71,3 @@ * The current reading position | ||
*/ | ||
readonly emittedTokens: ILexToken<TokenType>[]; | ||
readonly emittedTokens: IBaseLexToken<TokenTypes, AdditionalTokenData>[]; | ||
/** | ||
@@ -103,3 +116,3 @@ * Increases the current bracket depth. | ||
*/ | ||
lookBehind(): ILexToken<TokenType> | undefined; | ||
lookBehind(): IBaseLexToken<TokenTypes, AdditionalTokenData> | undefined; | ||
/** | ||
@@ -111,9 +124,9 @@ * Emits a token for the passed in type, based on the | ||
*/ | ||
emit(type: TokenType): void; | ||
emit(type: TokenTypes, tokenData?: AdditionalTokenData): void; | ||
/** | ||
* Pushes an error token onto the tokens array | ||
* @param type error type | ||
* @param message error messagde | ||
* @param message error message | ||
*/ | ||
emitError(type: TokenType, message: string): void; | ||
emitError(type: TokenTypes, message: string): void; | ||
/** | ||
@@ -129,3 +142,3 @@ * Advances the current position as long as `predicate` returns true | ||
*/ | ||
lookBehindForTypes(...types: TokenType[]): ILexToken<TokenType> | undefined; | ||
lookBehindForTypes(...types: TokenTypes[]): IBaseLexToken<TokenTypes, AdditionalTokenData> | undefined; | ||
} |
@@ -158,7 +158,12 @@ export var EOS = Symbol('EOS'); | ||
*/ | ||
Lexer.prototype.emit = function (type) { | ||
this.tokens.push({ | ||
Lexer.prototype.emit = function (type, tokenData) { | ||
var endPos = this.pos - this.start; | ||
var t = { | ||
data: tokenData, | ||
endPos: this.pos, | ||
startPos: this.start, | ||
type: type, | ||
value: this.str.substr(this.start, this.pos - this.start), | ||
}); | ||
value: this.str.substr(this.start, endPos), | ||
}; | ||
this.tokens.push(t); | ||
this.start = this.pos; | ||
@@ -169,6 +174,8 @@ }; | ||
* @param type error type | ||
* @param message error messagde | ||
* @param message error message | ||
*/ | ||
Lexer.prototype.emitError = function (type, message) { | ||
this.tokens.push({ | ||
endPos: this.pos, | ||
startPos: this.start, | ||
type: type, | ||
@@ -175,0 +182,0 @@ value: message, |
@@ -1,1 +0,1 @@ | ||
export { Lexer, ILexToken, EOS } from './lexer'; | ||
export { Lexer, IBaseLexToken, EOS } from './lexer'; |
export declare const EOS: unique symbol; | ||
export interface ILexToken<T> { | ||
export interface IBaseLexToken<T, D> { | ||
/** | ||
@@ -11,4 +11,17 @@ * The lexed value of the token | ||
type: T; | ||
/** | ||
* The position where token extraction started | ||
*/ | ||
startPos: number; | ||
/** | ||
* The position where token extraction ended. | ||
* This is the position excluding the token. | ||
*/ | ||
endPos: number; | ||
/** | ||
* Additional custom meta data | ||
*/ | ||
data?: D; | ||
} | ||
export declare class Lexer<TokenType> { | ||
export declare class Lexer<TokenTypes, AdditionalTokenData = {}> { | ||
/** | ||
@@ -58,3 +71,3 @@ * The current reading position | ||
*/ | ||
readonly emittedTokens: ILexToken<TokenType>[]; | ||
readonly emittedTokens: IBaseLexToken<TokenTypes, AdditionalTokenData>[]; | ||
/** | ||
@@ -103,3 +116,3 @@ * Increases the current bracket depth. | ||
*/ | ||
lookBehind(): ILexToken<TokenType> | undefined; | ||
lookBehind(): IBaseLexToken<TokenTypes, AdditionalTokenData> | undefined; | ||
/** | ||
@@ -111,9 +124,9 @@ * Emits a token for the passed in type, based on the | ||
*/ | ||
emit(type: TokenType): void; | ||
emit(type: TokenTypes, tokenData?: AdditionalTokenData): void; | ||
/** | ||
* Pushes an error token onto the tokens array | ||
* @param type error type | ||
* @param message error messagde | ||
* @param message error message | ||
*/ | ||
emitError(type: TokenType, message: string): void; | ||
emitError(type: TokenTypes, message: string): void; | ||
/** | ||
@@ -129,3 +142,3 @@ * Advances the current position as long as `predicate` returns true | ||
*/ | ||
lookBehindForTypes(...types: TokenType[]): ILexToken<TokenType> | undefined; | ||
lookBehindForTypes(...types: TokenTypes[]): IBaseLexToken<TokenTypes, AdditionalTokenData> | undefined; | ||
} |
@@ -160,7 +160,12 @@ "use strict"; | ||
*/ | ||
Lexer.prototype.emit = function (type) { | ||
this.tokens.push({ | ||
Lexer.prototype.emit = function (type, tokenData) { | ||
var endPos = this.pos - this.start; | ||
var t = { | ||
data: tokenData, | ||
endPos: this.pos, | ||
startPos: this.start, | ||
type: type, | ||
value: this.str.substr(this.start, this.pos - this.start), | ||
}); | ||
value: this.str.substr(this.start, endPos), | ||
}; | ||
this.tokens.push(t); | ||
this.start = this.pos; | ||
@@ -171,6 +176,8 @@ }; | ||
* @param type error type | ||
* @param message error messagde | ||
* @param message error message | ||
*/ | ||
Lexer.prototype.emitError = function (type, message) { | ||
this.tokens.push({ | ||
endPos: this.pos, | ||
startPos: this.start, | ||
type: type, | ||
@@ -177,0 +184,0 @@ value: message, |
{ | ||
"name": "@johanneslumpe/basic-lexer", | ||
"version": "0.1.0", | ||
"version": "0.2.0", | ||
"description": "A generic lexer base class", | ||
@@ -5,0 +5,0 @@ "main": "lib/index.js", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
41899
712