@dbpath/parser
Advanced tools
Comparing version 0.3.13 to 0.3.14
@@ -22,4 +22,5 @@ import { Token } from "./tokeniser"; | ||
export declare function parseCommaSeparated<C extends ParserContext, R>(c: C, comma: string, parser: Parser<C, R>): ResultAndContext<C, R[]>; | ||
export declare function brackets<C extends ParserContext, R>(c: C, open: string, parser: Parser<C, R>, close: string): ResultAndContext<C, R>; | ||
export declare function parseBracketedCommaSeparated<C extends ParserContext, R>(c: C, open: string, comma: string, parser: Parser<C, R>, close: string): ResultAndContext<C, R[]>; | ||
export declare function parserErrorMessage(s: string, c: ParserContext, errors: string[]): string[]; | ||
/** To help tests */ |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parserErrorMessage = exports.parseBracketedCommaSeparated = exports.parseCommaSeparated = exports.gotForError = exports.nextChar = exports.foldNextChar = exports.isNextChar = exports.liftError = exports.lift = exports.validateAndReturn = exports.mapParser = exports.identifier = void 0; | ||
exports.parserErrorMessage = exports.parseBracketedCommaSeparated = exports.brackets = exports.parseCommaSeparated = exports.gotForError = exports.nextChar = exports.foldNextChar = exports.isNextChar = exports.liftError = exports.lift = exports.validateAndReturn = exports.mapParser = exports.identifier = void 0; | ||
const identifier = (type) => (context) => { | ||
@@ -70,2 +70,6 @@ var pos = context.pos; | ||
exports.parseCommaSeparated = parseCommaSeparated; | ||
function brackets(c, open, parser, close) { | ||
return (0, exports.mapParser)(nextChar(c, open), c => (0, exports.mapParser)(parser(c), (c, r) => (0, exports.mapParser)(nextChar(c, close), c => lift(c, r)))); | ||
} | ||
exports.brackets = brackets; | ||
function parseBracketedCommaSeparated(c, open, comma, parser, close) { | ||
@@ -72,0 +76,0 @@ return (0, exports.foldNextChar)(c, open, c => (0, exports.mapParser)(nextChar(c, open), (c) => (0, exports.mapParser)(parseCommaSeparated(c, comma, parser), (c, ids) => (0, exports.mapParser)(nextChar(c, close), c => { |
@@ -5,2 +5,3 @@ export interface TokeniserContext { | ||
thisToken: string; | ||
specials: string; | ||
} | ||
@@ -28,2 +29,2 @@ export interface CharToken { | ||
export declare function tokeniseNext(context: TokeniserContext): TokenAndContext; | ||
export declare const tokenise: (path: string) => Token[]; | ||
export declare const tokenise: (specials: string) => (path: string) => Token[]; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.tokenise = exports.tokeniseNext = void 0; | ||
const specials = "[]{},.=:`"; | ||
function tokeniseNext(context) { | ||
const specials = context.specials; | ||
const initialPos = context.pos; | ||
@@ -30,7 +30,8 @@ var pos = context.pos; | ||
exports.tokeniseNext = tokeniseNext; | ||
const tokenise = (path) => { | ||
const tokenise = (specials) => (path) => { | ||
var context = { | ||
path, | ||
pos: 0, | ||
thisToken: '' | ||
thisToken: '', | ||
specials | ||
}; | ||
@@ -37,0 +38,0 @@ const tokens = []; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const tokeniser_1 = require("./tokeniser"); | ||
const specials = "[]{},.=:`"; | ||
describe('tokeniserNext', function () { | ||
@@ -9,3 +10,4 @@ function makeContext(thisBit) { | ||
pos: 6, | ||
thisToken: '' | ||
thisToken: '', | ||
specials | ||
}; | ||
@@ -54,6 +56,6 @@ } | ||
it("for string", () => { | ||
expect((0, tokeniser_1.tokenise)("hello")).toEqual([{ type: 'string', value: 'hello', pos: 0 }]); | ||
expect((0, tokeniser_1.tokenise)(specials)("hello")).toEqual([{ type: 'string', value: 'hello', pos: 0 }]); | ||
}); | ||
it("for string,string", () => { | ||
expect((0, tokeniser_1.tokenise)("hello,world")).toEqual([ | ||
expect((0, tokeniser_1.tokenise)(specials)("hello,world")).toEqual([ | ||
{ "pos": 0, "type": "string", "value": "hello" }, | ||
@@ -65,3 +67,3 @@ { "pos": 5, "type": "char", "value": "," }, | ||
it("for escaped", () => { | ||
expect((0, tokeniser_1.tokenise)("`hello,world`[a]")).toEqual([ | ||
expect((0, tokeniser_1.tokenise)(specials)("`hello,world`[a]")).toEqual([ | ||
{ "pos": 0, "type": "string", "value": "`hello,world`" }, | ||
@@ -74,3 +76,3 @@ { "pos": 13, "type": "char", "value": "[" }, | ||
it("for a few characters and strings", () => { | ||
expect((0, tokeniser_1.tokenise)("[a]b{c}")).toEqual([ | ||
expect((0, tokeniser_1.tokenise)(specials)("[a]b{c}")).toEqual([ | ||
{ "pos": 0, "type": "char", "value": "[" }, | ||
@@ -86,3 +88,3 @@ { "pos": 1, "type": "string", "value": "a" }, | ||
it("for a mixed set", () => { | ||
expect((0, tokeniser_1.tokenise)("[a]b{`c[]{},`}")).toEqual([ | ||
expect((0, tokeniser_1.tokenise)(specials)("[a]b{`c[]{},`}")).toEqual([ | ||
{ "pos": 0, "type": "char", "value": "[" }, | ||
@@ -89,0 +91,0 @@ { "pos": 1, "type": "string", "value": "a" }, |
{ | ||
"name": "@dbpath/parser", | ||
"description": "", | ||
"version": "0.3.13", | ||
"version": "0.3.14", | ||
"main": "dist/index", | ||
@@ -6,0 +6,0 @@ "types": "dist/index", |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
15194
304