Socket
Socket
Sign inDemoInstall

lezer

Package Overview
Dependencies
Maintainers
1
Versions
37
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

lezer - npm Package Compare versions

Comparing version 0.7.1 to 0.8.0

.rpt2_cache/rpt2_7701c6e64bb6c56ec842cf648d1ad8f417be9894/code/cache/4303de26f2a9ece936f3b4d0c72b26bfb52ad8d3

10

CHANGELOG.md

@@ -0,1 +1,11 @@

## 0.8.0 (2020-02-03)
### Breaking changes
The serialized parser format changed.
### New features
Add support for multiple `@top` rules through the `top` parse option.
## 0.7.1 (2020-01-23)

@@ -2,0 +12,0 @@

3

dist/constants.d.ts

@@ -20,4 +20,3 @@ export declare const enum Action {

export declare const enum Term {
Err = 0,
Top = 1
Err = 0
}

@@ -24,0 +23,0 @@ export declare const enum Seq {

@@ -99,5 +99,5 @@ 'use strict';

/// @internal
Stack.start = function (cx, pos) {
Stack.start = function (cx, state, pos) {
if (pos === void 0) { pos = 0; }
return new Stack(cx, [], cx.parser.states[0], pos, pos, 0, [], 0, null);
return new Stack(cx, [], state, pos, pos, 0, [], 0, null);
};

@@ -366,3 +366,3 @@ // Push a state onto the stack, tracking its start position as well

group: this.cx.parser.group,
topID: 1 /* Top */,
topID: this.cx.topTerm,
maxBufferLength: this.cx.maxBufferLength,

@@ -754,3 +754,3 @@ reused: this.cx.reused,

var StackContext = /** @class */ (function () {
function StackContext(parser, maxBufferLength, input, parent, wrapType // Set to -2 when a stack descending from this nesting event finishes
function StackContext(parser, maxBufferLength, input, topTerm, parent, wrapType // Set to -2 when a stack descending from this nesting event finishes
) {

@@ -762,2 +762,3 @@ if (parent === void 0) { parent = null; }

this.input = input;
this.topTerm = topTerm;
this.parent = parent;

@@ -777,3 +778,3 @@ this.wrapType = wrapType;

function ParseContext(parser, input, _a) {
var _b = _a === void 0 ? {} : _a, _c = _b.cache, cache = _c === void 0 ? undefined : _c, _d = _b.strict, strict = _d === void 0 ? false : _d, _e = _b.bufferLength, bufferLength = _e === void 0 ? lezerTree.DefaultBufferLength : _e;
var _b = _a === void 0 ? {} : _a, _c = _b.cache, cache = _c === void 0 ? undefined : _c, _d = _b.strict, strict = _d === void 0 ? false : _d, _e = _b.bufferLength, bufferLength = _e === void 0 ? lezerTree.DefaultBufferLength : _e, _f = _b.top, top = _f === void 0 ? undefined : _f;
// The position to which the parse has advanced.

@@ -783,3 +784,6 @@ this.pos = 0;

this.tokenCount = 0;
this.stacks = [Stack.start(new StackContext(parser, bufferLength, input))];
var topInfo = parser.topRules[top || Object.keys(parser.topRules)[0]];
if (!topInfo)
throw new RangeError("Invalid top rule name " + top);
this.stacks = [Stack.start(new StackContext(parser, bufferLength, input, topInfo[1]), topInfo[0])];
this.strict = strict;

@@ -904,3 +908,3 @@ this.cache = cache ? new CacheCursor(cache) : null;

var _b = parser.nested[nest], grammar = _b.grammar, endToken = _b.end, placeholder = _b.placeholder;
var filterEnd = undefined, parseNode = null, nested = void 0, wrapType = undefined;
var filterEnd = undefined, parseNode = null, nested = void 0, top = void 0, wrapType = undefined;
if (typeof grammar == "function") {

@@ -910,3 +914,3 @@ var query = grammar(input, stack);

break maybeNest;
(parseNode = query.parseNode, nested = query.parser, filterEnd = query.filterEnd, wrapType = query.wrapType);
(parseNode = query.parseNode, nested = query.parser, top = query.top, filterEnd = query.filterEnd, wrapType = query.wrapType);
}

@@ -928,3 +932,4 @@ else {

else {
var newStack = Stack.start(new StackContext(nested, stack.cx.maxBufferLength, clippedInput, stack, wrapType), stack.pos);
var topInfo = nested.topRules[top || Object.keys(nested.topRules)[0]];
var newStack = Stack.start(new StackContext(nested, stack.cx.maxBufferLength, clippedInput, topInfo[1], stack, wrapType), topInfo[0], stack.pos);
if (verbose)

@@ -1064,2 +1069,4 @@ console.log(base + newStack + " (nested)");

tokenizers,
/// Maps top rule names to [state ID, top term ID] pairs.
topRules,
/// Metadata about nested grammars used in this grammar @internal

@@ -1088,2 +1095,3 @@ nested,

this.tokenizers = tokenizers;
this.topRules = topRules;
this.nested = nested;

@@ -1204,3 +1212,3 @@ this.specializeTable = specializeTable;

Parser.prototype.withNested = function (spec) {
return new Parser(this.states, this.data, this.goto, this.group, this.minRepeatTerm, this.tokenizers, this.nested.map(function (obj) {
return new Parser(this.states, this.data, this.goto, this.group, this.minRepeatTerm, this.tokenizers, this.topRules, this.nested.map(function (obj) {
if (!Object.prototype.hasOwnProperty.call(spec, obj.name))

@@ -1220,3 +1228,3 @@ return obj;

}
return new Parser(this.states, this.data, this.goto, (_a = this.group).extend.apply(_a, props), this.minRepeatTerm, this.tokenizers, this.nested, this.specializeTable, this.specializations, this.tokenPrecTable, this.termNames);
return new Parser(this.states, this.data, this.goto, (_a = this.group).extend.apply(_a, props), this.minRepeatTerm, this.tokenizers, this.topRules, this.nested, this.specializeTable, this.specializations, this.tokenPrecTable, this.termNames);
};

@@ -1266,3 +1274,3 @@ /// Returns the name associated with a given term. This will only

var group = new lezerTree.NodeGroup(nodeNames.map(function (name, i) { return new lezerTree.NodeType(name, nodeProps[i], i); }));
return new Parser(decodeArray(spec.states, Uint32Array), decodeArray(spec.stateData), decodeArray(spec.goto), group, minRepeatTerm, spec.tokenizers.map(function (value) { return typeof value == "number" ? new TokenGroup(tokenArray, value) : value; }), (spec.nested || []).map(function (_a) {
return new Parser(decodeArray(spec.states, Uint32Array), decodeArray(spec.stateData), decodeArray(spec.goto), group, minRepeatTerm, spec.tokenizers.map(function (value) { return typeof value == "number" ? new TokenGroup(tokenArray, value) : value; }), spec.topRules, (spec.nested || []).map(function (_a) {
var name = _a[0], grammar = _a[1], endToken = _a[2], placeholder = _a[3];

@@ -1269,0 +1277,0 @@ return ({ name: name, grammar: grammar, end: new TokenGroup(decodeArray(endToken), 0), placeholder: placeholder });

@@ -7,2 +7,3 @@ import { Stack } from "./stack";

parser?: Parser;
top?: string;
stay?: boolean;

@@ -32,2 +33,3 @@ parseNode?: (input: InputStream, start: number) => Tree;

bufferLength?: number;
top?: string;
}

@@ -38,2 +40,3 @@ export declare class StackContext {

readonly input: InputStream;
readonly topTerm: number;
readonly parent: Stack | null;

@@ -43,3 +46,3 @@ wrapType: number;

tokens: TokenCache;
constructor(parser: Parser, maxBufferLength: number, input: InputStream, parent?: Stack | null, wrapType?: number);
constructor(parser: Parser, maxBufferLength: number, input: InputStream, topTerm: number, parent?: Stack | null, wrapType?: number);
}

@@ -53,3 +56,3 @@ export declare class ParseContext {

private strict;
constructor(parser: Parser, input: InputStream, { cache, strict, bufferLength }?: ParseOptions);
constructor(parser: Parser, input: InputStream, { cache, strict, bufferLength, top }?: ParseOptions);
putStack(stack: Stack): void;

@@ -71,2 +74,5 @@ advance(): Tree;

readonly tokenizers: readonly Tokenizer[];
readonly topRules: {
[name: string]: [number, number];
};
readonly nested: readonly {

@@ -89,3 +95,5 @@ name: string;

private nextStateCache;
constructor(states: Readonly<Uint32Array>, data: Readonly<Uint16Array>, goto: Readonly<Uint16Array>, group: NodeGroup, minRepeatTerm: number, tokenizers: readonly Tokenizer[], nested: readonly {
constructor(states: Readonly<Uint32Array>, data: Readonly<Uint16Array>, goto: Readonly<Uint16Array>, group: NodeGroup, minRepeatTerm: number, tokenizers: readonly Tokenizer[], topRules: {
[name: string]: [number, number];
}, nested: readonly {
name: string;

@@ -126,2 +134,5 @@ grammar: NestedGrammar;

tokenizers: (Tokenizer | number)[];
topRules: {
[name: string]: [number, number];
};
nested?: [string, null | NestedGrammar, string, number][];

@@ -128,0 +139,0 @@ specializeTable: number;

@@ -15,3 +15,3 @@ import { StackContext } from "./parse";

toString(): string;
static start(cx: StackContext, pos?: number): Stack;
static start(cx: StackContext, state: number, pos?: number): Stack;
pushState(state: number, start: number): void;

@@ -18,0 +18,0 @@ reduce(action: number): void;

{
"name": "lezer",
"version": "0.7.1",
"version": "0.8.0",
"description": "Incremental parser",

@@ -9,2 +9,6 @@ "main": "dist/index.js",

"license": "MIT",
"repository": {
"type" : "git",
"url" : "https://github.com/lezer-parser/lezer.git"
},
"devDependencies": {

@@ -18,3 +22,3 @@ "rollup": "^1.6.0",

"dependencies": {
"lezer-tree": "^0.7.1"
"lezer-tree": "^0.8.0"
},

@@ -21,0 +25,0 @@ "scripts": {

@@ -71,6 +71,5 @@ // This file defines some constants that are needed both in this

export const enum Term {
// The values of the error and top terms are hard coded, the others
// are allocated per grammar.
Err = 0,
Top = 1
// The value of the error term is hard coded, the others are
// allocated per grammar.
Err = 0
}

@@ -77,0 +76,0 @@

@@ -23,2 +23,5 @@ import {Stack, Recover} from "./stack"

parser?: Parser
/// When `parser` is given, this can be used to configure which top
/// rule to parse with it.
top?: string
/// This being true means that the outer grammar should use

@@ -197,3 +200,6 @@ /// the fallback expression provided for the nesting to parse the

/// tree. Defaults to 1024.
bufferLength?: number
bufferLength?: number,
/// The name of the @top declaration to parse from. If not
/// specified, the first @top declaration is used.
top?: string
}

@@ -208,2 +214,3 @@

readonly input: InputStream,
readonly topTerm: number,
readonly parent: Stack | null = null,

@@ -234,4 +241,6 @@ public wrapType: number = -1 // Set to -2 when a stack descending from this nesting event finishes

input: InputStream,
{cache = undefined, strict = false, bufferLength = DefaultBufferLength}: ParseOptions = {}) {
this.stacks = [Stack.start(new StackContext(parser, bufferLength, input))]
{cache = undefined, strict = false, bufferLength = DefaultBufferLength, top = undefined}: ParseOptions = {}) {
let topInfo = parser.topRules[top || Object.keys(parser.topRules)[0]]
if (!topInfo) throw new RangeError(`Invalid top rule name ${top}`)
this.stacks = [Stack.start(new StackContext(parser, bufferLength, input, topInfo[1]), topInfo[0])]
this.strict = strict

@@ -347,7 +356,7 @@ this.cache = cache ? new CacheCursor(cache) : null

let {grammar, end: endToken, placeholder} = parser.nested[nest]
let filterEnd = undefined, parseNode = null, nested, wrapType = undefined
let filterEnd = undefined, parseNode = null, nested, top, wrapType = undefined
if (typeof grammar == "function") {
let query = grammar(input, stack)
if (query.stay) break maybeNest
;({parseNode, parser: nested, filterEnd, wrapType} = query)
;({parseNode, parser: nested, top, filterEnd, wrapType} = query)
} else {

@@ -365,3 +374,5 @@ nested = grammar

} else {
let newStack = Stack.start(new StackContext(nested, stack.cx.maxBufferLength, clippedInput, stack, wrapType), stack.pos)
let topInfo = nested.topRules[top || Object.keys(nested.topRules)[0]]
let newStack = Stack.start(new StackContext(nested, stack.cx.maxBufferLength, clippedInput, topInfo[1], stack, wrapType),
topInfo[0], stack.pos)
if (verbose) console.log(base + newStack + ` (nested)`)

@@ -500,2 +511,4 @@ return newStack

readonly tokenizers: readonly Tokenizer[],
/// Maps top rule names to [state ID, top term ID] pairs.
readonly topRules: {[name: string]: [number, number]},
/// Metadata about nested grammars used in this grammar @internal

@@ -633,3 +646,3 @@ readonly nested: readonly {

withNested(spec: {[name: string]: NestedGrammar | null}) {
return new Parser(this.states, this.data, this.goto, this.group, this.minRepeatTerm, this.tokenizers,
return new Parser(this.states, this.data, this.goto, this.group, this.minRepeatTerm, this.tokenizers, this.topRules,
this.nested.map(obj => {

@@ -647,3 +660,3 @@ if (!Object.prototype.hasOwnProperty.call(spec, obj.name)) return obj

return new Parser(this.states, this.data, this.goto, this.group.extend(...props), this.minRepeatTerm,
this.tokenizers, this.nested,
this.tokenizers, this.topRules, this.nested,
this.specializeTable, this.specializations, this.tokenPrecTable, this.termNames)

@@ -677,2 +690,3 @@ }

tokenizers: (Tokenizer | number)[],
topRules: {[name: string]: [number, number]},
nested?: [string, null | NestedGrammar, string, number][],

@@ -704,2 +718,3 @@ specializeTable: number,

spec.tokenizers.map(value => typeof value == "number" ? new TokenGroup(tokenArray, value) : value),
spec.topRules,
(spec.nested || []).map(([name, grammar, endToken, placeholder]) =>

@@ -706,0 +721,0 @@ ({name, grammar, end: new TokenGroup(decodeArray(endToken), 0), placeholder})),

@@ -61,4 +61,4 @@ import {Action, Term, StateFlag, ParseState} from "./constants"

/// @internal
static start(cx: StackContext, pos = 0) {
return new Stack(cx, [], cx.parser.states[0], pos, pos, 0, [], 0, null)
static start(cx: StackContext, state: number, pos = 0) {
return new Stack(cx, [], state, pos, pos, 0, [], 0, null)
}

@@ -312,3 +312,3 @@

group: this.cx.parser.group,
topID: Term.Top,
topID: this.cx.topTerm,
maxBufferLength: this.cx.maxBufferLength,

@@ -315,0 +315,0 @@ reused: this.cx.reused,

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc