Socket
Socket
Sign inDemoInstall

lezer

Package Overview
Dependencies
Maintainers
1
Versions
37
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

lezer - npm Package Compare versions

Comparing version 0.6.0 to 0.7.0

.rpt2_cache/rpt2_0948bd7b8fcfb5dfcefcf110e10b03d9379ed0cf/code/cache/1f06184555f1bb80335b6c872aa8bdcb62c8a66a

6

CHANGELOG.md

@@ -0,1 +1,7 @@

## 0.7.0 (2020-01-20)
### Breaking changes
This now consumes the adjusted parser output of lezer-generator 0.7.0.
## 0.6.0 (2020-01-15)

@@ -2,0 +8,0 @@

47

dist/index.js

@@ -118,3 +118,3 @@ 'use strict';

// the stack without popping anything off.
if (type <= parser.maxNode)
if (type < parser.minRepeatTerm)
this.storeNode(type, this.reducePos, this.reducePos, 4, true);

@@ -132,3 +132,5 @@ this.pushState(parser.getGoto(this.state, type, true), this.reducePos);

var bufferBase = this.stack[base - 1], count = this.bufferBase + this.buffer.length - bufferBase;
if (type <= parser.maxNode && ((action & 131072 /* RepeatFlag */) || !parser.group.types[type].prop(lezerTree.NodeProp.repeated))) {
if (type < parser.minRepeatTerm || // Normal term
(action & 131072 /* RepeatFlag */) || // Inner repeat marker
(type > parser.maxNode && type <= parser.maxRepeatWrap)) { // Repeat wrapper
var pos = parser.stateFlag(this.state, 1 /* Skipped */) ? this.pos : this.reducePos;

@@ -363,3 +365,8 @@ this.storeNode(type, start, pos, count + 4, true);

Stack.prototype.toTree = function () {
return lezerTree.Tree.build(StackBufferCursor.create(this), this.cx.parser.group, 1 /* Top */, this.cx.maxBufferLength, this.cx.reused);
return lezerTree.Tree.build({ buffer: StackBufferCursor.create(this),
group: this.cx.parser.group,
topID: 1 /* Top */,
maxBufferLength: this.cx.maxBufferLength,
reused: this.cx.reused,
minRepeatType: this.cx.parser.minRepeatTerm });
};

@@ -623,2 +630,4 @@ return Stack;

var start = this.start[last] + top.positions[index];
if (start >= pos)
return start == pos ? next : null;
if (next instanceof lezerTree.TreeBuffer) {

@@ -628,5 +637,2 @@ this.index[last]++;

}
else if (start >= pos) {
return start == pos ? next : null;
}
else {

@@ -670,4 +676,5 @@ this.index[last]++;

var parser = stack.cx.parser, tokenizers = parser.tokenizers;
var mask = parser.stateSlot(stack.state, 3 /* TokenizerMask */);
for (var i = 0; i < tokenizers.length; i++) {
if (((1 << i) & parser.stateSlot(stack.state, 3 /* TokenizerMask */)) == 0)
if (((1 << i) & mask) == 0)
continue;

@@ -684,3 +691,2 @@ var tokenizer = tokenizers[i], token = void 0;

this.tokens.push(token = new CachedToken(tokenizer));
var mask = parser.stateSlot(stack.state, 3 /* TokenizerMask */);
if (tokenizer.contextual || token.start != stack.pos || token.mask != mask) {

@@ -893,3 +899,3 @@ this.updateCachedToken(token, stack, input);

}
if (cached.children.length == 0 || cached.positions[0] > 0)
if (!(cached instanceof lezerTree.Tree) || cached.children.length == 0 || cached.positions[0] > 0)
break;

@@ -1059,2 +1065,4 @@ var inner = cached.children[0];

group,
/// The first repeat-related term id @internal
minRepeatTerm,
/// The tokenizer objects used by the grammar @internal

@@ -1083,2 +1091,3 @@ tokenizers,

this.group = group;
this.minRepeatTerm = minRepeatTerm;
this.tokenizers = tokenizers;

@@ -1092,2 +1101,3 @@ this.nested = nested;

this.maxNode = this.group.types.length - 1;
this.maxRepeatWrap = this.group.types.length + (this.group.types.length - minRepeatTerm) - 1;
for (var i = 0, l = this.states.length / 6 /* Size */; i < l; i++)

@@ -1161,7 +1171,6 @@ this.nextStateCache[i] = null;

if (this.data[i] == 65535 /* End */)
return 0;
return false;
if (action == (this.data[i + 1] | (this.data[i + 2] << 16)))
return true;
}
return false;
};

@@ -1202,3 +1211,3 @@ /// Get the states that can follow this one through shift actions or

Parser.prototype.withNested = function (spec) {
return new Parser(this.states, this.data, this.goto, this.group, this.tokenizers, this.nested.map(function (obj) {
return new Parser(this.states, this.data, this.goto, this.group, this.minRepeatTerm, this.tokenizers, this.nested.map(function (obj) {
if (!Object.prototype.hasOwnProperty.call(spec, obj.name))

@@ -1218,3 +1227,3 @@ return obj;

}
return new Parser(this.states, this.data, this.goto, (_a = this.group).extend.apply(_a, props), this.tokenizers, this.nested, this.specializeTable, this.specializations, this.tokenPrecTable, this.termNames);
return new Parser(this.states, this.data, this.goto, (_a = this.group).extend.apply(_a, props), this.minRepeatTerm, this.tokenizers, this.nested, this.specializeTable, this.specializations, this.tokenPrecTable, this.termNames);
};

@@ -1231,3 +1240,3 @@ /// Returns the name associated with a given term. This will only

/// types. @internal
get: function () { return this.maxNode + 1; },
get: function () { return this.maxRepeatWrap + 1; },
enumerable: true,

@@ -1245,3 +1254,3 @@ configurable: true

var tokenArray = decodeArray(spec.tokenData);
var nodeNames = spec.nodeNames.split(" ");
var nodeNames = spec.nodeNames.split(" "), minRepeatTerm = nodeNames.length;
for (var i = 0; i < spec.repeatNodeCount; i++)

@@ -1258,4 +1267,2 @@ nodeNames.push("");

setProp(0, lezerTree.NodeProp.error, "");
for (var i = nodeProps.length - spec.repeatNodeCount; i < nodeProps.length; i++)
setProp(i, lezerTree.NodeProp.repeated, "");
if (spec.nodeProps)

@@ -1269,3 +1276,3 @@ for (var _i = 0, _a = spec.nodeProps; _i < _a.length; _i++) {

var group = new lezerTree.NodeGroup(nodeNames.map(function (name, i) { return new lezerTree.NodeType(name, nodeProps[i], i); }));
return new Parser(decodeArray(spec.states, Uint32Array), decodeArray(spec.stateData), decodeArray(spec.goto), group, spec.tokenizers.map(function (value) { return typeof value == "number" ? new TokenGroup(tokenArray, value) : value; }), (spec.nested || []).map(function (_a) {
return new Parser(decodeArray(spec.states, Uint32Array), decodeArray(spec.stateData), decodeArray(spec.goto), group, minRepeatTerm, spec.tokenizers.map(function (value) { return typeof value == "number" ? new TokenGroup(tokenArray, value) : value; }), (spec.nested || []).map(function (_a) {
var name = _a[0], grammar = _a[1], endToken = _a[2], placeholder = _a[3];

@@ -1304,3 +1311,3 @@ return ({ name: name, grammar: grammar, end: new TokenGroup(decodeArray(endToken), 0), placeholder: placeholder });

},
leave: function (type) { doneStart = true; }
leave: function () { doneStart = true; }
});

@@ -1314,3 +1321,3 @@ if (!fragile)

},
leave: function (type) { doneEnd = true; }
leave: function () { doneEnd = true; }
});

@@ -1317,0 +1324,0 @@ return fragile;

import { Stack } from "./stack";
import { InputStream, Token, Tokenizer, TokenGroup } from "./token";
import { Tree, NodeGroup, NodeProp, NodePropSource } from "lezer-tree";
import { Tree, TreeBuffer, NodeGroup, NodeProp, NodePropSource } from "lezer-tree";
export declare type NestedGrammar = null | Parser | ((input: InputStream, stack: Stack) => NestedGrammarSpec);

@@ -39,3 +39,3 @@ export interface NestedGrammarSpec {

wrapType: number;
reused: Tree[];
reused: (Tree | TreeBuffer)[];
tokens: TokenCache;

@@ -66,2 +66,3 @@ constructor(parser: Parser, maxBufferLength: number, input: InputStream, parent?: Stack | null, wrapType?: number);

readonly group: NodeGroup;
readonly minRepeatTerm: number;
readonly tokenizers: readonly Tokenizer[];

@@ -83,4 +84,5 @@ readonly nested: readonly {

maxNode: number;
maxRepeatWrap: number;
private nextStateCache;
constructor(states: Readonly<Uint32Array>, data: Readonly<Uint16Array>, goto: Readonly<Uint16Array>, group: NodeGroup, tokenizers: readonly Tokenizer[], nested: readonly {
constructor(states: Readonly<Uint32Array>, data: Readonly<Uint16Array>, goto: Readonly<Uint16Array>, group: NodeGroup, minRepeatTerm: number, tokenizers: readonly Tokenizer[], nested: readonly {
name: string;

@@ -102,3 +104,3 @@ grammar: NestedGrammar;

startNested(state: number): number;
validAction(state: number, action: number): boolean | 0;
validAction(state: number, action: number): boolean;
nextStates(state: number): readonly number[];

@@ -105,0 +107,0 @@ overrides(token: number, prev: number): boolean;

import { StackContext } from "./parse";
import { Tree } from "lezer-tree";
import { Tree, TreeBuffer } from "lezer-tree";
export declare class Stack {

@@ -21,3 +21,3 @@ readonly cx: StackContext;

apply(action: number, next: number, nextEnd: number): void;
useNode(value: Tree, next: number): void;
useNode(value: Tree | TreeBuffer, next: number): void;
split(): Stack;

@@ -24,0 +24,0 @@ recoverByDelete(next: number, nextEnd: number): void;

{
"name": "lezer",
"version": "0.6.0",
"version": "0.7.0",
"description": "Incremental parser",

@@ -17,3 +17,3 @@ "main": "dist/index.js",

"dependencies": {
"lezer-tree": "^0.5.1"
"lezer-tree": "^0.7.0"
},

@@ -20,0 +20,0 @@ "scripts": {

@@ -49,3 +49,3 @@ import {Stack} from "./stack"

// `pos` must be >= any previously given `pos` for this cursor
nodeAt(pos: number): Tree | null {
nodeAt(pos: number): Tree | TreeBuffer | null {
if (pos < this.nextStart) return null

@@ -68,7 +68,6 @@

let start = this.start[last] + top.positions[index]
if (start >= pos) return start == pos ? next : null
if (next instanceof TreeBuffer) {
this.index[last]++
this.nextStart = start + next.length
} else if (start >= pos) {
return start == pos ? next : null
} else {

@@ -111,8 +110,8 @@ this.index[last]++

let mask = parser.stateSlot(stack.state, ParseState.TokenizerMask)
for (let i = 0; i < tokenizers.length; i++) {
if (((1 << i) & parser.stateSlot(stack.state, ParseState.TokenizerMask)) == 0) continue
if (((1 << i) & mask) == 0) continue
let tokenizer = tokenizers[i], token
for (let t of this.tokens) if (t.tokenizer == tokenizer) { token = t; break }
if (!token) this.tokens.push(token = new CachedToken(tokenizer))
let mask = parser.stateSlot(stack.state, ParseState.TokenizerMask)
if (tokenizer.contextual || token.start != stack.pos || token.mask != mask) {

@@ -204,3 +203,3 @@ this.updateCachedToken(token, stack, input)

export class StackContext {
reused: Tree[] = []
reused: (Tree | TreeBuffer)[] = []
tokens = new TokenCache

@@ -335,3 +334,3 @@ constructor(

}
if (cached.children.length == 0 || cached.positions[0] > 0) break
if (!(cached instanceof Tree) || cached.children.length == 0 || cached.positions[0] > 0) break
let inner = cached.children[0]

@@ -477,2 +476,4 @@ if (inner instanceof Tree) cached = inner

maxNode: number
/// @internal
maxRepeatWrap: number
private nextStateCache: (readonly number[] | null)[] = []

@@ -492,2 +493,4 @@

readonly group: NodeGroup,
/// The first repeat-related term id @internal
readonly minRepeatTerm: number,
/// The tokenizer objects used by the grammar @internal

@@ -523,2 +526,3 @@ readonly tokenizers: readonly Tokenizer[],

this.maxNode = this.group.types.length - 1
this.maxRepeatWrap = this.group.types.length + (this.group.types.length - minRepeatTerm) - 1
for (let i = 0, l = this.states.length / ParseState.Size; i < l; i++) this.nextStateCache[i] = null

@@ -589,6 +593,5 @@ }

for (let i = this.stateSlot(state, ParseState.Actions);; i += 3) {
if (this.data[i] == Seq.End) return 0
if (this.data[i] == Seq.End) return false
if (action == (this.data[i + 1] | (this.data[i + 2] << 16))) return true
}
return false
}

@@ -629,3 +632,3 @@

withNested(spec: {[name: string]: NestedGrammar | null}) {
return new Parser(this.states, this.data, this.goto, this.group, this.tokenizers,
return new Parser(this.states, this.data, this.goto, this.group, this.minRepeatTerm, this.tokenizers,
this.nested.map(obj => {

@@ -642,3 +645,4 @@ if (!Object.prototype.hasOwnProperty.call(spec, obj.name)) return obj

withProps(...props: NodePropSource[]) {
return new Parser(this.states, this.data, this.goto, this.group.extend(...props), this.tokenizers, this.nested,
return new Parser(this.states, this.data, this.goto, this.group.extend(...props), this.minRepeatTerm,
this.tokenizers, this.nested,
this.specializeTable, this.specializations, this.tokenPrecTable, this.termNames)

@@ -657,3 +661,3 @@ }

/// types. @internal
get eofTerm() { return this.maxNode + 1 }
get eofTerm() { return this.maxRepeatWrap + 1 }

@@ -680,3 +684,3 @@ /// Tells you whether this grammar has any nested grammars.

let tokenArray = decodeArray(spec.tokenData)
let nodeNames = spec.nodeNames.split(" ")
let nodeNames = spec.nodeNames.split(" "), minRepeatTerm = nodeNames.length
for (let i = 0; i < spec.repeatNodeCount; i++) nodeNames.push("")

@@ -690,3 +694,2 @@ let nodeProps: {[id: number]: any}[] = []

setProp(0, NodeProp.error, "")
for (let i = nodeProps.length - spec.repeatNodeCount; i < nodeProps.length; i++) setProp(i, NodeProp.repeated, "")
if (spec.nodeProps) for (let propSpec of spec.nodeProps) {

@@ -700,3 +703,3 @@ let prop = propSpec[0]

return new Parser(decodeArray(spec.states, Uint32Array), decodeArray(spec.stateData),
decodeArray(spec.goto), group,
decodeArray(spec.goto), group, minRepeatTerm,
spec.tokenizers.map(value => typeof value == "number" ? new TokenGroup(tokenArray, value) : value),

@@ -729,3 +732,3 @@ (spec.nested || []).map(([name, grammar, endToken, placeholder]) =>

// case we shouldn't reuse it.
function isFragile(node: Tree) {
function isFragile(node: Tree | TreeBuffer) {
let doneStart = false, doneEnd = false, fragile = node.type.id == Term.Err

@@ -736,3 +739,3 @@ if (!fragile) node.iterate({

},
leave(type) { doneStart = true }
leave() { doneStart = true }
})

@@ -745,3 +748,3 @@ if (!fragile) node.iterate({

},
leave(type) { doneEnd = true }
leave() { doneEnd = true }
})

@@ -748,0 +751,0 @@ return fragile

import {Action, Term, StateFlag, ParseState} from "./constants"
import {StackContext} from "./parse"
import {Tree, BufferCursor, NodeProp} from "lezer-tree"
import {Tree, TreeBuffer, BufferCursor} from "lezer-tree"

@@ -81,3 +81,3 @@ /// A parse stack. These are used internally by the parser to track

// the stack without popping anything off.
if (type <= parser.maxNode) this.storeNode(type, this.reducePos, this.reducePos, 4, true)
if (type < parser.minRepeatTerm) this.storeNode(type, this.reducePos, this.reducePos, 4, true)
this.pushState(parser.getGoto(this.state, type, true), this.reducePos)

@@ -95,3 +95,5 @@ return

let bufferBase = this.stack[base - 1], count = this.bufferBase + this.buffer.length - bufferBase
if (type <= parser.maxNode && ((action & Action.RepeatFlag) || !parser.group.types[type].prop(NodeProp.repeated))) {
if (type < parser.minRepeatTerm || // Normal term
(action & Action.RepeatFlag) || // Inner repeat marker
(type > parser.maxNode && type <= parser.maxRepeatWrap)) { // Repeat wrapper
let pos = parser.stateFlag(this.state, StateFlag.Skipped) ? this.pos : this.reducePos

@@ -173,3 +175,3 @@ this.storeNode(type, start, pos, count + 4, true)

/// @internal
useNode(value: Tree, next: number) {
useNode(value: Tree | TreeBuffer, next: number) {
let index = this.cx.reused.length - 1

@@ -310,3 +312,8 @@ if (index < 0 || this.cx.reused[index] != value) {

toTree(): Tree {
return Tree.build(StackBufferCursor.create(this), this.cx.parser.group, Term.Top, this.cx.maxBufferLength, this.cx.reused)
return Tree.build({buffer: StackBufferCursor.create(this),
group: this.cx.parser.group,
topID: Term.Top,
maxBufferLength: this.cx.maxBufferLength,
reused: this.cx.reused,
minRepeatType: this.cx.parser.minRepeatTerm})
}

@@ -313,0 +320,0 @@ }

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc