Socket
Socket
Sign inDemoInstall

lezer

Package Overview
Dependencies
Maintainers
1
Versions
37
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

lezer - npm Package Compare versions

Comparing version 0.7.0 to 0.7.1

.rpt2_cache/rpt2_98b3e60b2a4e1105e3048ec94bb2146068dafff2/code/cache/606031e63f7be1716f1fa2a635b8397fb469a743

8

CHANGELOG.md

@@ -0,1 +1,9 @@

## 0.7.1 (2020-01-23)
### Bug fixes
Tweak recovery cost for forced reductions to prefer those to other recovery strategies.
More agressively reuse cached nodes.
## 0.7.0 (2020-01-20)

@@ -2,0 +10,0 @@

66

dist/index.js

@@ -261,3 +261,3 @@ 'use strict';

this.pos = this.reducePos = nextEnd;
this.recovered++;
this.recovered += 2 /* Token */;
};

@@ -337,3 +337,3 @@ /// Check if the given term would be able to be shifted (optionally

stack.pushState(nextStates[i], this.pos);
stack.recovered++;
stack.recovered += 2 /* Token */;
result.push(stack);

@@ -352,3 +352,3 @@ }

this.storeNode(0 /* Err */, this.reducePos, this.reducePos, 4, true);
this.recovered++;
this.recovered += 1 /* Reduce */;
}

@@ -377,2 +377,4 @@ this.reduce(reduce);

(function (Recover) {
Recover[Recover["Token"] = 2] = "Token";
Recover[Recover["Reduce"] = 1] = "Reduce";
Recover[Recover["MaxNext"] = 4] = "MaxNext";

@@ -651,5 +653,4 @@ })(Recover || (Recover = {}));

__extends(CachedToken, _super);
function CachedToken(tokenizer) {
var _this = _super.call(this) || this;
_this.tokenizer = tokenizer;
function CachedToken() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.extended = -1;

@@ -667,6 +668,7 @@ _this.mask = 0;

var TokenCache = /** @class */ (function () {
function TokenCache() {
function TokenCache(parser) {
this.tokens = [];
this.mainToken = dummyToken;
this.actions = [];
this.tokens = parser.tokenizers.map(function (_) { return new CachedToken; });
}

@@ -681,14 +683,5 @@ TokenCache.prototype.getActions = function (stack, input) {

continue;
var tokenizer = tokenizers[i], token = void 0;
for (var _i = 0, _a = this.tokens; _i < _a.length; _i++) {
var t = _a[_i];
if (t.tokenizer == tokenizer) {
token = t;
break;
}
}
if (!token)
this.tokens.push(token = new CachedToken(tokenizer));
var tokenizer = tokenizers[i], token = this.tokens[i];
if (tokenizer.contextual || token.start != stack.pos || token.mask != mask) {
this.updateCachedToken(token, stack, input);
this.updateCachedToken(token, tokenizer, stack, input);
token.mask = mask;

@@ -720,5 +713,5 @@ }

};
TokenCache.prototype.updateCachedToken = function (token, stack, input) {
TokenCache.prototype.updateCachedToken = function (token, tokenizer, stack, input) {
token.clear(stack.pos);
token.tokenizer.token(input, token, stack);
tokenizer.token(input, token, stack);
if (token.value > -1) {

@@ -777,3 +770,3 @@ var parser = stack.cx.parser;

this.reused = [];
this.tokens = new TokenCache;
this.tokens = new TokenCache(parser);
}

@@ -798,2 +791,3 @@ return StackContext;

}
/// @internal
ParseContext.prototype.putStack = function (stack) {

@@ -896,3 +890,3 @@ this.stacks.push(stack);

var match = parser.group.types[cached.type.id] == cached.type ? parser.getGoto(stack.state, cached.type.id) : -1;
if (match > -1 && !isFragile(cached)) {
if (match > -1) {
stack.useNode(cached, match);

@@ -1015,6 +1009,6 @@ if (verbose)

if (verbose)
console.log(base + (" (via recover-delete " + stack.cx.parser.getName(token) + ")"));
console.log(base + stack + (" (via recover-delete " + stack.cx.parser.getName(token) + ")"));
this.putStack(stack);
}
else if (!finished || finished.recovered > stack.recovered) {
else if (!stack.cx.parent && (!finished || finished.recovered > stack.recovered)) {
finished = stack;

@@ -1036,3 +1030,3 @@ }

get: function () {
return this.stacks[0].recovered / this.tokenCount;
return this.stacks[0].recovered * 2 /* Token */ / this.tokenCount;
},

@@ -1297,24 +1291,2 @@ enumerable: true,

}
// Checks whether a node starts or ends with an error node, in which
// case we shouldn't reuse it.
function isFragile(node) {
var doneStart = false, doneEnd = false, fragile = node.type.id == 0 /* Err */;
if (!fragile)
node.iterate({
enter: function (type) {
return doneStart || (type.id == 0 /* Err */ ? fragile = doneStart = true : undefined);
},
leave: function () { doneStart = true; }
});
if (!fragile)
node.iterate({
from: node.length,
to: 0,
enter: function (type) {
return doneEnd || (type.id == 0 /* Err */ ? fragile = doneEnd = true : undefined);
},
leave: function () { doneEnd = true; }
});
return fragile;
}
function findFinished(stacks) {

@@ -1321,0 +1293,0 @@ var best = null;

@@ -13,6 +13,4 @@ import { Stack } from "./stack";

declare class CachedToken extends Token {
readonly tokenizer: Tokenizer;
extended: number;
mask: number;
constructor(tokenizer: Tokenizer);
clear(start: number): void;

@@ -24,4 +22,5 @@ }

actions: number[];
constructor(parser: Parser);
getActions(stack: Stack, input: InputStream): number[];
updateCachedToken(token: CachedToken, stack: Stack, input: InputStream): void;
updateCachedToken(token: CachedToken, tokenizer: Tokenizer, stack: Stack, input: InputStream): void;
putAction(action: number, token: number, end: number, index: number): number;

@@ -28,0 +27,0 @@ addActions(stack: Stack, token: number, end: number, index: number): number;

@@ -32,1 +32,6 @@ import { StackContext } from "./parse";

}
export declare const enum Recover {
Token = 2,
Reduce = 1,
MaxNext = 4
}
{
"name": "lezer",
"version": "0.7.0",
"version": "0.7.1",
"description": "Incremental parser",

@@ -17,3 +17,3 @@ "main": "dist/index.js",

"dependencies": {
"lezer-tree": "^0.7.0"
"lezer-tree": "^0.7.1"
},

@@ -20,0 +20,0 @@ "scripts": {

@@ -1,2 +0,2 @@

import {Stack} from "./stack"
import {Stack, Recover} from "./stack"
import {Action, Specialize, Term, Seq, StateFlag, ParseState} from "./constants"

@@ -87,4 +87,2 @@ import {InputStream, Token, StringStream, Tokenizer, TokenGroup} from "./token"

constructor(readonly tokenizer: Tokenizer) { super() }
clear(start: number) {

@@ -104,2 +102,6 @@ this.start = start

constructor(parser: Parser) {
this.tokens = parser.tokenizers.map(_ => new CachedToken)
}
getActions(stack: Stack, input: InputStream) {

@@ -113,7 +115,5 @@ let actionIndex = 0

if (((1 << i) & mask) == 0) continue
let tokenizer = tokenizers[i], token
for (let t of this.tokens) if (t.tokenizer == tokenizer) { token = t; break }
if (!token) this.tokens.push(token = new CachedToken(tokenizer))
let tokenizer = tokenizers[i], token = this.tokens[i]
if (tokenizer.contextual || token.start != stack.pos || token.mask != mask) {
this.updateCachedToken(token, stack, input)
this.updateCachedToken(token, tokenizer, stack, input)
token.mask = mask

@@ -143,5 +143,5 @@ }

updateCachedToken(token: CachedToken, stack: Stack, input: InputStream) {
updateCachedToken(token: CachedToken, tokenizer: Tokenizer, stack: Stack, input: InputStream) {
token.clear(stack.pos)
token.tokenizer.token(input, token, stack)
tokenizer.token(input, token, stack)
if (token.value > -1) {

@@ -205,3 +205,3 @@ let {parser} = stack.cx

reused: (Tree | TreeBuffer)[] = []
tokens = new TokenCache
tokens: TokenCache
constructor(

@@ -213,3 +213,5 @@ readonly parser: Parser,

public wrapType: number = -1 // Set to -2 when a stack descending from this nesting event finishes
) {}
) {
this.tokens = new TokenCache(parser)
}
}

@@ -241,2 +243,3 @@

/// @internal
putStack(stack: Stack) {

@@ -332,3 +335,3 @@ this.stacks.push(stack)

let match = parser.group.types[cached.type.id] == cached.type ? parser.getGoto(stack.state, cached.type.id) : -1
if (match > -1 && !isFragile(cached)) {
if (match > -1) {
stack.useNode(cached, match)

@@ -439,5 +442,5 @@ if (verbose) console.log(base + stack + ` (via reuse of ${parser.getName(cached.type.id)})`)

stack.recoverByDelete(token, tokenEnd)
if (verbose) console.log(base + ` (via recover-delete ${stack.cx.parser.getName(token)})`)
if (verbose) console.log(base + stack + ` (via recover-delete ${stack.cx.parser.getName(token)})`)
this.putStack(stack)
} else if (!finished || finished.recovered > stack.recovered) {
} else if (!stack.cx.parent && (!finished || finished.recovered > stack.recovered)) {
finished = stack

@@ -460,3 +463,3 @@ }

get badness() {
return this.stacks[0].recovered / this.tokenCount
return this.stacks[0].recovered * Recover.Token / this.tokenCount
}

@@ -725,23 +728,2 @@

// Checks whether a node starts or ends with an error node, in which
// case we shouldn't reuse it.
function isFragile(node: Tree | TreeBuffer) {
let doneStart = false, doneEnd = false, fragile = node.type.id == Term.Err
if (!fragile) node.iterate({
enter(type) {
return doneStart || (type.id == Term.Err ? fragile = doneStart = true : undefined)
},
leave() { doneStart = true }
})
if (!fragile) node.iterate({
from: node.length,
to: 0,
enter(type) {
return doneEnd || (type.id == Term.Err ? fragile = doneEnd = true : undefined)
},
leave() { doneEnd = true }
})
return fragile
}
function findFinished(stacks: Stack[]) {

@@ -748,0 +730,0 @@ let best: Stack | null = null

@@ -211,3 +211,3 @@ import {Action, Term, StateFlag, ParseState} from "./constants"

this.pos = this.reducePos = nextEnd
this.recovered++
this.recovered += Recover.Token
}

@@ -281,3 +281,3 @@

stack.pushState(nextStates[i], this.pos)
stack.recovered++
stack.recovered += Recover.Token
result.push(stack)

@@ -296,3 +296,3 @@ }

this.storeNode(Term.Err, this.reducePos, this.reducePos, 4, true)
this.recovered++
this.recovered += Recover.Reduce
}

@@ -321,3 +321,5 @@ this.reduce(reduce)

const enum Recover {
export const enum Recover {
Token = 2,
Reduce = 1,
MaxNext = 4

@@ -324,0 +326,0 @@ }

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc