Comparing version 0.10.1 to 0.10.2
@@ -0,1 +1,15 @@ | ||
## 0.10.2 (2020-09-02) | ||
### Bug fixes | ||
`Stack.ruleStart` will now ignore repeat rules and node-less rules when determining the inner rule. | ||
Work around a failure mode where error-recovery got stuck in an end-of-grammar state and thus could not continue meaningfully parsing anything by restarting such states back to their initial state. | ||
### New features | ||
External tokenizers can now provide an `extend` flag to allow their tokens to be used alongside tokens produced by other tokenizers. | ||
Add support for dynamic precedences. | ||
## 0.10.1 (2020-08-20) | ||
@@ -2,0 +16,0 @@ |
export declare function decodeArray<T extends { | ||
[i: number]: number; | ||
} = Uint16Array>(input: string, Type?: { | ||
} = Uint16Array>(input: string | T, Type?: { | ||
new (n: number): T; | ||
}): T; |
@@ -40,12 +40,11 @@ import { Tree, NodeProp, NodeGroup, NodeType, DefaultBufferLength, TreeBuffer } from 'lezer-tree'; | ||
function Stack( | ||
// A group of values that the stack will share with all | ||
// split instances | ||
/// A group of values that the stack will share with all | ||
/// split instances | ||
///@internal | ||
cx, | ||
// Holds state, pos, value stack pos (15 bits array index, 15 bits | ||
// buffer index) triplets for all but the top state | ||
/// Holds state, pos, value stack pos (15 bits array index, 15 bits | ||
/// buffer index) triplets for all but the top state | ||
/// @internal | ||
stack, | ||
// The current parse state | ||
/// @internal | ||
/// The current parse state @internal | ||
state, | ||
@@ -58,7 +57,8 @@ // The position at which the next reduce should take place. This | ||
reducePos, | ||
// The input position up to which this stack has parsed. | ||
/// The input position up to which this stack has parsed. | ||
pos, | ||
// The amount of error-recovery that happened on this stack | ||
/// The dynamic score of the stack, including dynamic precedence | ||
/// and error-recovery penalties | ||
/// @internal | ||
recovered, | ||
score, | ||
// The output buffer. Holds (type, start, end, size) quads | ||
@@ -87,3 +87,3 @@ // representing nodes created by the parser, where `size` is | ||
this.pos = pos; | ||
this.recovered = recovered; | ||
this.score = score; | ||
this.buffer = buffer; | ||
@@ -95,3 +95,3 @@ this.bufferBase = bufferBase; | ||
Stack.prototype.toString = function () { | ||
return "[" + this.stack.filter(function (_, i) { return i % 3 == 0; }).concat(this.state) + "]@" + this.pos + (this.recovered ? "!" + this.recovered : ""); | ||
return "[" + this.stack.filter(function (_, i) { return i % 3 == 0; }).concat(this.state) + "]@" + this.pos + (this.score ? "!" + this.score : ""); | ||
}; | ||
@@ -116,2 +116,5 @@ // Start an empty stack | ||
var parser = this.cx.parser; | ||
var dPrec = parser.dynamicPrecedence(type); | ||
if (dPrec) | ||
this.score += dPrec; | ||
if (depth == 0) { | ||
@@ -253,3 +256,3 @@ // Zero-depth reductions are a special case—they add stuff to | ||
parent = parent.parent; | ||
return new Stack(this.cx, this.stack.slice(), this.state, this.reducePos, this.pos, this.recovered, buffer, base, parent); | ||
return new Stack(this.cx, this.stack.slice(), this.state, this.reducePos, this.pos, this.score, buffer, base, parent); | ||
}; | ||
@@ -264,3 +267,3 @@ // Try to recover from an error by 'deleting' (ignoring) one token. | ||
this.pos = this.reducePos = nextEnd; | ||
this.recovered += 2 /* Token */; | ||
this.score -= 200 /* Token */; | ||
}; | ||
@@ -284,7 +287,11 @@ /// Check if the given term would be able to be shifted (optionally | ||
get: function () { | ||
var force = this.cx.parser.stateSlot(this.state, 5 /* ForcedReduce */); | ||
if (!(force & 65536 /* ReduceFlag */)) | ||
return 0; | ||
var base = this.stack.length - (3 * (force >> 19 /* ReduceDepthShift */)); | ||
return this.stack[base + 1]; | ||
for (var state = this.state, base = this.stack.length;;) { | ||
var force = this.cx.parser.stateSlot(state, 5 /* ForcedReduce */); | ||
if (!(force & 65536 /* ReduceFlag */)) | ||
return 0; | ||
base -= 3 * (force >> 19 /* ReduceDepthShift */); | ||
if ((force & 65535 /* ValueMask */) < this.cx.parser.minRepeatTerm) | ||
return this.stack[base + 1]; | ||
state = this.stack[base]; | ||
} | ||
}, | ||
@@ -354,3 +361,3 @@ enumerable: true, | ||
stack.pushState(nextStates[i], this.pos); | ||
stack.recovered += 2 /* Token */; | ||
stack.score -= 200 /* Token */; | ||
result.push(stack); | ||
@@ -369,3 +376,3 @@ } | ||
this.storeNode(0 /* Err */, this.reducePos, this.reducePos, 4, true); | ||
this.recovered += 1 /* Reduce */; | ||
this.score -= 100 /* Reduce */; | ||
} | ||
@@ -380,2 +387,32 @@ this.reduce(reduce); | ||
}; | ||
Object.defineProperty(Stack.prototype, "deadEnd", { | ||
/// Check whether this state has no further actions (assumed to be a direct descendant of the | ||
/// top state, since any other states must be able to continue | ||
/// somehow). @internal | ||
get: function () { | ||
if (this.stack.length != 3) | ||
return false; | ||
var parser = this.cx.parser; | ||
return parser.data[parser.stateSlot(this.state, 1 /* Actions */)] == 65535 /* End */ && | ||
!parser.stateSlot(this.state, 4 /* DefaultReduce */); | ||
}, | ||
enumerable: true, | ||
configurable: true | ||
}); | ||
/// Restart the stack (put it back in its start state). Only safe | ||
/// when this.stack.length == 3 (state is directly below the top | ||
/// state). @internal | ||
Stack.prototype.restart = function () { | ||
this.state = this.stack[0]; | ||
this.stack.length = 0; | ||
}; | ||
/// @internal | ||
Stack.prototype.sameState = function (other) { | ||
if (this.state != other.state || this.stack.length != other.stack.length) | ||
return false; | ||
for (var i = 0; i < this.stack.length; i += 3) | ||
if (this.stack[i] != other.stack[i]) | ||
return false; | ||
return true; | ||
}; | ||
// Convert the stack's buffer to a syntax tree. | ||
@@ -404,4 +441,4 @@ /// @internal | ||
(function (Recover) { | ||
Recover[Recover["Token"] = 2] = "Token"; | ||
Recover[Recover["Reduce"] = 1] = "Reduce"; | ||
Recover[Recover["Token"] = 200] = "Token"; | ||
Recover[Recover["Reduce"] = 100] = "Reduce"; | ||
Recover[Recover["MaxNext"] = 4] = "MaxNext"; | ||
@@ -534,3 +571,3 @@ Recover[Recover["MaxInsertStackDepth"] = 300] = "MaxInsertStackDepth"; | ||
}()); | ||
TokenGroup.prototype.contextual = TokenGroup.prototype.fallback = false; | ||
TokenGroup.prototype.contextual = TokenGroup.prototype.fallback = TokenGroup.prototype.extend = false; | ||
/// Exports that are used for `@external tokens` in the grammar should | ||
@@ -551,2 +588,3 @@ /// export an instance of this class. | ||
this.fallback = !!options.fallback; | ||
this.extend = !!options.extend; | ||
} | ||
@@ -616,2 +654,4 @@ return ExternalTokenizer; | ||
if (Type === void 0) { Type = Uint16Array; } | ||
if (typeof input != "string") | ||
return input; | ||
var array = null; | ||
@@ -650,2 +690,3 @@ for (var pos = 0, out = 0; pos < input.length;) { | ||
var verbose = typeof process != "undefined" && /\bparse\b/.test(process.env.LOG); | ||
var stackIDs = null; | ||
var CacheCursor = /** @class */ (function () { | ||
@@ -733,3 +774,2 @@ function CacheCursor(tree) { | ||
if (token.value != 0 /* Err */) { | ||
main = token; | ||
var startIndex = actionIndex; | ||
@@ -739,4 +779,7 @@ if (token.extended > -1) | ||
actionIndex = this.addActions(stack, token.value, token.end, actionIndex); | ||
if (actionIndex > startIndex) | ||
break; | ||
if (!tokenizer.extend) { | ||
main = token; | ||
if (actionIndex > startIndex) | ||
break; | ||
} | ||
} | ||
@@ -832,2 +875,3 @@ } | ||
this.tokenCount = 0; | ||
this.nextStackID = 0x2654; | ||
var _a = options.cache, cache = _a === void 0 ? undefined : _a, _b = options.strict, strict = _b === void 0 ? false : _b, _c = options.bufferLength, bufferLength = _c === void 0 ? DefaultBufferLength : _c, _d = options.top, top = _d === void 0 ? undefined : _d, dialect = options.dialect; | ||
@@ -847,2 +891,14 @@ var topInfo = top ? parser.topRules[top] : parser.defaultTop; | ||
}; | ||
/// @internal | ||
ParseContext.prototype.putStackDedup = function (stack) { | ||
for (var i = 0; i < this.stacks.length; i++) { | ||
var other = this.stacks[i]; | ||
if (other.pos == stack.pos && other.sameState(stack)) { | ||
if (this.stacks[i].score < stack.score) | ||
this.stacks[i] = stack; | ||
return; | ||
} | ||
} | ||
this.putStack(stack); | ||
}; | ||
/// Move the parser forward. This will process all parse stacks at | ||
@@ -906,3 +962,3 @@ /// `this.pos` and try to advance them to a further position. If no | ||
if (this.stacks.length > maxRemaining) { | ||
this.stacks.sort(function (a, b) { return a.recovered - b.recovered; }); | ||
this.stacks.sort(function (a, b) { return b.score - a.score; }); | ||
this.stacks.length = maxRemaining; | ||
@@ -913,16 +969,22 @@ } | ||
} | ||
else if (this.stacks.length > 1 && this.stacks[0].buffer.length > minBufferLengthPrune) { | ||
// Prune stacks that have been running without splitting for a | ||
// while, to avoid getting stuck with multiple successful stacks | ||
// running endlessly on. | ||
var minLen = 1e9, minI = -1; | ||
for (var i = 0; i < this.stacks.length; i++) { | ||
else if (this.stacks.length > 1) { | ||
// Prune stacks that are in the same state, or that have been | ||
// running without splitting for a while, to avoid getting stuck | ||
// with multiple successful stacks running endlessly on. | ||
outer: for (var i = 0; i < this.stacks.length - 1; i++) { | ||
var stack = this.stacks[i]; | ||
if (stack.buffer.length < minLen) { | ||
minLen = stack.buffer.length; | ||
minI = i; | ||
for (var j = i + 1; j < this.stacks.length; j++) { | ||
var other = this.stacks[j]; | ||
if (stack.sameState(other) || | ||
stack.buffer.length > minBufferLengthPrune && other.buffer.length > minBufferLengthPrune) { | ||
if (((stack.score - other.score) || (stack.buffer.length - other.buffer.length)) > 0) { | ||
this.stacks.splice(j--, 1); | ||
} | ||
else { | ||
this.stacks.splice(i--, 1); | ||
continue outer; | ||
} | ||
} | ||
} | ||
} | ||
if (minLen > minBufferLengthPrune) | ||
this.stacks.splice(minI, 1); | ||
} | ||
@@ -938,3 +1000,3 @@ this.tokenCount++; | ||
var start = stack.pos, _a = stack.cx, input = _a.input, parser = _a.parser; | ||
var base = verbose ? stack + " -> " : ""; | ||
var base = verbose ? this.stackID(stack) + " -> " : ""; | ||
if (this.cache) { | ||
@@ -946,3 +1008,3 @@ for (var cached = this.cache.nodeAt(start); cached;) { | ||
if (verbose) | ||
console.log(base + stack + (" (via reuse of " + parser.getName(cached.type.id) + ")")); | ||
console.log(base + this.stackID(stack) + (" (via reuse of " + parser.getName(cached.type.id) + ")")); | ||
return stack; | ||
@@ -987,3 +1049,3 @@ } | ||
if (verbose) | ||
console.log(base + newStack + " (nested)"); | ||
console.log(base + this.stackID(newStack) + " (nested)"); | ||
return newStack; | ||
@@ -996,3 +1058,3 @@ } | ||
if (verbose) | ||
console.log(base + stack + (" (via always-reduce " + parser.getName(defaultReduce & 65535 /* ValueMask */) + ")")); | ||
console.log(base + this.stackID(stack) + (" (via always-reduce " + parser.getName(defaultReduce & 65535 /* ValueMask */) + ")")); | ||
return stack; | ||
@@ -1007,3 +1069,3 @@ } | ||
if (verbose) | ||
console.log(base + localStack + (" (via " + ((action & 65536 /* ReduceFlag */) == 0 ? "shift" | ||
console.log(base + this.stackID(localStack) + (" (via " + ((action & 65536 /* ReduceFlag */) == 0 ? "shift" | ||
: "reduce of " + parser.getName(action & 65535 /* ValueMask */)) + " for " + parser.getName(term) + " @ " + start + (localStack == stack ? "" : ", split") + ")")); | ||
@@ -1018,3 +1080,3 @@ if (last) | ||
if (stack.cx.parent && stack.pos == input.length) | ||
return finishNested(stack); | ||
return this.finishNested(stack); | ||
return null; | ||
@@ -1024,3 +1086,3 @@ }; | ||
// (possibly updated) stack if it got stuck, or null if it moved | ||
// forward and was given to `putStack`. | ||
// forward and was given to `putStackDedup`. | ||
ParseContext.prototype.advanceFully = function (stack) { | ||
@@ -1033,3 +1095,3 @@ var pos = stack.pos; | ||
if (result.pos > pos) { | ||
this.putStack(result); | ||
this.putStackDedup(result); | ||
return null; | ||
@@ -1041,10 +1103,23 @@ } | ||
ParseContext.prototype.runRecovery = function (stacks, tokens) { | ||
var finished = null; | ||
var finished = null, restarted = false; | ||
for (var i = 0; i < stacks.length; i++) { | ||
var stack = stacks[i], token = tokens[i << 1], tokenEnd = tokens[(i << 1) + 1]; | ||
var base = verbose ? stack + " -> " : ""; | ||
var base = verbose ? this.stackID(stack) + " -> " : ""; | ||
if (stack.deadEnd) { | ||
if (restarted) | ||
continue; | ||
restarted = true; | ||
stack.restart(); | ||
if (verbose) | ||
console.log(base + this.stackID(stack) + " (restarted)"); | ||
var stopped = this.advanceFully(stack); | ||
if (stopped) | ||
stack = stopped; | ||
else | ||
continue; | ||
} | ||
var force = stack.split(), forceBase = base; | ||
for (var j = 0; force.forceReduce() && j < forceReduceLimit; j++) { | ||
if (verbose) | ||
console.log(forceBase + force + " (via force-reduce)"); | ||
console.log(forceBase + this.stackID(force) + " (via force-reduce)"); | ||
var stopped = this.advanceFully(force); | ||
@@ -1055,3 +1130,3 @@ if (!stopped) | ||
if (verbose) | ||
forceBase = stopped + " -> "; | ||
forceBase = this.stackID(stopped) + " -> "; | ||
} | ||
@@ -1061,3 +1136,3 @@ for (var _i = 0, _a = stack.recoverByInsert(token); _i < _a.length; _i++) { | ||
if (verbose) | ||
console.log(base + insert + " (via recover-insert)"); | ||
console.log(base + this.stackID(insert) + " (via recover-insert)"); | ||
this.advanceFully(insert); | ||
@@ -1072,6 +1147,6 @@ } | ||
if (verbose) | ||
console.log(base + stack + (" (via recover-delete " + stack.cx.parser.getName(token) + ")")); | ||
this.putStack(stack); | ||
console.log(base + this.stackID(stack) + (" (via recover-delete " + stack.cx.parser.getName(token) + ")")); | ||
this.putStackDedup(stack); | ||
} | ||
else if (!stack.cx.parent && (!finished || finished.recovered > stack.recovered)) { | ||
else if (!stack.cx.parent && (!finished || finished.score < stack.score)) { | ||
finished = stack; | ||
@@ -1093,3 +1168,3 @@ } | ||
get: function () { | ||
return this.stacks[0].recovered * 2 /* Token */ / this.tokenCount; | ||
return -(this.stacks[0].score * 200 /* Token */ / this.tokenCount); | ||
}, | ||
@@ -1110,2 +1185,22 @@ enumerable: true, | ||
}; | ||
ParseContext.prototype.finishNested = function (stack) { | ||
if (stack.cx.wrapType == -2) | ||
return null; // Another nested stack already finished | ||
var parent = stack.cx.parent, tree = stack.forceAll().toTree(); | ||
var parentParser = parent.cx.parser, info = parentParser.nested[parentParser.startNested(parent.state)]; | ||
tree = new Tree(tree.type, tree.children, tree.positions.map(function (p) { return p - parent.pos; }), stack.pos - parent.pos); | ||
if (stack.cx.wrapType > -1) | ||
tree = new Tree(parentParser.group.types[stack.cx.wrapType], [tree], [0], tree.length); | ||
stack.cx.wrapType = -2; | ||
parent.useNode(tree, parentParser.getGoto(parent.state, info.placeholder, true)); | ||
if (verbose) | ||
console.log(this.stackID(parent) + (" (via unnest " + (stack.cx.wrapType > -1 ? parentParser.getName(stack.cx.wrapType) : tree.type.name) + ")")); | ||
return parent; | ||
}; | ||
ParseContext.prototype.stackID = function (stack) { | ||
var id = (stackIDs || (stackIDs = new WeakMap)).get(stack); | ||
if (!id) | ||
stackIDs.set(stack, id = String.fromCodePoint(this.nextStackID++)); | ||
return id + stack; | ||
}; | ||
return ParseContext; | ||
@@ -1126,55 +1221,50 @@ }()); | ||
/// @internal | ||
function Parser( | ||
/// The parse states for this grammar @internal | ||
states, | ||
/// A blob of data that the parse states, as well as some | ||
/// of `Parser`'s fields, point into @internal | ||
data, | ||
/// The goto table. See `computeGotoTable` in | ||
/// lezer-generator for details on the format @internal | ||
goto, | ||
/// A node group with the node types used by this parser. | ||
group, | ||
/// The highest term id @internal | ||
maxTerm, | ||
/// The first repeat-related term id @internal | ||
minRepeatTerm, | ||
/// The tokenizer objects used by the grammar @internal | ||
tokenizers, | ||
/// Maps top rule names to [state ID, top term ID] pairs. | ||
topRules, | ||
/// Metadata about nested grammars used in this grammar @internal | ||
nested, | ||
/// A mapping from dialect names to the tokens that are exclusive | ||
/// to them. @internal | ||
dialects, | ||
/// The token types have specializers (in this.specializers) @internal | ||
specialized, | ||
/// The specializer functions for the token types in specialized @internal | ||
specializers, | ||
/// Points into this.data at an array that holds the | ||
/// precedence order (higher precedence first) for ambiguous | ||
/// tokens @internal | ||
tokenPrecTable, | ||
/// An optional object mapping term ids to name strings @internal | ||
termNames) { | ||
if (termNames === void 0) { termNames = null; } | ||
this.states = states; | ||
this.data = data; | ||
this.goto = goto; | ||
this.group = group; | ||
this.maxTerm = maxTerm; | ||
this.minRepeatTerm = minRepeatTerm; | ||
this.tokenizers = tokenizers; | ||
this.topRules = topRules; | ||
this.nested = nested; | ||
this.dialects = dialects; | ||
this.specialized = specialized; | ||
this.specializers = specializers; | ||
this.tokenPrecTable = tokenPrecTable; | ||
this.termNames = termNames; | ||
function Parser(spec) { | ||
this.nextStateCache = []; | ||
this.cachedDialect = null; | ||
var tokenArray = decodeArray(spec.tokenData); | ||
var nodeNames = spec.nodeNames.split(" "); | ||
this.minRepeatTerm = nodeNames.length; | ||
for (var i = 0; i < spec.repeatNodeCount; i++) | ||
nodeNames.push(""); | ||
var nodeProps = []; | ||
for (var i = 0; i < nodeNames.length; i++) | ||
nodeProps.push(noProps); | ||
function setProp(nodeID, prop, value) { | ||
if (nodeProps[nodeID] == noProps) | ||
nodeProps[nodeID] = Object.create(null); | ||
prop.set(nodeProps[nodeID], prop.deserialize(String(value))); | ||
} | ||
setProp(0, NodeProp.error, ""); | ||
if (spec.nodeProps) | ||
for (var _i = 0, _a = spec.nodeProps; _i < _a.length; _i++) { | ||
var propSpec = _a[_i]; | ||
var prop = propSpec[0]; | ||
for (var i = 1; i < propSpec.length; i += 2) | ||
setProp(propSpec[i], prop, propSpec[i + 1]); | ||
} | ||
this.specialized = new Uint16Array(spec.specialized ? spec.specialized.length : 0); | ||
this.specializers = []; | ||
if (spec.specialized) | ||
for (var i = 0; i < spec.specialized.length; i++) { | ||
this.specialized[i] = spec.specialized[i].term; | ||
this.specializers[i] = spec.specialized[i].get; | ||
} | ||
this.states = decodeArray(spec.states, Uint32Array); | ||
this.data = decodeArray(spec.stateData); | ||
this.goto = decodeArray(spec.goto); | ||
this.group = new NodeGroup(nodeNames.map(function (name, i) { return new NodeType(name, nodeProps[i], i); })); | ||
this.maxTerm = spec.maxTerm; | ||
this.tokenizers = spec.tokenizers.map(function (value) { return typeof value == "number" ? new TokenGroup(tokenArray, value) : value; }); | ||
this.topRules = spec.topRules; | ||
this.nested = (spec.nested || []).map(function (_a) { | ||
var name = _a[0], grammar = _a[1], endToken = _a[2], placeholder = _a[3]; | ||
return { name: name, grammar: grammar, end: new TokenGroup(decodeArray(endToken), 0), placeholder: placeholder }; | ||
}); | ||
this.dialects = spec.dialects || {}; | ||
this.dynamicPrecedences = spec.dynamicPrecedences || null; | ||
this.tokenPrecTable = spec.tokenPrec; | ||
this.termNames = spec.termNames || null; | ||
this.maxNode = this.group.types.length - 1; | ||
this.maxRepeatWrap = this.group.types.length + (this.group.types.length - minRepeatTerm) - 1; | ||
this.maxRepeatWrap = this.group.types.length + (this.group.types.length - this.minRepeatTerm) - 1; | ||
for (var i = 0, l = this.states.length / 6 /* Size */; i < l; i++) | ||
@@ -1287,7 +1377,7 @@ this.nextStateCache[i] = null; | ||
Parser.prototype.withNested = function (spec) { | ||
return new Parser(this.states, this.data, this.goto, this.group, this.maxTerm, this.minRepeatTerm, this.tokenizers, this.topRules, this.nested.map(function (obj) { | ||
if (!Object.prototype.hasOwnProperty.call(spec, obj.name)) | ||
return obj; | ||
return { name: obj.name, grammar: spec[obj.name], end: obj.end, placeholder: obj.placeholder }; | ||
}), this.dialects, this.specialized, this.specializers, this.tokenPrecTable, this.termNames); | ||
return this.copy({ nested: this.nested.map(function (obj) { | ||
if (!Object.prototype.hasOwnProperty.call(spec, obj.name)) | ||
return obj; | ||
return { name: obj.name, grammar: spec[obj.name], end: obj.end, placeholder: obj.placeholder }; | ||
}) }); | ||
}; | ||
@@ -1303,4 +1393,14 @@ /// Create a new `Parser` instance whose node types have the given | ||
} | ||
return new Parser(this.states, this.data, this.goto, (_a = this.group).extend.apply(_a, props), this.maxTerm, this.minRepeatTerm, this.tokenizers, this.topRules, this.nested, this.dialects, this.specialized, this.specializers, this.tokenPrecTable, this.termNames); | ||
return this.copy({ group: (_a = this.group).extend.apply(_a, props) }); | ||
}; | ||
Parser.prototype.copy = function (props) { | ||
// Hideous reflection-based kludge to make it easy to create a | ||
// slightly modified copy of a parser. | ||
var obj = Object.create(Parser.prototype); | ||
for (var _i = 0, _a = Object.keys(this); _i < _a.length; _i++) { | ||
var key = _a[_i]; | ||
obj[key] = key in props ? props[key] : this[key]; | ||
} | ||
return obj; | ||
}; | ||
/// Returns the name associated with a given term. This will only | ||
@@ -1332,2 +1432,7 @@ /// work for all terms when the parser was generated with the | ||
}); | ||
/// @internal | ||
Parser.prototype.dynamicPrecedence = function (term) { | ||
var prec = this.dynamicPrecedences; | ||
return prec == null ? 0 : prec[term] || 0; | ||
}; | ||
Object.defineProperty(Parser.prototype, "topType", { | ||
@@ -1361,34 +1466,3 @@ /// The node type produced by the default top rule. | ||
Parser.deserialize = function (spec) { | ||
var tokenArray = decodeArray(spec.tokenData); | ||
var nodeNames = spec.nodeNames.split(" "), minRepeatTerm = nodeNames.length; | ||
for (var i = 0; i < spec.repeatNodeCount; i++) | ||
nodeNames.push(""); | ||
var nodeProps = []; | ||
for (var i = 0; i < nodeNames.length; i++) | ||
nodeProps.push(noProps); | ||
function setProp(nodeID, prop, value) { | ||
if (nodeProps[nodeID] == noProps) | ||
nodeProps[nodeID] = Object.create(null); | ||
prop.set(nodeProps[nodeID], prop.deserialize(String(value))); | ||
} | ||
setProp(0, NodeProp.error, ""); | ||
if (spec.nodeProps) | ||
for (var _i = 0, _a = spec.nodeProps; _i < _a.length; _i++) { | ||
var propSpec = _a[_i]; | ||
var prop = propSpec[0]; | ||
for (var i = 1; i < propSpec.length; i += 2) | ||
setProp(propSpec[i], prop, propSpec[i + 1]); | ||
} | ||
var group = new NodeGroup(nodeNames.map(function (name, i) { return new NodeType(name, nodeProps[i], i); })); | ||
var specialized = new Uint16Array(spec.specialized ? spec.specialized.length : 0); | ||
var specializers = []; | ||
if (spec.specialized) | ||
for (var i = 0; i < spec.specialized.length; i++) { | ||
specialized[i] = spec.specialized[i].term; | ||
specializers[i] = spec.specialized[i].get; | ||
} | ||
return new Parser(decodeArray(spec.states, Uint32Array), decodeArray(spec.stateData), decodeArray(spec.goto), group, spec.maxTerm, minRepeatTerm, spec.tokenizers.map(function (value) { return typeof value == "number" ? new TokenGroup(tokenArray, value) : value; }), spec.topRules, (spec.nested || []).map(function (_a) { | ||
var name = _a[0], grammar = _a[1], endToken = _a[2], placeholder = _a[3]; | ||
return ({ name: name, grammar: grammar, end: new TokenGroup(decodeArray(endToken), 0), placeholder: placeholder }); | ||
}), spec.dialects || {}, specialized, specializers, spec.tokenPrec, spec.termNames); | ||
return new Parser(spec); | ||
}; | ||
@@ -1411,3 +1485,3 @@ return Parser; | ||
stack.cx.parser.stateFlag(stack.state, 2 /* Accepting */) && | ||
(!best || best.recovered > stack.recovered)) | ||
(!best || best.score < stack.score)) | ||
best = stack; | ||
@@ -1417,18 +1491,4 @@ } | ||
} | ||
function finishNested(stack) { | ||
if (stack.cx.wrapType == -2) | ||
return null; // Another nested stack already finished | ||
var parent = stack.cx.parent, tree = stack.forceAll().toTree(); | ||
var parentParser = parent.cx.parser, info = parentParser.nested[parentParser.startNested(parent.state)]; | ||
tree = new Tree(tree.type, tree.children, tree.positions.map(function (p) { return p - parent.pos; }), stack.pos - parent.pos); | ||
if (stack.cx.wrapType > -1) | ||
tree = new Tree(parentParser.group.types[stack.cx.wrapType], [tree], [0], tree.length); | ||
stack.cx.wrapType = -2; | ||
parent.useNode(tree, parentParser.getGoto(parent.state, info.placeholder, true)); | ||
if (verbose) | ||
console.log(parent + (" (via unnest " + (stack.cx.wrapType > -1 ? parentParser.getName(stack.cx.wrapType) : tree.type.name) + ")")); | ||
return parent; | ||
} | ||
export { ExternalTokenizer, ParseContext, Parser, Stack, Token }; | ||
//# sourceMappingURL=index.es.js.map |
@@ -55,2 +55,3 @@ import { Stack } from "./stack"; | ||
private strict; | ||
private nextStackID; | ||
advance(): Tree; | ||
@@ -63,2 +64,4 @@ private advanceStack; | ||
private scanForNestEnd; | ||
private finishNested; | ||
private stackID; | ||
} | ||
@@ -85,2 +88,3 @@ export declare class Dialect { | ||
withProps(...props: NodePropSource[]): Parser; | ||
private copy; | ||
getName(term: number): string; | ||
@@ -87,0 +91,0 @@ get hasNested(): boolean; |
@@ -12,4 +12,4 @@ import { StackContext } from "./parse"; | ||
export declare const enum Recover { | ||
Token = 2, | ||
Reduce = 1, | ||
Token = 200, | ||
Reduce = 100, | ||
MaxNext = 4, | ||
@@ -16,0 +16,0 @@ MaxInsertStackDepth = 300, |
@@ -26,2 +26,3 @@ import { Stack } from "./stack"; | ||
fallback: boolean; | ||
extend: boolean; | ||
} | ||
@@ -31,2 +32,3 @@ interface ExternalOptions { | ||
fallback?: boolean; | ||
extend?: boolean; | ||
} | ||
@@ -33,0 +35,0 @@ export declare class ExternalTokenizer { |
{ | ||
"name": "lezer", | ||
"version": "0.10.1", | ||
"version": "0.10.2", | ||
"description": "Incremental parser", | ||
@@ -5,0 +5,0 @@ "main": "dist/index.cjs", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
373551
3086