Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

chevrotain

Package Overview
Dependencies
Maintainers
1
Versions
167
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

chevrotain - npm Package Compare versions

Comparing version 10.2.0 to 10.3.0

7

lib/src/api.js

@@ -63,9 +63,10 @@ "use strict";

Object.defineProperty(exports, "createSyntaxDiagramsCode", { enumerable: true, get: function () { return render_public_1.createSyntaxDiagramsCode; } });
class Parser {
constructor() {
var Parser = /** @class */ (function () {
function Parser() {
throw new Error("The Parser class has been deprecated, use CstParser or EmbeddedActionsParser instead.\t\n" +
"See: https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_7-0-0");
}
}
return Parser;
}());
exports.Parser = Parser;
//# sourceMappingURL=api.js.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createSyntaxDiagramsCode = void 0;
const version_1 = require("../version");
function createSyntaxDiagramsCode(grammar, { resourceBase = `https://unpkg.com/chevrotain@${version_1.VERSION}/diagrams/`, css = `https://unpkg.com/chevrotain@${version_1.VERSION}/diagrams/diagrams.css` } = {}) {
const header = `
<!-- This is a generated file -->
<!DOCTYPE html>
<meta charset="utf-8">
<style>
body {
background-color: hsl(30, 20%, 95%)
}
</style>
`;
const cssHtml = `
<link rel='stylesheet' href='${css}'>
`;
const scripts = `
<script src='${resourceBase}vendor/railroad-diagrams.js'></script>
<script src='${resourceBase}src/diagrams_builder.js'></script>
<script src='${resourceBase}src/diagrams_behavior.js'></script>
<script src='${resourceBase}src/main.js'></script>
`;
const diagramsDiv = `
<div id="diagrams" align="center"></div>
`;
const serializedGrammar = `
<script>
window.serializedGrammar = ${JSON.stringify(grammar, null, " ")};
</script>
`;
const initLogic = `
<script>
var diagramsDiv = document.getElementById("diagrams");
main.drawDiagramsFromSerializedGrammar(serializedGrammar, diagramsDiv);
</script>
`;
var version_1 = require("../version");
function createSyntaxDiagramsCode(grammar, _a) {
var _b = _a === void 0 ? {} : _a, _c = _b.resourceBase, resourceBase = _c === void 0 ? "https://unpkg.com/chevrotain@".concat(version_1.VERSION, "/diagrams/") : _c, _d = _b.css, css = _d === void 0 ? "https://unpkg.com/chevrotain@".concat(version_1.VERSION, "/diagrams/diagrams.css") : _d;
var header = "\n<!-- This is a generated file -->\n<!DOCTYPE html>\n<meta charset=\"utf-8\">\n<style>\n body {\n background-color: hsl(30, 20%, 95%)\n }\n</style>\n\n";
var cssHtml = "\n<link rel='stylesheet' href='".concat(css, "'>\n");
var scripts = "\n<script src='".concat(resourceBase, "vendor/railroad-diagrams.js'></script>\n<script src='").concat(resourceBase, "src/diagrams_builder.js'></script>\n<script src='").concat(resourceBase, "src/diagrams_behavior.js'></script>\n<script src='").concat(resourceBase, "src/main.js'></script>\n");
var diagramsDiv = "\n<div id=\"diagrams\" align=\"center\"></div> \n";
var serializedGrammar = "\n<script>\n window.serializedGrammar = ".concat(JSON.stringify(grammar, null, " "), ";\n</script>\n");
var initLogic = "\n<script>\n var diagramsDiv = document.getElementById(\"diagrams\");\n main.drawDiagramsFromSerializedGrammar(serializedGrammar, diagramsDiv);\n</script>\n";
return (header + cssHtml + scripts + diagramsDiv + serializedGrammar + initLogic);

@@ -41,0 +14,0 @@ }

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.defineNameProp = void 0;
const NAME = "name";
var NAME = "name";
function defineNameProp(obj, nameValue) {

@@ -6,0 +6,0 @@ Object.defineProperty(obj, NAME, {

@@ -6,23 +6,22 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.validateRedundantMethods = exports.validateMissingCstMethods = exports.validateVisitor = exports.CstVisitorDefinitionError = exports.createBaseVisitorConstructorWithDefaults = exports.createBaseSemanticVisitorConstructor = exports.defaultVisit = void 0;
const isEmpty_1 = __importDefault(require("lodash/isEmpty"));
const compact_1 = __importDefault(require("lodash/compact"));
const isArray_1 = __importDefault(require("lodash/isArray"));
const map_1 = __importDefault(require("lodash/map"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const filter_1 = __importDefault(require("lodash/filter"));
const keys_1 = __importDefault(require("lodash/keys"));
const isFunction_1 = __importDefault(require("lodash/isFunction"));
const isUndefined_1 = __importDefault(require("lodash/isUndefined"));
const includes_1 = __importDefault(require("lodash/includes"));
const lang_extensions_1 = require("../../lang/lang_extensions");
exports.validateMissingCstMethods = exports.validateVisitor = exports.CstVisitorDefinitionError = exports.createBaseVisitorConstructorWithDefaults = exports.createBaseSemanticVisitorConstructor = exports.defaultVisit = void 0;
var isEmpty_1 = __importDefault(require("lodash/isEmpty"));
var compact_1 = __importDefault(require("lodash/compact"));
var isArray_1 = __importDefault(require("lodash/isArray"));
var map_1 = __importDefault(require("lodash/map"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var filter_1 = __importDefault(require("lodash/filter"));
var keys_1 = __importDefault(require("lodash/keys"));
var isFunction_1 = __importDefault(require("lodash/isFunction"));
var isUndefined_1 = __importDefault(require("lodash/isUndefined"));
var lang_extensions_1 = require("../../lang/lang_extensions");
function defaultVisit(ctx, param) {
const childrenNames = (0, keys_1.default)(ctx);
const childrenNamesLength = childrenNames.length;
for (let i = 0; i < childrenNamesLength; i++) {
const currChildName = childrenNames[i];
const currChildArray = ctx[currChildName];
const currChildArrayLength = currChildArray.length;
for (let j = 0; j < currChildArrayLength; j++) {
const currChild = currChildArray[j];
var childrenNames = (0, keys_1.default)(ctx);
var childrenNamesLength = childrenNames.length;
for (var i = 0; i < childrenNamesLength; i++) {
var currChildName = childrenNames[i];
var currChildArray = ctx[currChildName];
var currChildArrayLength = currChildArray.length;
for (var j = 0; j < currChildArrayLength; j++) {
var currChild = currChildArray[j];
// distinction between Tokens Children and CstNode children

@@ -38,3 +37,3 @@ if (currChild.tokenTypeIdx === undefined) {

function createBaseSemanticVisitorConstructor(grammarName, ruleNames) {
const derivedConstructor = function () { };
var derivedConstructor = function () { };
// can be overwritten according to:

@@ -44,3 +43,3 @@ // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/

(0, lang_extensions_1.defineNameProp)(derivedConstructor, grammarName + "BaseSemantics");
const semanticProto = {
var semanticProto = {
visit: function (cstNode, param) {

@@ -60,7 +59,7 @@ // enables writing more concise visitor methods when CstNode has only a single child

validateVisitor: function () {
const semanticDefinitionErrors = validateVisitor(this, ruleNames);
var semanticDefinitionErrors = validateVisitor(this, ruleNames);
if (!(0, isEmpty_1.default)(semanticDefinitionErrors)) {
const errorMessages = (0, map_1.default)(semanticDefinitionErrors, (currDefError) => currDefError.msg);
throw Error(`Errors Detected in CST Visitor <${this.constructor.name}>:\n\t` +
`${errorMessages.join("\n\n").replace(/\n/g, "\n\t")}`);
var errorMessages = (0, map_1.default)(semanticDefinitionErrors, function (currDefError) { return currDefError.msg; });
throw Error("Errors Detected in CST Visitor <".concat(this.constructor.name, ">:\n\t") +
"".concat(errorMessages.join("\n\n").replace(/\n/g, "\n\t")));
}

@@ -76,3 +75,3 @@ }

function createBaseVisitorConstructorWithDefaults(grammarName, ruleNames, baseConstructor) {
const derivedConstructor = function () { };
var derivedConstructor = function () { };
// can be overwritten according to:

@@ -82,4 +81,4 @@ // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/

(0, lang_extensions_1.defineNameProp)(derivedConstructor, grammarName + "BaseSemanticsWithDefaults");
const withDefaultsProto = Object.create(baseConstructor.prototype);
(0, forEach_1.default)(ruleNames, (ruleName) => {
var withDefaultsProto = Object.create(baseConstructor.prototype);
(0, forEach_1.default)(ruleNames, function (ruleName) {
withDefaultsProto[ruleName] = defaultVisit;

@@ -98,14 +97,13 @@ });

function validateVisitor(visitorInstance, ruleNames) {
const missingErrors = validateMissingCstMethods(visitorInstance, ruleNames);
const redundantErrors = validateRedundantMethods(visitorInstance, ruleNames);
return missingErrors.concat(redundantErrors);
var missingErrors = validateMissingCstMethods(visitorInstance, ruleNames);
return missingErrors;
}
exports.validateVisitor = validateVisitor;
function validateMissingCstMethods(visitorInstance, ruleNames) {
const missingRuleNames = (0, filter_1.default)(ruleNames, (currRuleName) => {
var missingRuleNames = (0, filter_1.default)(ruleNames, function (currRuleName) {
return (0, isFunction_1.default)(visitorInstance[currRuleName]) === false;
});
const errors = (0, map_1.default)(missingRuleNames, (currRuleName) => {
var errors = (0, map_1.default)(missingRuleNames, function (currRuleName) {
return {
msg: `Missing visitor method: <${currRuleName}> on ${(visitorInstance.constructor.name)} CST Visitor.`,
msg: "Missing visitor method: <".concat(currRuleName, "> on ").concat((visitorInstance.constructor.name), " CST Visitor."),
type: CstVisitorDefinitionError.MISSING_METHOD,

@@ -118,21 +116,2 @@ methodName: currRuleName

exports.validateMissingCstMethods = validateMissingCstMethods;
const VALID_PROP_NAMES = ["constructor", "visit", "validateVisitor"];
function validateRedundantMethods(visitorInstance, ruleNames) {
const errors = [];
const propNames = Object.getOwnPropertyNames(visitorInstance.constructor.prototype);
(0, forEach_1.default)(propNames, (prop) => {
if ((0, isFunction_1.default)(visitorInstance[prop]) &&
!(0, includes_1.default)(VALID_PROP_NAMES, prop) &&
!(0, includes_1.default)(ruleNames, prop)) {
errors.push({
msg: `Redundant visitor method: <${prop}> on ${(visitorInstance.constructor.name)} CST Visitor\n` +
`There is no Grammar Rule corresponding to this method's name.\n`,
type: CstVisitorDefinitionError.REDUNDANT_METHOD,
methodName: prop
});
}
});
return errors;
}
exports.validateRedundantMethods = validateRedundantMethods;
//# sourceMappingURL=cst_visitor.js.map

@@ -7,25 +7,28 @@ "use strict";

exports.defaultGrammarValidatorErrorProvider = exports.defaultGrammarResolverErrorProvider = exports.defaultParserErrorProvider = void 0;
const tokens_public_1 = require("../scan/tokens_public");
const first_1 = __importDefault(require("lodash/first"));
const map_1 = __importDefault(require("lodash/map"));
const reduce_1 = __importDefault(require("lodash/reduce"));
const gast_1 = require("@chevrotain/gast");
const gast_2 = require("@chevrotain/gast");
var tokens_public_1 = require("../scan/tokens_public");
var first_1 = __importDefault(require("lodash/first"));
var map_1 = __importDefault(require("lodash/map"));
var reduce_1 = __importDefault(require("lodash/reduce"));
var gast_1 = require("@chevrotain/gast");
var gast_2 = require("@chevrotain/gast");
exports.defaultParserErrorProvider = {
buildMismatchTokenMessage({ expected, actual, previous, ruleName }) {
const hasLabel = (0, tokens_public_1.hasTokenLabel)(expected);
const expectedMsg = hasLabel
? `--> ${(0, tokens_public_1.tokenLabel)(expected)} <--`
: `token of type --> ${expected.name} <--`;
const msg = `Expecting ${expectedMsg} but found --> '${actual.image}' <--`;
buildMismatchTokenMessage: function (_a) {
var expected = _a.expected, actual = _a.actual, previous = _a.previous, ruleName = _a.ruleName;
var hasLabel = (0, tokens_public_1.hasTokenLabel)(expected);
var expectedMsg = hasLabel
? "--> ".concat((0, tokens_public_1.tokenLabel)(expected), " <--")
: "token of type --> ".concat(expected.name, " <--");
var msg = "Expecting ".concat(expectedMsg, " but found --> '").concat(actual.image, "' <--");
return msg;
},
buildNotAllInputParsedMessage({ firstRedundant, ruleName }) {
buildNotAllInputParsedMessage: function (_a) {
var firstRedundant = _a.firstRedundant, ruleName = _a.ruleName;
return "Redundant input, expecting EOF but found: " + firstRedundant.image;
},
buildNoViableAltMessage({ expectedPathsPerAlt, actual, previous, customUserDescription, ruleName }) {
const errPrefix = "Expecting: ";
buildNoViableAltMessage: function (_a) {
var expectedPathsPerAlt = _a.expectedPathsPerAlt, actual = _a.actual, previous = _a.previous, customUserDescription = _a.customUserDescription, ruleName = _a.ruleName;
var errPrefix = "Expecting: ";
// TODO: issue: No Viable Alternative Error may have incomplete details. #502
const actualText = (0, first_1.default)(actual).image;
const errSuffix = "\nbut found: '" + actualText + "'";
var actualText = (0, first_1.default)(actual).image;
var errSuffix = "\nbut found: '" + actualText + "'";
if (customUserDescription) {

@@ -35,14 +38,17 @@ return errPrefix + customUserDescription + errSuffix;

else {
const allLookAheadPaths = (0, reduce_1.default)(expectedPathsPerAlt, (result, currAltPaths) => result.concat(currAltPaths), []);
const nextValidTokenSequences = (0, map_1.default)(allLookAheadPaths, (currPath) => `[${(0, map_1.default)(currPath, (currTokenType) => (0, tokens_public_1.tokenLabel)(currTokenType)).join(", ")}]`);
const nextValidSequenceItems = (0, map_1.default)(nextValidTokenSequences, (itemMsg, idx) => ` ${idx + 1}. ${itemMsg}`);
const calculatedDescription = `one of these possible Token sequences:\n${nextValidSequenceItems.join("\n")}`;
var allLookAheadPaths = (0, reduce_1.default)(expectedPathsPerAlt, function (result, currAltPaths) { return result.concat(currAltPaths); }, []);
var nextValidTokenSequences = (0, map_1.default)(allLookAheadPaths, function (currPath) {
return "[".concat((0, map_1.default)(currPath, function (currTokenType) { return (0, tokens_public_1.tokenLabel)(currTokenType); }).join(", "), "]");
});
var nextValidSequenceItems = (0, map_1.default)(nextValidTokenSequences, function (itemMsg, idx) { return " ".concat(idx + 1, ". ").concat(itemMsg); });
var calculatedDescription = "one of these possible Token sequences:\n".concat(nextValidSequenceItems.join("\n"));
return errPrefix + calculatedDescription + errSuffix;
}
},
buildEarlyExitMessage({ expectedIterationPaths, actual, customUserDescription, ruleName }) {
const errPrefix = "Expecting: ";
buildEarlyExitMessage: function (_a) {
var expectedIterationPaths = _a.expectedIterationPaths, actual = _a.actual, customUserDescription = _a.customUserDescription, ruleName = _a.ruleName;
var errPrefix = "Expecting: ";
// TODO: issue: No Viable Alternative Error may have incomplete details. #502
const actualText = (0, first_1.default)(actual).image;
const errSuffix = "\nbut found: '" + actualText + "'";
var actualText = (0, first_1.default)(actual).image;
var errSuffix = "\nbut found: '" + actualText + "'";
if (customUserDescription) {

@@ -52,5 +58,7 @@ return errPrefix + customUserDescription + errSuffix;

else {
const nextValidTokenSequences = (0, map_1.default)(expectedIterationPaths, (currPath) => `[${(0, map_1.default)(currPath, (currTokenType) => (0, tokens_public_1.tokenLabel)(currTokenType)).join(",")}]`);
const calculatedDescription = `expecting at least one iteration which starts with one of these possible Token sequences::\n ` +
`<${nextValidTokenSequences.join(" ,")}>`;
var nextValidTokenSequences = (0, map_1.default)(expectedIterationPaths, function (currPath) {
return "[".concat((0, map_1.default)(currPath, function (currTokenType) { return (0, tokens_public_1.tokenLabel)(currTokenType); }).join(","), "]");
});
var calculatedDescription = "expecting at least one iteration which starts with one of these possible Token sequences::\n " +
"<".concat(nextValidTokenSequences.join(" ,"), ">");
return errPrefix + calculatedDescription + errSuffix;

@@ -62,4 +70,4 @@ }

exports.defaultGrammarResolverErrorProvider = {
buildRuleNotFoundError(topLevelRule, undefinedRule) {
const msg = "Invalid grammar, reference to a rule which is not defined: ->" +
buildRuleNotFoundError: function (topLevelRule, undefinedRule) {
var msg = "Invalid grammar, reference to a rule which is not defined: ->" +
undefinedRule.nonTerminalName +

@@ -74,3 +82,3 @@ "<-\n" +

exports.defaultGrammarValidatorErrorProvider = {
buildDuplicateFoundError(topLevelRule, duplicateProds) {
buildDuplicateFoundError: function (topLevelRule, duplicateProds) {
function getExtraProductionArgument(prod) {

@@ -87,12 +95,9 @@ if (prod instanceof gast_1.Terminal) {

}
const topLevelName = topLevelRule.name;
const duplicateProd = (0, first_1.default)(duplicateProds);
const index = duplicateProd.idx;
const dslName = (0, gast_2.getProductionDslName)(duplicateProd);
const extraArgument = getExtraProductionArgument(duplicateProd);
const hasExplicitIndex = index > 0;
let msg = `->${dslName}${hasExplicitIndex ? index : ""}<- ${extraArgument ? `with argument: ->${extraArgument}<-` : ""}
appears more than once (${duplicateProds.length} times) in the top level rule: ->${topLevelName}<-.
For further details see: https://chevrotain.io/docs/FAQ.html#NUMERICAL_SUFFIXES
`;
var topLevelName = topLevelRule.name;
var duplicateProd = (0, first_1.default)(duplicateProds);
var index = duplicateProd.idx;
var dslName = (0, gast_2.getProductionDslName)(duplicateProd);
var extraArgument = getExtraProductionArgument(duplicateProd);
var hasExplicitIndex = index > 0;
var msg = "->".concat(dslName).concat(hasExplicitIndex ? index : "", "<- ").concat(extraArgument ? "with argument: ->".concat(extraArgument, "<-") : "", "\n appears more than once (").concat(duplicateProds.length, " times) in the top level rule: ->").concat(topLevelName, "<-. \n For further details see: https://chevrotain.io/docs/FAQ.html#NUMERICAL_SUFFIXES \n ");
// white space trimming time! better to trim afterwards as it allows to use WELL formatted multi line template strings...

@@ -103,39 +108,43 @@ msg = msg.replace(/[ \t]+/g, " ");

},
buildNamespaceConflictError(rule) {
const errMsg = `Namespace conflict found in grammar.\n` +
`The grammar has both a Terminal(Token) and a Non-Terminal(Rule) named: <${rule.name}>.\n` +
`To resolve this make sure each Terminal and Non-Terminal names are unique\n` +
`This is easy to accomplish by using the convention that Terminal names start with an uppercase letter\n` +
`and Non-Terminal names start with a lower case letter.`;
buildNamespaceConflictError: function (rule) {
var errMsg = "Namespace conflict found in grammar.\n" +
"The grammar has both a Terminal(Token) and a Non-Terminal(Rule) named: <".concat(rule.name, ">.\n") +
"To resolve this make sure each Terminal and Non-Terminal names are unique\n" +
"This is easy to accomplish by using the convention that Terminal names start with an uppercase letter\n" +
"and Non-Terminal names start with a lower case letter.";
return errMsg;
},
buildAlternationPrefixAmbiguityError(options) {
const pathMsg = (0, map_1.default)(options.prefixPath, (currTok) => (0, tokens_public_1.tokenLabel)(currTok)).join(", ");
const occurrence = options.alternation.idx === 0 ? "" : options.alternation.idx;
const errMsg = `Ambiguous alternatives: <${options.ambiguityIndices.join(" ,")}> due to common lookahead prefix\n` +
`in <OR${occurrence}> inside <${options.topLevelRule.name}> Rule,\n` +
`<${pathMsg}> may appears as a prefix path in all these alternatives.\n` +
`See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#COMMON_PREFIX\n` +
`For Further details.`;
buildAlternationPrefixAmbiguityError: function (options) {
var pathMsg = (0, map_1.default)(options.prefixPath, function (currTok) {
return (0, tokens_public_1.tokenLabel)(currTok);
}).join(", ");
var occurrence = options.alternation.idx === 0 ? "" : options.alternation.idx;
var errMsg = "Ambiguous alternatives: <".concat(options.ambiguityIndices.join(" ,"), "> due to common lookahead prefix\n") +
"in <OR".concat(occurrence, "> inside <").concat(options.topLevelRule.name, "> Rule,\n") +
"<".concat(pathMsg, "> may appears as a prefix path in all these alternatives.\n") +
"See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#COMMON_PREFIX\n" +
"For Further details.";
return errMsg;
},
buildAlternationAmbiguityError(options) {
const pathMsg = (0, map_1.default)(options.prefixPath, (currtok) => (0, tokens_public_1.tokenLabel)(currtok)).join(", ");
const occurrence = options.alternation.idx === 0 ? "" : options.alternation.idx;
let currMessage = `Ambiguous Alternatives Detected: <${options.ambiguityIndices.join(" ,")}> in <OR${occurrence}>` +
` inside <${options.topLevelRule.name}> Rule,\n` +
`<${pathMsg}> may appears as a prefix path in all these alternatives.\n`;
buildAlternationAmbiguityError: function (options) {
var pathMsg = (0, map_1.default)(options.prefixPath, function (currtok) {
return (0, tokens_public_1.tokenLabel)(currtok);
}).join(", ");
var occurrence = options.alternation.idx === 0 ? "" : options.alternation.idx;
var currMessage = "Ambiguous Alternatives Detected: <".concat(options.ambiguityIndices.join(" ,"), "> in <OR").concat(occurrence, ">") +
" inside <".concat(options.topLevelRule.name, "> Rule,\n") +
"<".concat(pathMsg, "> may appears as a prefix path in all these alternatives.\n");
currMessage =
currMessage +
`See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES\n` +
`For Further details.`;
"See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES\n" +
"For Further details.";
return currMessage;
},
buildEmptyRepetitionError(options) {
let dslName = (0, gast_2.getProductionDslName)(options.repetition);
buildEmptyRepetitionError: function (options) {
var dslName = (0, gast_2.getProductionDslName)(options.repetition);
if (options.repetition.idx !== 0) {
dslName += options.repetition.idx;
}
const errMsg = `The repetition <${dslName}> within Rule <${options.topLevelRule.name}> can never consume any tokens.\n` +
`This could lead to an infinite loop.`;
var errMsg = "The repetition <".concat(dslName, "> within Rule <").concat(options.topLevelRule.name, "> can never consume any tokens.\n") +
"This could lead to an infinite loop.";
return errMsg;

@@ -145,28 +154,28 @@ },

// once this method is fully removed from this file
buildTokenNameError(options) {
buildTokenNameError: function (options) {
/* istanbul ignore next */
return "deprecated";
},
buildEmptyAlternationError(options) {
const errMsg = `Ambiguous empty alternative: <${options.emptyChoiceIdx + 1}>` +
` in <OR${options.alternation.idx}> inside <${options.topLevelRule.name}> Rule.\n` +
`Only the last alternative may be an empty alternative.`;
buildEmptyAlternationError: function (options) {
var errMsg = "Ambiguous empty alternative: <".concat(options.emptyChoiceIdx + 1, ">") +
" in <OR".concat(options.alternation.idx, "> inside <").concat(options.topLevelRule.name, "> Rule.\n") +
"Only the last alternative may be an empty alternative.";
return errMsg;
},
buildTooManyAlternativesError(options) {
const errMsg = `An Alternation cannot have more than 256 alternatives:\n` +
`<OR${options.alternation.idx}> inside <${options.topLevelRule.name}> Rule.\n has ${options.alternation.definition.length + 1} alternatives.`;
buildTooManyAlternativesError: function (options) {
var errMsg = "An Alternation cannot have more than 256 alternatives:\n" +
"<OR".concat(options.alternation.idx, "> inside <").concat(options.topLevelRule.name, "> Rule.\n has ").concat(options.alternation.definition.length + 1, " alternatives.");
return errMsg;
},
buildLeftRecursionError(options) {
const ruleName = options.topLevelRule.name;
const pathNames = (0, map_1.default)(options.leftRecursionPath, (currRule) => currRule.name);
const leftRecursivePath = `${ruleName} --> ${pathNames
buildLeftRecursionError: function (options) {
var ruleName = options.topLevelRule.name;
var pathNames = (0, map_1.default)(options.leftRecursionPath, function (currRule) { return currRule.name; });
var leftRecursivePath = "".concat(ruleName, " --> ").concat(pathNames
.concat([ruleName])
.join(" --> ")}`;
const errMsg = `Left Recursion found in grammar.\n` +
`rule: <${ruleName}> can be invoked from itself (directly or indirectly)\n` +
`without consuming any Tokens. The grammar path that causes this is: \n ${leftRecursivePath}\n` +
` To fix this refactor your grammar to remove the left recursion.\n` +
`see: https://en.wikipedia.org/wiki/LL_parser#Left_factoring.`;
.join(" --> "));
var errMsg = "Left Recursion found in grammar.\n" +
"rule: <".concat(ruleName, "> can be invoked from itself (directly or indirectly)\n") +
"without consuming any Tokens. The grammar path that causes this is: \n ".concat(leftRecursivePath, "\n") +
" To fix this refactor your grammar to remove the left recursion.\n" +
"see: https://en.wikipedia.org/wiki/LL_parser#Left_factoring.";
return errMsg;

@@ -176,8 +185,8 @@ },

// once this method is fully removed from this file
buildInvalidRuleNameError(options) {
buildInvalidRuleNameError: function (options) {
/* istanbul ignore next */
return "deprecated";
},
buildDuplicateRuleNameError(options) {
let ruleName;
buildDuplicateRuleNameError: function (options) {
var ruleName;
if (options.topLevelRule instanceof gast_1.Rule) {

@@ -189,3 +198,3 @@ ruleName = options.topLevelRule.name;

}
const errMsg = `Duplicate definition, rule: ->${ruleName}<- is already defined in the grammar: ->${options.grammarName}<-`;
var errMsg = "Duplicate definition, rule: ->".concat(ruleName, "<- is already defined in the grammar: ->").concat(options.grammarName, "<-");
return errMsg;

@@ -192,0 +201,0 @@ }

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,8 +22,8 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.EarlyExitException = exports.NotAllInputParsedException = exports.NoViableAltException = exports.MismatchedTokenException = exports.isRecognitionException = void 0;
const includes_1 = __importDefault(require("lodash/includes"));
const MISMATCHED_TOKEN_EXCEPTION = "MismatchedTokenException";
const NO_VIABLE_ALT_EXCEPTION = "NoViableAltException";
const EARLY_EXIT_EXCEPTION = "EarlyExitException";
const NOT_ALL_INPUT_PARSED_EXCEPTION = "NotAllInputParsedException";
const RECOGNITION_EXCEPTION_NAMES = [
var includes_1 = __importDefault(require("lodash/includes"));
var MISMATCHED_TOKEN_EXCEPTION = "MismatchedTokenException";
var NO_VIABLE_ALT_EXCEPTION = "NoViableAltException";
var EARLY_EXIT_EXCEPTION = "EarlyExitException";
var NOT_ALL_INPUT_PARSED_EXCEPTION = "NotAllInputParsedException";
var RECOGNITION_EXCEPTION_NAMES = [
MISMATCHED_TOKEN_EXCEPTION,

@@ -26,46 +41,62 @@ NO_VIABLE_ALT_EXCEPTION,

exports.isRecognitionException = isRecognitionException;
class RecognitionException extends Error {
constructor(message, token) {
super(message);
this.token = token;
this.resyncedTokens = [];
var RecognitionException = /** @class */ (function (_super) {
__extends(RecognitionException, _super);
function RecognitionException(message, token) {
var _newTarget = this.constructor;
var _this = _super.call(this, message) || this;
_this.token = token;
_this.resyncedTokens = [];
// fix prototype chain when typescript target is ES5
Object.setPrototypeOf(this, new.target.prototype);
Object.setPrototypeOf(_this, _newTarget.prototype);
/* istanbul ignore next - V8 workaround to remove constructor from stacktrace when typescript target is ES5 */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
Error.captureStackTrace(_this, _this.constructor);
}
return _this;
}
}
class MismatchedTokenException extends RecognitionException {
constructor(message, token, previousToken) {
super(message, token);
this.previousToken = previousToken;
this.name = MISMATCHED_TOKEN_EXCEPTION;
return RecognitionException;
}(Error));
var MismatchedTokenException = /** @class */ (function (_super) {
__extends(MismatchedTokenException, _super);
function MismatchedTokenException(message, token, previousToken) {
var _this = _super.call(this, message, token) || this;
_this.previousToken = previousToken;
_this.name = MISMATCHED_TOKEN_EXCEPTION;
return _this;
}
}
return MismatchedTokenException;
}(RecognitionException));
exports.MismatchedTokenException = MismatchedTokenException;
class NoViableAltException extends RecognitionException {
constructor(message, token, previousToken) {
super(message, token);
this.previousToken = previousToken;
this.name = NO_VIABLE_ALT_EXCEPTION;
var NoViableAltException = /** @class */ (function (_super) {
__extends(NoViableAltException, _super);
function NoViableAltException(message, token, previousToken) {
var _this = _super.call(this, message, token) || this;
_this.previousToken = previousToken;
_this.name = NO_VIABLE_ALT_EXCEPTION;
return _this;
}
}
return NoViableAltException;
}(RecognitionException));
exports.NoViableAltException = NoViableAltException;
class NotAllInputParsedException extends RecognitionException {
constructor(message, token) {
super(message, token);
this.name = NOT_ALL_INPUT_PARSED_EXCEPTION;
var NotAllInputParsedException = /** @class */ (function (_super) {
__extends(NotAllInputParsedException, _super);
function NotAllInputParsedException(message, token) {
var _this = _super.call(this, message, token) || this;
_this.name = NOT_ALL_INPUT_PARSED_EXCEPTION;
return _this;
}
}
return NotAllInputParsedException;
}(RecognitionException));
exports.NotAllInputParsedException = NotAllInputParsedException;
class EarlyExitException extends RecognitionException {
constructor(message, token, previousToken) {
super(message, token);
this.previousToken = previousToken;
this.name = EARLY_EXIT_EXCEPTION;
var EarlyExitException = /** @class */ (function (_super) {
__extends(EarlyExitException, _super);
function EarlyExitException(message, token, previousToken) {
var _this = _super.call(this, message, token) || this;
_this.previousToken = previousToken;
_this.name = EARLY_EXIT_EXCEPTION;
return _this;
}
}
return EarlyExitException;
}(RecognitionException));
exports.EarlyExitException = EarlyExitException;
//# sourceMappingURL=exceptions_public.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,43 +22,55 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.checkPrefixAlternativesAmbiguities = exports.validateSomeNonEmptyLookaheadPath = exports.validateTooManyAlts = exports.RepetitionCollector = exports.validateAmbiguousAlternationAlternatives = exports.validateEmptyOrAlternative = exports.getFirstNoneTerminal = exports.validateNoLeftRecursion = exports.validateRuleIsOverridden = exports.validateRuleDoesNotAlreadyExist = exports.OccurrenceValidationCollector = exports.identifyProductionForDuplicates = exports.validateGrammar = void 0;
const first_1 = __importDefault(require("lodash/first"));
const isEmpty_1 = __importDefault(require("lodash/isEmpty"));
const drop_1 = __importDefault(require("lodash/drop"));
const flatten_1 = __importDefault(require("lodash/flatten"));
const filter_1 = __importDefault(require("lodash/filter"));
const reject_1 = __importDefault(require("lodash/reject"));
const difference_1 = __importDefault(require("lodash/difference"));
const map_1 = __importDefault(require("lodash/map"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const groupBy_1 = __importDefault(require("lodash/groupBy"));
const reduce_1 = __importDefault(require("lodash/reduce"));
const pickBy_1 = __importDefault(require("lodash/pickBy"));
const values_1 = __importDefault(require("lodash/values"));
const includes_1 = __importDefault(require("lodash/includes"));
const flatMap_1 = __importDefault(require("lodash/flatMap"));
const clone_1 = __importDefault(require("lodash/clone"));
const parser_1 = require("../parser/parser");
const gast_1 = require("@chevrotain/gast");
const lookahead_1 = require("./lookahead");
const interpreter_1 = require("./interpreter");
const gast_2 = require("@chevrotain/gast");
const gast_3 = require("@chevrotain/gast");
const dropRight_1 = __importDefault(require("lodash/dropRight"));
const compact_1 = __importDefault(require("lodash/compact"));
const tokens_1 = require("../../scan/tokens");
var first_1 = __importDefault(require("lodash/first"));
var isEmpty_1 = __importDefault(require("lodash/isEmpty"));
var drop_1 = __importDefault(require("lodash/drop"));
var flatten_1 = __importDefault(require("lodash/flatten"));
var filter_1 = __importDefault(require("lodash/filter"));
var reject_1 = __importDefault(require("lodash/reject"));
var difference_1 = __importDefault(require("lodash/difference"));
var map_1 = __importDefault(require("lodash/map"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var groupBy_1 = __importDefault(require("lodash/groupBy"));
var reduce_1 = __importDefault(require("lodash/reduce"));
var pickBy_1 = __importDefault(require("lodash/pickBy"));
var values_1 = __importDefault(require("lodash/values"));
var includes_1 = __importDefault(require("lodash/includes"));
var flatMap_1 = __importDefault(require("lodash/flatMap"));
var clone_1 = __importDefault(require("lodash/clone"));
var parser_1 = require("../parser/parser");
var gast_1 = require("@chevrotain/gast");
var lookahead_1 = require("./lookahead");
var interpreter_1 = require("./interpreter");
var gast_2 = require("@chevrotain/gast");
var gast_3 = require("@chevrotain/gast");
var dropRight_1 = __importDefault(require("lodash/dropRight"));
var compact_1 = __importDefault(require("lodash/compact"));
var tokens_1 = require("../../scan/tokens");
function validateGrammar(topLevels, globalMaxLookahead, tokenTypes, errMsgProvider, grammarName) {
const duplicateErrors = (0, flatMap_1.default)(topLevels, (currTopLevel) => validateDuplicateProductions(currTopLevel, errMsgProvider));
const leftRecursionErrors = (0, flatMap_1.default)(topLevels, (currTopRule) => validateNoLeftRecursion(currTopRule, currTopRule, errMsgProvider));
let emptyAltErrors = [];
let ambiguousAltsErrors = [];
let emptyRepetitionErrors = [];
var duplicateErrors = (0, flatMap_1.default)(topLevels, function (currTopLevel) {
return validateDuplicateProductions(currTopLevel, errMsgProvider);
});
var leftRecursionErrors = (0, flatMap_1.default)(topLevels, function (currTopRule) {
return validateNoLeftRecursion(currTopRule, currTopRule, errMsgProvider);
});
var emptyAltErrors = [];
var ambiguousAltsErrors = [];
var emptyRepetitionErrors = [];
// left recursion could cause infinite loops in the following validations.
// It is safest to first have the user fix the left recursion errors first and only then examine Further issues.
if ((0, isEmpty_1.default)(leftRecursionErrors)) {
emptyAltErrors = (0, flatMap_1.default)(topLevels, (currTopRule) => validateEmptyOrAlternative(currTopRule, errMsgProvider));
ambiguousAltsErrors = (0, flatMap_1.default)(topLevels, (currTopRule) => validateAmbiguousAlternationAlternatives(currTopRule, globalMaxLookahead, errMsgProvider));
emptyAltErrors = (0, flatMap_1.default)(topLevels, function (currTopRule) {
return validateEmptyOrAlternative(currTopRule, errMsgProvider);
});
ambiguousAltsErrors = (0, flatMap_1.default)(topLevels, function (currTopRule) {
return validateAmbiguousAlternationAlternatives(currTopRule, globalMaxLookahead, errMsgProvider);
});
emptyRepetitionErrors = validateSomeNonEmptyLookaheadPath(topLevels, globalMaxLookahead, errMsgProvider);
}
const termsNamespaceConflictErrors = checkTerminalAndNoneTerminalsNameSpace(topLevels, tokenTypes, errMsgProvider);
const tooManyAltsErrors = (0, flatMap_1.default)(topLevels, (curRule) => validateTooManyAlts(curRule, errMsgProvider));
const duplicateRulesError = (0, flatMap_1.default)(topLevels, (curRule) => validateRuleDoesNotAlreadyExist(curRule, topLevels, grammarName, errMsgProvider));
var termsNamespaceConflictErrors = checkTerminalAndNoneTerminalsNameSpace(topLevels, tokenTypes, errMsgProvider);
var tooManyAltsErrors = (0, flatMap_1.default)(topLevels, function (curRule) {
return validateTooManyAlts(curRule, errMsgProvider);
});
var duplicateRulesError = (0, flatMap_1.default)(topLevels, function (curRule) {
return validateRuleDoesNotAlreadyExist(curRule, topLevels, grammarName, errMsgProvider);
});
return duplicateErrors.concat(emptyRepetitionErrors, leftRecursionErrors, emptyAltErrors, ambiguousAltsErrors, termsNamespaceConflictErrors, tooManyAltsErrors, duplicateRulesError);

@@ -53,14 +80,14 @@ }

function validateDuplicateProductions(topLevelRule, errMsgProvider) {
const collectorVisitor = new OccurrenceValidationCollector();
var collectorVisitor = new OccurrenceValidationCollector();
topLevelRule.accept(collectorVisitor);
const allRuleProductions = collectorVisitor.allProductions;
const productionGroups = (0, groupBy_1.default)(allRuleProductions, identifyProductionForDuplicates);
const duplicates = (0, pickBy_1.default)(productionGroups, (currGroup) => {
var allRuleProductions = collectorVisitor.allProductions;
var productionGroups = (0, groupBy_1.default)(allRuleProductions, identifyProductionForDuplicates);
var duplicates = (0, pickBy_1.default)(productionGroups, function (currGroup) {
return currGroup.length > 1;
});
const errors = (0, map_1.default)((0, values_1.default)(duplicates), (currDuplicates) => {
const firstProd = (0, first_1.default)(currDuplicates);
const msg = errMsgProvider.buildDuplicateFoundError(topLevelRule, currDuplicates);
const dslName = (0, gast_1.getProductionDslName)(firstProd);
const defError = {
var errors = (0, map_1.default)((0, values_1.default)(duplicates), function (currDuplicates) {
var firstProd = (0, first_1.default)(currDuplicates);
var msg = errMsgProvider.buildDuplicateFoundError(topLevelRule, currDuplicates);
var dslName = (0, gast_1.getProductionDslName)(firstProd);
var defError = {
message: msg,

@@ -72,3 +99,3 @@ type: parser_1.ParserDefinitionErrorType.DUPLICATE_PRODUCTIONS,

};
const param = getExtraProductionArgument(firstProd);
var param = getExtraProductionArgument(firstProd);
if (param) {

@@ -82,3 +109,3 @@ defError.parameter = param;

function identifyProductionForDuplicates(prod) {
return `${(0, gast_1.getProductionDslName)(prod)}_#_${prod.idx}_#_${getExtraProductionArgument(prod)}`;
return "".concat((0, gast_1.getProductionDslName)(prod), "_#_").concat(prod.idx, "_#_").concat(getExtraProductionArgument(prod));
}

@@ -97,36 +124,39 @@ exports.identifyProductionForDuplicates = identifyProductionForDuplicates;

}
class OccurrenceValidationCollector extends gast_3.GAstVisitor {
constructor() {
super(...arguments);
this.allProductions = [];
var OccurrenceValidationCollector = /** @class */ (function (_super) {
__extends(OccurrenceValidationCollector, _super);
function OccurrenceValidationCollector() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.allProductions = [];
return _this;
}
visitNonTerminal(subrule) {
OccurrenceValidationCollector.prototype.visitNonTerminal = function (subrule) {
this.allProductions.push(subrule);
}
visitOption(option) {
};
OccurrenceValidationCollector.prototype.visitOption = function (option) {
this.allProductions.push(option);
}
visitRepetitionWithSeparator(manySep) {
};
OccurrenceValidationCollector.prototype.visitRepetitionWithSeparator = function (manySep) {
this.allProductions.push(manySep);
}
visitRepetitionMandatory(atLeastOne) {
};
OccurrenceValidationCollector.prototype.visitRepetitionMandatory = function (atLeastOne) {
this.allProductions.push(atLeastOne);
}
visitRepetitionMandatoryWithSeparator(atLeastOneSep) {
};
OccurrenceValidationCollector.prototype.visitRepetitionMandatoryWithSeparator = function (atLeastOneSep) {
this.allProductions.push(atLeastOneSep);
}
visitRepetition(many) {
};
OccurrenceValidationCollector.prototype.visitRepetition = function (many) {
this.allProductions.push(many);
}
visitAlternation(or) {
};
OccurrenceValidationCollector.prototype.visitAlternation = function (or) {
this.allProductions.push(or);
}
visitTerminal(terminal) {
};
OccurrenceValidationCollector.prototype.visitTerminal = function (terminal) {
this.allProductions.push(terminal);
}
}
};
return OccurrenceValidationCollector;
}(gast_3.GAstVisitor));
exports.OccurrenceValidationCollector = OccurrenceValidationCollector;
function validateRuleDoesNotAlreadyExist(rule, allRules, className, errMsgProvider) {
const errors = [];
const occurrences = (0, reduce_1.default)(allRules, (result, curRule) => {
var errors = [];
var occurrences = (0, reduce_1.default)(allRules, function (result, curRule) {
if (curRule.name === rule.name) {

@@ -138,3 +168,3 @@ return result + 1;

if (occurrences > 1) {
const errMsg = errMsgProvider.buildDuplicateRuleNameError({
var errMsg = errMsgProvider.buildDuplicateRuleNameError({
topLevelRule: rule,

@@ -156,8 +186,8 @@ grammarName: className

function validateRuleIsOverridden(ruleName, definedRulesNames, className) {
const errors = [];
let errMsg;
var errors = [];
var errMsg;
if (!(0, includes_1.default)(definedRulesNames, ruleName)) {
errMsg =
`Invalid rule override, rule: ->${ruleName}<- cannot be overridden in the grammar: ->${className}<-` +
`as it is not defined in any of the super grammars `;
"Invalid rule override, rule: ->".concat(ruleName, "<- cannot be overridden in the grammar: ->").concat(className, "<-") +
"as it is not defined in any of the super grammars ";
errors.push({

@@ -172,5 +202,6 @@ message: errMsg,

exports.validateRuleIsOverridden = validateRuleIsOverridden;
function validateNoLeftRecursion(topRule, currRule, errMsgProvider, path = []) {
const errors = [];
const nextNonTerminals = getFirstNoneTerminal(currRule.definition);
function validateNoLeftRecursion(topRule, currRule, errMsgProvider, path) {
if (path === void 0) { path = []; }
var errors = [];
var nextNonTerminals = getFirstNoneTerminal(currRule.definition);
if ((0, isEmpty_1.default)(nextNonTerminals)) {

@@ -180,4 +211,4 @@ return [];

else {
const ruleName = topRule.name;
const foundLeftRecursion = (0, includes_1.default)(nextNonTerminals, topRule);
var ruleName = topRule.name;
var foundLeftRecursion = (0, includes_1.default)(nextNonTerminals, topRule);
if (foundLeftRecursion) {

@@ -195,5 +226,5 @@ errors.push({

// other cyclic paths are ignored, we still need this difference to avoid infinite loops...
const validNextSteps = (0, difference_1.default)(nextNonTerminals, path.concat([topRule]));
const errorsFromNextSteps = (0, flatMap_1.default)(validNextSteps, (currRefRule) => {
const newPath = (0, clone_1.default)(path);
var validNextSteps = (0, difference_1.default)(nextNonTerminals, path.concat([topRule]));
var errorsFromNextSteps = (0, flatMap_1.default)(validNextSteps, function (currRefRule) {
var newPath = (0, clone_1.default)(path);
newPath.push(currRefRule);

@@ -207,7 +238,7 @@ return validateNoLeftRecursion(topRule, currRefRule, errMsgProvider, newPath);

function getFirstNoneTerminal(definition) {
let result = [];
var result = [];
if ((0, isEmpty_1.default)(definition)) {
return result;
}
const firstProd = (0, first_1.default)(definition);
var firstProd = (0, first_1.default)(definition);
/* istanbul ignore else */

@@ -227,3 +258,5 @@ if (firstProd instanceof gast_2.NonTerminal) {

// each sub definition in alternation is a FLAT
result = (0, flatten_1.default)((0, map_1.default)(firstProd.definition, (currSubDef) => getFirstNoneTerminal(currSubDef.definition)));
result = (0, flatten_1.default)((0, map_1.default)(firstProd.definition, function (currSubDef) {
return getFirstNoneTerminal(currSubDef.definition);
}));
}

@@ -236,6 +269,6 @@ else if (firstProd instanceof gast_2.Terminal) {

}
const isFirstOptional = (0, gast_1.isOptionalProd)(firstProd);
const hasMore = definition.length > 1;
var isFirstOptional = (0, gast_1.isOptionalProd)(firstProd);
var hasMore = definition.length > 1;
if (isFirstOptional && hasMore) {
const rest = (0, drop_1.default)(definition);
var rest = (0, drop_1.default)(definition);
return result.concat(getFirstNoneTerminal(rest));

@@ -248,19 +281,22 @@ }

exports.getFirstNoneTerminal = getFirstNoneTerminal;
class OrCollector extends gast_3.GAstVisitor {
constructor() {
super(...arguments);
this.alternations = [];
var OrCollector = /** @class */ (function (_super) {
__extends(OrCollector, _super);
function OrCollector() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.alternations = [];
return _this;
}
visitAlternation(node) {
OrCollector.prototype.visitAlternation = function (node) {
this.alternations.push(node);
}
}
};
return OrCollector;
}(gast_3.GAstVisitor));
function validateEmptyOrAlternative(topLevelRule, errMsgProvider) {
const orCollector = new OrCollector();
var orCollector = new OrCollector();
topLevelRule.accept(orCollector);
const ors = orCollector.alternations;
const errors = (0, flatMap_1.default)(ors, (currOr) => {
const exceptLast = (0, dropRight_1.default)(currOr.definition);
return (0, flatMap_1.default)(exceptLast, (currAlternative, currAltIdx) => {
const possibleFirstInAlt = (0, interpreter_1.nextPossibleTokensAfter)([currAlternative], [], tokens_1.tokenStructuredMatcher, 1);
var ors = orCollector.alternations;
var errors = (0, flatMap_1.default)(ors, function (currOr) {
var exceptLast = (0, dropRight_1.default)(currOr.definition);
return (0, flatMap_1.default)(exceptLast, function (currAlternative, currAltIdx) {
var possibleFirstInAlt = (0, interpreter_1.nextPossibleTokensAfter)([currAlternative], [], tokens_1.tokenStructuredMatcher, 1);
if ((0, isEmpty_1.default)(possibleFirstInAlt)) {

@@ -290,14 +326,14 @@ return [

function validateAmbiguousAlternationAlternatives(topLevelRule, globalMaxLookahead, errMsgProvider) {
const orCollector = new OrCollector();
var orCollector = new OrCollector();
topLevelRule.accept(orCollector);
let ors = orCollector.alternations;
var ors = orCollector.alternations;
// New Handling of ignoring ambiguities
// - https://github.com/chevrotain/chevrotain/issues/869
ors = (0, reject_1.default)(ors, (currOr) => currOr.ignoreAmbiguities === true);
const errors = (0, flatMap_1.default)(ors, (currOr) => {
const currOccurrence = currOr.idx;
const actualMaxLookahead = currOr.maxLookahead || globalMaxLookahead;
const alternatives = (0, lookahead_1.getLookaheadPathsForOr)(currOccurrence, topLevelRule, actualMaxLookahead, currOr);
const altsAmbiguityErrors = checkAlternativesAmbiguities(alternatives, currOr, topLevelRule, errMsgProvider);
const altsPrefixAmbiguityErrors = checkPrefixAlternativesAmbiguities(alternatives, currOr, topLevelRule, errMsgProvider);
ors = (0, reject_1.default)(ors, function (currOr) { return currOr.ignoreAmbiguities === true; });
var errors = (0, flatMap_1.default)(ors, function (currOr) {
var currOccurrence = currOr.idx;
var actualMaxLookahead = currOr.maxLookahead || globalMaxLookahead;
var alternatives = (0, lookahead_1.getLookaheadPathsForOr)(currOccurrence, topLevelRule, actualMaxLookahead, currOr);
var altsAmbiguityErrors = checkAlternativesAmbiguities(alternatives, currOr, topLevelRule, errMsgProvider);
var altsPrefixAmbiguityErrors = checkPrefixAlternativesAmbiguities(alternatives, currOr, topLevelRule, errMsgProvider);
return altsAmbiguityErrors.concat(altsPrefixAmbiguityErrors);

@@ -308,26 +344,29 @@ });

exports.validateAmbiguousAlternationAlternatives = validateAmbiguousAlternationAlternatives;
class RepetitionCollector extends gast_3.GAstVisitor {
constructor() {
super(...arguments);
this.allProductions = [];
var RepetitionCollector = /** @class */ (function (_super) {
__extends(RepetitionCollector, _super);
function RepetitionCollector() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.allProductions = [];
return _this;
}
visitRepetitionWithSeparator(manySep) {
RepetitionCollector.prototype.visitRepetitionWithSeparator = function (manySep) {
this.allProductions.push(manySep);
}
visitRepetitionMandatory(atLeastOne) {
};
RepetitionCollector.prototype.visitRepetitionMandatory = function (atLeastOne) {
this.allProductions.push(atLeastOne);
}
visitRepetitionMandatoryWithSeparator(atLeastOneSep) {
};
RepetitionCollector.prototype.visitRepetitionMandatoryWithSeparator = function (atLeastOneSep) {
this.allProductions.push(atLeastOneSep);
}
visitRepetition(many) {
};
RepetitionCollector.prototype.visitRepetition = function (many) {
this.allProductions.push(many);
}
}
};
return RepetitionCollector;
}(gast_3.GAstVisitor));
exports.RepetitionCollector = RepetitionCollector;
function validateTooManyAlts(topLevelRule, errMsgProvider) {
const orCollector = new OrCollector();
var orCollector = new OrCollector();
topLevelRule.accept(orCollector);
const ors = orCollector.alternations;
const errors = (0, flatMap_1.default)(ors, (currOr) => {
var ors = orCollector.alternations;
var errors = (0, flatMap_1.default)(ors, function (currOr) {
if (currOr.definition.length > 255) {

@@ -354,15 +393,15 @@ return [

function validateSomeNonEmptyLookaheadPath(topLevelRules, maxLookahead, errMsgProvider) {
const errors = [];
(0, forEach_1.default)(topLevelRules, (currTopRule) => {
const collectorVisitor = new RepetitionCollector();
var errors = [];
(0, forEach_1.default)(topLevelRules, function (currTopRule) {
var collectorVisitor = new RepetitionCollector();
currTopRule.accept(collectorVisitor);
const allRuleProductions = collectorVisitor.allProductions;
(0, forEach_1.default)(allRuleProductions, (currProd) => {
const prodType = (0, lookahead_1.getProdType)(currProd);
const actualMaxLookahead = currProd.maxLookahead || maxLookahead;
const currOccurrence = currProd.idx;
const paths = (0, lookahead_1.getLookaheadPathsForOptionalProd)(currOccurrence, currTopRule, prodType, actualMaxLookahead);
const pathsInsideProduction = paths[0];
var allRuleProductions = collectorVisitor.allProductions;
(0, forEach_1.default)(allRuleProductions, function (currProd) {
var prodType = (0, lookahead_1.getProdType)(currProd);
var actualMaxLookahead = currProd.maxLookahead || maxLookahead;
var currOccurrence = currProd.idx;
var paths = (0, lookahead_1.getLookaheadPathsForOptionalProd)(currOccurrence, currTopRule, prodType, actualMaxLookahead);
var pathsInsideProduction = paths[0];
if ((0, isEmpty_1.default)((0, flatten_1.default)(pathsInsideProduction))) {
const errMsg = errMsgProvider.buildEmptyRepetitionError({
var errMsg = errMsgProvider.buildEmptyRepetitionError({
topLevelRule: currTopRule,

@@ -383,4 +422,4 @@ repetition: currProd

function checkAlternativesAmbiguities(alternatives, alternation, rule, errMsgProvider) {
const foundAmbiguousPaths = [];
const identicalAmbiguities = (0, reduce_1.default)(alternatives, (result, currAlt, currAltIdx) => {
var foundAmbiguousPaths = [];
var identicalAmbiguities = (0, reduce_1.default)(alternatives, function (result, currAlt, currAltIdx) {
// ignore (skip) ambiguities with this alternative

@@ -390,5 +429,5 @@ if (alternation.definition[currAltIdx].ignoreAmbiguities === true) {

}
(0, forEach_1.default)(currAlt, (currPath) => {
const altsCurrPathAppearsIn = [currAltIdx];
(0, forEach_1.default)(alternatives, (currOtherAlt, currOtherAltIdx) => {
(0, forEach_1.default)(currAlt, function (currPath) {
var altsCurrPathAppearsIn = [currAltIdx];
(0, forEach_1.default)(alternatives, function (currOtherAlt, currOtherAltIdx) {
if (currAltIdx !== currOtherAltIdx &&

@@ -412,5 +451,5 @@ (0, lookahead_1.containsPath)(currOtherAlt, currPath) &&

}, []);
const currErrors = (0, map_1.default)(identicalAmbiguities, (currAmbDescriptor) => {
const ambgIndices = (0, map_1.default)(currAmbDescriptor.alts, (currAltIdx) => currAltIdx + 1);
const currMessage = errMsgProvider.buildAlternationAmbiguityError({
var currErrors = (0, map_1.default)(identicalAmbiguities, function (currAmbDescriptor) {
var ambgIndices = (0, map_1.default)(currAmbDescriptor.alts, function (currAltIdx) { return currAltIdx + 1; });
var currMessage = errMsgProvider.buildAlternationAmbiguityError({
topLevelRule: rule,

@@ -433,4 +472,4 @@ alternation: alternation,

// flatten
const pathsAndIndices = (0, reduce_1.default)(alternatives, (result, currAlt, idx) => {
const currPathsAndIdx = (0, map_1.default)(currAlt, (currPath) => {
var pathsAndIndices = (0, reduce_1.default)(alternatives, function (result, currAlt, idx) {
var currPathsAndIdx = (0, map_1.default)(currAlt, function (currPath) {
return { idx: idx, path: currPath };

@@ -440,4 +479,4 @@ });

}, []);
const errors = (0, compact_1.default)((0, flatMap_1.default)(pathsAndIndices, (currPathAndIdx) => {
const alternativeGast = alternation.definition[currPathAndIdx.idx];
var errors = (0, compact_1.default)((0, flatMap_1.default)(pathsAndIndices, function (currPathAndIdx) {
var alternativeGast = alternation.definition[currPathAndIdx.idx];
// ignore (skip) ambiguities with this alternative

@@ -447,5 +486,5 @@ if (alternativeGast.ignoreAmbiguities === true) {

}
const targetIdx = currPathAndIdx.idx;
const targetPath = currPathAndIdx.path;
const prefixAmbiguitiesPathsAndIndices = (0, filter_1.default)(pathsAndIndices, (searchPathAndIdx) => {
var targetIdx = currPathAndIdx.idx;
var targetPath = currPathAndIdx.path;
var prefixAmbiguitiesPathsAndIndices = (0, filter_1.default)(pathsAndIndices, function (searchPathAndIdx) {
// prefix ambiguity can only be created from lower idx (higher priority) path

@@ -461,6 +500,6 @@ return (

});
const currPathPrefixErrors = (0, map_1.default)(prefixAmbiguitiesPathsAndIndices, (currAmbPathAndIdx) => {
const ambgIndices = [currAmbPathAndIdx.idx + 1, targetIdx + 1];
const occurrence = alternation.idx === 0 ? "" : alternation.idx;
const message = errMsgProvider.buildAlternationPrefixAmbiguityError({
var currPathPrefixErrors = (0, map_1.default)(prefixAmbiguitiesPathsAndIndices, function (currAmbPathAndIdx) {
var ambgIndices = [currAmbPathAndIdx.idx + 1, targetIdx + 1];
var occurrence = alternation.idx === 0 ? "" : alternation.idx;
var message = errMsgProvider.buildAlternationPrefixAmbiguityError({
topLevelRule: rule,

@@ -485,8 +524,8 @@ alternation: alternation,

function checkTerminalAndNoneTerminalsNameSpace(topLevels, tokenTypes, errMsgProvider) {
const errors = [];
const tokenNames = (0, map_1.default)(tokenTypes, (currToken) => currToken.name);
(0, forEach_1.default)(topLevels, (currRule) => {
const currRuleName = currRule.name;
var errors = [];
var tokenNames = (0, map_1.default)(tokenTypes, function (currToken) { return currToken.name; });
(0, forEach_1.default)(topLevels, function (currRule) {
var currRuleName = currRule.name;
if ((0, includes_1.default)(tokenNames, currRuleName)) {
const errMsg = errMsgProvider.buildNamespaceConflictError(currRule);
var errMsg = errMsgProvider.buildNamespaceConflictError(currRule);
errors.push({

@@ -493,0 +532,0 @@ message: errMsg,

@@ -7,7 +7,7 @@ "use strict";

exports.firstForTerminal = exports.firstForBranching = exports.firstForSequence = exports.first = void 0;
const flatten_1 = __importDefault(require("lodash/flatten"));
const uniq_1 = __importDefault(require("lodash/uniq"));
const map_1 = __importDefault(require("lodash/map"));
const gast_1 = require("@chevrotain/gast");
const gast_2 = require("@chevrotain/gast");
var flatten_1 = __importDefault(require("lodash/flatten"));
var uniq_1 = __importDefault(require("lodash/uniq"));
var map_1 = __importDefault(require("lodash/map"));
var gast_1 = require("@chevrotain/gast");
var gast_2 = require("@chevrotain/gast");
function first(prod) {

@@ -41,9 +41,9 @@ /* istanbul ignore else */

function firstForSequence(prod) {
let firstSet = [];
const seq = prod.definition;
let nextSubProdIdx = 0;
let hasInnerProdsRemaining = seq.length > nextSubProdIdx;
let currSubProd;
var firstSet = [];
var seq = prod.definition;
var nextSubProdIdx = 0;
var hasInnerProdsRemaining = seq.length > nextSubProdIdx;
var currSubProd;
// so we enter the loop at least once (if the definition is not empty
let isLastInnerProdOptional = true;
var isLastInnerProdOptional = true;
// scan a sequence until it's end or until we have found a NONE optional production in it

@@ -61,3 +61,3 @@ while (hasInnerProdsRemaining && isLastInnerProdOptional) {

function firstForBranching(prod) {
const allAlternativesFirsts = (0, map_1.default)(prod.definition, (innerProd) => {
var allAlternativesFirsts = (0, map_1.default)(prod.definition, function (innerProd) {
return first(innerProd);

@@ -64,0 +64,0 @@ });

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,37 +22,40 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.buildInProdFollowPrefix = exports.buildBetweenProdsFollowPrefix = exports.computeAllProdsFollows = exports.ResyncFollowsWalker = void 0;
const rest_1 = require("./rest");
const first_1 = require("./first");
const forEach_1 = __importDefault(require("lodash/forEach"));
const assign_1 = __importDefault(require("lodash/assign"));
const constants_1 = require("../constants");
const gast_1 = require("@chevrotain/gast");
var rest_1 = require("./rest");
var first_1 = require("./first");
var forEach_1 = __importDefault(require("lodash/forEach"));
var assign_1 = __importDefault(require("lodash/assign"));
var constants_1 = require("../constants");
var gast_1 = require("@chevrotain/gast");
// This ResyncFollowsWalker computes all of the follows required for RESYNC
// (skipping reference production).
class ResyncFollowsWalker extends rest_1.RestWalker {
constructor(topProd) {
super();
this.topProd = topProd;
this.follows = {};
var ResyncFollowsWalker = /** @class */ (function (_super) {
__extends(ResyncFollowsWalker, _super);
function ResyncFollowsWalker(topProd) {
var _this = _super.call(this) || this;
_this.topProd = topProd;
_this.follows = {};
return _this;
}
startWalking() {
ResyncFollowsWalker.prototype.startWalking = function () {
this.walk(this.topProd);
return this.follows;
}
walkTerminal(terminal, currRest, prevRest) {
};
ResyncFollowsWalker.prototype.walkTerminal = function (terminal, currRest, prevRest) {
// do nothing! just like in the public sector after 13:00
}
walkProdRef(refProd, currRest, prevRest) {
const followName = buildBetweenProdsFollowPrefix(refProd.referencedRule, refProd.idx) +
};
ResyncFollowsWalker.prototype.walkProdRef = function (refProd, currRest, prevRest) {
var followName = buildBetweenProdsFollowPrefix(refProd.referencedRule, refProd.idx) +
this.topProd.name;
const fullRest = currRest.concat(prevRest);
const restProd = new gast_1.Alternative({ definition: fullRest });
const t_in_topProd_follows = (0, first_1.first)(restProd);
var fullRest = currRest.concat(prevRest);
var restProd = new gast_1.Alternative({ definition: fullRest });
var t_in_topProd_follows = (0, first_1.first)(restProd);
this.follows[followName] = t_in_topProd_follows;
}
}
};
return ResyncFollowsWalker;
}(rest_1.RestWalker));
exports.ResyncFollowsWalker = ResyncFollowsWalker;
function computeAllProdsFollows(topProductions) {
const reSyncFollows = {};
(0, forEach_1.default)(topProductions, (topProd) => {
const currRefsFollow = new ResyncFollowsWalker(topProd).startWalking();
var reSyncFollows = {};
(0, forEach_1.default)(topProductions, function (topProd) {
var currRefsFollow = new ResyncFollowsWalker(topProd).startWalking();
(0, assign_1.default)(reSyncFollows, currRefsFollow);

@@ -53,3 +71,3 @@ });

function buildInProdFollowPrefix(terminal) {
const terminalName = terminal.terminalType.name;
var terminalName = terminal.terminalType.name;
return terminalName + terminal.idx + constants_1.IN;

@@ -56,0 +74,0 @@ }

@@ -7,13 +7,13 @@ "use strict";

exports.validateGrammar = exports.resolveGrammar = void 0;
const forEach_1 = __importDefault(require("lodash/forEach"));
const defaults_1 = __importDefault(require("lodash/defaults"));
const resolver_1 = require("../resolver");
const checks_1 = require("../checks");
const errors_public_1 = require("../../errors_public");
var forEach_1 = __importDefault(require("lodash/forEach"));
var defaults_1 = __importDefault(require("lodash/defaults"));
var resolver_1 = require("../resolver");
var checks_1 = require("../checks");
var errors_public_1 = require("../../errors_public");
function resolveGrammar(options) {
const actualOptions = (0, defaults_1.default)(options, {
var actualOptions = (0, defaults_1.default)(options, {
errMsgProvider: errors_public_1.defaultGrammarResolverErrorProvider
});
const topRulesTable = {};
(0, forEach_1.default)(options.rules, (rule) => {
var topRulesTable = {};
(0, forEach_1.default)(options.rules, function (rule) {
topRulesTable[rule.name] = rule;

@@ -20,0 +20,0 @@ });

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,24 +22,26 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.nextPossibleTokensAfter = exports.possiblePathsFrom = exports.NextTerminalAfterAtLeastOneSepWalker = exports.NextTerminalAfterAtLeastOneWalker = exports.NextTerminalAfterManySepWalker = exports.NextTerminalAfterManyWalker = exports.AbstractNextTerminalAfterProductionWalker = exports.NextAfterTokenWalker = exports.AbstractNextPossibleTokensWalker = void 0;
const rest_1 = require("./rest");
const first_1 = __importDefault(require("lodash/first"));
const isEmpty_1 = __importDefault(require("lodash/isEmpty"));
const dropRight_1 = __importDefault(require("lodash/dropRight"));
const drop_1 = __importDefault(require("lodash/drop"));
const last_1 = __importDefault(require("lodash/last"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const clone_1 = __importDefault(require("lodash/clone"));
const first_2 = require("./first");
const gast_1 = require("@chevrotain/gast");
class AbstractNextPossibleTokensWalker extends rest_1.RestWalker {
constructor(topProd, path) {
super();
this.topProd = topProd;
this.path = path;
this.possibleTokTypes = [];
this.nextProductionName = "";
this.nextProductionOccurrence = 0;
this.found = false;
this.isAtEndOfPath = false;
var rest_1 = require("./rest");
var first_1 = __importDefault(require("lodash/first"));
var isEmpty_1 = __importDefault(require("lodash/isEmpty"));
var dropRight_1 = __importDefault(require("lodash/dropRight"));
var drop_1 = __importDefault(require("lodash/drop"));
var last_1 = __importDefault(require("lodash/last"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var clone_1 = __importDefault(require("lodash/clone"));
var first_2 = require("./first");
var gast_1 = require("@chevrotain/gast");
var AbstractNextPossibleTokensWalker = /** @class */ (function (_super) {
__extends(AbstractNextPossibleTokensWalker, _super);
function AbstractNextPossibleTokensWalker(topProd, path) {
var _this = _super.call(this) || this;
_this.topProd = topProd;
_this.path = path;
_this.possibleTokTypes = [];
_this.nextProductionName = "";
_this.nextProductionOccurrence = 0;
_this.found = false;
_this.isAtEndOfPath = false;
return _this;
}
startWalking() {
AbstractNextPossibleTokensWalker.prototype.startWalking = function () {
this.found = false;

@@ -43,19 +60,20 @@ if (this.path.ruleStack[0] !== this.topProd.name) {

return this.possibleTokTypes;
}
walk(prod, prevRest = []) {
};
AbstractNextPossibleTokensWalker.prototype.walk = function (prod, prevRest) {
if (prevRest === void 0) { prevRest = []; }
// stop scanning once we found the path
if (!this.found) {
super.walk(prod, prevRest);
_super.prototype.walk.call(this, prod, prevRest);
}
}
walkProdRef(refProd, currRest, prevRest) {
};
AbstractNextPossibleTokensWalker.prototype.walkProdRef = function (refProd, currRest, prevRest) {
// found the next production, need to keep walking in it
if (refProd.referencedRule.name === this.nextProductionName &&
refProd.idx === this.nextProductionOccurrence) {
const fullRest = currRest.concat(prevRest);
var fullRest = currRest.concat(prevRest);
this.updateExpectedNext();
this.walk(refProd.referencedRule, fullRest);
}
}
updateExpectedNext() {
};
AbstractNextPossibleTokensWalker.prototype.updateExpectedNext = function () {
// need to consume the Terminal

@@ -73,15 +91,18 @@ if ((0, isEmpty_1.default)(this.ruleStack)) {

}
}
}
};
return AbstractNextPossibleTokensWalker;
}(rest_1.RestWalker));
exports.AbstractNextPossibleTokensWalker = AbstractNextPossibleTokensWalker;
class NextAfterTokenWalker extends AbstractNextPossibleTokensWalker {
constructor(topProd, path) {
super(topProd, path);
this.path = path;
this.nextTerminalName = "";
this.nextTerminalOccurrence = 0;
this.nextTerminalName = this.path.lastTok.name;
this.nextTerminalOccurrence = this.path.lastTokOccurrence;
var NextAfterTokenWalker = /** @class */ (function (_super) {
__extends(NextAfterTokenWalker, _super);
function NextAfterTokenWalker(topProd, path) {
var _this = _super.call(this, topProd, path) || this;
_this.path = path;
_this.nextTerminalName = "";
_this.nextTerminalOccurrence = 0;
_this.nextTerminalName = _this.path.lastTok.name;
_this.nextTerminalOccurrence = _this.path.lastTokOccurrence;
return _this;
}
walkTerminal(terminal, currRest, prevRest) {
NextAfterTokenWalker.prototype.walkTerminal = function (terminal, currRest, prevRest) {
if (this.isAtEndOfPath &&

@@ -91,9 +112,10 @@ terminal.terminalType.name === this.nextTerminalName &&

!this.found) {
const fullRest = currRest.concat(prevRest);
const restProd = new gast_1.Alternative({ definition: fullRest });
var fullRest = currRest.concat(prevRest);
var restProd = new gast_1.Alternative({ definition: fullRest });
this.possibleTokTypes = (0, first_2.first)(restProd);
this.found = true;
}
}
}
};
return NextAfterTokenWalker;
}(AbstractNextPossibleTokensWalker));
exports.NextAfterTokenWalker = NextAfterTokenWalker;

@@ -104,8 +126,9 @@ /**

*/
class AbstractNextTerminalAfterProductionWalker extends rest_1.RestWalker {
constructor(topRule, occurrence) {
super();
this.topRule = topRule;
this.occurrence = occurrence;
this.result = {
var AbstractNextTerminalAfterProductionWalker = /** @class */ (function (_super) {
__extends(AbstractNextTerminalAfterProductionWalker, _super);
function AbstractNextTerminalAfterProductionWalker(topRule, occurrence) {
var _this = _super.call(this) || this;
_this.topRule = topRule;
_this.occurrence = occurrence;
_this.result = {
token: undefined,

@@ -115,13 +138,19 @@ occurrence: undefined,

};
return _this;
}
startWalking() {
AbstractNextTerminalAfterProductionWalker.prototype.startWalking = function () {
this.walk(this.topRule);
return this.result;
};
return AbstractNextTerminalAfterProductionWalker;
}(rest_1.RestWalker));
exports.AbstractNextTerminalAfterProductionWalker = AbstractNextTerminalAfterProductionWalker;
var NextTerminalAfterManyWalker = /** @class */ (function (_super) {
__extends(NextTerminalAfterManyWalker, _super);
function NextTerminalAfterManyWalker() {
return _super !== null && _super.apply(this, arguments) || this;
}
}
exports.AbstractNextTerminalAfterProductionWalker = AbstractNextTerminalAfterProductionWalker;
class NextTerminalAfterManyWalker extends AbstractNextTerminalAfterProductionWalker {
walkMany(manyProd, currRest, prevRest) {
NextTerminalAfterManyWalker.prototype.walkMany = function (manyProd, currRest, prevRest) {
if (manyProd.idx === this.occurrence) {
const firstAfterMany = (0, first_1.default)(currRest.concat(prevRest));
var firstAfterMany = (0, first_1.default)(currRest.concat(prevRest));
this.result.isEndOfRule = firstAfterMany === undefined;

@@ -134,11 +163,16 @@ if (firstAfterMany instanceof gast_1.Terminal) {

else {
super.walkMany(manyProd, currRest, prevRest);
_super.prototype.walkMany.call(this, manyProd, currRest, prevRest);
}
};
return NextTerminalAfterManyWalker;
}(AbstractNextTerminalAfterProductionWalker));
exports.NextTerminalAfterManyWalker = NextTerminalAfterManyWalker;
var NextTerminalAfterManySepWalker = /** @class */ (function (_super) {
__extends(NextTerminalAfterManySepWalker, _super);
function NextTerminalAfterManySepWalker() {
return _super !== null && _super.apply(this, arguments) || this;
}
}
exports.NextTerminalAfterManyWalker = NextTerminalAfterManyWalker;
class NextTerminalAfterManySepWalker extends AbstractNextTerminalAfterProductionWalker {
walkManySep(manySepProd, currRest, prevRest) {
NextTerminalAfterManySepWalker.prototype.walkManySep = function (manySepProd, currRest, prevRest) {
if (manySepProd.idx === this.occurrence) {
const firstAfterManySep = (0, first_1.default)(currRest.concat(prevRest));
var firstAfterManySep = (0, first_1.default)(currRest.concat(prevRest));
this.result.isEndOfRule = firstAfterManySep === undefined;

@@ -151,11 +185,16 @@ if (firstAfterManySep instanceof gast_1.Terminal) {

else {
super.walkManySep(manySepProd, currRest, prevRest);
_super.prototype.walkManySep.call(this, manySepProd, currRest, prevRest);
}
};
return NextTerminalAfterManySepWalker;
}(AbstractNextTerminalAfterProductionWalker));
exports.NextTerminalAfterManySepWalker = NextTerminalAfterManySepWalker;
var NextTerminalAfterAtLeastOneWalker = /** @class */ (function (_super) {
__extends(NextTerminalAfterAtLeastOneWalker, _super);
function NextTerminalAfterAtLeastOneWalker() {
return _super !== null && _super.apply(this, arguments) || this;
}
}
exports.NextTerminalAfterManySepWalker = NextTerminalAfterManySepWalker;
class NextTerminalAfterAtLeastOneWalker extends AbstractNextTerminalAfterProductionWalker {
walkAtLeastOne(atLeastOneProd, currRest, prevRest) {
NextTerminalAfterAtLeastOneWalker.prototype.walkAtLeastOne = function (atLeastOneProd, currRest, prevRest) {
if (atLeastOneProd.idx === this.occurrence) {
const firstAfterAtLeastOne = (0, first_1.default)(currRest.concat(prevRest));
var firstAfterAtLeastOne = (0, first_1.default)(currRest.concat(prevRest));
this.result.isEndOfRule = firstAfterAtLeastOne === undefined;

@@ -168,12 +207,17 @@ if (firstAfterAtLeastOne instanceof gast_1.Terminal) {

else {
super.walkAtLeastOne(atLeastOneProd, currRest, prevRest);
_super.prototype.walkAtLeastOne.call(this, atLeastOneProd, currRest, prevRest);
}
}
}
};
return NextTerminalAfterAtLeastOneWalker;
}(AbstractNextTerminalAfterProductionWalker));
exports.NextTerminalAfterAtLeastOneWalker = NextTerminalAfterAtLeastOneWalker;
// TODO: reduce code duplication in the AfterWalkers
class NextTerminalAfterAtLeastOneSepWalker extends AbstractNextTerminalAfterProductionWalker {
walkAtLeastOneSep(atleastOneSepProd, currRest, prevRest) {
var NextTerminalAfterAtLeastOneSepWalker = /** @class */ (function (_super) {
__extends(NextTerminalAfterAtLeastOneSepWalker, _super);
function NextTerminalAfterAtLeastOneSepWalker() {
return _super !== null && _super.apply(this, arguments) || this;
}
NextTerminalAfterAtLeastOneSepWalker.prototype.walkAtLeastOneSep = function (atleastOneSepProd, currRest, prevRest) {
if (atleastOneSepProd.idx === this.occurrence) {
const firstAfterfirstAfterAtLeastOneSep = (0, first_1.default)(currRest.concat(prevRest));
var firstAfterfirstAfterAtLeastOneSep = (0, first_1.default)(currRest.concat(prevRest));
this.result.isEndOfRule = firstAfterfirstAfterAtLeastOneSep === undefined;

@@ -186,12 +230,14 @@ if (firstAfterfirstAfterAtLeastOneSep instanceof gast_1.Terminal) {

else {
super.walkAtLeastOneSep(atleastOneSepProd, currRest, prevRest);
_super.prototype.walkAtLeastOneSep.call(this, atleastOneSepProd, currRest, prevRest);
}
}
}
};
return NextTerminalAfterAtLeastOneSepWalker;
}(AbstractNextTerminalAfterProductionWalker));
exports.NextTerminalAfterAtLeastOneSepWalker = NextTerminalAfterAtLeastOneSepWalker;
function possiblePathsFrom(targetDef, maxLength, currPath = []) {
function possiblePathsFrom(targetDef, maxLength, currPath) {
if (currPath === void 0) { currPath = []; }
// avoid side effects
currPath = (0, clone_1.default)(currPath);
let result = [];
let i = 0;
var result = [];
var i = 0;
// TODO: avoid inner funcs

@@ -203,3 +249,3 @@ function remainingPathWith(nextDef) {

function getAlternativesForProd(definition) {
const alternatives = possiblePathsFrom(remainingPathWith(definition), maxLength, currPath);
var alternatives = possiblePathsFrom(remainingPathWith(definition), maxLength, currPath);
return result.concat(alternatives);

@@ -215,3 +261,3 @@ }

while (currPath.length < maxLength && i < targetDef.length) {
const prod = targetDef[i];
var prod = targetDef[i];
/* istanbul ignore else */

@@ -228,3 +274,3 @@ if (prod instanceof gast_1.Alternative) {

else if (prod instanceof gast_1.RepetitionMandatory) {
const newDef = prod.definition.concat([
var newDef = prod.definition.concat([
new gast_1.Repetition({

@@ -237,3 +283,3 @@ definition: prod.definition

else if (prod instanceof gast_1.RepetitionMandatoryWithSeparator) {
const newDef = [
var newDef = [
new gast_1.Alternative({ definition: prod.definition }),

@@ -247,3 +293,3 @@ new gast_1.Repetition({

else if (prod instanceof gast_1.RepetitionWithSeparator) {
const newDef = prod.definition.concat([
var newDef = prod.definition.concat([
new gast_1.Repetition({

@@ -256,3 +302,3 @@ definition: [new gast_1.Terminal({ terminalType: prod.separator })].concat(prod.definition)

else if (prod instanceof gast_1.Repetition) {
const newDef = prod.definition.concat([
var newDef = prod.definition.concat([
new gast_1.Repetition({

@@ -265,3 +311,3 @@ definition: prod.definition

else if (prod instanceof gast_1.Alternation) {
(0, forEach_1.default)(prod.definition, (currAlt) => {
(0, forEach_1.default)(prod.definition, function (currAlt) {
// TODO: this is a limited check for empty alternatives

@@ -292,11 +338,11 @@ // It would prevent a common case of infinite loops during parser initialization.

function nextPossibleTokensAfter(initialDef, tokenVector, tokMatcher, maxLookAhead) {
const EXIT_NON_TERMINAL = "EXIT_NONE_TERMINAL";
var EXIT_NON_TERMINAL = "EXIT_NONE_TERMINAL";
// to avoid creating a new Array each time.
const EXIT_NON_TERMINAL_ARR = [EXIT_NON_TERMINAL];
const EXIT_ALTERNATIVE = "EXIT_ALTERNATIVE";
let foundCompletePath = false;
const tokenVectorLength = tokenVector.length;
const minimalAlternativesIndex = tokenVectorLength - maxLookAhead - 1;
const result = [];
const possiblePaths = [];
var EXIT_NON_TERMINAL_ARR = [EXIT_NON_TERMINAL];
var EXIT_ALTERNATIVE = "EXIT_ALTERNATIVE";
var foundCompletePath = false;
var tokenVectorLength = tokenVector.length;
var minimalAlternativesIndex = tokenVectorLength - maxLookAhead - 1;
var result = [];
var possiblePaths = [];
possiblePaths.push({

@@ -309,3 +355,3 @@ idx: -1,

while (!(0, isEmpty_1.default)(possiblePaths)) {
const currPath = possiblePaths.pop();
var currPath = possiblePaths.pop();
// skip alternatives if no more results can be found (assuming deterministic grammar with fixed lookahead)

@@ -320,6 +366,6 @@ if (currPath === EXIT_ALTERNATIVE) {

}
const currDef = currPath.def;
const currIdx = currPath.idx;
const currRuleStack = currPath.ruleStack;
const currOccurrenceStack = currPath.occurrenceStack;
var currDef = currPath.def;
var currIdx = currPath.idx;
var currRuleStack = currPath.ruleStack;
var currOccurrenceStack = currPath.occurrenceStack;
// For Example: an empty path could exist in a valid grammar in the case of an EMPTY_ALT

@@ -329,6 +375,6 @@ if ((0, isEmpty_1.default)(currDef)) {

}
const prod = currDef[0];
var prod = currDef[0];
/* istanbul ignore else */
if (prod === EXIT_NON_TERMINAL) {
const nextPath = {
var nextPath = {
idx: currIdx,

@@ -344,6 +390,6 @@ def: (0, drop_1.default)(currDef),

if (currIdx < tokenVectorLength - 1) {
const nextIdx = currIdx + 1;
const actualToken = tokenVector[nextIdx];
var nextIdx = currIdx + 1;
var actualToken = tokenVector[nextIdx];
if (tokMatcher(actualToken, prod.terminalType)) {
const nextPath = {
var nextPath = {
idx: nextIdx,

@@ -373,7 +419,7 @@ def: (0, drop_1.default)(currDef),

else if (prod instanceof gast_1.NonTerminal) {
const newRuleStack = (0, clone_1.default)(currRuleStack);
var newRuleStack = (0, clone_1.default)(currRuleStack);
newRuleStack.push(prod.nonTerminalName);
const newOccurrenceStack = (0, clone_1.default)(currOccurrenceStack);
var newOccurrenceStack = (0, clone_1.default)(currOccurrenceStack);
newOccurrenceStack.push(prod.idx);
const nextPath = {
var nextPath = {
idx: currIdx,

@@ -388,3 +434,3 @@ def: prod.definition.concat(EXIT_NON_TERMINAL_ARR, (0, drop_1.default)(currDef)),

// the order of alternatives is meaningful, FILO (Last path will be traversed first).
const nextPathWithout = {
var nextPathWithout = {
idx: currIdx,

@@ -398,3 +444,3 @@ def: (0, drop_1.default)(currDef),

possiblePaths.push(EXIT_ALTERNATIVE);
const nextPathWith = {
var nextPathWith = {
idx: currIdx,

@@ -409,8 +455,8 @@ def: prod.definition.concat((0, drop_1.default)(currDef)),

// TODO:(THE NEW operators here take a while...) (convert once?)
const secondIteration = new gast_1.Repetition({
var secondIteration = new gast_1.Repetition({
definition: prod.definition,
idx: prod.idx
});
const nextDef = prod.definition.concat([secondIteration], (0, drop_1.default)(currDef));
const nextPath = {
var nextDef = prod.definition.concat([secondIteration], (0, drop_1.default)(currDef));
var nextPath = {
idx: currIdx,

@@ -425,11 +471,11 @@ def: nextDef,

// TODO:(THE NEW operators here take a while...) (convert once?)
const separatorGast = new gast_1.Terminal({
var separatorGast = new gast_1.Terminal({
terminalType: prod.separator
});
const secondIteration = new gast_1.Repetition({
var secondIteration = new gast_1.Repetition({
definition: [separatorGast].concat(prod.definition),
idx: prod.idx
});
const nextDef = prod.definition.concat([secondIteration], (0, drop_1.default)(currDef));
const nextPath = {
var nextDef = prod.definition.concat([secondIteration], (0, drop_1.default)(currDef));
var nextPath = {
idx: currIdx,

@@ -444,3 +490,3 @@ def: nextDef,

// the order of alternatives is meaningful, FILO (Last path will be traversed first).
const nextPathWithout = {
var nextPathWithout = {
idx: currIdx,

@@ -454,11 +500,11 @@ def: (0, drop_1.default)(currDef),

possiblePaths.push(EXIT_ALTERNATIVE);
const separatorGast = new gast_1.Terminal({
var separatorGast = new gast_1.Terminal({
terminalType: prod.separator
});
const nthRepetition = new gast_1.Repetition({
var nthRepetition = new gast_1.Repetition({
definition: [separatorGast].concat(prod.definition),
idx: prod.idx
});
const nextDef = prod.definition.concat([nthRepetition], (0, drop_1.default)(currDef));
const nextPathWith = {
var nextDef = prod.definition.concat([nthRepetition], (0, drop_1.default)(currDef));
var nextPathWith = {
idx: currIdx,

@@ -473,3 +519,3 @@ def: nextDef,

// the order of alternatives is meaningful, FILO (Last path will be traversed first).
const nextPathWithout = {
var nextPathWithout = {
idx: currIdx,

@@ -484,8 +530,8 @@ def: (0, drop_1.default)(currDef),

// TODO: an empty repetition will cause infinite loops here, will the parser detect this in selfAnalysis?
const nthRepetition = new gast_1.Repetition({
var nthRepetition = new gast_1.Repetition({
definition: prod.definition,
idx: prod.idx
});
const nextDef = prod.definition.concat([nthRepetition], (0, drop_1.default)(currDef));
const nextPathWith = {
var nextDef = prod.definition.concat([nthRepetition], (0, drop_1.default)(currDef));
var nextPathWith = {
idx: currIdx,

@@ -500,5 +546,5 @@ def: nextDef,

// the order of alternatives is meaningful, FILO (Last path will be traversed first).
for (let i = prod.definition.length - 1; i >= 0; i--) {
const currAlt = prod.definition[i];
const currAltPath = {
for (var i = prod.definition.length - 1; i >= 0; i--) {
var currAlt = prod.definition[i];
var currAltPath = {
idx: currIdx,

@@ -533,5 +579,5 @@ def: currAlt.definition.concat((0, drop_1.default)(currDef)),

function expandTopLevelRule(topRule, currIdx, currRuleStack, currOccurrenceStack) {
const newRuleStack = (0, clone_1.default)(currRuleStack);
var newRuleStack = (0, clone_1.default)(currRuleStack);
newRuleStack.push(topRule.name);
const newCurrOccurrenceStack = (0, clone_1.default)(currOccurrenceStack);
var newCurrOccurrenceStack = (0, clone_1.default)(currOccurrenceStack);
// top rule is always assumed to have been called with occurrence index 1

@@ -538,0 +584,0 @@ newCurrOccurrenceStack.push(1);

@@ -29,3 +29,3 @@ "use strict";

exports.getKeyForAutomaticLookahead = getKeyForAutomaticLookahead;
const BITS_START_FOR_ALT_IDX = 32 - exports.BITS_FOR_ALT_IDX;
var BITS_START_FOR_ALT_IDX = 32 - exports.BITS_FOR_ALT_IDX;
//# sourceMappingURL=keys.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,14 +22,14 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.areTokenCategoriesNotUsed = exports.isStrictPrefixOfPath = exports.containsPath = exports.getLookaheadPathsForOptionalProd = exports.getLookaheadPathsForOr = exports.lookAheadSequenceFromAlternatives = exports.buildSingleAlternativeLookaheadFunction = exports.buildAlternativesLookAheadFunc = exports.buildLookaheadFuncForOptionalProd = exports.buildLookaheadFuncForOr = exports.getProdType = exports.PROD_TYPE = void 0;
const isEmpty_1 = __importDefault(require("lodash/isEmpty"));
const flatten_1 = __importDefault(require("lodash/flatten"));
const every_1 = __importDefault(require("lodash/every"));
const map_1 = __importDefault(require("lodash/map"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const has_1 = __importDefault(require("lodash/has"));
const reduce_1 = __importDefault(require("lodash/reduce"));
const interpreter_1 = require("./interpreter");
const rest_1 = require("./rest");
const tokens_1 = require("../../scan/tokens");
const gast_1 = require("@chevrotain/gast");
const gast_2 = require("@chevrotain/gast");
var isEmpty_1 = __importDefault(require("lodash/isEmpty"));
var flatten_1 = __importDefault(require("lodash/flatten"));
var every_1 = __importDefault(require("lodash/every"));
var map_1 = __importDefault(require("lodash/map"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var has_1 = __importDefault(require("lodash/has"));
var reduce_1 = __importDefault(require("lodash/reduce"));
var interpreter_1 = require("./interpreter");
var rest_1 = require("./rest");
var tokens_1 = require("../../scan/tokens");
var gast_1 = require("@chevrotain/gast");
var gast_2 = require("@chevrotain/gast");
var PROD_TYPE;

@@ -55,4 +70,4 @@ (function (PROD_TYPE) {

function buildLookaheadFuncForOr(occurrence, ruleGrammar, maxLookahead, hasPredicates, dynamicTokensEnabled, laFuncBuilder) {
const lookAheadPaths = getLookaheadPathsForOr(occurrence, ruleGrammar, maxLookahead);
const tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths)
var lookAheadPaths = getLookaheadPathsForOr(occurrence, ruleGrammar, maxLookahead);
var tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths)
? tokens_1.tokenStructuredMatcherNoCategories

@@ -76,4 +91,4 @@ : tokens_1.tokenStructuredMatcher;

function buildLookaheadFuncForOptionalProd(occurrence, ruleGrammar, k, dynamicTokensEnabled, prodType, lookaheadBuilder) {
const lookAheadPaths = getLookaheadPathsForOptionalProd(occurrence, ruleGrammar, prodType, k);
const tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths)
var lookAheadPaths = getLookaheadPathsForOptionalProd(occurrence, ruleGrammar, prodType, k);
var tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths)
? tokens_1.tokenStructuredMatcherNoCategories

@@ -85,5 +100,5 @@ : tokens_1.tokenStructuredMatcher;

function buildAlternativesLookAheadFunc(alts, hasPredicates, tokenMatcher, dynamicTokensEnabled) {
const numOfAlts = alts.length;
const areAllOneTokenLookahead = (0, every_1.default)(alts, (currAlt) => {
return (0, every_1.default)(currAlt, (currPath) => {
var numOfAlts = alts.length;
var areAllOneTokenLookahead = (0, every_1.default)(alts, function (currAlt) {
return (0, every_1.default)(currAlt, function (currPath) {
return currPath.length === 1;

@@ -101,7 +116,7 @@ });

// note that in the common case of no predicates, no cpu time will be wasted on this (see else block)
const predicates = (0, map_1.default)(orAlts, (currAlt) => currAlt.GATE);
for (let t = 0; t < numOfAlts; t++) {
const currAlt = alts[t];
const currNumOfPaths = currAlt.length;
const currPredicate = predicates[t];
var predicates = (0, map_1.default)(orAlts, function (currAlt) { return currAlt.GATE; });
for (var t = 0; t < numOfAlts; t++) {
var currAlt = alts[t];
var currNumOfPaths = currAlt.length;
var currPredicate = predicates[t];
if (currPredicate !== undefined && currPredicate.call(this) === false) {

@@ -111,7 +126,7 @@ // if the predicate does not match there is no point in checking the paths

}
nextPath: for (let j = 0; j < currNumOfPaths; j++) {
const currPath = currAlt[j];
const currPathLength = currPath.length;
for (let i = 0; i < currPathLength; i++) {
const nextToken = this.LA(i + 1);
nextPath: for (var j = 0; j < currNumOfPaths; j++) {
var currPath = currAlt[j];
var currPathLength = currPath.length;
for (var i = 0; i < currPathLength; i++) {
var nextToken = this.LA(i + 1);
if (tokenMatcher(nextToken, currPath[i]) === false) {

@@ -137,11 +152,11 @@ // mismatch in current path

// a single token lookahead. These Optimizations cannot work if dynamically defined Tokens are used.
const singleTokenAlts = (0, map_1.default)(alts, (currAlt) => {
var singleTokenAlts = (0, map_1.default)(alts, function (currAlt) {
return (0, flatten_1.default)(currAlt);
});
const choiceToAlt = (0, reduce_1.default)(singleTokenAlts, (result, currAlt, idx) => {
(0, forEach_1.default)(currAlt, (currTokType) => {
var choiceToAlt_1 = (0, reduce_1.default)(singleTokenAlts, function (result, currAlt, idx) {
(0, forEach_1.default)(currAlt, function (currTokType) {
if (!(0, has_1.default)(result, currTokType.tokenTypeIdx)) {
result[currTokType.tokenTypeIdx] = idx;
}
(0, forEach_1.default)(currTokType.categoryMatches, (currExtendingType) => {
(0, forEach_1.default)(currTokType.categoryMatches, function (currExtendingType) {
if (!(0, has_1.default)(result, currExtendingType)) {

@@ -158,4 +173,4 @@ result[currExtendingType] = idx;

return function () {
const nextToken = this.LA(1);
return choiceToAlt[nextToken.tokenTypeIdx];
var nextToken = this.LA(1);
return choiceToAlt_1[nextToken.tokenTypeIdx];
};

@@ -170,10 +185,10 @@ }

return function () {
for (let t = 0; t < numOfAlts; t++) {
const currAlt = alts[t];
const currNumOfPaths = currAlt.length;
nextPath: for (let j = 0; j < currNumOfPaths; j++) {
const currPath = currAlt[j];
const currPathLength = currPath.length;
for (let i = 0; i < currPathLength; i++) {
const nextToken = this.LA(i + 1);
for (var t = 0; t < numOfAlts; t++) {
var currAlt = alts[t];
var currNumOfPaths = currAlt.length;
nextPath: for (var j = 0; j < currNumOfPaths; j++) {
var currPath = currAlt[j];
var currPathLength = currPath.length;
for (var i = 0; i < currPathLength; i++) {
var nextToken = this.LA(i + 1);
if (tokenMatcher(nextToken, currPath[i]) === false) {

@@ -199,22 +214,22 @@ // mismatch in current path

function buildSingleAlternativeLookaheadFunction(alt, tokenMatcher, dynamicTokensEnabled) {
const areAllOneTokenLookahead = (0, every_1.default)(alt, (currPath) => {
var areAllOneTokenLookahead = (0, every_1.default)(alt, function (currPath) {
return currPath.length === 1;
});
const numOfPaths = alt.length;
var numOfPaths = alt.length;
// optimized (common) case of all the lookaheads paths requiring only
// a single token lookahead.
if (areAllOneTokenLookahead && !dynamicTokensEnabled) {
const singleTokensTypes = (0, flatten_1.default)(alt);
var singleTokensTypes = (0, flatten_1.default)(alt);
if (singleTokensTypes.length === 1 &&
(0, isEmpty_1.default)(singleTokensTypes[0].categoryMatches)) {
const expectedTokenType = singleTokensTypes[0];
const expectedTokenUniqueKey = expectedTokenType.tokenTypeIdx;
var expectedTokenType = singleTokensTypes[0];
var expectedTokenUniqueKey_1 = expectedTokenType.tokenTypeIdx;
return function () {
return this.LA(1).tokenTypeIdx === expectedTokenUniqueKey;
return this.LA(1).tokenTypeIdx === expectedTokenUniqueKey_1;
};
}
else {
const choiceToAlt = (0, reduce_1.default)(singleTokensTypes, (result, currTokType, idx) => {
var choiceToAlt_2 = (0, reduce_1.default)(singleTokensTypes, function (result, currTokType, idx) {
result[currTokType.tokenTypeIdx] = true;
(0, forEach_1.default)(currTokType.categoryMatches, (currExtendingType) => {
(0, forEach_1.default)(currTokType.categoryMatches, function (currExtendingType) {
result[currExtendingType] = true;

@@ -225,4 +240,4 @@ });

return function () {
const nextToken = this.LA(1);
return choiceToAlt[nextToken.tokenTypeIdx] === true;
var nextToken = this.LA(1);
return choiceToAlt_2[nextToken.tokenTypeIdx] === true;
};

@@ -233,7 +248,7 @@ }

return function () {
nextPath: for (let j = 0; j < numOfPaths; j++) {
const currPath = alt[j];
const currPathLength = currPath.length;
for (let i = 0; i < currPathLength; i++) {
const nextToken = this.LA(i + 1);
nextPath: for (var j = 0; j < numOfPaths; j++) {
var currPath = alt[j];
var currPathLength = currPath.length;
for (var i = 0; i < currPathLength; i++) {
var nextToken = this.LA(i + 1);
if (tokenMatcher(nextToken, currPath[i]) === false) {

@@ -254,14 +269,16 @@ // mismatch in current path

exports.buildSingleAlternativeLookaheadFunction = buildSingleAlternativeLookaheadFunction;
class RestDefinitionFinderWalker extends rest_1.RestWalker {
constructor(topProd, targetOccurrence, targetProdType) {
super();
this.topProd = topProd;
this.targetOccurrence = targetOccurrence;
this.targetProdType = targetProdType;
var RestDefinitionFinderWalker = /** @class */ (function (_super) {
__extends(RestDefinitionFinderWalker, _super);
function RestDefinitionFinderWalker(topProd, targetOccurrence, targetProdType) {
var _this = _super.call(this) || this;
_this.topProd = topProd;
_this.targetOccurrence = targetOccurrence;
_this.targetProdType = targetProdType;
return _this;
}
startWalking() {
RestDefinitionFinderWalker.prototype.startWalking = function () {
this.walk(this.topProd);
return this.restDef;
}
checkIsTarget(node, expectedProdType, currRest, prevRest) {
};
RestDefinitionFinderWalker.prototype.checkIsTarget = function (node, expectedProdType, currRest, prevRest) {
if (node.idx === this.targetOccurrence &&

@@ -274,41 +291,44 @@ this.targetProdType === expectedProdType) {

return false;
}
walkOption(optionProd, currRest, prevRest) {
};
RestDefinitionFinderWalker.prototype.walkOption = function (optionProd, currRest, prevRest) {
if (!this.checkIsTarget(optionProd, PROD_TYPE.OPTION, currRest, prevRest)) {
super.walkOption(optionProd, currRest, prevRest);
_super.prototype.walkOption.call(this, optionProd, currRest, prevRest);
}
}
walkAtLeastOne(atLeastOneProd, currRest, prevRest) {
};
RestDefinitionFinderWalker.prototype.walkAtLeastOne = function (atLeastOneProd, currRest, prevRest) {
if (!this.checkIsTarget(atLeastOneProd, PROD_TYPE.REPETITION_MANDATORY, currRest, prevRest)) {
super.walkOption(atLeastOneProd, currRest, prevRest);
_super.prototype.walkOption.call(this, atLeastOneProd, currRest, prevRest);
}
}
walkAtLeastOneSep(atLeastOneSepProd, currRest, prevRest) {
};
RestDefinitionFinderWalker.prototype.walkAtLeastOneSep = function (atLeastOneSepProd, currRest, prevRest) {
if (!this.checkIsTarget(atLeastOneSepProd, PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR, currRest, prevRest)) {
super.walkOption(atLeastOneSepProd, currRest, prevRest);
_super.prototype.walkOption.call(this, atLeastOneSepProd, currRest, prevRest);
}
}
walkMany(manyProd, currRest, prevRest) {
};
RestDefinitionFinderWalker.prototype.walkMany = function (manyProd, currRest, prevRest) {
if (!this.checkIsTarget(manyProd, PROD_TYPE.REPETITION, currRest, prevRest)) {
super.walkOption(manyProd, currRest, prevRest);
_super.prototype.walkOption.call(this, manyProd, currRest, prevRest);
}
}
walkManySep(manySepProd, currRest, prevRest) {
};
RestDefinitionFinderWalker.prototype.walkManySep = function (manySepProd, currRest, prevRest) {
if (!this.checkIsTarget(manySepProd, PROD_TYPE.REPETITION_WITH_SEPARATOR, currRest, prevRest)) {
super.walkOption(manySepProd, currRest, prevRest);
_super.prototype.walkOption.call(this, manySepProd, currRest, prevRest);
}
}
}
};
return RestDefinitionFinderWalker;
}(rest_1.RestWalker));
/**
* Returns the definition of a target production in a top level level rule.
*/
class InsideDefinitionFinderVisitor extends gast_2.GAstVisitor {
constructor(targetOccurrence, targetProdType, targetRef) {
super();
this.targetOccurrence = targetOccurrence;
this.targetProdType = targetProdType;
this.targetRef = targetRef;
this.result = [];
var InsideDefinitionFinderVisitor = /** @class */ (function (_super) {
__extends(InsideDefinitionFinderVisitor, _super);
function InsideDefinitionFinderVisitor(targetOccurrence, targetProdType, targetRef) {
var _this = _super.call(this) || this;
_this.targetOccurrence = targetOccurrence;
_this.targetProdType = targetProdType;
_this.targetRef = targetRef;
_this.result = [];
return _this;
}
checkIsTarget(node, expectedProdName) {
InsideDefinitionFinderVisitor.prototype.checkIsTarget = function (node, expectedProdName) {
if (node.idx === this.targetOccurrence &&

@@ -319,25 +339,26 @@ this.targetProdType === expectedProdName &&

}
}
visitOption(node) {
};
InsideDefinitionFinderVisitor.prototype.visitOption = function (node) {
this.checkIsTarget(node, PROD_TYPE.OPTION);
}
visitRepetition(node) {
};
InsideDefinitionFinderVisitor.prototype.visitRepetition = function (node) {
this.checkIsTarget(node, PROD_TYPE.REPETITION);
}
visitRepetitionMandatory(node) {
};
InsideDefinitionFinderVisitor.prototype.visitRepetitionMandatory = function (node) {
this.checkIsTarget(node, PROD_TYPE.REPETITION_MANDATORY);
}
visitRepetitionMandatoryWithSeparator(node) {
};
InsideDefinitionFinderVisitor.prototype.visitRepetitionMandatoryWithSeparator = function (node) {
this.checkIsTarget(node, PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR);
}
visitRepetitionWithSeparator(node) {
};
InsideDefinitionFinderVisitor.prototype.visitRepetitionWithSeparator = function (node) {
this.checkIsTarget(node, PROD_TYPE.REPETITION_WITH_SEPARATOR);
}
visitAlternation(node) {
};
InsideDefinitionFinderVisitor.prototype.visitAlternation = function (node) {
this.checkIsTarget(node, PROD_TYPE.ALTERNATION);
}
}
};
return InsideDefinitionFinderVisitor;
}(gast_2.GAstVisitor));
function initializeArrayOfArrays(size) {
const result = new Array(size);
for (let i = 0; i < size; i++) {
var result = new Array(size);
for (var i = 0; i < size; i++) {
result[i] = [];

@@ -353,11 +374,11 @@ }

function pathToHashKeys(path) {
let keys = [""];
for (let i = 0; i < path.length; i++) {
const tokType = path[i];
const longerKeys = [];
for (let j = 0; j < keys.length; j++) {
const currShorterKey = keys[j];
var keys = [""];
for (var i = 0; i < path.length; i++) {
var tokType = path[i];
var longerKeys = [];
for (var j = 0; j < keys.length; j++) {
var currShorterKey = keys[j];
longerKeys.push(currShorterKey + "_" + tokType.tokenTypeIdx);
for (let t = 0; t < tokType.categoryMatches.length; t++) {
const categoriesKeySuffix = "_" + tokType.categoryMatches[t];
for (var t = 0; t < tokType.categoryMatches.length; t++) {
var categoriesKeySuffix = "_" + tokType.categoryMatches[t];
longerKeys.push(currShorterKey + categoriesKeySuffix);

@@ -374,3 +395,3 @@ }

function isUniquePrefixHash(altKnownPathsKeys, searchPathKeys, idx) {
for (let currAltIdx = 0; currAltIdx < altKnownPathsKeys.length; currAltIdx++) {
for (var currAltIdx = 0; currAltIdx < altKnownPathsKeys.length; currAltIdx++) {
// We only want to test vs the other alternatives

@@ -380,5 +401,5 @@ if (currAltIdx === idx) {

}
const otherAltKnownPathsKeys = altKnownPathsKeys[currAltIdx];
for (let searchIdx = 0; searchIdx < searchPathKeys.length; searchIdx++) {
const searchKey = searchPathKeys[searchIdx];
var otherAltKnownPathsKeys = altKnownPathsKeys[currAltIdx];
for (var searchIdx = 0; searchIdx < searchPathKeys.length; searchIdx++) {
var searchKey = searchPathKeys[searchIdx];
if (otherAltKnownPathsKeys[searchKey] === true) {

@@ -393,9 +414,11 @@ return false;

function lookAheadSequenceFromAlternatives(altsDefs, k) {
const partialAlts = (0, map_1.default)(altsDefs, (currAlt) => (0, interpreter_1.possiblePathsFrom)([currAlt], 1));
const finalResult = initializeArrayOfArrays(partialAlts.length);
const altsHashes = (0, map_1.default)(partialAlts, (currAltPaths) => {
const dict = {};
(0, forEach_1.default)(currAltPaths, (item) => {
const keys = pathToHashKeys(item.partialPath);
(0, forEach_1.default)(keys, (currKey) => {
var partialAlts = (0, map_1.default)(altsDefs, function (currAlt) {
return (0, interpreter_1.possiblePathsFrom)([currAlt], 1);
});
var finalResult = initializeArrayOfArrays(partialAlts.length);
var altsHashes = (0, map_1.default)(partialAlts, function (currAltPaths) {
var dict = {};
(0, forEach_1.default)(currAltPaths, function (item) {
var keys = pathToHashKeys(item.partialPath);
(0, forEach_1.default)(keys, function (currKey) {
dict[currKey] = true;

@@ -406,19 +429,18 @@ });

});
let newData = partialAlts;
var newData = partialAlts;
// maxLookahead loop
for (let pathLength = 1; pathLength <= k; pathLength++) {
const currDataset = newData;
for (var pathLength = 1; pathLength <= k; pathLength++) {
var currDataset = newData;
newData = initializeArrayOfArrays(currDataset.length);
// alternatives loop
for (let altIdx = 0; altIdx < currDataset.length; altIdx++) {
const currAltPathsAndSuffixes = currDataset[altIdx];
var _loop_1 = function (altIdx) {
var currAltPathsAndSuffixes = currDataset[altIdx];
// paths in current alternative loop
for (let currPathIdx = 0; currPathIdx < currAltPathsAndSuffixes.length; currPathIdx++) {
const currPathPrefix = currAltPathsAndSuffixes[currPathIdx].partialPath;
const suffixDef = currAltPathsAndSuffixes[currPathIdx].suffixDef;
const prefixKeys = pathToHashKeys(currPathPrefix);
const isUnique = isUniquePrefixHash(altsHashes, prefixKeys, altIdx);
for (var currPathIdx = 0; currPathIdx < currAltPathsAndSuffixes.length; currPathIdx++) {
var currPathPrefix = currAltPathsAndSuffixes[currPathIdx].partialPath;
var suffixDef = currAltPathsAndSuffixes[currPathIdx].suffixDef;
var prefixKeys = pathToHashKeys(currPathPrefix);
var isUnique = isUniquePrefixHash(altsHashes, prefixKeys, altIdx);
// End of the line for this path.
if (isUnique || (0, isEmpty_1.default)(suffixDef) || currPathPrefix.length === k) {
const currAltResult = finalResult[altIdx];
var currAltResult = finalResult[altIdx];
// TODO: Can we implement a containsPath using Maps/Dictionaries?

@@ -428,4 +450,4 @@ if (containsPath(currAltResult, currPathPrefix) === false) {

// Update all new keys for the current path.
for (let j = 0; j < prefixKeys.length; j++) {
const currKey = prefixKeys[j];
for (var j = 0; j < prefixKeys.length; j++) {
var currKey = prefixKeys[j];
altsHashes[altIdx][currKey] = true;

@@ -437,8 +459,8 @@ }

else {
const newPartialPathsAndSuffixes = (0, interpreter_1.possiblePathsFrom)(suffixDef, pathLength + 1, currPathPrefix);
var newPartialPathsAndSuffixes = (0, interpreter_1.possiblePathsFrom)(suffixDef, pathLength + 1, currPathPrefix);
newData[altIdx] = newData[altIdx].concat(newPartialPathsAndSuffixes);
// Update keys for new known paths
(0, forEach_1.default)(newPartialPathsAndSuffixes, (item) => {
const prefixKeys = pathToHashKeys(item.partialPath);
(0, forEach_1.default)(prefixKeys, (key) => {
(0, forEach_1.default)(newPartialPathsAndSuffixes, function (item) {
var prefixKeys = pathToHashKeys(item.partialPath);
(0, forEach_1.default)(prefixKeys, function (key) {
altsHashes[altIdx][key] = true;

@@ -449,2 +471,6 @@ });

}
};
// alternatives loop
for (var altIdx = 0; altIdx < currDataset.length; altIdx++) {
_loop_1(altIdx);
}

@@ -456,3 +482,3 @@ }

function getLookaheadPathsForOr(occurrence, ruleGrammar, k, orProd) {
const visitor = new InsideDefinitionFinderVisitor(occurrence, PROD_TYPE.ALTERNATION, orProd);
var visitor = new InsideDefinitionFinderVisitor(occurrence, PROD_TYPE.ALTERNATION, orProd);
ruleGrammar.accept(visitor);

@@ -463,9 +489,9 @@ return lookAheadSequenceFromAlternatives(visitor.result, k);

function getLookaheadPathsForOptionalProd(occurrence, ruleGrammar, prodType, k) {
const insideDefVisitor = new InsideDefinitionFinderVisitor(occurrence, prodType);
var insideDefVisitor = new InsideDefinitionFinderVisitor(occurrence, prodType);
ruleGrammar.accept(insideDefVisitor);
const insideDef = insideDefVisitor.result;
const afterDefWalker = new RestDefinitionFinderWalker(ruleGrammar, occurrence, prodType);
const afterDef = afterDefWalker.startWalking();
const insideFlat = new gast_1.Alternative({ definition: insideDef });
const afterFlat = new gast_1.Alternative({ definition: afterDef });
var insideDef = insideDefVisitor.result;
var afterDefWalker = new RestDefinitionFinderWalker(ruleGrammar, occurrence, prodType);
var afterDef = afterDefWalker.startWalking();
var insideFlat = new gast_1.Alternative({ definition: insideDef });
var afterFlat = new gast_1.Alternative({ definition: afterDef });
return lookAheadSequenceFromAlternatives([insideFlat, afterFlat], k);

@@ -475,11 +501,11 @@ }

function containsPath(alternative, searchPath) {
compareOtherPath: for (let i = 0; i < alternative.length; i++) {
const otherPath = alternative[i];
compareOtherPath: for (var i = 0; i < alternative.length; i++) {
var otherPath = alternative[i];
if (otherPath.length !== searchPath.length) {
continue;
}
for (let j = 0; j < otherPath.length; j++) {
const searchTok = searchPath[j];
const otherTok = otherPath[j];
const matchingTokens = searchTok === otherTok ||
for (var j = 0; j < otherPath.length; j++) {
var searchTok = searchPath[j];
var otherTok = otherPath[j];
var matchingTokens = searchTok === otherTok ||
otherTok.categoryMatchesMap[searchTok.tokenTypeIdx] !== undefined;

@@ -497,4 +523,4 @@ if (matchingTokens === false) {

return (prefix.length < other.length &&
(0, every_1.default)(prefix, (tokType, idx) => {
const otherTokType = other[idx];
(0, every_1.default)(prefix, function (tokType, idx) {
var otherTokType = other[idx];
return (tokType === otherTokType ||

@@ -506,5 +532,9 @@ otherTokType.categoryMatchesMap[tokType.tokenTypeIdx]);

function areTokenCategoriesNotUsed(lookAheadPaths) {
return (0, every_1.default)(lookAheadPaths, (singleAltPaths) => (0, every_1.default)(singleAltPaths, (singlePath) => (0, every_1.default)(singlePath, (token) => (0, isEmpty_1.default)(token.categoryMatches))));
return (0, every_1.default)(lookAheadPaths, function (singleAltPaths) {
return (0, every_1.default)(singleAltPaths, function (singlePath) {
return (0, every_1.default)(singlePath, function (token) { return (0, isEmpty_1.default)(token.categoryMatches); });
});
});
}
exports.areTokenCategoriesNotUsed = areTokenCategoriesNotUsed;
//# sourceMappingURL=lookahead.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,8 +22,8 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.GastRefResolverVisitor = exports.resolveGrammar = void 0;
const parser_1 = require("../parser/parser");
const forEach_1 = __importDefault(require("lodash/forEach"));
const values_1 = __importDefault(require("lodash/values"));
const gast_1 = require("@chevrotain/gast");
var parser_1 = require("../parser/parser");
var forEach_1 = __importDefault(require("lodash/forEach"));
var values_1 = __importDefault(require("lodash/values"));
var gast_1 = require("@chevrotain/gast");
function resolveGrammar(topLevels, errMsgProvider) {
const refResolver = new GastRefResolverVisitor(topLevels, errMsgProvider);
var refResolver = new GastRefResolverVisitor(topLevels, errMsgProvider);
refResolver.resolveRefs();

@@ -18,19 +33,22 @@ return refResolver.errors;

exports.resolveGrammar = resolveGrammar;
class GastRefResolverVisitor extends gast_1.GAstVisitor {
constructor(nameToTopRule, errMsgProvider) {
super();
this.nameToTopRule = nameToTopRule;
this.errMsgProvider = errMsgProvider;
this.errors = [];
var GastRefResolverVisitor = /** @class */ (function (_super) {
__extends(GastRefResolverVisitor, _super);
function GastRefResolverVisitor(nameToTopRule, errMsgProvider) {
var _this = _super.call(this) || this;
_this.nameToTopRule = nameToTopRule;
_this.errMsgProvider = errMsgProvider;
_this.errors = [];
return _this;
}
resolveRefs() {
(0, forEach_1.default)((0, values_1.default)(this.nameToTopRule), (prod) => {
this.currTopLevel = prod;
prod.accept(this);
GastRefResolverVisitor.prototype.resolveRefs = function () {
var _this = this;
(0, forEach_1.default)((0, values_1.default)(this.nameToTopRule), function (prod) {
_this.currTopLevel = prod;
prod.accept(_this);
});
}
visitNonTerminal(node) {
const ref = this.nameToTopRule[node.nonTerminalName];
};
GastRefResolverVisitor.prototype.visitNonTerminal = function (node) {
var ref = this.nameToTopRule[node.nonTerminalName];
if (!ref) {
const msg = this.errMsgProvider.buildRuleNotFoundError(this.currTopLevel, node);
var msg = this.errMsgProvider.buildRuleNotFoundError(this.currTopLevel, node);
this.errors.push({

@@ -46,5 +64,6 @@ message: msg,

}
}
}
};
return GastRefResolverVisitor;
}(gast_1.GAstVisitor));
exports.GastRefResolverVisitor = GastRefResolverVisitor;
//# sourceMappingURL=resolver.js.map

@@ -7,39 +7,43 @@ "use strict";

exports.RestWalker = void 0;
const drop_1 = __importDefault(require("lodash/drop"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const gast_1 = require("@chevrotain/gast");
var drop_1 = __importDefault(require("lodash/drop"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var gast_1 = require("@chevrotain/gast");
/**
* A Grammar Walker that computes the "remaining" grammar "after" a productions in the grammar.
*/
class RestWalker {
walk(prod, prevRest = []) {
(0, forEach_1.default)(prod.definition, (subProd, index) => {
const currRest = (0, drop_1.default)(prod.definition, index + 1);
var RestWalker = /** @class */ (function () {
function RestWalker() {
}
RestWalker.prototype.walk = function (prod, prevRest) {
var _this = this;
if (prevRest === void 0) { prevRest = []; }
(0, forEach_1.default)(prod.definition, function (subProd, index) {
var currRest = (0, drop_1.default)(prod.definition, index + 1);
/* istanbul ignore else */
if (subProd instanceof gast_1.NonTerminal) {
this.walkProdRef(subProd, currRest, prevRest);
_this.walkProdRef(subProd, currRest, prevRest);
}
else if (subProd instanceof gast_1.Terminal) {
this.walkTerminal(subProd, currRest, prevRest);
_this.walkTerminal(subProd, currRest, prevRest);
}
else if (subProd instanceof gast_1.Alternative) {
this.walkFlat(subProd, currRest, prevRest);
_this.walkFlat(subProd, currRest, prevRest);
}
else if (subProd instanceof gast_1.Option) {
this.walkOption(subProd, currRest, prevRest);
_this.walkOption(subProd, currRest, prevRest);
}
else if (subProd instanceof gast_1.RepetitionMandatory) {
this.walkAtLeastOne(subProd, currRest, prevRest);
_this.walkAtLeastOne(subProd, currRest, prevRest);
}
else if (subProd instanceof gast_1.RepetitionMandatoryWithSeparator) {
this.walkAtLeastOneSep(subProd, currRest, prevRest);
_this.walkAtLeastOneSep(subProd, currRest, prevRest);
}
else if (subProd instanceof gast_1.RepetitionWithSeparator) {
this.walkManySep(subProd, currRest, prevRest);
_this.walkManySep(subProd, currRest, prevRest);
}
else if (subProd instanceof gast_1.Repetition) {
this.walkMany(subProd, currRest, prevRest);
_this.walkMany(subProd, currRest, prevRest);
}
else if (subProd instanceof gast_1.Alternation) {
this.walkOr(subProd, currRest, prevRest);
_this.walkOr(subProd, currRest, prevRest);
}

@@ -50,55 +54,57 @@ else {

});
}
walkTerminal(terminal, currRest, prevRest) { }
walkProdRef(refProd, currRest, prevRest) { }
walkFlat(flatProd, currRest, prevRest) {
};
RestWalker.prototype.walkTerminal = function (terminal, currRest, prevRest) { };
RestWalker.prototype.walkProdRef = function (refProd, currRest, prevRest) { };
RestWalker.prototype.walkFlat = function (flatProd, currRest, prevRest) {
// ABCDEF => after the D the rest is EF
const fullOrRest = currRest.concat(prevRest);
var fullOrRest = currRest.concat(prevRest);
this.walk(flatProd, fullOrRest);
}
walkOption(optionProd, currRest, prevRest) {
};
RestWalker.prototype.walkOption = function (optionProd, currRest, prevRest) {
// ABC(DE)?F => after the (DE)? the rest is F
const fullOrRest = currRest.concat(prevRest);
var fullOrRest = currRest.concat(prevRest);
this.walk(optionProd, fullOrRest);
}
walkAtLeastOne(atLeastOneProd, currRest, prevRest) {
};
RestWalker.prototype.walkAtLeastOne = function (atLeastOneProd, currRest, prevRest) {
// ABC(DE)+F => after the (DE)+ the rest is (DE)?F
const fullAtLeastOneRest = [
var fullAtLeastOneRest = [
new gast_1.Option({ definition: atLeastOneProd.definition })
].concat(currRest, prevRest);
this.walk(atLeastOneProd, fullAtLeastOneRest);
}
walkAtLeastOneSep(atLeastOneSepProd, currRest, prevRest) {
};
RestWalker.prototype.walkAtLeastOneSep = function (atLeastOneSepProd, currRest, prevRest) {
// ABC DE(,DE)* F => after the (,DE)+ the rest is (,DE)?F
const fullAtLeastOneSepRest = restForRepetitionWithSeparator(atLeastOneSepProd, currRest, prevRest);
var fullAtLeastOneSepRest = restForRepetitionWithSeparator(atLeastOneSepProd, currRest, prevRest);
this.walk(atLeastOneSepProd, fullAtLeastOneSepRest);
}
walkMany(manyProd, currRest, prevRest) {
};
RestWalker.prototype.walkMany = function (manyProd, currRest, prevRest) {
// ABC(DE)*F => after the (DE)* the rest is (DE)?F
const fullManyRest = [
var fullManyRest = [
new gast_1.Option({ definition: manyProd.definition })
].concat(currRest, prevRest);
this.walk(manyProd, fullManyRest);
}
walkManySep(manySepProd, currRest, prevRest) {
};
RestWalker.prototype.walkManySep = function (manySepProd, currRest, prevRest) {
// ABC (DE(,DE)*)? F => after the (,DE)* the rest is (,DE)?F
const fullManySepRest = restForRepetitionWithSeparator(manySepProd, currRest, prevRest);
var fullManySepRest = restForRepetitionWithSeparator(manySepProd, currRest, prevRest);
this.walk(manySepProd, fullManySepRest);
}
walkOr(orProd, currRest, prevRest) {
};
RestWalker.prototype.walkOr = function (orProd, currRest, prevRest) {
var _this = this;
// ABC(D|E|F)G => when finding the (D|E|F) the rest is G
const fullOrRest = currRest.concat(prevRest);
var fullOrRest = currRest.concat(prevRest);
// walk all different alternatives
(0, forEach_1.default)(orProd.definition, (alt) => {
(0, forEach_1.default)(orProd.definition, function (alt) {
// wrapping each alternative in a single definition wrapper
// to avoid errors in computing the rest of that alternative in the invocation to computeInProdFollows
// (otherwise for OR([alt1,alt2]) alt2 will be considered in 'rest' of alt1
const prodWrapper = new gast_1.Alternative({ definition: [alt] });
this.walk(prodWrapper, fullOrRest);
var prodWrapper = new gast_1.Alternative({ definition: [alt] });
_this.walk(prodWrapper, fullOrRest);
});
}
}
};
return RestWalker;
}());
exports.RestWalker = RestWalker;
function restForRepetitionWithSeparator(repSepProd, currRest, prevRest) {
const repSepRest = [
var repSepRest = [
new gast_1.Option({

@@ -110,5 +116,5 @@ definition: [

];
const fullRepSepRest = repSepRest.concat(currRest, prevRest);
var fullRepSepRest = repSepRest.concat(currRest, prevRest);
return fullRepSepRest;
}
//# sourceMappingURL=rest.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,24 +22,24 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.EmbeddedActionsParser = exports.CstParser = exports.Parser = exports.EMPTY_ALT = exports.ParserDefinitionErrorType = exports.DEFAULT_RULE_CONFIG = exports.DEFAULT_PARSER_CONFIG = exports.END_OF_FILE = void 0;
const isEmpty_1 = __importDefault(require("lodash/isEmpty"));
const map_1 = __importDefault(require("lodash/map"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const values_1 = __importDefault(require("lodash/values"));
const has_1 = __importDefault(require("lodash/has"));
const clone_1 = __importDefault(require("lodash/clone"));
const utils_1 = require("@chevrotain/utils");
const follow_1 = require("../grammar/follow");
const tokens_public_1 = require("../../scan/tokens_public");
const errors_public_1 = require("../errors_public");
const gast_resolver_public_1 = require("../grammar/gast/gast_resolver_public");
const recoverable_1 = require("./traits/recoverable");
const looksahead_1 = require("./traits/looksahead");
const tree_builder_1 = require("./traits/tree_builder");
const lexer_adapter_1 = require("./traits/lexer_adapter");
const recognizer_api_1 = require("./traits/recognizer_api");
const recognizer_engine_1 = require("./traits/recognizer_engine");
const error_handler_1 = require("./traits/error_handler");
const context_assist_1 = require("./traits/context_assist");
const gast_recorder_1 = require("./traits/gast_recorder");
const perf_tracer_1 = require("./traits/perf_tracer");
const apply_mixins_1 = require("./utils/apply_mixins");
var isEmpty_1 = __importDefault(require("lodash/isEmpty"));
var map_1 = __importDefault(require("lodash/map"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var values_1 = __importDefault(require("lodash/values"));
var has_1 = __importDefault(require("lodash/has"));
var clone_1 = __importDefault(require("lodash/clone"));
var utils_1 = require("@chevrotain/utils");
var follow_1 = require("../grammar/follow");
var tokens_public_1 = require("../../scan/tokens_public");
var errors_public_1 = require("../errors_public");
var gast_resolver_public_1 = require("../grammar/gast/gast_resolver_public");
var recoverable_1 = require("./traits/recoverable");
var looksahead_1 = require("./traits/looksahead");
var tree_builder_1 = require("./traits/tree_builder");
var lexer_adapter_1 = require("./traits/lexer_adapter");
var recognizer_api_1 = require("./traits/recognizer_api");
var recognizer_engine_1 = require("./traits/recognizer_engine");
var error_handler_1 = require("./traits/error_handler");
var context_assist_1 = require("./traits/context_assist");
var gast_recorder_1 = require("./traits/gast_recorder");
var perf_tracer_1 = require("./traits/perf_tracer");
var apply_mixins_1 = require("./utils/apply_mixins");
exports.END_OF_FILE = (0, tokens_public_1.createTokenInstance)(tokens_public_1.EOF, "", NaN, NaN, NaN, NaN, NaN, NaN);

@@ -43,3 +58,3 @@ Object.freeze(exports.END_OF_FILE);

exports.DEFAULT_RULE_CONFIG = Object.freeze({
recoveryValueFunc: () => undefined,
recoveryValueFunc: function () { return undefined; },
resyncEnabled: true

@@ -63,3 +78,4 @@ });

})(ParserDefinitionErrorType = exports.ParserDefinitionErrorType || (exports.ParserDefinitionErrorType = {}));
function EMPTY_ALT(value = undefined) {
function EMPTY_ALT(value) {
if (value === void 0) { value = undefined; }
return function () {

@@ -70,7 +86,7 @@ return value;

exports.EMPTY_ALT = EMPTY_ALT;
class Parser {
constructor(tokenVocabulary, config) {
var Parser = /** @class */ (function () {
function Parser(tokenVocabulary, config) {
this.definitionErrors = [];
this.selfAnalysisDone = false;
const that = this;
var that = this;
that.initErrorHandler(config);

@@ -98,85 +114,87 @@ that.initLexerAdapter();

*/
static performSelfAnalysis(parserInstance) {
Parser.performSelfAnalysis = function (parserInstance) {
throw Error("The **static** `performSelfAnalysis` method has been deprecated." +
"\t\nUse the **instance** method with the same name instead.");
}
performSelfAnalysis() {
this.TRACE_INIT("performSelfAnalysis", () => {
let defErrorsMsgs;
this.selfAnalysisDone = true;
const className = this.className;
this.TRACE_INIT("toFastProps", () => {
};
Parser.prototype.performSelfAnalysis = function () {
var _this = this;
this.TRACE_INIT("performSelfAnalysis", function () {
var defErrorsMsgs;
_this.selfAnalysisDone = true;
var className = _this.className;
_this.TRACE_INIT("toFastProps", function () {
// Without this voodoo magic the parser would be x3-x4 slower
// It seems it is better to invoke `toFastProperties` **before**
// Any manipulations of the `this` object done during the recording phase.
(0, utils_1.toFastProperties)(this);
(0, utils_1.toFastProperties)(_this);
});
this.TRACE_INIT("Grammar Recording", () => {
_this.TRACE_INIT("Grammar Recording", function () {
try {
this.enableRecording();
_this.enableRecording();
// Building the GAST
(0, forEach_1.default)(this.definedRulesNames, (currRuleName) => {
const wrappedRule = this[currRuleName];
const originalGrammarAction = wrappedRule["originalGrammarAction"];
let recordedRuleGast;
this.TRACE_INIT(`${currRuleName} Rule`, () => {
recordedRuleGast = this.topLevelRuleRecord(currRuleName, originalGrammarAction);
(0, forEach_1.default)(_this.definedRulesNames, function (currRuleName) {
var wrappedRule = _this[currRuleName];
var originalGrammarAction = wrappedRule["originalGrammarAction"];
var recordedRuleGast;
_this.TRACE_INIT("".concat(currRuleName, " Rule"), function () {
recordedRuleGast = _this.topLevelRuleRecord(currRuleName, originalGrammarAction);
});
this.gastProductionsCache[currRuleName] = recordedRuleGast;
_this.gastProductionsCache[currRuleName] = recordedRuleGast;
});
}
finally {
this.disableRecording();
_this.disableRecording();
}
});
let resolverErrors = [];
this.TRACE_INIT("Grammar Resolving", () => {
var resolverErrors = [];
_this.TRACE_INIT("Grammar Resolving", function () {
resolverErrors = (0, gast_resolver_public_1.resolveGrammar)({
rules: (0, values_1.default)(this.gastProductionsCache)
rules: (0, values_1.default)(_this.gastProductionsCache)
});
this.definitionErrors = this.definitionErrors.concat(resolverErrors);
_this.definitionErrors = _this.definitionErrors.concat(resolverErrors);
});
this.TRACE_INIT("Grammar Validations", () => {
_this.TRACE_INIT("Grammar Validations", function () {
// only perform additional grammar validations IFF no resolving errors have occurred.
// as unresolved grammar may lead to unhandled runtime exceptions in the follow up validations.
if ((0, isEmpty_1.default)(resolverErrors) && this.skipValidations === false) {
const validationErrors = (0, gast_resolver_public_1.validateGrammar)({
rules: (0, values_1.default)(this.gastProductionsCache),
maxLookahead: this.maxLookahead,
tokenTypes: (0, values_1.default)(this.tokensMap),
if ((0, isEmpty_1.default)(resolverErrors) && _this.skipValidations === false) {
var validationErrors = (0, gast_resolver_public_1.validateGrammar)({
rules: (0, values_1.default)(_this.gastProductionsCache),
maxLookahead: _this.maxLookahead,
tokenTypes: (0, values_1.default)(_this.tokensMap),
errMsgProvider: errors_public_1.defaultGrammarValidatorErrorProvider,
grammarName: className
});
this.definitionErrors = this.definitionErrors.concat(validationErrors);
_this.definitionErrors = _this.definitionErrors.concat(validationErrors);
}
});
// this analysis may fail if the grammar is not perfectly valid
if ((0, isEmpty_1.default)(this.definitionErrors)) {
if ((0, isEmpty_1.default)(_this.definitionErrors)) {
// The results of these computations are not needed unless error recovery is enabled.
if (this.recoveryEnabled) {
this.TRACE_INIT("computeAllProdsFollows", () => {
const allFollows = (0, follow_1.computeAllProdsFollows)((0, values_1.default)(this.gastProductionsCache));
this.resyncFollows = allFollows;
if (_this.recoveryEnabled) {
_this.TRACE_INIT("computeAllProdsFollows", function () {
var allFollows = (0, follow_1.computeAllProdsFollows)((0, values_1.default)(_this.gastProductionsCache));
_this.resyncFollows = allFollows;
});
}
this.TRACE_INIT("ComputeLookaheadFunctions", () => {
this.preComputeLookaheadFunctions((0, values_1.default)(this.gastProductionsCache));
_this.TRACE_INIT("ComputeLookaheadFunctions", function () {
_this.preComputeLookaheadFunctions((0, values_1.default)(_this.gastProductionsCache));
});
}
if (!Parser.DEFER_DEFINITION_ERRORS_HANDLING &&
!(0, isEmpty_1.default)(this.definitionErrors)) {
defErrorsMsgs = (0, map_1.default)(this.definitionErrors, (defError) => defError.message);
throw new Error(`Parser Definition Errors detected:\n ${defErrorsMsgs.join("\n-------------------------------\n")}`);
!(0, isEmpty_1.default)(_this.definitionErrors)) {
defErrorsMsgs = (0, map_1.default)(_this.definitionErrors, function (defError) { return defError.message; });
throw new Error("Parser Definition Errors detected:\n ".concat(defErrorsMsgs.join("\n-------------------------------\n")));
}
});
}
}
};
// Set this flag to true if you don't want the Parser to throw error when problems in it's definition are detected.
// (normally during the parser's constructor).
// This is a design time flag, it will not affect the runtime error handling of the parser, just design time errors,
// for example: duplicate rule names, referencing an unresolved subrule, ect...
// This flag should not be enabled during normal usage, it is used in special situations, for example when
// needing to display the parser definition errors in some GUI(online playground).
Parser.DEFER_DEFINITION_ERRORS_HANDLING = false;
return Parser;
}());
exports.Parser = Parser;
// Set this flag to true if you don't want the Parser to throw error when problems in it's definition are detected.
// (normally during the parser's constructor).
// This is a design time flag, it will not affect the runtime error handling of the parser, just design time errors,
// for example: duplicate rule names, referencing an unresolved subrule, ect...
// This flag should not be enabled during normal usage, it is used in special situations, for example when
// needing to display the parser definition errors in some GUI(online playground).
Parser.DEFER_DEFINITION_ERRORS_HANDLING = false;
(0, apply_mixins_1.applyMixins)(Parser, [

@@ -194,18 +212,24 @@ recoverable_1.Recoverable,

]);
class CstParser extends Parser {
constructor(tokenVocabulary, config = exports.DEFAULT_PARSER_CONFIG) {
const configClone = (0, clone_1.default)(config);
var CstParser = /** @class */ (function (_super) {
__extends(CstParser, _super);
function CstParser(tokenVocabulary, config) {
if (config === void 0) { config = exports.DEFAULT_PARSER_CONFIG; }
var configClone = (0, clone_1.default)(config);
configClone.outputCst = true;
super(tokenVocabulary, configClone);
return _super.call(this, tokenVocabulary, configClone) || this;
}
}
return CstParser;
}(Parser));
exports.CstParser = CstParser;
class EmbeddedActionsParser extends Parser {
constructor(tokenVocabulary, config = exports.DEFAULT_PARSER_CONFIG) {
const configClone = (0, clone_1.default)(config);
var EmbeddedActionsParser = /** @class */ (function (_super) {
__extends(EmbeddedActionsParser, _super);
function EmbeddedActionsParser(tokenVocabulary, config) {
if (config === void 0) { config = exports.DEFAULT_PARSER_CONFIG; }
var configClone = (0, clone_1.default)(config);
configClone.outputCst = false;
super(tokenVocabulary, configClone);
return _super.call(this, tokenVocabulary, configClone) || this;
}
}
return EmbeddedActionsParser;
}(Parser));
exports.EmbeddedActionsParser = EmbeddedActionsParser;
//# sourceMappingURL=parser.js.map

@@ -7,25 +7,28 @@ "use strict";

exports.ContentAssist = void 0;
const interpreter_1 = require("../../grammar/interpreter");
const first_1 = __importDefault(require("lodash/first"));
const isUndefined_1 = __importDefault(require("lodash/isUndefined"));
class ContentAssist {
initContentAssist() { }
computeContentAssist(startRuleName, precedingInput) {
const startRuleGast = this.gastProductionsCache[startRuleName];
var interpreter_1 = require("../../grammar/interpreter");
var first_1 = __importDefault(require("lodash/first"));
var isUndefined_1 = __importDefault(require("lodash/isUndefined"));
var ContentAssist = /** @class */ (function () {
function ContentAssist() {
}
ContentAssist.prototype.initContentAssist = function () { };
ContentAssist.prototype.computeContentAssist = function (startRuleName, precedingInput) {
var startRuleGast = this.gastProductionsCache[startRuleName];
if ((0, isUndefined_1.default)(startRuleGast)) {
throw Error(`Rule ->${startRuleName}<- does not exist in this grammar.`);
throw Error("Rule ->".concat(startRuleName, "<- does not exist in this grammar."));
}
return (0, interpreter_1.nextPossibleTokensAfter)([startRuleGast], precedingInput, this.tokenMatcher, this.maxLookahead);
}
};
// TODO: should this be a member method or a utility? it does not have any state or usage of 'this'...
// TODO: should this be more explicitly part of the public API?
getNextPossibleTokenTypes(grammarPath) {
const topRuleName = (0, first_1.default)(grammarPath.ruleStack);
const gastProductions = this.getGAstProductions();
const topProduction = gastProductions[topRuleName];
const nextPossibleTokenTypes = new interpreter_1.NextAfterTokenWalker(topProduction, grammarPath).startWalking();
ContentAssist.prototype.getNextPossibleTokenTypes = function (grammarPath) {
var topRuleName = (0, first_1.default)(grammarPath.ruleStack);
var gastProductions = this.getGAstProductions();
var topProduction = gastProductions[topRuleName];
var nextPossibleTokenTypes = new interpreter_1.NextAfterTokenWalker(topProduction, grammarPath).startWalking();
return nextPossibleTokenTypes;
}
}
};
return ContentAssist;
}());
exports.ContentAssist = ContentAssist;
//# sourceMappingURL=context_assist.js.map

@@ -7,12 +7,14 @@ "use strict";

exports.ErrorHandler = void 0;
const exceptions_public_1 = require("../../exceptions_public");
const has_1 = __importDefault(require("lodash/has"));
const clone_1 = __importDefault(require("lodash/clone"));
const lookahead_1 = require("../../grammar/lookahead");
const parser_1 = require("../parser");
var exceptions_public_1 = require("../../exceptions_public");
var has_1 = __importDefault(require("lodash/has"));
var clone_1 = __importDefault(require("lodash/clone"));
var lookahead_1 = require("../../grammar/lookahead");
var parser_1 = require("../parser");
/**
* Trait responsible for runtime parsing errors.
*/
class ErrorHandler {
initErrorHandler(config) {
var ErrorHandler = /** @class */ (function () {
function ErrorHandler() {
}
ErrorHandler.prototype.initErrorHandler = function (config) {
this._errors = [];

@@ -22,4 +24,4 @@ this.errorMessageProvider = (0, has_1.default)(config, "errorMessageProvider")

: parser_1.DEFAULT_PARSER_CONFIG.errorMessageProvider;
}
SAVE_ERROR(error) {
};
ErrorHandler.prototype.SAVE_ERROR = function (error) {
if ((0, exceptions_public_1.isRecognitionException)(error)) {

@@ -36,20 +38,24 @@ error.context = {

}
}
get errors() {
return (0, clone_1.default)(this._errors);
}
set errors(newErrors) {
this._errors = newErrors;
}
};
Object.defineProperty(ErrorHandler.prototype, "errors", {
get: function () {
return (0, clone_1.default)(this._errors);
},
set: function (newErrors) {
this._errors = newErrors;
},
enumerable: false,
configurable: true
});
// TODO: consider caching the error message computed information
raiseEarlyExitException(occurrence, prodType, userDefinedErrMsg) {
const ruleName = this.getCurrRuleFullName();
const ruleGrammar = this.getGAstProductions()[ruleName];
const lookAheadPathsPerAlternative = (0, lookahead_1.getLookaheadPathsForOptionalProd)(occurrence, ruleGrammar, prodType, this.maxLookahead);
const insideProdPaths = lookAheadPathsPerAlternative[0];
const actualTokens = [];
for (let i = 1; i <= this.maxLookahead; i++) {
ErrorHandler.prototype.raiseEarlyExitException = function (occurrence, prodType, userDefinedErrMsg) {
var ruleName = this.getCurrRuleFullName();
var ruleGrammar = this.getGAstProductions()[ruleName];
var lookAheadPathsPerAlternative = (0, lookahead_1.getLookaheadPathsForOptionalProd)(occurrence, ruleGrammar, prodType, this.maxLookahead);
var insideProdPaths = lookAheadPathsPerAlternative[0];
var actualTokens = [];
for (var i = 1; i <= this.maxLookahead; i++) {
actualTokens.push(this.LA(i));
}
const msg = this.errorMessageProvider.buildEarlyExitMessage({
var msg = this.errorMessageProvider.buildEarlyExitMessage({
expectedIterationPaths: insideProdPaths,

@@ -62,15 +68,15 @@ actual: actualTokens,

throw this.SAVE_ERROR(new exceptions_public_1.EarlyExitException(msg, this.LA(1), this.LA(0)));
}
};
// TODO: consider caching the error message computed information
raiseNoAltException(occurrence, errMsgTypes) {
const ruleName = this.getCurrRuleFullName();
const ruleGrammar = this.getGAstProductions()[ruleName];
ErrorHandler.prototype.raiseNoAltException = function (occurrence, errMsgTypes) {
var ruleName = this.getCurrRuleFullName();
var ruleGrammar = this.getGAstProductions()[ruleName];
// TODO: getLookaheadPathsForOr can be slow for large enough maxLookahead and certain grammars, consider caching ?
const lookAheadPathsPerAlternative = (0, lookahead_1.getLookaheadPathsForOr)(occurrence, ruleGrammar, this.maxLookahead);
const actualTokens = [];
for (let i = 1; i <= this.maxLookahead; i++) {
var lookAheadPathsPerAlternative = (0, lookahead_1.getLookaheadPathsForOr)(occurrence, ruleGrammar, this.maxLookahead);
var actualTokens = [];
for (var i = 1; i <= this.maxLookahead; i++) {
actualTokens.push(this.LA(i));
}
const previousToken = this.LA(0);
const errMsg = this.errorMessageProvider.buildNoViableAltMessage({
var previousToken = this.LA(0);
var errMsg = this.errorMessageProvider.buildNoViableAltMessage({
expectedPathsPerAlt: lookAheadPathsPerAlternative,

@@ -83,5 +89,6 @@ actual: actualTokens,

throw this.SAVE_ERROR(new exceptions_public_1.NoViableAltException(errMsg, this.LA(1), previousToken));
}
}
};
return ErrorHandler;
}());
exports.ErrorHandler = ErrorHandler;
//# sourceMappingURL=error_handler.js.map

@@ -7,23 +7,23 @@ "use strict";

exports.GastRecorder = void 0;
const last_1 = __importDefault(require("lodash/last"));
const isArray_1 = __importDefault(require("lodash/isArray"));
const some_1 = __importDefault(require("lodash/some"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const isFunction_1 = __importDefault(require("lodash/isFunction"));
const has_1 = __importDefault(require("lodash/has"));
const gast_1 = require("@chevrotain/gast");
const lexer_public_1 = require("../../../scan/lexer_public");
const tokens_1 = require("../../../scan/tokens");
const tokens_public_1 = require("../../../scan/tokens_public");
const parser_1 = require("../parser");
const keys_1 = require("../../grammar/keys");
const RECORDING_NULL_OBJECT = {
var last_1 = __importDefault(require("lodash/last"));
var isArray_1 = __importDefault(require("lodash/isArray"));
var some_1 = __importDefault(require("lodash/some"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var isFunction_1 = __importDefault(require("lodash/isFunction"));
var has_1 = __importDefault(require("lodash/has"));
var gast_1 = require("@chevrotain/gast");
var lexer_public_1 = require("../../../scan/lexer_public");
var tokens_1 = require("../../../scan/tokens");
var tokens_public_1 = require("../../../scan/tokens_public");
var parser_1 = require("../parser");
var keys_1 = require("../../grammar/keys");
var RECORDING_NULL_OBJECT = {
description: "This Object indicates the Parser is during Recording Phase"
};
Object.freeze(RECORDING_NULL_OBJECT);
const HANDLE_SEPARATOR = true;
const MAX_METHOD_IDX = Math.pow(2, keys_1.BITS_FOR_OCCURRENCE_IDX) - 1;
const RFT = (0, tokens_public_1.createToken)({ name: "RECORDING_PHASE_TOKEN", pattern: lexer_public_1.Lexer.NA });
var HANDLE_SEPARATOR = true;
var MAX_METHOD_IDX = Math.pow(2, keys_1.BITS_FOR_OCCURRENCE_IDX) - 1;
var RFT = (0, tokens_public_1.createToken)({ name: "RECORDING_PHASE_TOKEN", pattern: lexer_public_1.Lexer.NA });
(0, tokens_1.augmentTokenTypes)([RFT]);
const RECORDING_PHASE_TOKEN = (0, tokens_public_1.createTokenInstance)(RFT, "This IToken indicates the Parser is in Recording Phase\n\t" +
var RECORDING_PHASE_TOKEN = (0, tokens_public_1.createTokenInstance)(RFT, "This IToken indicates the Parser is in Recording Phase\n\t" +
"" +

@@ -35,3 +35,3 @@ "See: https://chevrotain.io/docs/guide/internals.html#grammar-recording for details",

Object.freeze(RECORDING_PHASE_TOKEN);
const RECORDING_PHASE_CSTNODE = {
var RECORDING_PHASE_CSTNODE = {
name: "This CSTNode indicates the Parser is in Recording Phase\n\t" +

@@ -44,71 +44,78 @@ "See: https://chevrotain.io/docs/guide/internals.html#grammar-recording for details",

*/
class GastRecorder {
initGastRecorder(config) {
var GastRecorder = /** @class */ (function () {
function GastRecorder() {
}
GastRecorder.prototype.initGastRecorder = function (config) {
this.recordingProdStack = [];
this.RECORDING_PHASE = false;
}
enableRecording() {
};
GastRecorder.prototype.enableRecording = function () {
var _this = this;
this.RECORDING_PHASE = true;
this.TRACE_INIT("Enable Recording", () => {
/**
* Warning Dark Voodoo Magic upcoming!
* We are "replacing" the public parsing DSL methods API
* With **new** alternative implementations on the Parser **instance**
*
* So far this is the only way I've found to avoid performance regressions during parsing time.
* - Approx 30% performance regression was measured on Chrome 75 Canary when attempting to replace the "internal"
* implementations directly instead.
*/
for (let i = 0; i < 10; i++) {
const idx = i > 0 ? i : "";
this[`CONSUME${idx}`] = function (arg1, arg2) {
this.TRACE_INIT("Enable Recording", function () {
var _loop_1 = function (i) {
var idx = i > 0 ? i : "";
_this["CONSUME".concat(idx)] = function (arg1, arg2) {
return this.consumeInternalRecord(arg1, i, arg2);
};
this[`SUBRULE${idx}`] = function (arg1, arg2) {
_this["SUBRULE".concat(idx)] = function (arg1, arg2) {
return this.subruleInternalRecord(arg1, i, arg2);
};
this[`OPTION${idx}`] = function (arg1) {
_this["OPTION".concat(idx)] = function (arg1) {
return this.optionInternalRecord(arg1, i);
};
this[`OR${idx}`] = function (arg1) {
_this["OR".concat(idx)] = function (arg1) {
return this.orInternalRecord(arg1, i);
};
this[`MANY${idx}`] = function (arg1) {
_this["MANY".concat(idx)] = function (arg1) {
this.manyInternalRecord(i, arg1);
};
this[`MANY_SEP${idx}`] = function (arg1) {
_this["MANY_SEP".concat(idx)] = function (arg1) {
this.manySepFirstInternalRecord(i, arg1);
};
this[`AT_LEAST_ONE${idx}`] = function (arg1) {
_this["AT_LEAST_ONE".concat(idx)] = function (arg1) {
this.atLeastOneInternalRecord(i, arg1);
};
this[`AT_LEAST_ONE_SEP${idx}`] = function (arg1) {
_this["AT_LEAST_ONE_SEP".concat(idx)] = function (arg1) {
this.atLeastOneSepFirstInternalRecord(i, arg1);
};
};
/**
* Warning Dark Voodoo Magic upcoming!
* We are "replacing" the public parsing DSL methods API
* With **new** alternative implementations on the Parser **instance**
*
* So far this is the only way I've found to avoid performance regressions during parsing time.
* - Approx 30% performance regression was measured on Chrome 75 Canary when attempting to replace the "internal"
* implementations directly instead.
*/
for (var i = 0; i < 10; i++) {
_loop_1(i);
}
// DSL methods with the idx(suffix) as an argument
this[`consume`] = function (idx, arg1, arg2) {
_this["consume"] = function (idx, arg1, arg2) {
return this.consumeInternalRecord(arg1, idx, arg2);
};
this[`subrule`] = function (idx, arg1, arg2) {
_this["subrule"] = function (idx, arg1, arg2) {
return this.subruleInternalRecord(arg1, idx, arg2);
};
this[`option`] = function (idx, arg1) {
_this["option"] = function (idx, arg1) {
return this.optionInternalRecord(arg1, idx);
};
this[`or`] = function (idx, arg1) {
_this["or"] = function (idx, arg1) {
return this.orInternalRecord(arg1, idx);
};
this[`many`] = function (idx, arg1) {
_this["many"] = function (idx, arg1) {
this.manyInternalRecord(idx, arg1);
};
this[`atLeastOne`] = function (idx, arg1) {
_this["atLeastOne"] = function (idx, arg1) {
this.atLeastOneInternalRecord(idx, arg1);
};
this.ACTION = this.ACTION_RECORD;
this.BACKTRACK = this.BACKTRACK_RECORD;
this.LA = this.LA_RECORD;
_this.ACTION = _this.ACTION_RECORD;
_this.BACKTRACK = _this.BACKTRACK_RECORD;
_this.LA = _this.LA_RECORD;
});
}
disableRecording() {
};
GastRecorder.prototype.disableRecording = function () {
var _this = this;
this.RECORDING_PHASE = false;

@@ -119,21 +126,21 @@ // By deleting these **instance** properties, any future invocation

// do during the recording phase.
this.TRACE_INIT("Deleting Recording methods", () => {
const that = this;
for (let i = 0; i < 10; i++) {
const idx = i > 0 ? i : "";
delete that[`CONSUME${idx}`];
delete that[`SUBRULE${idx}`];
delete that[`OPTION${idx}`];
delete that[`OR${idx}`];
delete that[`MANY${idx}`];
delete that[`MANY_SEP${idx}`];
delete that[`AT_LEAST_ONE${idx}`];
delete that[`AT_LEAST_ONE_SEP${idx}`];
this.TRACE_INIT("Deleting Recording methods", function () {
var that = _this;
for (var i = 0; i < 10; i++) {
var idx = i > 0 ? i : "";
delete that["CONSUME".concat(idx)];
delete that["SUBRULE".concat(idx)];
delete that["OPTION".concat(idx)];
delete that["OR".concat(idx)];
delete that["MANY".concat(idx)];
delete that["MANY_SEP".concat(idx)];
delete that["AT_LEAST_ONE".concat(idx)];
delete that["AT_LEAST_ONE_SEP".concat(idx)];
}
delete that[`consume`];
delete that[`subrule`];
delete that[`option`];
delete that[`or`];
delete that[`many`];
delete that[`atLeastOne`];
delete that["consume"];
delete that["subrule"];
delete that["option"];
delete that["or"];
delete that["many"];
delete that["atLeastOne"];
delete that.ACTION;

@@ -143,23 +150,23 @@ delete that.BACKTRACK;

});
}
};
// Parser methods are called inside an ACTION?
// Maybe try/catch/finally on ACTIONS while disabling the recorders state changes?
// @ts-expect-error -- noop place holder
ACTION_RECORD(impl) {
GastRecorder.prototype.ACTION_RECORD = function (impl) {
// NO-OP during recording
}
};
// Executing backtracking logic will break our recording logic assumptions
BACKTRACK_RECORD(grammarRule, args) {
return () => true;
}
GastRecorder.prototype.BACKTRACK_RECORD = function (grammarRule, args) {
return function () { return true; };
};
// LA is part of the official API and may be used for custom lookahead logic
// by end users who may forget to wrap it in ACTION or inside a GATE
LA_RECORD(howMuch) {
GastRecorder.prototype.LA_RECORD = function (howMuch) {
// We cannot use the RECORD_PHASE_TOKEN here because someone may depend
// On LA return EOF at the end of the input so an infinite loop may occur.
return parser_1.END_OF_FILE;
}
topLevelRuleRecord(name, def) {
};
GastRecorder.prototype.topLevelRuleRecord = function (name, def) {
try {
const newTopLevelRule = new gast_1.Rule({ definition: [], name: name });
var newTopLevelRule = new gast_1.Rule({ definition: [], name: name });
newTopLevelRule.name = name;

@@ -186,34 +193,34 @@ this.recordingProdStack.push(newTopLevelRule);

}
}
};
// Implementation of parsing DSL
optionInternalRecord(actionORMethodDef, occurrence) {
GastRecorder.prototype.optionInternalRecord = function (actionORMethodDef, occurrence) {
return recordProd.call(this, gast_1.Option, actionORMethodDef, occurrence);
}
atLeastOneInternalRecord(occurrence, actionORMethodDef) {
};
GastRecorder.prototype.atLeastOneInternalRecord = function (occurrence, actionORMethodDef) {
recordProd.call(this, gast_1.RepetitionMandatory, actionORMethodDef, occurrence);
}
atLeastOneSepFirstInternalRecord(occurrence, options) {
};
GastRecorder.prototype.atLeastOneSepFirstInternalRecord = function (occurrence, options) {
recordProd.call(this, gast_1.RepetitionMandatoryWithSeparator, options, occurrence, HANDLE_SEPARATOR);
}
manyInternalRecord(occurrence, actionORMethodDef) {
};
GastRecorder.prototype.manyInternalRecord = function (occurrence, actionORMethodDef) {
recordProd.call(this, gast_1.Repetition, actionORMethodDef, occurrence);
}
manySepFirstInternalRecord(occurrence, options) {
};
GastRecorder.prototype.manySepFirstInternalRecord = function (occurrence, options) {
recordProd.call(this, gast_1.RepetitionWithSeparator, options, occurrence, HANDLE_SEPARATOR);
}
orInternalRecord(altsOrOpts, occurrence) {
};
GastRecorder.prototype.orInternalRecord = function (altsOrOpts, occurrence) {
return recordOrProd.call(this, altsOrOpts, occurrence);
}
subruleInternalRecord(ruleToCall, occurrence, options) {
};
GastRecorder.prototype.subruleInternalRecord = function (ruleToCall, occurrence, options) {
assertMethodIdxIsValid(occurrence);
if (!ruleToCall || (0, has_1.default)(ruleToCall, "ruleName") === false) {
const error = new Error(`<SUBRULE${getIdxSuffix(occurrence)}> argument is invalid` +
` expecting a Parser method reference but got: <${JSON.stringify(ruleToCall)}>` +
`\n inside top level rule: <${this.recordingProdStack[0].name}>`);
var error = new Error("<SUBRULE".concat(getIdxSuffix(occurrence), "> argument is invalid") +
" expecting a Parser method reference but got: <".concat(JSON.stringify(ruleToCall), ">") +
"\n inside top level rule: <".concat(this.recordingProdStack[0].name, ">"));
error.KNOWN_RECORDER_ERROR = true;
throw error;
}
const prevProd = (0, last_1.default)(this.recordingProdStack);
const ruleName = ruleToCall.ruleName;
const newNoneTerminal = new gast_1.NonTerminal({
var prevProd = (0, last_1.default)(this.recordingProdStack);
var ruleName = ruleToCall.ruleName;
var newNoneTerminal = new gast_1.NonTerminal({
idx: occurrence,

@@ -227,14 +234,14 @@ nonTerminalName: ruleName,

return this.outputCst ? RECORDING_PHASE_CSTNODE : RECORDING_NULL_OBJECT;
}
consumeInternalRecord(tokType, occurrence, options) {
};
GastRecorder.prototype.consumeInternalRecord = function (tokType, occurrence, options) {
assertMethodIdxIsValid(occurrence);
if (!(0, tokens_1.hasShortKeyProperty)(tokType)) {
const error = new Error(`<CONSUME${getIdxSuffix(occurrence)}> argument is invalid` +
` expecting a TokenType reference but got: <${JSON.stringify(tokType)}>` +
`\n inside top level rule: <${this.recordingProdStack[0].name}>`);
var error = new Error("<CONSUME".concat(getIdxSuffix(occurrence), "> argument is invalid") +
" expecting a TokenType reference but got: <".concat(JSON.stringify(tokType), ">") +
"\n inside top level rule: <".concat(this.recordingProdStack[0].name, ">"));
error.KNOWN_RECORDER_ERROR = true;
throw error;
}
const prevProd = (0, last_1.default)(this.recordingProdStack);
const newNoneTerminal = new gast_1.Terminal({
var prevProd = (0, last_1.default)(this.recordingProdStack);
var newNoneTerminal = new gast_1.Terminal({
idx: occurrence,

@@ -246,10 +253,12 @@ terminalType: tokType,

return RECORDING_PHASE_TOKEN;
}
}
};
return GastRecorder;
}());
exports.GastRecorder = GastRecorder;
function recordProd(prodConstructor, mainProdArg, occurrence, handleSep = false) {
function recordProd(prodConstructor, mainProdArg, occurrence, handleSep) {
if (handleSep === void 0) { handleSep = false; }
assertMethodIdxIsValid(occurrence);
const prevProd = (0, last_1.default)(this.recordingProdStack);
const grammarAction = (0, isFunction_1.default)(mainProdArg) ? mainProdArg : mainProdArg.DEF;
const newProd = new prodConstructor({ definition: [], idx: occurrence });
var prevProd = (0, last_1.default)(this.recordingProdStack);
var grammarAction = (0, isFunction_1.default)(mainProdArg) ? mainProdArg : mainProdArg.DEF;
var newProd = new prodConstructor({ definition: [], idx: occurrence });
if (handleSep) {

@@ -268,8 +277,9 @@ newProd.separator = mainProdArg.SEP;

function recordOrProd(mainProdArg, occurrence) {
var _this = this;
assertMethodIdxIsValid(occurrence);
const prevProd = (0, last_1.default)(this.recordingProdStack);
var prevProd = (0, last_1.default)(this.recordingProdStack);
// Only an array of alternatives
const hasOptions = (0, isArray_1.default)(mainProdArg) === false;
const alts = hasOptions === false ? mainProdArg : mainProdArg.DEF;
const newOrProd = new gast_1.Alternation({
var hasOptions = (0, isArray_1.default)(mainProdArg) === false;
var alts = hasOptions === false ? mainProdArg : mainProdArg.DEF;
var newOrProd = new gast_1.Alternation({
definition: [],

@@ -282,7 +292,7 @@ idx: occurrence,

}
const hasPredicates = (0, some_1.default)(alts, (currAlt) => (0, isFunction_1.default)(currAlt.GATE));
var hasPredicates = (0, some_1.default)(alts, function (currAlt) { return (0, isFunction_1.default)(currAlt.GATE); });
newOrProd.hasPredicates = hasPredicates;
prevProd.definition.push(newOrProd);
(0, forEach_1.default)(alts, (currAlt) => {
const currAltFlat = new gast_1.Alternative({ definition: [] });
(0, forEach_1.default)(alts, function (currAlt) {
var currAltFlat = new gast_1.Alternative({ definition: [] });
newOrProd.definition.push(currAltFlat);

@@ -296,5 +306,5 @@ if ((0, has_1.default)(currAlt, "IGNORE_AMBIGUITIES")) {

}
this.recordingProdStack.push(currAltFlat);
currAlt.ALT.call(this);
this.recordingProdStack.pop();
_this.recordingProdStack.push(currAltFlat);
currAlt.ALT.call(_this);
_this.recordingProdStack.pop();
});

@@ -304,10 +314,10 @@ return RECORDING_NULL_OBJECT;

function getIdxSuffix(idx) {
return idx === 0 ? "" : `${idx}`;
return idx === 0 ? "" : "".concat(idx);
}
function assertMethodIdxIsValid(idx) {
if (idx < 0 || idx > MAX_METHOD_IDX) {
const error = new Error(
var error = new Error(
// The stack trace will contain all the needed details
`Invalid DSL Method idx value: <${idx}>\n\t` +
`Idx value must be a none negative value smaller than ${MAX_METHOD_IDX + 1}`);
"Invalid DSL Method idx value: <".concat(idx, ">\n\t") +
"Idx value must be a none negative value smaller than ".concat(MAX_METHOD_IDX + 1));
error.KNOWN_RECORDER_ERROR = true;

@@ -314,0 +324,0 @@ throw error;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.LexerAdapter = void 0;
const parser_1 = require("../parser");
var parser_1 = require("../parser");
/**

@@ -12,25 +12,31 @@ * Trait responsible abstracting over the interaction with Lexer output (Token vector).

*/
class LexerAdapter {
initLexerAdapter() {
var LexerAdapter = /** @class */ (function () {
function LexerAdapter() {
}
LexerAdapter.prototype.initLexerAdapter = function () {
this.tokVector = [];
this.tokVectorLength = 0;
this.currIdx = -1;
}
set input(newInput) {
// @ts-ignore - `this parameter` not supported in setters/getters
// - https://www.typescriptlang.org/docs/handbook/functions.html#this-parameters
if (this.selfAnalysisDone !== true) {
throw Error(`Missing <performSelfAnalysis> invocation at the end of the Parser's constructor.`);
}
// @ts-ignore - `this parameter` not supported in setters/getters
// - https://www.typescriptlang.org/docs/handbook/functions.html#this-parameters
this.reset();
this.tokVector = newInput;
this.tokVectorLength = newInput.length;
}
get input() {
return this.tokVector;
}
};
Object.defineProperty(LexerAdapter.prototype, "input", {
get: function () {
return this.tokVector;
},
set: function (newInput) {
// @ts-ignore - `this parameter` not supported in setters/getters
// - https://www.typescriptlang.org/docs/handbook/functions.html#this-parameters
if (this.selfAnalysisDone !== true) {
throw Error("Missing <performSelfAnalysis> invocation at the end of the Parser's constructor.");
}
// @ts-ignore - `this parameter` not supported in setters/getters
// - https://www.typescriptlang.org/docs/handbook/functions.html#this-parameters
this.reset();
this.tokVector = newInput;
this.tokVectorLength = newInput.length;
},
enumerable: false,
configurable: true
});
// skips a token and returns the next token
SKIP_TOKEN() {
LexerAdapter.prototype.SKIP_TOKEN = function () {
if (this.currIdx <= this.tokVector.length - 2) {

@@ -43,7 +49,7 @@ this.consumeToken();

}
}
};
// Lexer (accessing Token vector) related methods which can be overridden to implement lazy lexers
// or lexers dependent on parser context.
LA(howMuch) {
const soughtIdx = this.currIdx + howMuch;
LexerAdapter.prototype.LA = function (howMuch) {
var soughtIdx = this.currIdx + howMuch;
if (soughtIdx < 0 || this.tokVectorLength <= soughtIdx) {

@@ -55,23 +61,24 @@ return parser_1.END_OF_FILE;

}
}
consumeToken() {
};
LexerAdapter.prototype.consumeToken = function () {
this.currIdx++;
}
exportLexerState() {
};
LexerAdapter.prototype.exportLexerState = function () {
return this.currIdx;
}
importLexerState(newState) {
};
LexerAdapter.prototype.importLexerState = function (newState) {
this.currIdx = newState;
}
resetLexerState() {
};
LexerAdapter.prototype.resetLexerState = function () {
this.currIdx = -1;
}
moveToTerminatedState() {
};
LexerAdapter.prototype.moveToTerminatedState = function () {
this.currIdx = this.tokVector.length - 1;
}
getLexerPosition() {
};
LexerAdapter.prototype.getLexerPosition = function () {
return this.exportLexerState();
}
}
};
return LexerAdapter;
}());
exports.LexerAdapter = LexerAdapter;
//# sourceMappingURL=lexer_adapter.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,14 +22,16 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.collectMethods = exports.LooksAhead = void 0;
const lookahead_1 = require("../../grammar/lookahead");
const forEach_1 = __importDefault(require("lodash/forEach"));
const has_1 = __importDefault(require("lodash/has"));
const parser_1 = require("../parser");
const keys_1 = require("../../grammar/keys");
const gast_1 = require("@chevrotain/gast");
const gast_2 = require("@chevrotain/gast");
var lookahead_1 = require("../../grammar/lookahead");
var forEach_1 = __importDefault(require("lodash/forEach"));
var has_1 = __importDefault(require("lodash/has"));
var parser_1 = require("../parser");
var keys_1 = require("../../grammar/keys");
var gast_1 = require("@chevrotain/gast");
var gast_2 = require("@chevrotain/gast");
/**
* Trait responsible for the lookahead related utilities and optimizations.
*/
class LooksAhead {
initLooksAhead(config) {
var LooksAhead = /** @class */ (function () {
function LooksAhead() {
}
LooksAhead.prototype.initLooksAhead = function (config) {
this.dynamicTokensEnabled = (0, has_1.default)(config, "dynamicTokensEnabled")

@@ -27,64 +44,68 @@ ? config.dynamicTokensEnabled // assumes end user provides the correct config value/type

this.lookAheadFuncsCache = new Map();
}
preComputeLookaheadFunctions(rules) {
(0, forEach_1.default)(rules, (currRule) => {
this.TRACE_INIT(`${currRule.name} Rule Lookahead`, () => {
const { alternation, repetition, option, repetitionMandatory, repetitionMandatoryWithSeparator, repetitionWithSeparator } = collectMethods(currRule);
(0, forEach_1.default)(alternation, (currProd) => {
const prodIdx = currProd.idx === 0 ? "" : currProd.idx;
this.TRACE_INIT(`${(0, gast_2.getProductionDslName)(currProd)}${prodIdx}`, () => {
const laFunc = (0, lookahead_1.buildLookaheadFuncForOr)(currProd.idx, currRule, currProd.maxLookahead || this.maxLookahead, currProd.hasPredicates, this.dynamicTokensEnabled, this.lookAheadBuilderForAlternatives);
const key = (0, keys_1.getKeyForAutomaticLookahead)(this.fullRuleNameToShort[currRule.name], keys_1.OR_IDX, currProd.idx);
this.setLaFuncCache(key, laFunc);
};
LooksAhead.prototype.preComputeLookaheadFunctions = function (rules) {
var _this = this;
(0, forEach_1.default)(rules, function (currRule) {
_this.TRACE_INIT("".concat(currRule.name, " Rule Lookahead"), function () {
var _a = collectMethods(currRule), alternation = _a.alternation, repetition = _a.repetition, option = _a.option, repetitionMandatory = _a.repetitionMandatory, repetitionMandatoryWithSeparator = _a.repetitionMandatoryWithSeparator, repetitionWithSeparator = _a.repetitionWithSeparator;
(0, forEach_1.default)(alternation, function (currProd) {
var prodIdx = currProd.idx === 0 ? "" : currProd.idx;
_this.TRACE_INIT("".concat((0, gast_2.getProductionDslName)(currProd)).concat(prodIdx), function () {
var laFunc = (0, lookahead_1.buildLookaheadFuncForOr)(currProd.idx, currRule, currProd.maxLookahead || _this.maxLookahead, currProd.hasPredicates, _this.dynamicTokensEnabled, _this.lookAheadBuilderForAlternatives);
var key = (0, keys_1.getKeyForAutomaticLookahead)(_this.fullRuleNameToShort[currRule.name], keys_1.OR_IDX, currProd.idx);
_this.setLaFuncCache(key, laFunc);
});
});
(0, forEach_1.default)(repetition, (currProd) => {
this.computeLookaheadFunc(currRule, currProd.idx, keys_1.MANY_IDX, lookahead_1.PROD_TYPE.REPETITION, currProd.maxLookahead, (0, gast_2.getProductionDslName)(currProd));
(0, forEach_1.default)(repetition, function (currProd) {
_this.computeLookaheadFunc(currRule, currProd.idx, keys_1.MANY_IDX, lookahead_1.PROD_TYPE.REPETITION, currProd.maxLookahead, (0, gast_2.getProductionDslName)(currProd));
});
(0, forEach_1.default)(option, (currProd) => {
this.computeLookaheadFunc(currRule, currProd.idx, keys_1.OPTION_IDX, lookahead_1.PROD_TYPE.OPTION, currProd.maxLookahead, (0, gast_2.getProductionDslName)(currProd));
(0, forEach_1.default)(option, function (currProd) {
_this.computeLookaheadFunc(currRule, currProd.idx, keys_1.OPTION_IDX, lookahead_1.PROD_TYPE.OPTION, currProd.maxLookahead, (0, gast_2.getProductionDslName)(currProd));
});
(0, forEach_1.default)(repetitionMandatory, (currProd) => {
this.computeLookaheadFunc(currRule, currProd.idx, keys_1.AT_LEAST_ONE_IDX, lookahead_1.PROD_TYPE.REPETITION_MANDATORY, currProd.maxLookahead, (0, gast_2.getProductionDslName)(currProd));
(0, forEach_1.default)(repetitionMandatory, function (currProd) {
_this.computeLookaheadFunc(currRule, currProd.idx, keys_1.AT_LEAST_ONE_IDX, lookahead_1.PROD_TYPE.REPETITION_MANDATORY, currProd.maxLookahead, (0, gast_2.getProductionDslName)(currProd));
});
(0, forEach_1.default)(repetitionMandatoryWithSeparator, (currProd) => {
this.computeLookaheadFunc(currRule, currProd.idx, keys_1.AT_LEAST_ONE_SEP_IDX, lookahead_1.PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR, currProd.maxLookahead, (0, gast_2.getProductionDslName)(currProd));
(0, forEach_1.default)(repetitionMandatoryWithSeparator, function (currProd) {
_this.computeLookaheadFunc(currRule, currProd.idx, keys_1.AT_LEAST_ONE_SEP_IDX, lookahead_1.PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR, currProd.maxLookahead, (0, gast_2.getProductionDslName)(currProd));
});
(0, forEach_1.default)(repetitionWithSeparator, (currProd) => {
this.computeLookaheadFunc(currRule, currProd.idx, keys_1.MANY_SEP_IDX, lookahead_1.PROD_TYPE.REPETITION_WITH_SEPARATOR, currProd.maxLookahead, (0, gast_2.getProductionDslName)(currProd));
(0, forEach_1.default)(repetitionWithSeparator, function (currProd) {
_this.computeLookaheadFunc(currRule, currProd.idx, keys_1.MANY_SEP_IDX, lookahead_1.PROD_TYPE.REPETITION_WITH_SEPARATOR, currProd.maxLookahead, (0, gast_2.getProductionDslName)(currProd));
});
});
});
}
computeLookaheadFunc(rule, prodOccurrence, prodKey, prodType, prodMaxLookahead, dslMethodName) {
this.TRACE_INIT(`${dslMethodName}${prodOccurrence === 0 ? "" : prodOccurrence}`, () => {
const laFunc = (0, lookahead_1.buildLookaheadFuncForOptionalProd)(prodOccurrence, rule, prodMaxLookahead || this.maxLookahead, this.dynamicTokensEnabled, prodType, this.lookAheadBuilderForOptional);
const key = (0, keys_1.getKeyForAutomaticLookahead)(this.fullRuleNameToShort[rule.name], prodKey, prodOccurrence);
this.setLaFuncCache(key, laFunc);
};
LooksAhead.prototype.computeLookaheadFunc = function (rule, prodOccurrence, prodKey, prodType, prodMaxLookahead, dslMethodName) {
var _this = this;
this.TRACE_INIT("".concat(dslMethodName).concat(prodOccurrence === 0 ? "" : prodOccurrence), function () {
var laFunc = (0, lookahead_1.buildLookaheadFuncForOptionalProd)(prodOccurrence, rule, prodMaxLookahead || _this.maxLookahead, _this.dynamicTokensEnabled, prodType, _this.lookAheadBuilderForOptional);
var key = (0, keys_1.getKeyForAutomaticLookahead)(_this.fullRuleNameToShort[rule.name], prodKey, prodOccurrence);
_this.setLaFuncCache(key, laFunc);
});
}
lookAheadBuilderForOptional(alt, tokenMatcher, dynamicTokensEnabled) {
};
LooksAhead.prototype.lookAheadBuilderForOptional = function (alt, tokenMatcher, dynamicTokensEnabled) {
return (0, lookahead_1.buildSingleAlternativeLookaheadFunction)(alt, tokenMatcher, dynamicTokensEnabled);
}
lookAheadBuilderForAlternatives(alts, hasPredicates, tokenMatcher, dynamicTokensEnabled) {
};
LooksAhead.prototype.lookAheadBuilderForAlternatives = function (alts, hasPredicates, tokenMatcher, dynamicTokensEnabled) {
return (0, lookahead_1.buildAlternativesLookAheadFunc)(alts, hasPredicates, tokenMatcher, dynamicTokensEnabled);
}
};
// this actually returns a number, but it is always used as a string (object prop key)
getKeyForAutomaticLookahead(dslMethodIdx, occurrence) {
const currRuleShortName = this.getLastExplicitRuleShortName();
LooksAhead.prototype.getKeyForAutomaticLookahead = function (dslMethodIdx, occurrence) {
var currRuleShortName = this.getLastExplicitRuleShortName();
return (0, keys_1.getKeyForAutomaticLookahead)(currRuleShortName, dslMethodIdx, occurrence);
}
getLaFuncFromCache(key) {
};
LooksAhead.prototype.getLaFuncFromCache = function (key) {
return this.lookAheadFuncsCache.get(key);
}
};
/* istanbul ignore next */
setLaFuncCache(key, value) {
LooksAhead.prototype.setLaFuncCache = function (key, value) {
this.lookAheadFuncsCache.set(key, value);
}
}
};
return LooksAhead;
}());
exports.LooksAhead = LooksAhead;
class DslMethodsCollectorVisitor extends gast_1.GAstVisitor {
constructor() {
super(...arguments);
this.dslMethods = {
var DslMethodsCollectorVisitor = /** @class */ (function (_super) {
__extends(DslMethodsCollectorVisitor, _super);
function DslMethodsCollectorVisitor() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.dslMethods = {
option: [],

@@ -97,4 +118,5 @@ alternation: [],

};
return _this;
}
reset() {
DslMethodsCollectorVisitor.prototype.reset = function () {
this.dslMethods = {

@@ -108,27 +130,28 @@ option: [],

};
}
visitOption(option) {
};
DslMethodsCollectorVisitor.prototype.visitOption = function (option) {
this.dslMethods.option.push(option);
}
visitRepetitionWithSeparator(manySep) {
};
DslMethodsCollectorVisitor.prototype.visitRepetitionWithSeparator = function (manySep) {
this.dslMethods.repetitionWithSeparator.push(manySep);
}
visitRepetitionMandatory(atLeastOne) {
};
DslMethodsCollectorVisitor.prototype.visitRepetitionMandatory = function (atLeastOne) {
this.dslMethods.repetitionMandatory.push(atLeastOne);
}
visitRepetitionMandatoryWithSeparator(atLeastOneSep) {
};
DslMethodsCollectorVisitor.prototype.visitRepetitionMandatoryWithSeparator = function (atLeastOneSep) {
this.dslMethods.repetitionMandatoryWithSeparator.push(atLeastOneSep);
}
visitRepetition(many) {
};
DslMethodsCollectorVisitor.prototype.visitRepetition = function (many) {
this.dslMethods.repetition.push(many);
}
visitAlternation(or) {
};
DslMethodsCollectorVisitor.prototype.visitAlternation = function (or) {
this.dslMethods.alternation.push(or);
}
}
const collectorVisitor = new DslMethodsCollectorVisitor();
};
return DslMethodsCollectorVisitor;
}(gast_1.GAstVisitor));
var collectorVisitor = new DslMethodsCollectorVisitor();
function collectMethods(rule) {
collectorVisitor.reset();
rule.accept(collectorVisitor);
const dslMethods = collectorVisitor.dslMethods;
var dslMethods = collectorVisitor.dslMethods;
// avoid uncleaned references

@@ -135,0 +158,0 @@ collectorVisitor.reset();

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.EmbeddedActionsParser = exports.CstParser = void 0;
const parser_1 = require("../parser");
var parser_1 = require("../parser");
exports.CstParser = (parser_1.CstParser);
exports.EmbeddedActionsParser = parser_1.EmbeddedActionsParser;
//# sourceMappingURL=parser_traits.js.map

@@ -7,13 +7,15 @@ "use strict";

exports.PerformanceTracer = void 0;
const has_1 = __importDefault(require("lodash/has"));
const utils_1 = require("@chevrotain/utils");
const parser_1 = require("../parser");
var has_1 = __importDefault(require("lodash/has"));
var utils_1 = require("@chevrotain/utils");
var parser_1 = require("../parser");
/**
* Trait responsible for runtime parsing errors.
*/
class PerformanceTracer {
initPerformanceTracer(config) {
var PerformanceTracer = /** @class */ (function () {
function PerformanceTracer() {
}
PerformanceTracer.prototype.initPerformanceTracer = function (config) {
if ((0, has_1.default)(config, "traceInitPerf")) {
const userTraceInitPerf = config.traceInitPerf;
const traceIsNumber = typeof userTraceInitPerf === "number";
var userTraceInitPerf = config.traceInitPerf;
var traceIsNumber = typeof userTraceInitPerf === "number";
this.traceInitMaxIdent = traceIsNumber

@@ -31,4 +33,4 @@ ? userTraceInitPerf

this.traceInitIndent = -1;
}
TRACE_INIT(phaseDesc, phaseImpl) {
};
PerformanceTracer.prototype.TRACE_INIT = function (phaseDesc, phaseImpl) {
// No need to optimize this using NOOP pattern because

@@ -38,11 +40,11 @@ // It is not called in a hot spot...

this.traceInitIndent++;
const indent = new Array(this.traceInitIndent + 1).join("\t");
var indent = new Array(this.traceInitIndent + 1).join("\t");
if (this.traceInitIndent < this.traceInitMaxIdent) {
console.log(`${indent}--> <${phaseDesc}>`);
console.log("".concat(indent, "--> <").concat(phaseDesc, ">"));
}
const { time, value } = (0, utils_1.timer)(phaseImpl);
var _a = (0, utils_1.timer)(phaseImpl), time = _a.time, value = _a.value;
/* istanbul ignore next - Difficult to reproduce specific performance behavior (>10ms) in tests */
const traceMethod = time > 10 ? console.warn : console.log;
var traceMethod = time > 10 ? console.warn : console.log;
if (this.traceInitIndent < this.traceInitMaxIdent) {
traceMethod(`${indent}<-- <${phaseDesc}> time: ${time}ms`);
traceMethod("".concat(indent, "<-- <").concat(phaseDesc, "> time: ").concat(time, "ms"));
}

@@ -55,5 +57,6 @@ this.traceInitIndent--;

}
}
}
};
return PerformanceTracer;
}());
exports.PerformanceTracer = PerformanceTracer;
//# sourceMappingURL=perf_tracer.js.map

@@ -7,9 +7,9 @@ "use strict";

exports.RecognizerApi = void 0;
const values_1 = __importDefault(require("lodash/values"));
const includes_1 = __importDefault(require("lodash/includes"));
const exceptions_public_1 = require("../../exceptions_public");
const parser_1 = require("../parser");
const errors_public_1 = require("../../errors_public");
const checks_1 = require("../../grammar/checks");
const gast_1 = require("@chevrotain/gast");
var values_1 = __importDefault(require("lodash/values"));
var includes_1 = __importDefault(require("lodash/includes"));
var exceptions_public_1 = require("../../exceptions_public");
var parser_1 = require("../parser");
var errors_public_1 = require("../../errors_public");
var checks_1 = require("../../grammar/checks");
var gast_1 = require("@chevrotain/gast");
/**

@@ -23,271 +23,274 @@ * This trait is responsible for implementing the public API

*/
class RecognizerApi {
ACTION(impl) {
var RecognizerApi = /** @class */ (function () {
function RecognizerApi() {
}
RecognizerApi.prototype.ACTION = function (impl) {
return impl.call(this);
}
consume(idx, tokType, options) {
};
RecognizerApi.prototype.consume = function (idx, tokType, options) {
return this.consumeInternal(tokType, idx, options);
}
subrule(idx, ruleToCall, options) {
};
RecognizerApi.prototype.subrule = function (idx, ruleToCall, options) {
return this.subruleInternal(ruleToCall, idx, options);
}
option(idx, actionORMethodDef) {
};
RecognizerApi.prototype.option = function (idx, actionORMethodDef) {
return this.optionInternal(actionORMethodDef, idx);
}
or(idx, altsOrOpts) {
};
RecognizerApi.prototype.or = function (idx, altsOrOpts) {
return this.orInternal(altsOrOpts, idx);
}
many(idx, actionORMethodDef) {
};
RecognizerApi.prototype.many = function (idx, actionORMethodDef) {
return this.manyInternal(idx, actionORMethodDef);
}
atLeastOne(idx, actionORMethodDef) {
};
RecognizerApi.prototype.atLeastOne = function (idx, actionORMethodDef) {
return this.atLeastOneInternal(idx, actionORMethodDef);
}
CONSUME(tokType, options) {
};
RecognizerApi.prototype.CONSUME = function (tokType, options) {
return this.consumeInternal(tokType, 0, options);
}
CONSUME1(tokType, options) {
};
RecognizerApi.prototype.CONSUME1 = function (tokType, options) {
return this.consumeInternal(tokType, 1, options);
}
CONSUME2(tokType, options) {
};
RecognizerApi.prototype.CONSUME2 = function (tokType, options) {
return this.consumeInternal(tokType, 2, options);
}
CONSUME3(tokType, options) {
};
RecognizerApi.prototype.CONSUME3 = function (tokType, options) {
return this.consumeInternal(tokType, 3, options);
}
CONSUME4(tokType, options) {
};
RecognizerApi.prototype.CONSUME4 = function (tokType, options) {
return this.consumeInternal(tokType, 4, options);
}
CONSUME5(tokType, options) {
};
RecognizerApi.prototype.CONSUME5 = function (tokType, options) {
return this.consumeInternal(tokType, 5, options);
}
CONSUME6(tokType, options) {
};
RecognizerApi.prototype.CONSUME6 = function (tokType, options) {
return this.consumeInternal(tokType, 6, options);
}
CONSUME7(tokType, options) {
};
RecognizerApi.prototype.CONSUME7 = function (tokType, options) {
return this.consumeInternal(tokType, 7, options);
}
CONSUME8(tokType, options) {
};
RecognizerApi.prototype.CONSUME8 = function (tokType, options) {
return this.consumeInternal(tokType, 8, options);
}
CONSUME9(tokType, options) {
};
RecognizerApi.prototype.CONSUME9 = function (tokType, options) {
return this.consumeInternal(tokType, 9, options);
}
SUBRULE(ruleToCall, options) {
};
RecognizerApi.prototype.SUBRULE = function (ruleToCall, options) {
return this.subruleInternal(ruleToCall, 0, options);
}
SUBRULE1(ruleToCall, options) {
};
RecognizerApi.prototype.SUBRULE1 = function (ruleToCall, options) {
return this.subruleInternal(ruleToCall, 1, options);
}
SUBRULE2(ruleToCall, options) {
};
RecognizerApi.prototype.SUBRULE2 = function (ruleToCall, options) {
return this.subruleInternal(ruleToCall, 2, options);
}
SUBRULE3(ruleToCall, options) {
};
RecognizerApi.prototype.SUBRULE3 = function (ruleToCall, options) {
return this.subruleInternal(ruleToCall, 3, options);
}
SUBRULE4(ruleToCall, options) {
};
RecognizerApi.prototype.SUBRULE4 = function (ruleToCall, options) {
return this.subruleInternal(ruleToCall, 4, options);
}
SUBRULE5(ruleToCall, options) {
};
RecognizerApi.prototype.SUBRULE5 = function (ruleToCall, options) {
return this.subruleInternal(ruleToCall, 5, options);
}
SUBRULE6(ruleToCall, options) {
};
RecognizerApi.prototype.SUBRULE6 = function (ruleToCall, options) {
return this.subruleInternal(ruleToCall, 6, options);
}
SUBRULE7(ruleToCall, options) {
};
RecognizerApi.prototype.SUBRULE7 = function (ruleToCall, options) {
return this.subruleInternal(ruleToCall, 7, options);
}
SUBRULE8(ruleToCall, options) {
};
RecognizerApi.prototype.SUBRULE8 = function (ruleToCall, options) {
return this.subruleInternal(ruleToCall, 8, options);
}
SUBRULE9(ruleToCall, options) {
};
RecognizerApi.prototype.SUBRULE9 = function (ruleToCall, options) {
return this.subruleInternal(ruleToCall, 9, options);
}
OPTION(actionORMethodDef) {
};
RecognizerApi.prototype.OPTION = function (actionORMethodDef) {
return this.optionInternal(actionORMethodDef, 0);
}
OPTION1(actionORMethodDef) {
};
RecognizerApi.prototype.OPTION1 = function (actionORMethodDef) {
return this.optionInternal(actionORMethodDef, 1);
}
OPTION2(actionORMethodDef) {
};
RecognizerApi.prototype.OPTION2 = function (actionORMethodDef) {
return this.optionInternal(actionORMethodDef, 2);
}
OPTION3(actionORMethodDef) {
};
RecognizerApi.prototype.OPTION3 = function (actionORMethodDef) {
return this.optionInternal(actionORMethodDef, 3);
}
OPTION4(actionORMethodDef) {
};
RecognizerApi.prototype.OPTION4 = function (actionORMethodDef) {
return this.optionInternal(actionORMethodDef, 4);
}
OPTION5(actionORMethodDef) {
};
RecognizerApi.prototype.OPTION5 = function (actionORMethodDef) {
return this.optionInternal(actionORMethodDef, 5);
}
OPTION6(actionORMethodDef) {
};
RecognizerApi.prototype.OPTION6 = function (actionORMethodDef) {
return this.optionInternal(actionORMethodDef, 6);
}
OPTION7(actionORMethodDef) {
};
RecognizerApi.prototype.OPTION7 = function (actionORMethodDef) {
return this.optionInternal(actionORMethodDef, 7);
}
OPTION8(actionORMethodDef) {
};
RecognizerApi.prototype.OPTION8 = function (actionORMethodDef) {
return this.optionInternal(actionORMethodDef, 8);
}
OPTION9(actionORMethodDef) {
};
RecognizerApi.prototype.OPTION9 = function (actionORMethodDef) {
return this.optionInternal(actionORMethodDef, 9);
}
OR(altsOrOpts) {
};
RecognizerApi.prototype.OR = function (altsOrOpts) {
return this.orInternal(altsOrOpts, 0);
}
OR1(altsOrOpts) {
};
RecognizerApi.prototype.OR1 = function (altsOrOpts) {
return this.orInternal(altsOrOpts, 1);
}
OR2(altsOrOpts) {
};
RecognizerApi.prototype.OR2 = function (altsOrOpts) {
return this.orInternal(altsOrOpts, 2);
}
OR3(altsOrOpts) {
};
RecognizerApi.prototype.OR3 = function (altsOrOpts) {
return this.orInternal(altsOrOpts, 3);
}
OR4(altsOrOpts) {
};
RecognizerApi.prototype.OR4 = function (altsOrOpts) {
return this.orInternal(altsOrOpts, 4);
}
OR5(altsOrOpts) {
};
RecognizerApi.prototype.OR5 = function (altsOrOpts) {
return this.orInternal(altsOrOpts, 5);
}
OR6(altsOrOpts) {
};
RecognizerApi.prototype.OR6 = function (altsOrOpts) {
return this.orInternal(altsOrOpts, 6);
}
OR7(altsOrOpts) {
};
RecognizerApi.prototype.OR7 = function (altsOrOpts) {
return this.orInternal(altsOrOpts, 7);
}
OR8(altsOrOpts) {
};
RecognizerApi.prototype.OR8 = function (altsOrOpts) {
return this.orInternal(altsOrOpts, 8);
}
OR9(altsOrOpts) {
};
RecognizerApi.prototype.OR9 = function (altsOrOpts) {
return this.orInternal(altsOrOpts, 9);
}
MANY(actionORMethodDef) {
};
RecognizerApi.prototype.MANY = function (actionORMethodDef) {
this.manyInternal(0, actionORMethodDef);
}
MANY1(actionORMethodDef) {
};
RecognizerApi.prototype.MANY1 = function (actionORMethodDef) {
this.manyInternal(1, actionORMethodDef);
}
MANY2(actionORMethodDef) {
};
RecognizerApi.prototype.MANY2 = function (actionORMethodDef) {
this.manyInternal(2, actionORMethodDef);
}
MANY3(actionORMethodDef) {
};
RecognizerApi.prototype.MANY3 = function (actionORMethodDef) {
this.manyInternal(3, actionORMethodDef);
}
MANY4(actionORMethodDef) {
};
RecognizerApi.prototype.MANY4 = function (actionORMethodDef) {
this.manyInternal(4, actionORMethodDef);
}
MANY5(actionORMethodDef) {
};
RecognizerApi.prototype.MANY5 = function (actionORMethodDef) {
this.manyInternal(5, actionORMethodDef);
}
MANY6(actionORMethodDef) {
};
RecognizerApi.prototype.MANY6 = function (actionORMethodDef) {
this.manyInternal(6, actionORMethodDef);
}
MANY7(actionORMethodDef) {
};
RecognizerApi.prototype.MANY7 = function (actionORMethodDef) {
this.manyInternal(7, actionORMethodDef);
}
MANY8(actionORMethodDef) {
};
RecognizerApi.prototype.MANY8 = function (actionORMethodDef) {
this.manyInternal(8, actionORMethodDef);
}
MANY9(actionORMethodDef) {
};
RecognizerApi.prototype.MANY9 = function (actionORMethodDef) {
this.manyInternal(9, actionORMethodDef);
}
MANY_SEP(options) {
};
RecognizerApi.prototype.MANY_SEP = function (options) {
this.manySepFirstInternal(0, options);
}
MANY_SEP1(options) {
};
RecognizerApi.prototype.MANY_SEP1 = function (options) {
this.manySepFirstInternal(1, options);
}
MANY_SEP2(options) {
};
RecognizerApi.prototype.MANY_SEP2 = function (options) {
this.manySepFirstInternal(2, options);
}
MANY_SEP3(options) {
};
RecognizerApi.prototype.MANY_SEP3 = function (options) {
this.manySepFirstInternal(3, options);
}
MANY_SEP4(options) {
};
RecognizerApi.prototype.MANY_SEP4 = function (options) {
this.manySepFirstInternal(4, options);
}
MANY_SEP5(options) {
};
RecognizerApi.prototype.MANY_SEP5 = function (options) {
this.manySepFirstInternal(5, options);
}
MANY_SEP6(options) {
};
RecognizerApi.prototype.MANY_SEP6 = function (options) {
this.manySepFirstInternal(6, options);
}
MANY_SEP7(options) {
};
RecognizerApi.prototype.MANY_SEP7 = function (options) {
this.manySepFirstInternal(7, options);
}
MANY_SEP8(options) {
};
RecognizerApi.prototype.MANY_SEP8 = function (options) {
this.manySepFirstInternal(8, options);
}
MANY_SEP9(options) {
};
RecognizerApi.prototype.MANY_SEP9 = function (options) {
this.manySepFirstInternal(9, options);
}
AT_LEAST_ONE(actionORMethodDef) {
};
RecognizerApi.prototype.AT_LEAST_ONE = function (actionORMethodDef) {
this.atLeastOneInternal(0, actionORMethodDef);
}
AT_LEAST_ONE1(actionORMethodDef) {
};
RecognizerApi.prototype.AT_LEAST_ONE1 = function (actionORMethodDef) {
return this.atLeastOneInternal(1, actionORMethodDef);
}
AT_LEAST_ONE2(actionORMethodDef) {
};
RecognizerApi.prototype.AT_LEAST_ONE2 = function (actionORMethodDef) {
this.atLeastOneInternal(2, actionORMethodDef);
}
AT_LEAST_ONE3(actionORMethodDef) {
};
RecognizerApi.prototype.AT_LEAST_ONE3 = function (actionORMethodDef) {
this.atLeastOneInternal(3, actionORMethodDef);
}
AT_LEAST_ONE4(actionORMethodDef) {
};
RecognizerApi.prototype.AT_LEAST_ONE4 = function (actionORMethodDef) {
this.atLeastOneInternal(4, actionORMethodDef);
}
AT_LEAST_ONE5(actionORMethodDef) {
};
RecognizerApi.prototype.AT_LEAST_ONE5 = function (actionORMethodDef) {
this.atLeastOneInternal(5, actionORMethodDef);
}
AT_LEAST_ONE6(actionORMethodDef) {
};
RecognizerApi.prototype.AT_LEAST_ONE6 = function (actionORMethodDef) {
this.atLeastOneInternal(6, actionORMethodDef);
}
AT_LEAST_ONE7(actionORMethodDef) {
};
RecognizerApi.prototype.AT_LEAST_ONE7 = function (actionORMethodDef) {
this.atLeastOneInternal(7, actionORMethodDef);
}
AT_LEAST_ONE8(actionORMethodDef) {
};
RecognizerApi.prototype.AT_LEAST_ONE8 = function (actionORMethodDef) {
this.atLeastOneInternal(8, actionORMethodDef);
}
AT_LEAST_ONE9(actionORMethodDef) {
};
RecognizerApi.prototype.AT_LEAST_ONE9 = function (actionORMethodDef) {
this.atLeastOneInternal(9, actionORMethodDef);
}
AT_LEAST_ONE_SEP(options) {
};
RecognizerApi.prototype.AT_LEAST_ONE_SEP = function (options) {
this.atLeastOneSepFirstInternal(0, options);
}
AT_LEAST_ONE_SEP1(options) {
};
RecognizerApi.prototype.AT_LEAST_ONE_SEP1 = function (options) {
this.atLeastOneSepFirstInternal(1, options);
}
AT_LEAST_ONE_SEP2(options) {
};
RecognizerApi.prototype.AT_LEAST_ONE_SEP2 = function (options) {
this.atLeastOneSepFirstInternal(2, options);
}
AT_LEAST_ONE_SEP3(options) {
};
RecognizerApi.prototype.AT_LEAST_ONE_SEP3 = function (options) {
this.atLeastOneSepFirstInternal(3, options);
}
AT_LEAST_ONE_SEP4(options) {
};
RecognizerApi.prototype.AT_LEAST_ONE_SEP4 = function (options) {
this.atLeastOneSepFirstInternal(4, options);
}
AT_LEAST_ONE_SEP5(options) {
};
RecognizerApi.prototype.AT_LEAST_ONE_SEP5 = function (options) {
this.atLeastOneSepFirstInternal(5, options);
}
AT_LEAST_ONE_SEP6(options) {
};
RecognizerApi.prototype.AT_LEAST_ONE_SEP6 = function (options) {
this.atLeastOneSepFirstInternal(6, options);
}
AT_LEAST_ONE_SEP7(options) {
};
RecognizerApi.prototype.AT_LEAST_ONE_SEP7 = function (options) {
this.atLeastOneSepFirstInternal(7, options);
}
AT_LEAST_ONE_SEP8(options) {
};
RecognizerApi.prototype.AT_LEAST_ONE_SEP8 = function (options) {
this.atLeastOneSepFirstInternal(8, options);
}
AT_LEAST_ONE_SEP9(options) {
};
RecognizerApi.prototype.AT_LEAST_ONE_SEP9 = function (options) {
this.atLeastOneSepFirstInternal(9, options);
}
RULE(name, implementation, config = parser_1.DEFAULT_RULE_CONFIG) {
};
RecognizerApi.prototype.RULE = function (name, implementation, config) {
if (config === void 0) { config = parser_1.DEFAULT_RULE_CONFIG; }
if ((0, includes_1.default)(this.definedRulesNames, name)) {
const errMsg = errors_public_1.defaultGrammarValidatorErrorProvider.buildDuplicateRuleNameError({
var errMsg = errors_public_1.defaultGrammarValidatorErrorProvider.buildDuplicateRuleNameError({
topLevelRule: name,
grammarName: this.className
});
const error = {
var error = {
message: errMsg,

@@ -300,18 +303,19 @@ type: parser_1.ParserDefinitionErrorType.DUPLICATE_RULE_NAME,

this.definedRulesNames.push(name);
const ruleImplementation = this.defineRule(name, implementation, config);
var ruleImplementation = this.defineRule(name, implementation, config);
this[name] = ruleImplementation;
return ruleImplementation;
}
OVERRIDE_RULE(name, impl, config = parser_1.DEFAULT_RULE_CONFIG) {
const ruleErrors = (0, checks_1.validateRuleIsOverridden)(name, this.definedRulesNames, this.className);
};
RecognizerApi.prototype.OVERRIDE_RULE = function (name, impl, config) {
if (config === void 0) { config = parser_1.DEFAULT_RULE_CONFIG; }
var ruleErrors = (0, checks_1.validateRuleIsOverridden)(name, this.definedRulesNames, this.className);
this.definitionErrors = this.definitionErrors.concat(ruleErrors);
const ruleImplementation = this.defineRule(name, impl, config);
var ruleImplementation = this.defineRule(name, impl, config);
this[name] = ruleImplementation;
return ruleImplementation;
}
BACKTRACK(grammarRule, args) {
};
RecognizerApi.prototype.BACKTRACK = function (grammarRule, args) {
return function () {
// save org state
this.isBackTrackingStack.push(1);
const orgState = this.saveRecogState();
var orgState = this.saveRecogState();
try {

@@ -335,12 +339,13 @@ grammarRule.apply(this, args);

};
}
};
// GAST export APIs
getGAstProductions() {
RecognizerApi.prototype.getGAstProductions = function () {
return this.gastProductionsCache;
}
getSerializedGastProductions() {
};
RecognizerApi.prototype.getSerializedGastProductions = function () {
return (0, gast_1.serializeGrammar)((0, values_1.default)(this.gastProductionsCache));
}
}
};
return RecognizerApi;
}());
exports.RecognizerApi = RecognizerApi;
//# sourceMappingURL=recognizer_api.js.map

@@ -7,20 +7,20 @@ "use strict";

exports.RecognizerEngine = void 0;
const isEmpty_1 = __importDefault(require("lodash/isEmpty"));
const isArray_1 = __importDefault(require("lodash/isArray"));
const flatten_1 = __importDefault(require("lodash/flatten"));
const every_1 = __importDefault(require("lodash/every"));
const uniq_1 = __importDefault(require("lodash/uniq"));
const isObject_1 = __importDefault(require("lodash/isObject"));
const has_1 = __importDefault(require("lodash/has"));
const values_1 = __importDefault(require("lodash/values"));
const reduce_1 = __importDefault(require("lodash/reduce"));
const clone_1 = __importDefault(require("lodash/clone"));
const keys_1 = require("../../grammar/keys");
const exceptions_public_1 = require("../../exceptions_public");
const lookahead_1 = require("../../grammar/lookahead");
const interpreter_1 = require("../../grammar/interpreter");
const parser_1 = require("../parser");
const recoverable_1 = require("./recoverable");
const tokens_public_1 = require("../../../scan/tokens_public");
const tokens_1 = require("../../../scan/tokens");
var isEmpty_1 = __importDefault(require("lodash/isEmpty"));
var isArray_1 = __importDefault(require("lodash/isArray"));
var flatten_1 = __importDefault(require("lodash/flatten"));
var every_1 = __importDefault(require("lodash/every"));
var uniq_1 = __importDefault(require("lodash/uniq"));
var isObject_1 = __importDefault(require("lodash/isObject"));
var has_1 = __importDefault(require("lodash/has"));
var values_1 = __importDefault(require("lodash/values"));
var reduce_1 = __importDefault(require("lodash/reduce"));
var clone_1 = __importDefault(require("lodash/clone"));
var keys_1 = require("../../grammar/keys");
var exceptions_public_1 = require("../../exceptions_public");
var lookahead_1 = require("../../grammar/lookahead");
var interpreter_1 = require("../../grammar/interpreter");
var parser_1 = require("../parser");
var recoverable_1 = require("./recoverable");
var tokens_public_1 = require("../../../scan/tokens_public");
var tokens_1 = require("../../../scan/tokens");
/**

@@ -30,4 +30,6 @@ * This trait is responsible for the runtime parsing engine

*/
class RecognizerEngine {
initRecognizerEngine(tokenVocabulary, config) {
var RecognizerEngine = /** @class */ (function () {
function RecognizerEngine() {
}
RecognizerEngine.prototype.initRecognizerEngine = function (tokenVocabulary, config) {
this.className = this.constructor.name;

@@ -67,3 +69,3 @@ // TODO: would using an ES6 Map or plain object be faster (CST building scenario)

if ((0, isArray_1.default)(tokenVocabulary)) {
this.tokensMap = (0, reduce_1.default)(tokenVocabulary, (acc, tokType) => {
this.tokensMap = (0, reduce_1.default)(tokenVocabulary, function (acc, tokType) {
acc[tokType.name] = tokType;

@@ -75,5 +77,5 @@ return acc;

(0, every_1.default)((0, flatten_1.default)((0, values_1.default)(tokenVocabulary.modes)), tokens_1.isTokenType)) {
const allTokenTypes = (0, flatten_1.default)((0, values_1.default)(tokenVocabulary.modes));
const uniqueTokens = (0, uniq_1.default)(allTokenTypes);
this.tokensMap = (0, reduce_1.default)(uniqueTokens, (acc, tokType) => {
var allTokenTypes_1 = (0, flatten_1.default)((0, values_1.default)(tokenVocabulary.modes));
var uniqueTokens = (0, uniq_1.default)(allTokenTypes_1);
this.tokensMap = (0, reduce_1.default)(uniqueTokens, function (acc, tokType) {
acc[tokType.name] = tokType;

@@ -93,6 +95,8 @@ return acc;

this.tokensMap["EOF"] = tokens_public_1.EOF;
const allTokenTypes = (0, has_1.default)(tokenVocabulary, "modes")
var allTokenTypes = (0, has_1.default)(tokenVocabulary, "modes")
? (0, flatten_1.default)((0, values_1.default)(tokenVocabulary.modes))
: (0, values_1.default)(tokenVocabulary);
const noTokenCategoriesUsed = (0, every_1.default)(allTokenTypes, (tokenConstructor) => (0, isEmpty_1.default)(tokenConstructor.categoryMatches));
var noTokenCategoriesUsed = (0, every_1.default)(allTokenTypes, function (tokenConstructor) {
return (0, isEmpty_1.default)(tokenConstructor.categoryMatches);
});
this.tokenMatcher = noTokenCategoriesUsed

@@ -105,12 +109,12 @@ ? tokens_1.tokenStructuredMatcherNoCategories

(0, tokens_1.augmentTokenTypes)((0, values_1.default)(this.tokensMap));
}
defineRule(ruleName, impl, config) {
};
RecognizerEngine.prototype.defineRule = function (ruleName, impl, config) {
if (this.selfAnalysisDone) {
throw Error(`Grammar rule <${ruleName}> may not be defined after the 'performSelfAnalysis' method has been called'\n` +
`Make sure that all grammar rule definitions are done before 'performSelfAnalysis' is called.`);
throw Error("Grammar rule <".concat(ruleName, "> may not be defined after the 'performSelfAnalysis' method has been called'\n") +
"Make sure that all grammar rule definitions are done before 'performSelfAnalysis' is called.");
}
const resyncEnabled = (0, has_1.default)(config, "resyncEnabled")
var resyncEnabled = (0, has_1.default)(config, "resyncEnabled")
? config.resyncEnabled // assumes end user provides the correct config value/type
: parser_1.DEFAULT_RULE_CONFIG.resyncEnabled;
const recoveryValueFunc = (0, has_1.default)(config, "recoveryValueFunc")
var recoveryValueFunc = (0, has_1.default)(config, "recoveryValueFunc")
? config.recoveryValueFunc // assumes end user provides the correct config value/type

@@ -120,15 +124,19 @@ : parser_1.DEFAULT_RULE_CONFIG.recoveryValueFunc;

// this greatly improves Map access time (as much as 8% for some performance benchmarks).
const shortName = this.ruleShortNameIdx << (keys_1.BITS_FOR_METHOD_TYPE + keys_1.BITS_FOR_OCCURRENCE_IDX);
var shortName = this.ruleShortNameIdx << (keys_1.BITS_FOR_METHOD_TYPE + keys_1.BITS_FOR_OCCURRENCE_IDX);
this.ruleShortNameIdx++;
this.shortRuleNameToFull[shortName] = ruleName;
this.fullRuleNameToShort[ruleName] = shortName;
let invokeRuleWithTry;
var invokeRuleWithTry;
// Micro optimization, only check the condition **once** on rule definition
// instead of **every single** rule invocation.
if (this.outputCst === true) {
invokeRuleWithTry = function invokeRuleWithTry(...args) {
invokeRuleWithTry = function invokeRuleWithTry() {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
try {
this.ruleInvocationStateUpdate(shortName, ruleName, this.subruleIdx);
impl.apply(this, args);
const cst = this.CST_STACK[this.CST_STACK.length - 1];
var cst = this.CST_STACK[this.CST_STACK.length - 1];
this.cstPostRule(cst);

@@ -146,3 +154,7 @@ return cst;

else {
invokeRuleWithTry = function invokeRuleWithTryCst(...args) {
invokeRuleWithTry = function invokeRuleWithTryCst() {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
try {

@@ -160,7 +172,7 @@ this.ruleInvocationStateUpdate(shortName, ruleName, this.subruleIdx);

}
const wrappedGrammarRule = Object.assign(invokeRuleWithTry, { ruleName, originalGrammarAction: impl });
var wrappedGrammarRule = Object.assign(invokeRuleWithTry, { ruleName: ruleName, originalGrammarAction: impl });
return wrappedGrammarRule;
}
invokeRuleCatch(e, resyncEnabledConfig, recoveryValueFunc) {
const isFirstInvokedRule = this.RULE_STACK.length === 1;
};
RecognizerEngine.prototype.invokeRuleCatch = function (e, resyncEnabledConfig, recoveryValueFunc) {
var isFirstInvokedRule = this.RULE_STACK.length === 1;
// note the reSync is always enabled for the first rule invocation, because we must always be able to

@@ -170,11 +182,11 @@ // reSync with EOF and just output some INVALID ParseTree

// path is really the most valid one
const reSyncEnabled = resyncEnabledConfig && !this.isBackTracking() && this.recoveryEnabled;
var reSyncEnabled = resyncEnabledConfig && !this.isBackTracking() && this.recoveryEnabled;
if ((0, exceptions_public_1.isRecognitionException)(e)) {
const recogError = e;
var recogError = e;
if (reSyncEnabled) {
const reSyncTokType = this.findReSyncTokenType();
var reSyncTokType = this.findReSyncTokenType();
if (this.isInCurrentRuleReSyncSet(reSyncTokType)) {
recogError.resyncedTokens = this.reSyncTo(reSyncTokType);
if (this.outputCst) {
const partialCstResult = this.CST_STACK[this.CST_STACK.length - 1];
var partialCstResult = this.CST_STACK[this.CST_STACK.length - 1];
partialCstResult.recoveredNode = true;

@@ -189,3 +201,3 @@ return partialCstResult;

if (this.outputCst) {
const partialCstResult = this.CST_STACK[this.CST_STACK.length - 1];
var partialCstResult = this.CST_STACK[this.CST_STACK.length - 1];
partialCstResult.recoveredNode = true;

@@ -214,19 +226,20 @@ recogError.partialCstResult = partialCstResult;

}
}
};
// Implementation of parsing DSL
optionInternal(actionORMethodDef, occurrence) {
const key = this.getKeyForAutomaticLookahead(keys_1.OPTION_IDX, occurrence);
RecognizerEngine.prototype.optionInternal = function (actionORMethodDef, occurrence) {
var key = this.getKeyForAutomaticLookahead(keys_1.OPTION_IDX, occurrence);
return this.optionInternalLogic(actionORMethodDef, occurrence, key);
}
optionInternalLogic(actionORMethodDef, occurrence, key) {
let lookAheadFunc = this.getLaFuncFromCache(key);
let action;
};
RecognizerEngine.prototype.optionInternalLogic = function (actionORMethodDef, occurrence, key) {
var _this = this;
var lookAheadFunc = this.getLaFuncFromCache(key);
var action;
if (typeof actionORMethodDef !== "function") {
action = actionORMethodDef.DEF;
const predicate = actionORMethodDef.GATE;
var predicate_1 = actionORMethodDef.GATE;
// predicate present
if (predicate !== undefined) {
const orgLookaheadFunction = lookAheadFunc;
lookAheadFunc = () => {
return predicate.call(this) && orgLookaheadFunction.call(this);
if (predicate_1 !== undefined) {
var orgLookaheadFunction_1 = lookAheadFunc;
lookAheadFunc = function () {
return predicate_1.call(_this) && orgLookaheadFunction_1.call(_this);
};

@@ -242,18 +255,19 @@ }

return undefined;
}
atLeastOneInternal(prodOccurrence, actionORMethodDef) {
const laKey = this.getKeyForAutomaticLookahead(keys_1.AT_LEAST_ONE_IDX, prodOccurrence);
};
RecognizerEngine.prototype.atLeastOneInternal = function (prodOccurrence, actionORMethodDef) {
var laKey = this.getKeyForAutomaticLookahead(keys_1.AT_LEAST_ONE_IDX, prodOccurrence);
return this.atLeastOneInternalLogic(prodOccurrence, actionORMethodDef, laKey);
}
atLeastOneInternalLogic(prodOccurrence, actionORMethodDef, key) {
let lookAheadFunc = this.getLaFuncFromCache(key);
let action;
};
RecognizerEngine.prototype.atLeastOneInternalLogic = function (prodOccurrence, actionORMethodDef, key) {
var _this = this;
var lookAheadFunc = this.getLaFuncFromCache(key);
var action;
if (typeof actionORMethodDef !== "function") {
action = actionORMethodDef.DEF;
const predicate = actionORMethodDef.GATE;
var predicate_2 = actionORMethodDef.GATE;
// predicate present
if (predicate !== undefined) {
const orgLookaheadFunction = lookAheadFunc;
lookAheadFunc = () => {
return predicate.call(this) && orgLookaheadFunction.call(this);
if (predicate_2 !== undefined) {
var orgLookaheadFunction_2 = lookAheadFunc;
lookAheadFunc = function () {
return predicate_2.call(_this) && orgLookaheadFunction_2.call(_this);
};

@@ -266,3 +280,3 @@ }

if (lookAheadFunc.call(this) === true) {
let notStuck = this.doSingleRepetition(action);
var notStuck = this.doSingleRepetition(action);
while (lookAheadFunc.call(this) === true &&

@@ -281,11 +295,12 @@ notStuck === true) {

this.attemptInRepetitionRecovery(this.atLeastOneInternal, [prodOccurrence, actionORMethodDef], lookAheadFunc, keys_1.AT_LEAST_ONE_IDX, prodOccurrence, interpreter_1.NextTerminalAfterAtLeastOneWalker);
}
atLeastOneSepFirstInternal(prodOccurrence, options) {
const laKey = this.getKeyForAutomaticLookahead(keys_1.AT_LEAST_ONE_SEP_IDX, prodOccurrence);
};
RecognizerEngine.prototype.atLeastOneSepFirstInternal = function (prodOccurrence, options) {
var laKey = this.getKeyForAutomaticLookahead(keys_1.AT_LEAST_ONE_SEP_IDX, prodOccurrence);
this.atLeastOneSepFirstInternalLogic(prodOccurrence, options, laKey);
}
atLeastOneSepFirstInternalLogic(prodOccurrence, options, key) {
const action = options.DEF;
const separator = options.SEP;
const firstIterationLookaheadFunc = this.getLaFuncFromCache(key);
};
RecognizerEngine.prototype.atLeastOneSepFirstInternalLogic = function (prodOccurrence, options, key) {
var _this = this;
var action = options.DEF;
var separator = options.SEP;
var firstIterationLookaheadFunc = this.getLaFuncFromCache(key);
// 1st iteration

@@ -297,4 +312,4 @@ if (firstIterationLookaheadFunc.call(this) === true) {

// because it is only needed in error recovery scenarios.
const separatorLookAheadFunc = () => {
return this.tokenMatcher(this.LA(1), separator);
var separatorLookAheadFunc = function () {
return _this.tokenMatcher(_this.LA(1), separator);
};

@@ -320,18 +335,19 @@ // 2nd..nth iterations

}
}
manyInternal(prodOccurrence, actionORMethodDef) {
const laKey = this.getKeyForAutomaticLookahead(keys_1.MANY_IDX, prodOccurrence);
};
RecognizerEngine.prototype.manyInternal = function (prodOccurrence, actionORMethodDef) {
var laKey = this.getKeyForAutomaticLookahead(keys_1.MANY_IDX, prodOccurrence);
return this.manyInternalLogic(prodOccurrence, actionORMethodDef, laKey);
}
manyInternalLogic(prodOccurrence, actionORMethodDef, key) {
let lookaheadFunction = this.getLaFuncFromCache(key);
let action;
};
RecognizerEngine.prototype.manyInternalLogic = function (prodOccurrence, actionORMethodDef, key) {
var _this = this;
var lookaheadFunction = this.getLaFuncFromCache(key);
var action;
if (typeof actionORMethodDef !== "function") {
action = actionORMethodDef.DEF;
const predicate = actionORMethodDef.GATE;
var predicate_3 = actionORMethodDef.GATE;
// predicate present
if (predicate !== undefined) {
const orgLookaheadFunction = lookaheadFunction;
lookaheadFunction = () => {
return predicate.call(this) && orgLookaheadFunction.call(this);
if (predicate_3 !== undefined) {
var orgLookaheadFunction_3 = lookaheadFunction;
lookaheadFunction = function () {
return predicate_3.call(_this) && orgLookaheadFunction_3.call(_this);
};

@@ -343,3 +359,3 @@ }

}
let notStuck = true;
var notStuck = true;
while (lookaheadFunction.call(this) === true && notStuck === true) {

@@ -356,16 +372,17 @@ notStuck = this.doSingleRepetition(action);

notStuck);
}
manySepFirstInternal(prodOccurrence, options) {
const laKey = this.getKeyForAutomaticLookahead(keys_1.MANY_SEP_IDX, prodOccurrence);
};
RecognizerEngine.prototype.manySepFirstInternal = function (prodOccurrence, options) {
var laKey = this.getKeyForAutomaticLookahead(keys_1.MANY_SEP_IDX, prodOccurrence);
this.manySepFirstInternalLogic(prodOccurrence, options, laKey);
}
manySepFirstInternalLogic(prodOccurrence, options, key) {
const action = options.DEF;
const separator = options.SEP;
const firstIterationLaFunc = this.getLaFuncFromCache(key);
};
RecognizerEngine.prototype.manySepFirstInternalLogic = function (prodOccurrence, options, key) {
var _this = this;
var action = options.DEF;
var separator = options.SEP;
var firstIterationLaFunc = this.getLaFuncFromCache(key);
// 1st iteration
if (firstIterationLaFunc.call(this) === true) {
action.call(this);
const separatorLookAheadFunc = () => {
return this.tokenMatcher(this.LA(1), separator);
var separatorLookAheadFunc = function () {
return _this.tokenMatcher(_this.LA(1), separator);
};

@@ -389,4 +406,4 @@ // 2nd..nth iterations

}
}
repetitionSepSecondInternal(prodOccurrence, separator, separatorLookAheadFunc, action, nextTerminalAfterWalker) {
};
RecognizerEngine.prototype.repetitionSepSecondInternal = function (prodOccurrence, separator, separatorLookAheadFunc, action, nextTerminalAfterWalker) {
while (separatorLookAheadFunc()) {

@@ -411,23 +428,23 @@ // note that this CONSUME will never enter recovery because

], separatorLookAheadFunc, keys_1.AT_LEAST_ONE_SEP_IDX, prodOccurrence, nextTerminalAfterWalker);
}
doSingleRepetition(action) {
const beforeIteration = this.getLexerPosition();
};
RecognizerEngine.prototype.doSingleRepetition = function (action) {
var beforeIteration = this.getLexerPosition();
action.call(this);
const afterIteration = this.getLexerPosition();
var afterIteration = this.getLexerPosition();
// This boolean will indicate if this repetition progressed
// or if we are "stuck" (potential infinite loop in the repetition).
return afterIteration > beforeIteration;
}
orInternal(altsOrOpts, occurrence) {
const laKey = this.getKeyForAutomaticLookahead(keys_1.OR_IDX, occurrence);
const alts = (0, isArray_1.default)(altsOrOpts) ? altsOrOpts : altsOrOpts.DEF;
const laFunc = this.getLaFuncFromCache(laKey);
const altIdxToTake = laFunc.call(this, alts);
};
RecognizerEngine.prototype.orInternal = function (altsOrOpts, occurrence) {
var laKey = this.getKeyForAutomaticLookahead(keys_1.OR_IDX, occurrence);
var alts = (0, isArray_1.default)(altsOrOpts) ? altsOrOpts : altsOrOpts.DEF;
var laFunc = this.getLaFuncFromCache(laKey);
var altIdxToTake = laFunc.call(this, alts);
if (altIdxToTake !== undefined) {
const chosenAlternative = alts[altIdxToTake];
var chosenAlternative = alts[altIdxToTake];
return chosenAlternative.ALT.call(this);
}
this.raiseNoAltException(occurrence, altsOrOpts.ERR_MSG);
}
ruleFinallyStateUpdate() {
};
RecognizerEngine.prototype.ruleFinallyStateUpdate = function () {
this.RULE_STACK.pop();

@@ -438,4 +455,4 @@ this.RULE_OCCURRENCE_STACK.pop();

if (this.RULE_STACK.length === 0 && this.isAtEndOfInput() === false) {
const firstRedundantTok = this.LA(1);
const errMsg = this.errorMessageProvider.buildNotAllInputParsedMessage({
var firstRedundantTok = this.LA(1);
var errMsg = this.errorMessageProvider.buildNotAllInputParsedMessage({
firstRedundant: firstRedundantTok,

@@ -446,7 +463,7 @@ ruleName: this.getCurrRuleFullName()

}
}
subruleInternal(ruleToCall, idx, options) {
let ruleResult;
};
RecognizerEngine.prototype.subruleInternal = function (ruleToCall, idx, options) {
var ruleResult;
try {
const args = options !== undefined ? options.ARGS : undefined;
var args = options !== undefined ? options.ARGS : undefined;
this.subruleIdx = idx;

@@ -462,4 +479,4 @@ ruleResult = ruleToCall.apply(this, args);

}
}
subruleInternalError(e, options, ruleName) {
};
RecognizerEngine.prototype.subruleInternalError = function (e, options, ruleName) {
if ((0, exceptions_public_1.isRecognitionException)(e) && e.partialCstResult !== undefined) {

@@ -472,7 +489,7 @@ this.cstPostNonTerminal(e.partialCstResult, options !== undefined && options.LABEL !== undefined

throw e;
}
consumeInternal(tokType, idx, options) {
let consumedToken;
};
RecognizerEngine.prototype.consumeInternal = function (tokType, idx, options) {
var consumedToken;
try {
const nextToken = this.LA(1);
var nextToken = this.LA(1);
if (this.tokenMatcher(nextToken, tokType) === true) {

@@ -493,6 +510,6 @@ this.consumeToken();

return consumedToken;
}
consumeInternalError(tokType, nextToken, options) {
let msg;
const previousToken = this.LA(0);
};
RecognizerEngine.prototype.consumeInternalError = function (tokType, nextToken, options) {
var msg;
var previousToken = this.LA(0);
if (options !== undefined && options.ERR_MSG) {

@@ -510,4 +527,4 @@ msg = options.ERR_MSG;

throw this.SAVE_ERROR(new exceptions_public_1.MismatchedTokenException(msg, nextToken, previousToken));
}
consumeInternalRecovery(tokType, idx, eFromConsumption) {
};
RecognizerEngine.prototype.consumeInternalRecovery = function (tokType, idx, eFromConsumption) {
// no recovery allowed during backtracking, otherwise backtracking may recover invalid syntax and accept it

@@ -519,3 +536,3 @@ // but the original syntax could have been parsed successfully without any backtracking + recovery

!this.isBackTracking()) {
const follows = this.getFollowsForInRuleRecovery(tokType, idx);
var follows = this.getFollowsForInRuleRecovery(tokType, idx);
try {

@@ -538,7 +555,7 @@ return this.tryInRuleRecovery(tokType, follows);

}
}
saveRecogState() {
};
RecognizerEngine.prototype.saveRecogState = function () {
// errors is a getter which will clone the errors array
const savedErrors = this.errors;
const savedRuleStack = (0, clone_1.default)(this.RULE_STACK);
var savedErrors = this.errors;
var savedRuleStack = (0, clone_1.default)(this.RULE_STACK);
return {

@@ -550,9 +567,9 @@ errors: savedErrors,

};
}
reloadRecogState(newState) {
};
RecognizerEngine.prototype.reloadRecogState = function (newState) {
this.errors = newState.errors;
this.importLexerState(newState.lexerState);
this.RULE_STACK = newState.RULE_STACK;
}
ruleInvocationStateUpdate(shortName, fullName, idxInCallingRule) {
};
RecognizerEngine.prototype.ruleInvocationStateUpdate = function (shortName, fullName, idxInCallingRule) {
this.RULE_OCCURRENCE_STACK.push(idxInCallingRule);

@@ -562,17 +579,17 @@ this.RULE_STACK.push(shortName);

this.cstInvocationStateUpdate(fullName);
}
isBackTracking() {
};
RecognizerEngine.prototype.isBackTracking = function () {
return this.isBackTrackingStack.length !== 0;
}
getCurrRuleFullName() {
const shortName = this.getLastExplicitRuleShortName();
};
RecognizerEngine.prototype.getCurrRuleFullName = function () {
var shortName = this.getLastExplicitRuleShortName();
return this.shortRuleNameToFull[shortName];
}
shortRuleNameToFullName(shortName) {
};
RecognizerEngine.prototype.shortRuleNameToFullName = function (shortName) {
return this.shortRuleNameToFull[shortName];
}
isAtEndOfInput() {
};
RecognizerEngine.prototype.isAtEndOfInput = function () {
return this.tokenMatcher(this.LA(1), tokens_public_1.EOF);
}
reset() {
};
RecognizerEngine.prototype.reset = function () {
this.resetLexerState();

@@ -586,5 +603,6 @@ this.subruleIdx = 0;

this.RULE_OCCURRENCE_STACK = [];
}
}
};
return RecognizerEngine;
}());
exports.RecognizerEngine = RecognizerEngine;
//# sourceMappingURL=recognizer_engine.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,22 +22,25 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.attemptInRepetitionRecovery = exports.Recoverable = exports.InRuleRecoveryException = exports.IN_RULE_RECOVERY_EXCEPTION = exports.EOF_FOLLOW_KEY = void 0;
const tokens_public_1 = require("../../../scan/tokens_public");
const isEmpty_1 = __importDefault(require("lodash/isEmpty"));
const dropRight_1 = __importDefault(require("lodash/dropRight"));
const flatten_1 = __importDefault(require("lodash/flatten"));
const map_1 = __importDefault(require("lodash/map"));
const find_1 = __importDefault(require("lodash/find"));
const has_1 = __importDefault(require("lodash/has"));
const includes_1 = __importDefault(require("lodash/includes"));
const clone_1 = __importDefault(require("lodash/clone"));
const exceptions_public_1 = require("../../exceptions_public");
const constants_1 = require("../../constants");
const parser_1 = require("../parser");
var tokens_public_1 = require("../../../scan/tokens_public");
var isEmpty_1 = __importDefault(require("lodash/isEmpty"));
var dropRight_1 = __importDefault(require("lodash/dropRight"));
var flatten_1 = __importDefault(require("lodash/flatten"));
var map_1 = __importDefault(require("lodash/map"));
var find_1 = __importDefault(require("lodash/find"));
var has_1 = __importDefault(require("lodash/has"));
var includes_1 = __importDefault(require("lodash/includes"));
var clone_1 = __importDefault(require("lodash/clone"));
var exceptions_public_1 = require("../../exceptions_public");
var constants_1 = require("../../constants");
var parser_1 = require("../parser");
exports.EOF_FOLLOW_KEY = {};
exports.IN_RULE_RECOVERY_EXCEPTION = "InRuleRecoveryException";
class InRuleRecoveryException extends Error {
constructor(message) {
super(message);
this.name = exports.IN_RULE_RECOVERY_EXCEPTION;
var InRuleRecoveryException = /** @class */ (function (_super) {
__extends(InRuleRecoveryException, _super);
function InRuleRecoveryException(message) {
var _this = _super.call(this, message) || this;
_this.name = exports.IN_RULE_RECOVERY_EXCEPTION;
return _this;
}
}
return InRuleRecoveryException;
}(Error));
exports.InRuleRecoveryException = InRuleRecoveryException;

@@ -32,4 +50,6 @@ /**

*/
class Recoverable {
initRecoverable(config) {
var Recoverable = /** @class */ (function () {
function Recoverable() {
}
Recoverable.prototype.initRecoverable = function (config) {
this.firstAfterRepMap = {};

@@ -46,36 +66,37 @@ this.resyncFollows = {};

}
}
getTokenToInsert(tokType) {
const tokToInsert = (0, tokens_public_1.createTokenInstance)(tokType, "", NaN, NaN, NaN, NaN, NaN, NaN);
};
Recoverable.prototype.getTokenToInsert = function (tokType) {
var tokToInsert = (0, tokens_public_1.createTokenInstance)(tokType, "", NaN, NaN, NaN, NaN, NaN, NaN);
tokToInsert.isInsertedInRecovery = true;
return tokToInsert;
}
canTokenTypeBeInsertedInRecovery(tokType) {
};
Recoverable.prototype.canTokenTypeBeInsertedInRecovery = function (tokType) {
return true;
}
canTokenTypeBeDeletedInRecovery(tokType) {
};
Recoverable.prototype.canTokenTypeBeDeletedInRecovery = function (tokType) {
return true;
}
tryInRepetitionRecovery(grammarRule, grammarRuleArgs, lookAheadFunc, expectedTokType) {
};
Recoverable.prototype.tryInRepetitionRecovery = function (grammarRule, grammarRuleArgs, lookAheadFunc, expectedTokType) {
var _this = this;
// TODO: can the resyncTokenType be cached?
const reSyncTokType = this.findReSyncTokenType();
const savedLexerState = this.exportLexerState();
const resyncedTokens = [];
let passedResyncPoint = false;
const nextTokenWithoutResync = this.LA(1);
let currToken = this.LA(1);
const generateErrorMessage = () => {
const previousToken = this.LA(0);
var reSyncTokType = this.findReSyncTokenType();
var savedLexerState = this.exportLexerState();
var resyncedTokens = [];
var passedResyncPoint = false;
var nextTokenWithoutResync = this.LA(1);
var currToken = this.LA(1);
var generateErrorMessage = function () {
var previousToken = _this.LA(0);
// we are preemptively re-syncing before an error has been detected, therefor we must reproduce
// the error that would have been thrown
const msg = this.errorMessageProvider.buildMismatchTokenMessage({
var msg = _this.errorMessageProvider.buildMismatchTokenMessage({
expected: expectedTokType,
actual: nextTokenWithoutResync,
previous: previousToken,
ruleName: this.getCurrRuleFullName()
ruleName: _this.getCurrRuleFullName()
});
const error = new exceptions_public_1.MismatchedTokenException(msg, nextTokenWithoutResync, this.LA(0));
var error = new exceptions_public_1.MismatchedTokenException(msg, nextTokenWithoutResync, _this.LA(0));
// the first token here will be the original cause of the error, this is not part of the resyncedTokens property.
error.resyncedTokens = (0, dropRight_1.default)(resyncedTokens);
this.SAVE_ERROR(error);
_this.SAVE_ERROR(error);
};

@@ -107,4 +128,4 @@ while (!passedResyncPoint) {

this.importLexerState(savedLexerState);
}
shouldInRepetitionRecoveryBeTried(expectTokAfterLastMatch, nextTokIdx, notStuck) {
};
Recoverable.prototype.shouldInRepetitionRecoveryBeTried = function (expectTokAfterLastMatch, nextTokIdx, notStuck) {
// Edge case of arriving from a MANY repetition which is stuck

@@ -131,16 +152,16 @@ // Attempting recovery in this case could cause an infinite loop

return true;
}
};
// Error Recovery functionality
getFollowsForInRuleRecovery(tokType, tokIdxInRule) {
const grammarPath = this.getCurrentGrammarPath(tokType, tokIdxInRule);
const follows = this.getNextPossibleTokenTypes(grammarPath);
Recoverable.prototype.getFollowsForInRuleRecovery = function (tokType, tokIdxInRule) {
var grammarPath = this.getCurrentGrammarPath(tokType, tokIdxInRule);
var follows = this.getNextPossibleTokenTypes(grammarPath);
return follows;
}
tryInRuleRecovery(expectedTokType, follows) {
};
Recoverable.prototype.tryInRuleRecovery = function (expectedTokType, follows) {
if (this.canRecoverWithSingleTokenInsertion(expectedTokType, follows)) {
const tokToInsert = this.getTokenToInsert(expectedTokType);
var tokToInsert = this.getTokenToInsert(expectedTokType);
return tokToInsert;
}
if (this.canRecoverWithSingleTokenDeletion(expectedTokType)) {
const nextTok = this.SKIP_TOKEN();
var nextTok = this.SKIP_TOKEN();
this.consumeToken();

@@ -150,8 +171,9 @@ return nextTok;

throw new InRuleRecoveryException("sad sad panda");
}
canPerformInRuleRecovery(expectedToken, follows) {
};
Recoverable.prototype.canPerformInRuleRecovery = function (expectedToken, follows) {
return (this.canRecoverWithSingleTokenInsertion(expectedToken, follows) ||
this.canRecoverWithSingleTokenDeletion(expectedToken));
}
canRecoverWithSingleTokenInsertion(expectedTokType, follows) {
};
Recoverable.prototype.canRecoverWithSingleTokenInsertion = function (expectedTokType, follows) {
var _this = this;
if (!this.canTokenTypeBeInsertedInRecovery(expectedTokType)) {

@@ -164,28 +186,28 @@ return false;

}
const mismatchedTok = this.LA(1);
const isMisMatchedTokInFollows = (0, find_1.default)(follows, (possibleFollowsTokType) => {
return this.tokenMatcher(mismatchedTok, possibleFollowsTokType);
var mismatchedTok = this.LA(1);
var isMisMatchedTokInFollows = (0, find_1.default)(follows, function (possibleFollowsTokType) {
return _this.tokenMatcher(mismatchedTok, possibleFollowsTokType);
}) !== undefined;
return isMisMatchedTokInFollows;
}
canRecoverWithSingleTokenDeletion(expectedTokType) {
};
Recoverable.prototype.canRecoverWithSingleTokenDeletion = function (expectedTokType) {
if (!this.canTokenTypeBeDeletedInRecovery(expectedTokType)) {
return false;
}
const isNextTokenWhatIsExpected = this.tokenMatcher(this.LA(2), expectedTokType);
var isNextTokenWhatIsExpected = this.tokenMatcher(this.LA(2), expectedTokType);
return isNextTokenWhatIsExpected;
}
isInCurrentRuleReSyncSet(tokenTypeIdx) {
const followKey = this.getCurrFollowKey();
const currentRuleReSyncSet = this.getFollowSetFromFollowKey(followKey);
};
Recoverable.prototype.isInCurrentRuleReSyncSet = function (tokenTypeIdx) {
var followKey = this.getCurrFollowKey();
var currentRuleReSyncSet = this.getFollowSetFromFollowKey(followKey);
return (0, includes_1.default)(currentRuleReSyncSet, tokenTypeIdx);
}
findReSyncTokenType() {
const allPossibleReSyncTokTypes = this.flattenFollowSet();
};
Recoverable.prototype.findReSyncTokenType = function () {
var allPossibleReSyncTokTypes = this.flattenFollowSet();
// this loop will always terminate as EOF is always in the follow stack and also always (virtually) in the input
let nextToken = this.LA(1);
let k = 2;
var nextToken = this.LA(1);
var k = 2;
while (true) {
const foundMatch = (0, find_1.default)(allPossibleReSyncTokTypes, (resyncTokType) => {
const canMatch = (0, tokens_public_1.tokenMatcher)(nextToken, resyncTokType);
var foundMatch = (0, find_1.default)(allPossibleReSyncTokTypes, function (resyncTokType) {
var canMatch = (0, tokens_public_1.tokenMatcher)(nextToken, resyncTokType);
return canMatch;

@@ -199,4 +221,4 @@ });

}
}
getCurrFollowKey() {
};
Recoverable.prototype.getCurrFollowKey = function () {
// the length is at least one as we always add the ruleName to the stack before invoking the rule.

@@ -206,5 +228,5 @@ if (this.RULE_STACK.length === 1) {

}
const currRuleShortName = this.getLastExplicitRuleShortName();
const currRuleIdx = this.getLastExplicitRuleOccurrenceIndex();
const prevRuleShortName = this.getPreviousExplicitRuleShortName();
var currRuleShortName = this.getLastExplicitRuleShortName();
var currRuleIdx = this.getLastExplicitRuleOccurrenceIndex();
var prevRuleShortName = this.getPreviousExplicitRuleShortName();
return {

@@ -215,7 +237,8 @@ ruleName: this.shortRuleNameToFullName(currRuleShortName),

};
}
buildFullFollowKeyStack() {
const explicitRuleStack = this.RULE_STACK;
const explicitOccurrenceStack = this.RULE_OCCURRENCE_STACK;
return (0, map_1.default)(explicitRuleStack, (ruleName, idx) => {
};
Recoverable.prototype.buildFullFollowKeyStack = function () {
var _this = this;
var explicitRuleStack = this.RULE_STACK;
var explicitOccurrenceStack = this.RULE_OCCURRENCE_STACK;
return (0, map_1.default)(explicitRuleStack, function (ruleName, idx) {
if (idx === 0) {

@@ -225,24 +248,25 @@ return exports.EOF_FOLLOW_KEY;

return {
ruleName: this.shortRuleNameToFullName(ruleName),
ruleName: _this.shortRuleNameToFullName(ruleName),
idxInCallingRule: explicitOccurrenceStack[idx],
inRule: this.shortRuleNameToFullName(explicitRuleStack[idx - 1])
inRule: _this.shortRuleNameToFullName(explicitRuleStack[idx - 1])
};
});
}
flattenFollowSet() {
const followStack = (0, map_1.default)(this.buildFullFollowKeyStack(), (currKey) => {
return this.getFollowSetFromFollowKey(currKey);
};
Recoverable.prototype.flattenFollowSet = function () {
var _this = this;
var followStack = (0, map_1.default)(this.buildFullFollowKeyStack(), function (currKey) {
return _this.getFollowSetFromFollowKey(currKey);
});
return (0, flatten_1.default)(followStack);
}
getFollowSetFromFollowKey(followKey) {
};
Recoverable.prototype.getFollowSetFromFollowKey = function (followKey) {
if (followKey === exports.EOF_FOLLOW_KEY) {
return [tokens_public_1.EOF];
}
const followName = followKey.ruleName + followKey.idxInCallingRule + constants_1.IN + followKey.inRule;
var followName = followKey.ruleName + followKey.idxInCallingRule + constants_1.IN + followKey.inRule;
return this.resyncFollows[followName];
}
};
// It does not make any sense to include a virtual EOF token in the list of resynced tokens
// as EOF does not really exist and thus does not contain any useful information (line/column numbers)
addToResyncTokens(token, resyncTokens) {
Recoverable.prototype.addToResyncTokens = function (token, resyncTokens) {
if (!this.tokenMatcher(token, tokens_public_1.EOF)) {

@@ -252,6 +276,6 @@ resyncTokens.push(token);

return resyncTokens;
}
reSyncTo(tokType) {
const resyncedTokens = [];
let nextTok = this.LA(1);
};
Recoverable.prototype.reSyncTo = function (tokType) {
var resyncedTokens = [];
var nextTok = this.LA(1);
while (this.tokenMatcher(nextTok, tokType) === false) {

@@ -263,11 +287,11 @@ nextTok = this.SKIP_TOKEN();

return (0, dropRight_1.default)(resyncedTokens);
}
attemptInRepetitionRecovery(prodFunc, args, lookaheadFunc, dslMethodIdx, prodOccurrence, nextToksWalker, notStuck) {
};
Recoverable.prototype.attemptInRepetitionRecovery = function (prodFunc, args, lookaheadFunc, dslMethodIdx, prodOccurrence, nextToksWalker, notStuck) {
// by default this is a NO-OP
// The actual implementation is with the function(not method) below
}
getCurrentGrammarPath(tokType, tokIdxInRule) {
const pathRuleStack = this.getHumanReadableRuleStack();
const pathOccurrenceStack = (0, clone_1.default)(this.RULE_OCCURRENCE_STACK);
const grammarPath = {
};
Recoverable.prototype.getCurrentGrammarPath = function (tokType, tokIdxInRule) {
var pathRuleStack = this.getHumanReadableRuleStack();
var pathOccurrenceStack = (0, clone_1.default)(this.RULE_OCCURRENCE_STACK);
var grammarPath = {
ruleStack: pathRuleStack,

@@ -279,21 +303,25 @@ occurrenceStack: pathOccurrenceStack,

return grammarPath;
}
getHumanReadableRuleStack() {
return (0, map_1.default)(this.RULE_STACK, (currShortName) => this.shortRuleNameToFullName(currShortName));
}
}
};
Recoverable.prototype.getHumanReadableRuleStack = function () {
var _this = this;
return (0, map_1.default)(this.RULE_STACK, function (currShortName) {
return _this.shortRuleNameToFullName(currShortName);
});
};
return Recoverable;
}());
exports.Recoverable = Recoverable;
function attemptInRepetitionRecovery(prodFunc, args, lookaheadFunc, dslMethodIdx, prodOccurrence, nextToksWalker, notStuck) {
const key = this.getKeyForAutomaticLookahead(dslMethodIdx, prodOccurrence);
let firstAfterRepInfo = this.firstAfterRepMap[key];
var key = this.getKeyForAutomaticLookahead(dslMethodIdx, prodOccurrence);
var firstAfterRepInfo = this.firstAfterRepMap[key];
if (firstAfterRepInfo === undefined) {
const currRuleName = this.getCurrRuleFullName();
const ruleGrammar = this.getGAstProductions()[currRuleName];
const walker = new nextToksWalker(ruleGrammar, prodOccurrence);
var currRuleName = this.getCurrRuleFullName();
var ruleGrammar = this.getGAstProductions()[currRuleName];
var walker = new nextToksWalker(ruleGrammar, prodOccurrence);
firstAfterRepInfo = walker.startWalking();
this.firstAfterRepMap[key] = firstAfterRepInfo;
}
let expectTokAfterLastMatch = firstAfterRepInfo.token;
let nextTokIdx = firstAfterRepInfo.occurrence;
const isEndOfRule = firstAfterRepInfo.isEndOfRule;
var expectTokAfterLastMatch = firstAfterRepInfo.token;
var nextTokIdx = firstAfterRepInfo.occurrence;
var isEndOfRule = firstAfterRepInfo.isEndOfRule;
// special edge case of a TOP most repetition after which the input should END.

@@ -300,0 +328,0 @@ // this will force an attempt for inRule recovery in that scenario.

@@ -7,14 +7,16 @@ "use strict";

exports.TreeBuilder = void 0;
const cst_1 = require("../../cst/cst");
const noop_1 = __importDefault(require("lodash/noop"));
const has_1 = __importDefault(require("lodash/has"));
const keys_1 = __importDefault(require("lodash/keys"));
const isUndefined_1 = __importDefault(require("lodash/isUndefined"));
const cst_visitor_1 = require("../../cst/cst_visitor");
const parser_1 = require("../parser");
var cst_1 = require("../../cst/cst");
var noop_1 = __importDefault(require("lodash/noop"));
var has_1 = __importDefault(require("lodash/has"));
var keys_1 = __importDefault(require("lodash/keys"));
var isUndefined_1 = __importDefault(require("lodash/isUndefined"));
var cst_visitor_1 = require("../../cst/cst_visitor");
var parser_1 = require("../parser");
/**
* This trait is responsible for the CST building logic.
*/
class TreeBuilder {
initTreeBuilder(config) {
var TreeBuilder = /** @class */ (function () {
function TreeBuilder() {
}
TreeBuilder.prototype.initTreeBuilder = function (config) {
this.CST_STACK = [];

@@ -71,7 +73,7 @@ // outputCst is no longer exposed/defined in the pubic API

else {
throw Error(`Invalid <nodeLocationTracking> config option: "${config.nodeLocationTracking}"`);
throw Error("Invalid <nodeLocationTracking> config option: \"".concat(config.nodeLocationTracking, "\""));
}
}
}
setInitialNodeLocationOnlyOffsetRecovery(cstNode) {
};
TreeBuilder.prototype.setInitialNodeLocationOnlyOffsetRecovery = function (cstNode) {
cstNode.location = {

@@ -81,4 +83,4 @@ startOffset: NaN,

};
}
setInitialNodeLocationOnlyOffsetRegular(cstNode) {
};
TreeBuilder.prototype.setInitialNodeLocationOnlyOffsetRegular = function (cstNode) {
cstNode.location = {

@@ -92,4 +94,4 @@ // without error recovery the starting Location of a new CstNode is guaranteed

};
}
setInitialNodeLocationFullRecovery(cstNode) {
};
TreeBuilder.prototype.setInitialNodeLocationFullRecovery = function (cstNode) {
cstNode.location = {

@@ -103,3 +105,3 @@ startOffset: NaN,

};
}
};
/**

@@ -110,4 +112,4 @@ * @see setInitialNodeLocationOnlyOffsetRegular for explanation why this work

*/
setInitialNodeLocationFullRegular(cstNode) {
const nextToken = this.LA(1);
TreeBuilder.prototype.setInitialNodeLocationFullRegular = function (cstNode) {
var nextToken = this.LA(1);
cstNode.location = {

@@ -121,5 +123,5 @@ startOffset: nextToken.startOffset,

};
}
cstInvocationStateUpdate(fullRuleName) {
const cstNode = {
};
TreeBuilder.prototype.cstInvocationStateUpdate = function (fullRuleName) {
var cstNode = {
name: fullRuleName,

@@ -130,10 +132,10 @@ children: Object.create(null)

this.CST_STACK.push(cstNode);
}
cstFinallyStateUpdate() {
};
TreeBuilder.prototype.cstFinallyStateUpdate = function () {
this.CST_STACK.pop();
}
cstPostRuleFull(ruleCstNode) {
};
TreeBuilder.prototype.cstPostRuleFull = function (ruleCstNode) {
// casts to `required<CstNodeLocation>` are safe because `cstPostRuleFull` should only be invoked when full location is enabled
const prevToken = this.LA(0);
const loc = ruleCstNode.location;
var prevToken = this.LA(0);
var loc = ruleCstNode.location;
// If this condition is true it means we consumed at least one Token

@@ -152,7 +154,7 @@ // In this CstNode.

}
}
cstPostRuleOnlyOffset(ruleCstNode) {
const prevToken = this.LA(0);
};
TreeBuilder.prototype.cstPostRuleOnlyOffset = function (ruleCstNode) {
var prevToken = this.LA(0);
// `location' is not null because `cstPostRuleOnlyOffset` will only be invoked when location tracking is enabled.
const loc = ruleCstNode.location;
var loc = ruleCstNode.location;
// If this condition is true it means we consumed at least one Token

@@ -167,18 +169,18 @@ // In this CstNode.

}
}
cstPostTerminal(key, consumedToken) {
const rootCst = this.CST_STACK[this.CST_STACK.length - 1];
};
TreeBuilder.prototype.cstPostTerminal = function (key, consumedToken) {
var rootCst = this.CST_STACK[this.CST_STACK.length - 1];
(0, cst_1.addTerminalToCst)(rootCst, consumedToken, key);
// This is only used when **both** error recovery and CST Output are enabled.
this.setNodeLocationFromToken(rootCst.location, consumedToken);
}
cstPostNonTerminal(ruleCstResult, ruleName) {
const preCstNode = this.CST_STACK[this.CST_STACK.length - 1];
};
TreeBuilder.prototype.cstPostNonTerminal = function (ruleCstResult, ruleName) {
var preCstNode = this.CST_STACK[this.CST_STACK.length - 1];
(0, cst_1.addNoneTerminalToCst)(preCstNode, ruleName, ruleCstResult);
// This is only used when **both** error recovery and CST Output are enabled.
this.setNodeLocationFromNode(preCstNode.location, ruleCstResult.location);
}
getBaseCstVisitorConstructor() {
};
TreeBuilder.prototype.getBaseCstVisitorConstructor = function () {
if ((0, isUndefined_1.default)(this.baseCstVisitorConstructor)) {
const newBaseCstVisitorConstructor = (0, cst_visitor_1.createBaseSemanticVisitorConstructor)(this.className, (0, keys_1.default)(this.gastProductionsCache));
var newBaseCstVisitorConstructor = (0, cst_visitor_1.createBaseSemanticVisitorConstructor)(this.className, (0, keys_1.default)(this.gastProductionsCache));
this.baseCstVisitorConstructor = newBaseCstVisitorConstructor;

@@ -188,6 +190,6 @@ return newBaseCstVisitorConstructor;

return this.baseCstVisitorConstructor;
}
getBaseCstVisitorConstructorWithDefaults() {
};
TreeBuilder.prototype.getBaseCstVisitorConstructorWithDefaults = function () {
if ((0, isUndefined_1.default)(this.baseCstVisitorWithDefaultsConstructor)) {
const newConstructor = (0, cst_visitor_1.createBaseVisitorConstructorWithDefaults)(this.className, (0, keys_1.default)(this.gastProductionsCache), this.getBaseCstVisitorConstructor());
var newConstructor = (0, cst_visitor_1.createBaseVisitorConstructorWithDefaults)(this.className, (0, keys_1.default)(this.gastProductionsCache), this.getBaseCstVisitorConstructor());
this.baseCstVisitorWithDefaultsConstructor = newConstructor;

@@ -197,17 +199,18 @@ return newConstructor;

return this.baseCstVisitorWithDefaultsConstructor;
}
getLastExplicitRuleShortName() {
const ruleStack = this.RULE_STACK;
};
TreeBuilder.prototype.getLastExplicitRuleShortName = function () {
var ruleStack = this.RULE_STACK;
return ruleStack[ruleStack.length - 1];
}
getPreviousExplicitRuleShortName() {
const ruleStack = this.RULE_STACK;
};
TreeBuilder.prototype.getPreviousExplicitRuleShortName = function () {
var ruleStack = this.RULE_STACK;
return ruleStack[ruleStack.length - 2];
}
getLastExplicitRuleOccurrenceIndex() {
const occurrenceStack = this.RULE_OCCURRENCE_STACK;
};
TreeBuilder.prototype.getLastExplicitRuleOccurrenceIndex = function () {
var occurrenceStack = this.RULE_OCCURRENCE_STACK;
return occurrenceStack[occurrenceStack.length - 1];
}
}
};
return TreeBuilder;
}());
exports.TreeBuilder = TreeBuilder;
//# sourceMappingURL=tree_builder.js.map

@@ -5,9 +5,9 @@ "use strict";

function applyMixins(derivedCtor, baseCtors) {
baseCtors.forEach((baseCtor) => {
const baseProto = baseCtor.prototype;
Object.getOwnPropertyNames(baseProto).forEach((propName) => {
baseCtors.forEach(function (baseCtor) {
var baseProto = baseCtor.prototype;
Object.getOwnPropertyNames(baseProto).forEach(function (propName) {
if (propName === "constructor") {
return;
}
const basePropDescriptor = Object.getOwnPropertyDescriptor(baseProto, propName);
var basePropDescriptor = Object.getOwnPropertyDescriptor(baseProto, propName);
// Handle Accessors

@@ -14,0 +14,0 @@ if (basePropDescriptor &&

@@ -5,9 +5,9 @@ "use strict";

exports.defaultLexerErrorProvider = {
buildUnableToPopLexerModeMessage(token) {
return `Unable to pop Lexer Mode after encountering Token ->${token.image}<- The Mode Stack is empty`;
buildUnableToPopLexerModeMessage: function (token) {
return "Unable to pop Lexer Mode after encountering Token ->".concat(token.image, "<- The Mode Stack is empty");
},
buildUnexpectedCharactersMessage(fullText, startOffset, length, line, column) {
return (`unexpected character: ->${fullText.charAt(startOffset)}<- at offset: ${startOffset},` + ` skipped ${length} characters.`);
buildUnexpectedCharactersMessage: function (fullText, startOffset, length, line, column) {
return ("unexpected character: ->".concat(fullText.charAt(startOffset), "<- at offset: ").concat(startOffset, ",") + " skipped ".concat(length, " characters."));
}
};
//# sourceMappingURL=lexer_errors_public.js.map

@@ -7,20 +7,20 @@ "use strict";

exports.Lexer = exports.LexerDefinitionErrorType = void 0;
const lexer_1 = require("./lexer");
const noop_1 = __importDefault(require("lodash/noop"));
const isEmpty_1 = __importDefault(require("lodash/isEmpty"));
const isArray_1 = __importDefault(require("lodash/isArray"));
const last_1 = __importDefault(require("lodash/last"));
const reject_1 = __importDefault(require("lodash/reject"));
const map_1 = __importDefault(require("lodash/map"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const keys_1 = __importDefault(require("lodash/keys"));
const isUndefined_1 = __importDefault(require("lodash/isUndefined"));
const identity_1 = __importDefault(require("lodash/identity"));
const assign_1 = __importDefault(require("lodash/assign"));
const reduce_1 = __importDefault(require("lodash/reduce"));
const clone_1 = __importDefault(require("lodash/clone"));
const utils_1 = require("@chevrotain/utils");
const tokens_1 = require("./tokens");
const lexer_errors_public_1 = require("./lexer_errors_public");
const reg_exp_parser_1 = require("./reg_exp_parser");
var lexer_1 = require("./lexer");
var noop_1 = __importDefault(require("lodash/noop"));
var isEmpty_1 = __importDefault(require("lodash/isEmpty"));
var isArray_1 = __importDefault(require("lodash/isArray"));
var last_1 = __importDefault(require("lodash/last"));
var reject_1 = __importDefault(require("lodash/reject"));
var map_1 = __importDefault(require("lodash/map"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var keys_1 = __importDefault(require("lodash/keys"));
var isUndefined_1 = __importDefault(require("lodash/isUndefined"));
var identity_1 = __importDefault(require("lodash/identity"));
var assign_1 = __importDefault(require("lodash/assign"));
var reduce_1 = __importDefault(require("lodash/reduce"));
var clone_1 = __importDefault(require("lodash/clone"));
var utils_1 = require("@chevrotain/utils");
var tokens_1 = require("./tokens");
var lexer_errors_public_1 = require("./lexer_errors_public");
var reg_exp_parser_1 = require("./reg_exp_parser");
var LexerDefinitionErrorType;

@@ -47,3 +47,3 @@ (function (LexerDefinitionErrorType) {

})(LexerDefinitionErrorType = exports.LexerDefinitionErrorType || (exports.LexerDefinitionErrorType = {}));
const DEFAULT_LEXER_CONFIG = {
var DEFAULT_LEXER_CONFIG = {
deferDefinitionErrorsHandling: false,

@@ -61,4 +61,6 @@ positionTracking: "full",

Object.freeze(DEFAULT_LEXER_CONFIG);
class Lexer {
constructor(lexerDefinition, config = DEFAULT_LEXER_CONFIG) {
var Lexer = /** @class */ (function () {
function Lexer(lexerDefinition, config) {
if (config === void 0) { config = DEFAULT_LEXER_CONFIG; }
var _this = this;
this.lexerDefinition = lexerDefinition;

@@ -77,18 +79,18 @@ this.lexerDefinitionErrors = [];

// of the lexer to a separate package.
this.TRACE_INIT = (phaseDesc, phaseImpl) => {
this.TRACE_INIT = function (phaseDesc, phaseImpl) {
// No need to optimize this using NOOP pattern because
// It is not called in a hot spot...
if (this.traceInitPerf === true) {
this.traceInitIndent++;
const indent = new Array(this.traceInitIndent + 1).join("\t");
if (this.traceInitIndent < this.traceInitMaxIdent) {
console.log(`${indent}--> <${phaseDesc}>`);
if (_this.traceInitPerf === true) {
_this.traceInitIndent++;
var indent = new Array(_this.traceInitIndent + 1).join("\t");
if (_this.traceInitIndent < _this.traceInitMaxIdent) {
console.log("".concat(indent, "--> <").concat(phaseDesc, ">"));
}
const { time, value } = (0, utils_1.timer)(phaseImpl);
var _a = (0, utils_1.timer)(phaseImpl), time = _a.time, value = _a.value;
/* istanbul ignore next - Difficult to reproduce specific performance behavior (>10ms) in tests */
const traceMethod = time > 10 ? console.warn : console.log;
if (this.traceInitIndent < this.traceInitMaxIdent) {
traceMethod(`${indent}<-- <${phaseDesc}> time: ${time}ms`);
var traceMethod = time > 10 ? console.warn : console.log;
if (_this.traceInitIndent < _this.traceInitMaxIdent) {
traceMethod("".concat(indent, "<-- <").concat(phaseDesc, "> time: ").concat(time, "ms"));
}
this.traceInitIndent--;
_this.traceInitIndent--;
return value;

@@ -106,3 +108,3 @@ }

this.config = (0, assign_1.default)({}, DEFAULT_LEXER_CONFIG, config);
const traceInitVal = this.config.traceInitPerf;
var traceInitVal = this.config.traceInitPerf;
if (traceInitVal === true) {

@@ -117,13 +119,13 @@ this.traceInitMaxIdent = Infinity;

this.traceInitIndent = -1;
this.TRACE_INIT("Lexer Constructor", () => {
let actualDefinition;
let hasOnlySingleMode = true;
this.TRACE_INIT("Lexer Config handling", () => {
if (this.config.lineTerminatorsPattern ===
this.TRACE_INIT("Lexer Constructor", function () {
var actualDefinition;
var hasOnlySingleMode = true;
_this.TRACE_INIT("Lexer Config handling", function () {
if (_this.config.lineTerminatorsPattern ===
DEFAULT_LEXER_CONFIG.lineTerminatorsPattern) {
// optimized built-in implementation for the defaults definition of lineTerminators
this.config.lineTerminatorsPattern = lexer_1.LineTerminatorOptimizedTester;
_this.config.lineTerminatorsPattern = lexer_1.LineTerminatorOptimizedTester;
}
else {
if (this.config.lineTerminatorCharacters ===
if (_this.config.lineTerminatorCharacters ===
DEFAULT_LEXER_CONFIG.lineTerminatorCharacters) {

@@ -137,4 +139,4 @@ throw Error("Error: Missing <lineTerminatorCharacters> property on the Lexer config.\n" +

}
this.trackStartLines = /full|onlyStart/i.test(this.config.positionTracking);
this.trackEndLines = /full/i.test(this.config.positionTracking);
_this.trackStartLines = /full|onlyStart/i.test(_this.config.positionTracking);
_this.trackEndLines = /full/i.test(_this.config.positionTracking);
// Convert SingleModeLexerDefinition into a IMultiModeLexerDefinition.

@@ -153,8 +155,8 @@ if ((0, isArray_1.default)(lexerDefinition)) {

});
if (this.config.skipValidations === false) {
this.TRACE_INIT("performRuntimeChecks", () => {
this.lexerDefinitionErrors = this.lexerDefinitionErrors.concat((0, lexer_1.performRuntimeChecks)(actualDefinition, this.trackStartLines, this.config.lineTerminatorCharacters));
if (_this.config.skipValidations === false) {
_this.TRACE_INIT("performRuntimeChecks", function () {
_this.lexerDefinitionErrors = _this.lexerDefinitionErrors.concat((0, lexer_1.performRuntimeChecks)(actualDefinition, _this.trackStartLines, _this.config.lineTerminatorCharacters));
});
this.TRACE_INIT("performWarningRuntimeChecks", () => {
this.lexerDefinitionWarning = this.lexerDefinitionWarning.concat((0, lexer_1.performWarningRuntimeChecks)(actualDefinition, this.trackStartLines, this.config.lineTerminatorCharacters));
_this.TRACE_INIT("performWarningRuntimeChecks", function () {
_this.lexerDefinitionWarning = _this.lexerDefinitionWarning.concat((0, lexer_1.performWarningRuntimeChecks)(actualDefinition, _this.trackStartLines, _this.config.lineTerminatorCharacters));
});

@@ -168,12 +170,12 @@ }

// this transformation is to increase robustness in the case of partially invalid lexer definition.
(0, forEach_1.default)(actualDefinition.modes, (currModeValue, currModeName) => {
actualDefinition.modes[currModeName] = (0, reject_1.default)(currModeValue, (currTokType) => (0, isUndefined_1.default)(currTokType));
(0, forEach_1.default)(actualDefinition.modes, function (currModeValue, currModeName) {
actualDefinition.modes[currModeName] = (0, reject_1.default)(currModeValue, function (currTokType) { return (0, isUndefined_1.default)(currTokType); });
});
const allModeNames = (0, keys_1.default)(actualDefinition.modes);
(0, forEach_1.default)(actualDefinition.modes, (currModDef, currModName) => {
this.TRACE_INIT(`Mode: <${currModName}> processing`, () => {
this.modes.push(currModName);
if (this.config.skipValidations === false) {
this.TRACE_INIT(`validatePatterns`, () => {
this.lexerDefinitionErrors = this.lexerDefinitionErrors.concat((0, lexer_1.validatePatterns)(currModDef, allModeNames));
var allModeNames = (0, keys_1.default)(actualDefinition.modes);
(0, forEach_1.default)(actualDefinition.modes, function (currModDef, currModName) {
_this.TRACE_INIT("Mode: <".concat(currModName, "> processing"), function () {
_this.modes.push(currModName);
if (_this.config.skipValidations === false) {
_this.TRACE_INIT("validatePatterns", function () {
_this.lexerDefinitionErrors = _this.lexerDefinitionErrors.concat((0, lexer_1.validatePatterns)(currModDef, allModeNames));
});

@@ -184,39 +186,39 @@ }

// to performing the analysis anyhow...
if ((0, isEmpty_1.default)(this.lexerDefinitionErrors)) {
if ((0, isEmpty_1.default)(_this.lexerDefinitionErrors)) {
(0, tokens_1.augmentTokenTypes)(currModDef);
let currAnalyzeResult;
this.TRACE_INIT(`analyzeTokenTypes`, () => {
currAnalyzeResult = (0, lexer_1.analyzeTokenTypes)(currModDef, {
lineTerminatorCharacters: this.config.lineTerminatorCharacters,
var currAnalyzeResult_1;
_this.TRACE_INIT("analyzeTokenTypes", function () {
currAnalyzeResult_1 = (0, lexer_1.analyzeTokenTypes)(currModDef, {
lineTerminatorCharacters: _this.config.lineTerminatorCharacters,
positionTracking: config.positionTracking,
ensureOptimizations: config.ensureOptimizations,
safeMode: config.safeMode,
tracer: this.TRACE_INIT
tracer: _this.TRACE_INIT
});
});
this.patternIdxToConfig[currModName] =
currAnalyzeResult.patternIdxToConfig;
this.charCodeToPatternIdxToConfig[currModName] =
currAnalyzeResult.charCodeToPatternIdxToConfig;
this.emptyGroups = (0, assign_1.default)({}, this.emptyGroups, currAnalyzeResult.emptyGroups);
this.hasCustom = currAnalyzeResult.hasCustom || this.hasCustom;
this.canModeBeOptimized[currModName] =
currAnalyzeResult.canBeOptimized;
_this.patternIdxToConfig[currModName] =
currAnalyzeResult_1.patternIdxToConfig;
_this.charCodeToPatternIdxToConfig[currModName] =
currAnalyzeResult_1.charCodeToPatternIdxToConfig;
_this.emptyGroups = (0, assign_1.default)({}, _this.emptyGroups, currAnalyzeResult_1.emptyGroups);
_this.hasCustom = currAnalyzeResult_1.hasCustom || _this.hasCustom;
_this.canModeBeOptimized[currModName] =
currAnalyzeResult_1.canBeOptimized;
}
});
});
this.defaultMode = actualDefinition.defaultMode;
if (!(0, isEmpty_1.default)(this.lexerDefinitionErrors) &&
!this.config.deferDefinitionErrorsHandling) {
const allErrMessages = (0, map_1.default)(this.lexerDefinitionErrors, (error) => {
_this.defaultMode = actualDefinition.defaultMode;
if (!(0, isEmpty_1.default)(_this.lexerDefinitionErrors) &&
!_this.config.deferDefinitionErrorsHandling) {
var allErrMessages = (0, map_1.default)(_this.lexerDefinitionErrors, function (error) {
return error.message;
});
const allErrMessagesString = allErrMessages.join("-----------------------\n");
var allErrMessagesString = allErrMessages.join("-----------------------\n");
throw new Error("Errors detected in definition of Lexer:\n" + allErrMessagesString);
}
// Only print warning if there are no errors, This will avoid pl
(0, forEach_1.default)(this.lexerDefinitionWarning, (warningDescriptor) => {
(0, forEach_1.default)(_this.lexerDefinitionWarning, function (warningDescriptor) {
(0, utils_1.PRINT_WARNING)(warningDescriptor.message);
});
this.TRACE_INIT("Choosing sub-methods implementations", () => {
_this.TRACE_INIT("Choosing sub-methods implementations", function () {
// Choose the relevant internal implementations for this specific parser.

@@ -226,41 +228,41 @@ // These implementations should be in-lined by the JavaScript engine

if (lexer_1.SUPPORT_STICKY) {
this.chopInput = identity_1.default;
this.match = this.matchWithTest;
_this.chopInput = identity_1.default;
_this.match = _this.matchWithTest;
}
else {
this.updateLastIndex = noop_1.default;
this.match = this.matchWithExec;
_this.updateLastIndex = noop_1.default;
_this.match = _this.matchWithExec;
}
if (hasOnlySingleMode) {
this.handleModes = noop_1.default;
_this.handleModes = noop_1.default;
}
if (this.trackStartLines === false) {
this.computeNewColumn = identity_1.default;
if (_this.trackStartLines === false) {
_this.computeNewColumn = identity_1.default;
}
if (this.trackEndLines === false) {
this.updateTokenEndLineColumnLocation = noop_1.default;
if (_this.trackEndLines === false) {
_this.updateTokenEndLineColumnLocation = noop_1.default;
}
if (/full/i.test(this.config.positionTracking)) {
this.createTokenInstance = this.createFullToken;
if (/full/i.test(_this.config.positionTracking)) {
_this.createTokenInstance = _this.createFullToken;
}
else if (/onlyStart/i.test(this.config.positionTracking)) {
this.createTokenInstance = this.createStartOnlyToken;
else if (/onlyStart/i.test(_this.config.positionTracking)) {
_this.createTokenInstance = _this.createStartOnlyToken;
}
else if (/onlyOffset/i.test(this.config.positionTracking)) {
this.createTokenInstance = this.createOffsetOnlyToken;
else if (/onlyOffset/i.test(_this.config.positionTracking)) {
_this.createTokenInstance = _this.createOffsetOnlyToken;
}
else {
throw Error(`Invalid <positionTracking> config option: "${this.config.positionTracking}"`);
throw Error("Invalid <positionTracking> config option: \"".concat(_this.config.positionTracking, "\""));
}
if (this.hasCustom) {
this.addToken = this.addTokenUsingPush;
this.handlePayload = this.handlePayloadWithCustom;
if (_this.hasCustom) {
_this.addToken = _this.addTokenUsingPush;
_this.handlePayload = _this.handlePayloadWithCustom;
}
else {
this.addToken = this.addTokenUsingMemberAccess;
this.handlePayload = this.handlePayloadNoCustom;
_this.addToken = _this.addTokenUsingMemberAccess;
_this.handlePayload = _this.handlePayloadNoCustom;
}
});
this.TRACE_INIT("Failed Optimization Warnings", () => {
const unOptimizedModes = (0, reduce_1.default)(this.canModeBeOptimized, (cannotBeOptimized, canBeOptimized, modeName) => {
_this.TRACE_INIT("Failed Optimization Warnings", function () {
var unOptimizedModes = (0, reduce_1.default)(_this.canModeBeOptimized, function (cannotBeOptimized, canBeOptimized, modeName) {
if (canBeOptimized === false) {

@@ -272,3 +274,3 @@ cannotBeOptimized.push(modeName);

if (config.ensureOptimizations && !(0, isEmpty_1.default)(unOptimizedModes)) {
throw Error(`Lexer Modes: < ${unOptimizedModes.join(", ")} > cannot be optimized.\n` +
throw Error("Lexer Modes: < ".concat(unOptimizedModes.join(", "), " > cannot be optimized.\n") +
'\t Disable the "ensureOptimizations" lexer config flag to silently ignore this and run the lexer in an un-optimized mode.\n' +

@@ -278,16 +280,17 @@ "\t Or inspect the console log for details on how to resolve these issues.");

});
this.TRACE_INIT("clearRegExpParserCache", () => {
_this.TRACE_INIT("clearRegExpParserCache", function () {
(0, reg_exp_parser_1.clearRegExpParserCache)();
});
this.TRACE_INIT("toFastProperties", () => {
(0, utils_1.toFastProperties)(this);
_this.TRACE_INIT("toFastProperties", function () {
(0, utils_1.toFastProperties)(_this);
});
});
}
tokenize(text, initialMode = this.defaultMode) {
Lexer.prototype.tokenize = function (text, initialMode) {
if (initialMode === void 0) { initialMode = this.defaultMode; }
if (!(0, isEmpty_1.default)(this.lexerDefinitionErrors)) {
const allErrMessages = (0, map_1.default)(this.lexerDefinitionErrors, (error) => {
var allErrMessages = (0, map_1.default)(this.lexerDefinitionErrors, function (error) {
return error.message;
});
const allErrMessagesString = allErrMessages.join("-----------------------\n");
var allErrMessagesString = allErrMessages.join("-----------------------\n");
throw new Error("Unable to Tokenize because Errors detected in definition of Lexer:\n" +

@@ -297,3 +300,3 @@ allErrMessagesString);

return this.tokenizeInternal(text, initialMode);
}
};
// There is quite a bit of duplication between this and "tokenizeInternalLazy"

@@ -303,8 +306,9 @@ // This is intentional due to performance considerations.

// for `tsc` to always understand it is "safe"
tokenizeInternal(text, initialMode) {
let i, j, k, matchAltImage, longerAlt, matchedImage, payload, altPayload, imageLength, group, tokType, newToken, errLength, droppedChar, msg, match;
const orgText = text;
const orgLength = orgText.length;
let offset = 0;
let matchedTokensIndex = 0;
Lexer.prototype.tokenizeInternal = function (text, initialMode) {
var _this = this;
var i, j, k, matchAltImage, longerAlt, matchedImage, payload, altPayload, imageLength, group, tokType, newToken, errLength, droppedChar, msg, match;
var orgText = text;
var orgLength = orgText.length;
var offset = 0;
var matchedTokensIndex = 0;
// initializing the tokensArray to the "guessed" size.

@@ -314,19 +318,19 @@ // guessing too little will still reduce the number of array re-sizes on pushes.

// but would still have a faster runtime by avoiding (All but one) array resizing.
const guessedNumberOfTokens = this.hasCustom
var guessedNumberOfTokens = this.hasCustom
? 0 // will break custom token pattern APIs the matchedTokens array will contain undefined elements.
: Math.floor(text.length / 10);
const matchedTokens = new Array(guessedNumberOfTokens);
const errors = [];
let line = this.trackStartLines ? 1 : undefined;
let column = this.trackStartLines ? 1 : undefined;
const groups = (0, lexer_1.cloneEmptyGroups)(this.emptyGroups);
const trackLines = this.trackStartLines;
const lineTerminatorPattern = this.config.lineTerminatorsPattern;
let currModePatternsLength = 0;
let patternIdxToConfig = [];
let currCharCodeToPatternIdxToConfig = [];
const modeStack = [];
const emptyArray = [];
var matchedTokens = new Array(guessedNumberOfTokens);
var errors = [];
var line = this.trackStartLines ? 1 : undefined;
var column = this.trackStartLines ? 1 : undefined;
var groups = (0, lexer_1.cloneEmptyGroups)(this.emptyGroups);
var trackLines = this.trackStartLines;
var lineTerminatorPattern = this.config.lineTerminatorsPattern;
var currModePatternsLength = 0;
var patternIdxToConfig = [];
var currCharCodeToPatternIdxToConfig = [];
var modeStack = [];
var emptyArray = [];
Object.freeze(emptyArray);
let getPossiblePatterns;
var getPossiblePatterns;
function getPossiblePatternsSlow() {

@@ -336,4 +340,4 @@ return patternIdxToConfig;

function getPossiblePatternsOptimized(charCode) {
const optimizedCharIdx = (0, lexer_1.charCodeToOptimizedIndex)(charCode);
const possiblePatterns = currCharCodeToPatternIdxToConfig[optimizedCharIdx];
var optimizedCharIdx = (0, lexer_1.charCodeToOptimizedIndex)(charCode);
var possiblePatterns = currCharCodeToPatternIdxToConfig[optimizedCharIdx];
if (possiblePatterns === undefined) {

@@ -346,3 +350,3 @@ return emptyArray;

}
const pop_mode = (popToken) => {
var pop_mode = function (popToken) {
// TODO: perhaps avoid this error in the edge case there is no more input?

@@ -355,3 +359,3 @@ if (modeStack.length === 1 &&

// thus the pop is ignored, an error will be created and the lexer will continue parsing in the previous mode.
const msg = this.config.errorMessageProvider.buildUnableToPopLexerModeMessage(popToken);
var msg_1 = _this.config.errorMessageProvider.buildUnableToPopLexerModeMessage(popToken);
errors.push({

@@ -362,3 +366,3 @@ offset: popToken.startOffset,

length: popToken.image.length,
message: msg
message: msg_1
});

@@ -368,8 +372,8 @@ }

modeStack.pop();
const newMode = (0, last_1.default)(modeStack);
patternIdxToConfig = this.patternIdxToConfig[newMode];
var newMode = (0, last_1.default)(modeStack);
patternIdxToConfig = _this.patternIdxToConfig[newMode];
currCharCodeToPatternIdxToConfig =
this.charCodeToPatternIdxToConfig[newMode];
_this.charCodeToPatternIdxToConfig[newMode];
currModePatternsLength = patternIdxToConfig.length;
const modeCanBeOptimized = this.canModeBeOptimized[newMode] && this.config.safeMode === false;
var modeCanBeOptimized = _this.canModeBeOptimized[newMode] && _this.config.safeMode === false;
if (currCharCodeToPatternIdxToConfig && modeCanBeOptimized) {

@@ -390,3 +394,3 @@ getPossiblePatterns = getPossiblePatternsOptimized;

currModePatternsLength = patternIdxToConfig.length;
const modeCanBeOptimized = this.canModeBeOptimized[newMode] && this.config.safeMode === false;
var modeCanBeOptimized = this.canModeBeOptimized[newMode] && this.config.safeMode === false;
if (currCharCodeToPatternIdxToConfig && modeCanBeOptimized) {

@@ -402,15 +406,15 @@ getPossiblePatterns = getPossiblePatternsOptimized;

push_mode.call(this, initialMode);
let currConfig;
const recoveryEnabled = this.config.recoveryEnabled;
var currConfig;
var recoveryEnabled = this.config.recoveryEnabled;
while (offset < orgLength) {
matchedImage = null;
const nextCharCode = orgText.charCodeAt(offset);
const chosenPatternIdxToConfig = getPossiblePatterns(nextCharCode);
const chosenPatternsLength = chosenPatternIdxToConfig.length;
var nextCharCode = orgText.charCodeAt(offset);
var chosenPatternIdxToConfig = getPossiblePatterns(nextCharCode);
var chosenPatternsLength = chosenPatternIdxToConfig.length;
for (i = 0; i < chosenPatternsLength; i++) {
currConfig = chosenPatternIdxToConfig[i];
const currPattern = currConfig.pattern;
var currPattern = currConfig.pattern;
payload = null;
// manually in-lined because > 600 chars won't be in-lined in V8
const singleCharCode = currConfig.short;
var singleCharCode = currConfig.short;
if (singleCharCode !== false) {

@@ -445,6 +449,6 @@ if (nextCharCode === singleCharCode) {

// by saving/linking longerAlt on the original config?
const longerAltLength = longerAlt.length;
var longerAltLength = longerAlt.length;
for (k = 0; k < longerAltLength; k++) {
const longerAltConfig = patternIdxToConfig[longerAlt[k]];
const longerAltPattern = longerAltConfig.pattern;
var longerAltConfig = patternIdxToConfig[longerAlt[k]];
var longerAltPattern = longerAltConfig.pattern;
altPayload = null;

@@ -505,5 +509,5 @@ // single Char can never be a longer alt so no need to test it.

if (trackLines === true && currConfig.canLineTerminator === true) {
let numOfLTsInMatch = 0;
let foundTerminator;
let lastLTEndOffset;
var numOfLTsInMatch = 0;
var foundTerminator = void 0;
var lastLTEndOffset = void 0;
lineTerminatorPattern.lastIndex = 0;

@@ -528,6 +532,6 @@ do {

// error recovery, drop characters until we identify a valid token's start point
const errorStartOffset = offset;
const errorLine = line;
const errorColumn = column;
let foundResyncPoint = recoveryEnabled === false;
var errorStartOffset = offset;
var errorLine = line;
var errorColumn = column;
var foundResyncPoint = recoveryEnabled === false;
while (foundResyncPoint === false && offset < orgLength) {

@@ -538,6 +542,6 @@ // Identity Func (when sticky flag is enabled)

for (j = 0; j < currModePatternsLength; j++) {
const currConfig = patternIdxToConfig[j];
const currPattern = currConfig.pattern;
var currConfig_1 = patternIdxToConfig[j];
var currPattern = currConfig_1.pattern;
// manually in-lined because > 600 chars won't be in-lined in V8
const singleCharCode = currConfig.short;
var singleCharCode = currConfig_1.short;
if (singleCharCode !== false) {

@@ -549,3 +553,3 @@ if (orgText.charCodeAt(offset) === singleCharCode) {

}
else if (currConfig.isCustom === true) {
else if (currConfig_1.isCustom === true) {
foundResyncPoint =

@@ -589,8 +593,8 @@ currPattern.exec(orgText, offset, matchedTokens, groups) !== null;

};
}
handleModes(config, pop_mode, push_mode, newToken) {
};
Lexer.prototype.handleModes = function (config, pop_mode, push_mode, newToken) {
if (config.pop === true) {
// need to save the PUSH_MODE property as if the mode is popped
// patternIdxToPopMode is updated to reflect the new mode after popping the stack
const pushMode = config.push;
var pushMode = config.push;
pop_mode(newToken);

@@ -604,12 +608,12 @@ if (pushMode !== undefined) {

}
}
chopInput(text, length) {
};
Lexer.prototype.chopInput = function (text, length) {
return text.substring(length);
}
updateLastIndex(regExp, newLastIndex) {
};
Lexer.prototype.updateLastIndex = function (regExp, newLastIndex) {
regExp.lastIndex = newLastIndex;
}
};
// TODO: decrease this under 600 characters? inspect stripping comments option in TSC compiler
updateTokenEndLineColumnLocation(newToken, group, lastLTIdx, numOfLTsInMatch, line, column, imageLength) {
let lastCharIsLT, fixForEndingInLT;
Lexer.prototype.updateTokenEndLineColumnLocation = function (newToken, group, lastLTIdx, numOfLTsInMatch, line, column, imageLength) {
var lastCharIsLT, fixForEndingInLT;
if (group !== undefined) {

@@ -628,54 +632,54 @@ // a none skipped multi line Token, need to update endLine/endColumn

}
}
computeNewColumn(oldColumn, imageLength) {
};
Lexer.prototype.computeNewColumn = function (oldColumn, imageLength) {
return oldColumn + imageLength;
}
createOffsetOnlyToken(image, startOffset, tokenTypeIdx, tokenType) {
};
Lexer.prototype.createOffsetOnlyToken = function (image, startOffset, tokenTypeIdx, tokenType) {
return {
image,
startOffset,
tokenTypeIdx,
tokenType
image: image,
startOffset: startOffset,
tokenTypeIdx: tokenTypeIdx,
tokenType: tokenType
};
}
createStartOnlyToken(image, startOffset, tokenTypeIdx, tokenType, startLine, startColumn) {
};
Lexer.prototype.createStartOnlyToken = function (image, startOffset, tokenTypeIdx, tokenType, startLine, startColumn) {
return {
image,
startOffset,
startLine,
startColumn,
tokenTypeIdx,
tokenType
image: image,
startOffset: startOffset,
startLine: startLine,
startColumn: startColumn,
tokenTypeIdx: tokenTypeIdx,
tokenType: tokenType
};
}
createFullToken(image, startOffset, tokenTypeIdx, tokenType, startLine, startColumn, imageLength) {
};
Lexer.prototype.createFullToken = function (image, startOffset, tokenTypeIdx, tokenType, startLine, startColumn, imageLength) {
return {
image,
startOffset,
image: image,
startOffset: startOffset,
endOffset: startOffset + imageLength - 1,
startLine,
startLine: startLine,
endLine: startLine,
startColumn,
startColumn: startColumn,
endColumn: startColumn + imageLength - 1,
tokenTypeIdx,
tokenType
tokenTypeIdx: tokenTypeIdx,
tokenType: tokenType
};
}
addTokenUsingPush(tokenVector, index, tokenToAdd) {
};
Lexer.prototype.addTokenUsingPush = function (tokenVector, index, tokenToAdd) {
tokenVector.push(tokenToAdd);
return index;
}
addTokenUsingMemberAccess(tokenVector, index, tokenToAdd) {
};
Lexer.prototype.addTokenUsingMemberAccess = function (tokenVector, index, tokenToAdd) {
tokenVector[index] = tokenToAdd;
index++;
return index;
}
handlePayloadNoCustom(token, payload) { }
handlePayloadWithCustom(token, payload) {
};
Lexer.prototype.handlePayloadNoCustom = function (token, payload) { };
Lexer.prototype.handlePayloadWithCustom = function (token, payload) {
if (payload !== null) {
token.payload = payload;
}
}
matchWithTest(pattern, text, offset) {
const found = pattern.test(text);
};
Lexer.prototype.matchWithTest = function (pattern, text, offset) {
var found = pattern.test(text);
if (found === true) {

@@ -685,12 +689,13 @@ return text.substring(offset, pattern.lastIndex);

return null;
}
matchWithExec(pattern, text) {
const regExpArray = pattern.exec(text);
};
Lexer.prototype.matchWithExec = function (pattern, text) {
var regExpArray = pattern.exec(text);
return regExpArray !== null ? regExpArray[0] : null;
}
}
};
Lexer.SKIPPED = "This marks a skipped Token pattern, this means each token identified by it will" +
"be consumed and then thrown into oblivion, this can be used to for example to completely ignore whitespace.";
Lexer.NA = /NOT_APPLICABLE/;
return Lexer;
}());
exports.Lexer = Lexer;
Lexer.SKIPPED = "This marks a skipped Token pattern, this means each token identified by it will" +
"be consumed and then thrown into oblivion, this can be used to for example to completely ignore whitespace.";
Lexer.NA = /NOT_APPLICABLE/;
//# sourceMappingURL=lexer_public.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,30 +22,30 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.charCodeToOptimizedIndex = exports.minOptimizationVal = exports.buildLineBreakIssueMessage = exports.LineTerminatorOptimizedTester = exports.isShortPattern = exports.isCustomPattern = exports.cloneEmptyGroups = exports.performWarningRuntimeChecks = exports.performRuntimeChecks = exports.addStickyFlag = exports.addStartOfInput = exports.findUnreachablePatterns = exports.findModesThatDoNotExist = exports.findInvalidGroupType = exports.findDuplicatePatterns = exports.findUnsupportedFlags = exports.findStartOfInputAnchor = exports.findEmptyMatchRegExps = exports.findEndOfInputAnchor = exports.findInvalidPatterns = exports.findMissingPatterns = exports.validatePatterns = exports.analyzeTokenTypes = exports.enableSticky = exports.disableSticky = exports.SUPPORT_STICKY = exports.MODES = exports.DEFAULT_MODE = void 0;
const regexp_to_ast_1 = require("regexp-to-ast");
const lexer_public_1 = require("./lexer_public");
const first_1 = __importDefault(require("lodash/first"));
const isEmpty_1 = __importDefault(require("lodash/isEmpty"));
const compact_1 = __importDefault(require("lodash/compact"));
const isArray_1 = __importDefault(require("lodash/isArray"));
const values_1 = __importDefault(require("lodash/values"));
const flatten_1 = __importDefault(require("lodash/flatten"));
const reject_1 = __importDefault(require("lodash/reject"));
const difference_1 = __importDefault(require("lodash/difference"));
const indexOf_1 = __importDefault(require("lodash/indexOf"));
const map_1 = __importDefault(require("lodash/map"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const isString_1 = __importDefault(require("lodash/isString"));
const isFunction_1 = __importDefault(require("lodash/isFunction"));
const isUndefined_1 = __importDefault(require("lodash/isUndefined"));
const find_1 = __importDefault(require("lodash/find"));
const has_1 = __importDefault(require("lodash/has"));
const keys_1 = __importDefault(require("lodash/keys"));
const isRegExp_1 = __importDefault(require("lodash/isRegExp"));
const filter_1 = __importDefault(require("lodash/filter"));
const defaults_1 = __importDefault(require("lodash/defaults"));
const reduce_1 = __importDefault(require("lodash/reduce"));
const includes_1 = __importDefault(require("lodash/includes"));
const utils_1 = require("@chevrotain/utils");
const reg_exp_1 = require("./reg_exp");
const reg_exp_parser_1 = require("./reg_exp_parser");
const PATTERN = "PATTERN";
var regexp_to_ast_1 = require("regexp-to-ast");
var lexer_public_1 = require("./lexer_public");
var first_1 = __importDefault(require("lodash/first"));
var isEmpty_1 = __importDefault(require("lodash/isEmpty"));
var compact_1 = __importDefault(require("lodash/compact"));
var isArray_1 = __importDefault(require("lodash/isArray"));
var values_1 = __importDefault(require("lodash/values"));
var flatten_1 = __importDefault(require("lodash/flatten"));
var reject_1 = __importDefault(require("lodash/reject"));
var difference_1 = __importDefault(require("lodash/difference"));
var indexOf_1 = __importDefault(require("lodash/indexOf"));
var map_1 = __importDefault(require("lodash/map"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var isString_1 = __importDefault(require("lodash/isString"));
var isFunction_1 = __importDefault(require("lodash/isFunction"));
var isUndefined_1 = __importDefault(require("lodash/isUndefined"));
var find_1 = __importDefault(require("lodash/find"));
var has_1 = __importDefault(require("lodash/has"));
var keys_1 = __importDefault(require("lodash/keys"));
var isRegExp_1 = __importDefault(require("lodash/isRegExp"));
var filter_1 = __importDefault(require("lodash/filter"));
var defaults_1 = __importDefault(require("lodash/defaults"));
var reduce_1 = __importDefault(require("lodash/reduce"));
var includes_1 = __importDefault(require("lodash/includes"));
var utils_1 = require("@chevrotain/utils");
var reg_exp_1 = require("./reg_exp");
var reg_exp_parser_1 = require("./reg_exp_parser");
var PATTERN = "PATTERN";
exports.DEFAULT_MODE = "defaultMode";

@@ -54,23 +69,23 @@ exports.MODES = "modes";

lineTerminatorCharacters: ["\r", "\n"],
tracer: (msg, action) => action()
tracer: function (msg, action) { return action(); }
});
const tracer = options.tracer;
tracer("initCharCodeToOptimizedIndexMap", () => {
var tracer = options.tracer;
tracer("initCharCodeToOptimizedIndexMap", function () {
initCharCodeToOptimizedIndexMap();
});
let onlyRelevantTypes;
tracer("Reject Lexer.NA", () => {
onlyRelevantTypes = (0, reject_1.default)(tokenTypes, (currType) => {
var onlyRelevantTypes;
tracer("Reject Lexer.NA", function () {
onlyRelevantTypes = (0, reject_1.default)(tokenTypes, function (currType) {
return currType[PATTERN] === lexer_public_1.Lexer.NA;
});
});
let hasCustom = false;
let allTransformedPatterns;
tracer("Transform Patterns", () => {
var hasCustom = false;
var allTransformedPatterns;
tracer("Transform Patterns", function () {
hasCustom = false;
allTransformedPatterns = (0, map_1.default)(onlyRelevantTypes, (currType) => {
const currPattern = currType[PATTERN];
allTransformedPatterns = (0, map_1.default)(onlyRelevantTypes, function (currType) {
var currPattern = currType[PATTERN];
/* istanbul ignore else */
if ((0, isRegExp_1.default)(currPattern)) {
const regExpSource = currPattern.source;
var regExpSource = currPattern.source;
if (regExpSource.length === 1 &&

@@ -131,4 +146,4 @@ // only these regExp meta characters which can appear in a length one regExp

else {
const escapedRegExpString = currPattern.replace(/[\\^$.*+?()[\]{}|]/g, "\\$&");
const wrappedRegExp = new RegExp(escapedRegExpString);
var escapedRegExpString = currPattern.replace(/[\\^$.*+?()[\]{}|]/g, "\\$&");
var wrappedRegExp = new RegExp(escapedRegExpString);
return options.useSticky

@@ -144,11 +159,11 @@ ? addStickyFlag(wrappedRegExp)

});
let patternIdxToType;
let patternIdxToGroup;
let patternIdxToLongerAltIdxArr;
let patternIdxToPushMode;
let patternIdxToPopMode;
tracer("misc mapping", () => {
patternIdxToType = (0, map_1.default)(onlyRelevantTypes, (currType) => currType.tokenTypeIdx);
patternIdxToGroup = (0, map_1.default)(onlyRelevantTypes, (clazz) => {
const groupName = clazz.GROUP;
var patternIdxToType;
var patternIdxToGroup;
var patternIdxToLongerAltIdxArr;
var patternIdxToPushMode;
var patternIdxToPopMode;
tracer("misc mapping", function () {
patternIdxToType = (0, map_1.default)(onlyRelevantTypes, function (currType) { return currType.tokenTypeIdx; });
patternIdxToGroup = (0, map_1.default)(onlyRelevantTypes, function (clazz) {
var groupName = clazz.GROUP;
/* istanbul ignore next */

@@ -168,7 +183,7 @@ if (groupName === lexer_public_1.Lexer.SKIPPED) {

});
patternIdxToLongerAltIdxArr = (0, map_1.default)(onlyRelevantTypes, (clazz) => {
const longerAltType = clazz.LONGER_ALT;
patternIdxToLongerAltIdxArr = (0, map_1.default)(onlyRelevantTypes, function (clazz) {
var longerAltType = clazz.LONGER_ALT;
if (longerAltType) {
const longerAltIdxArr = (0, isArray_1.default)(longerAltType)
? (0, map_1.default)(longerAltType, (type) => (0, indexOf_1.default)(onlyRelevantTypes, type))
var longerAltIdxArr = (0, isArray_1.default)(longerAltType)
? (0, map_1.default)(longerAltType, function (type) { return (0, indexOf_1.default)(onlyRelevantTypes, type); })
: [(0, indexOf_1.default)(onlyRelevantTypes, longerAltType)];

@@ -178,11 +193,13 @@ return longerAltIdxArr;

});
patternIdxToPushMode = (0, map_1.default)(onlyRelevantTypes, (clazz) => clazz.PUSH_MODE);
patternIdxToPopMode = (0, map_1.default)(onlyRelevantTypes, (clazz) => (0, has_1.default)(clazz, "POP_MODE"));
patternIdxToPushMode = (0, map_1.default)(onlyRelevantTypes, function (clazz) { return clazz.PUSH_MODE; });
patternIdxToPopMode = (0, map_1.default)(onlyRelevantTypes, function (clazz) {
return (0, has_1.default)(clazz, "POP_MODE");
});
});
let patternIdxToCanLineTerminator;
tracer("Line Terminator Handling", () => {
const lineTerminatorCharCodes = getCharCodes(options.lineTerminatorCharacters);
patternIdxToCanLineTerminator = (0, map_1.default)(onlyRelevantTypes, (tokType) => false);
var patternIdxToCanLineTerminator;
tracer("Line Terminator Handling", function () {
var lineTerminatorCharCodes = getCharCodes(options.lineTerminatorCharacters);
patternIdxToCanLineTerminator = (0, map_1.default)(onlyRelevantTypes, function (tokType) { return false; });
if (options.positionTracking !== "onlyOffset") {
patternIdxToCanLineTerminator = (0, map_1.default)(onlyRelevantTypes, (tokType) => {
patternIdxToCanLineTerminator = (0, map_1.default)(onlyRelevantTypes, function (tokType) {
if ((0, has_1.default)(tokType, "LINE_BREAKS")) {

@@ -198,11 +215,11 @@ return !!tokType.LINE_BREAKS;

});
let patternIdxToIsCustom;
let patternIdxToShort;
let emptyGroups;
let patternIdxToConfig;
tracer("Misc Mapping #2", () => {
var patternIdxToIsCustom;
var patternIdxToShort;
var emptyGroups;
var patternIdxToConfig;
tracer("Misc Mapping #2", function () {
patternIdxToIsCustom = (0, map_1.default)(onlyRelevantTypes, isCustomPattern);
patternIdxToShort = (0, map_1.default)(allTransformedPatterns, isShortPattern);
emptyGroups = (0, reduce_1.default)(onlyRelevantTypes, (acc, clazz) => {
const groupName = clazz.GROUP;
emptyGroups = (0, reduce_1.default)(onlyRelevantTypes, function (acc, clazz) {
var groupName = clazz.GROUP;
if ((0, isString_1.default)(groupName) && !(groupName === lexer_public_1.Lexer.SKIPPED)) {

@@ -213,3 +230,3 @@ acc[groupName] = [];

}, {});
patternIdxToConfig = (0, map_1.default)(allTransformedPatterns, (x, idx) => {
patternIdxToConfig = (0, map_1.default)(allTransformedPatterns, function (x, idx) {
return {

@@ -229,19 +246,19 @@ pattern: allTransformedPatterns[idx],

});
let canBeOptimized = true;
let charCodeToPatternIdxToConfig = [];
var canBeOptimized = true;
var charCodeToPatternIdxToConfig = [];
if (!options.safeMode) {
tracer("First Char Optimization", () => {
charCodeToPatternIdxToConfig = (0, reduce_1.default)(onlyRelevantTypes, (result, currTokType, idx) => {
tracer("First Char Optimization", function () {
charCodeToPatternIdxToConfig = (0, reduce_1.default)(onlyRelevantTypes, function (result, currTokType, idx) {
if (typeof currTokType.PATTERN === "string") {
const charCode = currTokType.PATTERN.charCodeAt(0);
const optimizedIdx = charCodeToOptimizedIndex(charCode);
var charCode = currTokType.PATTERN.charCodeAt(0);
var optimizedIdx = charCodeToOptimizedIndex(charCode);
addToMapOfArrays(result, optimizedIdx, patternIdxToConfig[idx]);
}
else if ((0, isArray_1.default)(currTokType.START_CHARS_HINT)) {
let lastOptimizedIdx;
(0, forEach_1.default)(currTokType.START_CHARS_HINT, (charOrInt) => {
const charCode = typeof charOrInt === "string"
var lastOptimizedIdx_1;
(0, forEach_1.default)(currTokType.START_CHARS_HINT, function (charOrInt) {
var charCode = typeof charOrInt === "string"
? charOrInt.charCodeAt(0)
: charOrInt;
const currOptimizedIdx = charCodeToOptimizedIndex(charCode);
var currOptimizedIdx = charCodeToOptimizedIndex(charCode);
// Avoid adding the config multiple times

@@ -251,4 +268,4 @@ /* istanbul ignore else */

// optimization that does not change correctness
if (lastOptimizedIdx !== currOptimizedIdx) {
lastOptimizedIdx = currOptimizedIdx;
if (lastOptimizedIdx_1 !== currOptimizedIdx) {
lastOptimizedIdx_1 = currOptimizedIdx;
addToMapOfArrays(result, currOptimizedIdx, patternIdxToConfig[idx]);

@@ -262,4 +279,4 @@ }

if (options.ensureOptimizations) {
(0, utils_1.PRINT_ERROR)(`${reg_exp_1.failedOptimizationPrefixMsg}` +
`\tUnable to analyze < ${currTokType.PATTERN.toString()} > pattern.\n` +
(0, utils_1.PRINT_ERROR)("".concat(reg_exp_1.failedOptimizationPrefixMsg) +
"\tUnable to analyze < ".concat(currTokType.PATTERN.toString(), " > pattern.\n") +
"\tThe regexp unicode flag is not currently supported by the regexp-to-ast library.\n" +

@@ -271,3 +288,3 @@ "\tThis will disable the lexer's first char optimizations.\n" +

else {
const optimizedCodes = (0, reg_exp_1.getOptimizedStartCodesIndices)(currTokType.PATTERN, options.ensureOptimizations);
var optimizedCodes = (0, reg_exp_1.getOptimizedStartCodesIndices)(currTokType.PATTERN, options.ensureOptimizations);
/* istanbul ignore if */

@@ -282,3 +299,3 @@ // start code will only be empty given an empty regExp or failure of regexp-to-ast library

}
(0, forEach_1.default)(optimizedCodes, (code) => {
(0, forEach_1.default)(optimizedCodes, function (code) {
addToMapOfArrays(result, code, patternIdxToConfig[idx]);

@@ -290,4 +307,4 @@ });

if (options.ensureOptimizations) {
(0, utils_1.PRINT_ERROR)(`${reg_exp_1.failedOptimizationPrefixMsg}` +
`\tTokenType: <${currTokType.name}> is using a custom token pattern without providing <start_chars_hint> parameter.\n` +
(0, utils_1.PRINT_ERROR)("".concat(reg_exp_1.failedOptimizationPrefixMsg) +
"\tTokenType: <".concat(currTokType.name, "> is using a custom token pattern without providing <start_chars_hint> parameter.\n") +
"\tThis will disable the lexer's first char optimizations.\n" +

@@ -312,7 +329,7 @@ "\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_OPTIMIZE");

function validatePatterns(tokenTypes, validModesNames) {
let errors = [];
const missingResult = findMissingPatterns(tokenTypes);
var errors = [];
var missingResult = findMissingPatterns(tokenTypes);
errors = errors.concat(missingResult.errors);
const invalidResult = findInvalidPatterns(missingResult.valid);
const validTokenTypes = invalidResult.valid;
var invalidResult = findInvalidPatterns(missingResult.valid);
var validTokenTypes = invalidResult.valid;
errors = errors.concat(invalidResult.errors);

@@ -327,4 +344,6 @@ errors = errors.concat(validateRegExpPattern(validTokenTypes));

function validateRegExpPattern(tokenTypes) {
let errors = [];
const withRegExpPatterns = (0, filter_1.default)(tokenTypes, (currTokType) => (0, isRegExp_1.default)(currTokType[PATTERN]));
var errors = [];
var withRegExpPatterns = (0, filter_1.default)(tokenTypes, function (currTokType) {
return (0, isRegExp_1.default)(currTokType[PATTERN]);
});
errors = errors.concat(findEndOfInputAnchor(withRegExpPatterns));

@@ -338,6 +357,6 @@ errors = errors.concat(findStartOfInputAnchor(withRegExpPatterns));

function findMissingPatterns(tokenTypes) {
const tokenTypesWithMissingPattern = (0, filter_1.default)(tokenTypes, (currType) => {
var tokenTypesWithMissingPattern = (0, filter_1.default)(tokenTypes, function (currType) {
return !(0, has_1.default)(currType, PATTERN);
});
const errors = (0, map_1.default)(tokenTypesWithMissingPattern, (currType) => {
var errors = (0, map_1.default)(tokenTypesWithMissingPattern, function (currType) {
return {

@@ -351,9 +370,9 @@ message: "Token Type: ->" +

});
const valid = (0, difference_1.default)(tokenTypes, tokenTypesWithMissingPattern);
return { errors, valid };
var valid = (0, difference_1.default)(tokenTypes, tokenTypesWithMissingPattern);
return { errors: errors, valid: valid };
}
exports.findMissingPatterns = findMissingPatterns;
function findInvalidPatterns(tokenTypes) {
const tokenTypesWithInvalidPattern = (0, filter_1.default)(tokenTypes, (currType) => {
const pattern = currType[PATTERN];
var tokenTypesWithInvalidPattern = (0, filter_1.default)(tokenTypes, function (currType) {
var pattern = currType[PATTERN];
return (!(0, isRegExp_1.default)(pattern) &&

@@ -364,3 +383,3 @@ !(0, isFunction_1.default)(pattern) &&

});
const errors = (0, map_1.default)(tokenTypesWithInvalidPattern, (currType) => {
var errors = (0, map_1.default)(tokenTypesWithInvalidPattern, function (currType) {
return {

@@ -375,22 +394,25 @@ message: "Token Type: ->" +

});
const valid = (0, difference_1.default)(tokenTypes, tokenTypesWithInvalidPattern);
return { errors, valid };
var valid = (0, difference_1.default)(tokenTypes, tokenTypesWithInvalidPattern);
return { errors: errors, valid: valid };
}
exports.findInvalidPatterns = findInvalidPatterns;
const end_of_input = /[^\\][$]/;
var end_of_input = /[^\\][$]/;
function findEndOfInputAnchor(tokenTypes) {
class EndAnchorFinder extends regexp_to_ast_1.BaseRegExpVisitor {
constructor() {
super(...arguments);
this.found = false;
var EndAnchorFinder = /** @class */ (function (_super) {
__extends(EndAnchorFinder, _super);
function EndAnchorFinder() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.found = false;
return _this;
}
visitEndAnchor(node) {
EndAnchorFinder.prototype.visitEndAnchor = function (node) {
this.found = true;
}
}
const invalidRegex = (0, filter_1.default)(tokenTypes, (currType) => {
const pattern = currType.PATTERN;
};
return EndAnchorFinder;
}(regexp_to_ast_1.BaseRegExpVisitor));
var invalidRegex = (0, filter_1.default)(tokenTypes, function (currType) {
var pattern = currType.PATTERN;
try {
const regexpAst = (0, reg_exp_parser_1.getRegExpAst)(pattern);
const endAnchorVisitor = new EndAnchorFinder();
var regexpAst = (0, reg_exp_parser_1.getRegExpAst)(pattern);
var endAnchorVisitor = new EndAnchorFinder();
endAnchorVisitor.visit(regexpAst);

@@ -405,3 +427,3 @@ return endAnchorVisitor.found;

});
const errors = (0, map_1.default)(invalidRegex, (currType) => {
var errors = (0, map_1.default)(invalidRegex, function (currType) {
return {

@@ -422,7 +444,7 @@ message: "Unexpected RegExp Anchor Error:\n" +

function findEmptyMatchRegExps(tokenTypes) {
const matchesEmptyString = (0, filter_1.default)(tokenTypes, (currType) => {
const pattern = currType.PATTERN;
var matchesEmptyString = (0, filter_1.default)(tokenTypes, function (currType) {
var pattern = currType.PATTERN;
return pattern.test("");
});
const errors = (0, map_1.default)(matchesEmptyString, (currType) => {
var errors = (0, map_1.default)(matchesEmptyString, function (currType) {
return {

@@ -439,18 +461,21 @@ message: "Token Type: ->" +

exports.findEmptyMatchRegExps = findEmptyMatchRegExps;
const start_of_input = /[^\\[][\^]|^\^/;
var start_of_input = /[^\\[][\^]|^\^/;
function findStartOfInputAnchor(tokenTypes) {
class StartAnchorFinder extends regexp_to_ast_1.BaseRegExpVisitor {
constructor() {
super(...arguments);
this.found = false;
var StartAnchorFinder = /** @class */ (function (_super) {
__extends(StartAnchorFinder, _super);
function StartAnchorFinder() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.found = false;
return _this;
}
visitStartAnchor(node) {
StartAnchorFinder.prototype.visitStartAnchor = function (node) {
this.found = true;
}
}
const invalidRegex = (0, filter_1.default)(tokenTypes, (currType) => {
const pattern = currType.PATTERN;
};
return StartAnchorFinder;
}(regexp_to_ast_1.BaseRegExpVisitor));
var invalidRegex = (0, filter_1.default)(tokenTypes, function (currType) {
var pattern = currType.PATTERN;
try {
const regexpAst = (0, reg_exp_parser_1.getRegExpAst)(pattern);
const startAnchorVisitor = new StartAnchorFinder();
var regexpAst = (0, reg_exp_parser_1.getRegExpAst)(pattern);
var startAnchorVisitor = new StartAnchorFinder();
startAnchorVisitor.visit(regexpAst);

@@ -465,3 +490,3 @@ return startAnchorVisitor.found;

});
const errors = (0, map_1.default)(invalidRegex, (currType) => {
var errors = (0, map_1.default)(invalidRegex, function (currType) {
return {

@@ -482,7 +507,7 @@ message: "Unexpected RegExp Anchor Error:\n" +

function findUnsupportedFlags(tokenTypes) {
const invalidFlags = (0, filter_1.default)(tokenTypes, (currType) => {
const pattern = currType[PATTERN];
var invalidFlags = (0, filter_1.default)(tokenTypes, function (currType) {
var pattern = currType[PATTERN];
return pattern instanceof RegExp && (pattern.multiline || pattern.global);
});
const errors = (0, map_1.default)(invalidFlags, (currType) => {
var errors = (0, map_1.default)(invalidFlags, function (currType) {
return {

@@ -501,5 +526,5 @@ message: "Token Type: ->" +

function findDuplicatePatterns(tokenTypes) {
const found = [];
let identicalPatterns = (0, map_1.default)(tokenTypes, (outerType) => {
return (0, reduce_1.default)(tokenTypes, (result, innerType) => {
var found = [];
var identicalPatterns = (0, map_1.default)(tokenTypes, function (outerType) {
return (0, reduce_1.default)(tokenTypes, function (result, innerType) {
if (outerType.PATTERN.source === innerType.PATTERN.source &&

@@ -518,13 +543,13 @@ !(0, includes_1.default)(found, innerType) &&

identicalPatterns = (0, compact_1.default)(identicalPatterns);
const duplicatePatterns = (0, filter_1.default)(identicalPatterns, (currIdenticalSet) => {
var duplicatePatterns = (0, filter_1.default)(identicalPatterns, function (currIdenticalSet) {
return currIdenticalSet.length > 1;
});
const errors = (0, map_1.default)(duplicatePatterns, (setOfIdentical) => {
const tokenTypeNames = (0, map_1.default)(setOfIdentical, (currType) => {
var errors = (0, map_1.default)(duplicatePatterns, function (setOfIdentical) {
var tokenTypeNames = (0, map_1.default)(setOfIdentical, function (currType) {
return currType.name;
});
const dupPatternSrc = (0, first_1.default)(setOfIdentical).PATTERN;
var dupPatternSrc = (0, first_1.default)(setOfIdentical).PATTERN;
return {
message: `The same RegExp pattern ->${dupPatternSrc}<-` +
`has been used in all of the following Token Types: ${tokenTypeNames.join(", ")} <-`,
message: "The same RegExp pattern ->".concat(dupPatternSrc, "<-") +
"has been used in all of the following Token Types: ".concat(tokenTypeNames.join(", "), " <-"),
type: lexer_public_1.LexerDefinitionErrorType.DUPLICATE_PATTERNS_FOUND,

@@ -538,10 +563,10 @@ tokenTypes: setOfIdentical

function findInvalidGroupType(tokenTypes) {
const invalidTypes = (0, filter_1.default)(tokenTypes, (clazz) => {
var invalidTypes = (0, filter_1.default)(tokenTypes, function (clazz) {
if (!(0, has_1.default)(clazz, "GROUP")) {
return false;
}
const group = clazz.GROUP;
var group = clazz.GROUP;
return group !== lexer_public_1.Lexer.SKIPPED && group !== lexer_public_1.Lexer.NA && !(0, isString_1.default)(group);
});
const errors = (0, map_1.default)(invalidTypes, (currType) => {
var errors = (0, map_1.default)(invalidTypes, function (currType) {
return {

@@ -559,8 +584,8 @@ message: "Token Type: ->" +

function findModesThatDoNotExist(tokenTypes, validModes) {
const invalidModes = (0, filter_1.default)(tokenTypes, (clazz) => {
var invalidModes = (0, filter_1.default)(tokenTypes, function (clazz) {
return (clazz.PUSH_MODE !== undefined && !(0, includes_1.default)(validModes, clazz.PUSH_MODE));
});
const errors = (0, map_1.default)(invalidModes, (tokType) => {
const msg = `Token Type: ->${tokType.name}<- static 'PUSH_MODE' value cannot refer to a Lexer Mode ->${tokType.PUSH_MODE}<-` +
`which does not exist`;
var errors = (0, map_1.default)(invalidModes, function (tokType) {
var msg = "Token Type: ->".concat(tokType.name, "<- static 'PUSH_MODE' value cannot refer to a Lexer Mode ->").concat(tokType.PUSH_MODE, "<-") +
"which does not exist";
return {

@@ -576,5 +601,5 @@ message: msg,

function findUnreachablePatterns(tokenTypes) {
const errors = [];
const canBeTested = (0, reduce_1.default)(tokenTypes, (result, tokType, idx) => {
const pattern = tokType.PATTERN;
var errors = [];
var canBeTested = (0, reduce_1.default)(tokenTypes, function (result, tokType, idx) {
var pattern = tokType.PATTERN;
if (pattern === lexer_public_1.Lexer.NA) {

@@ -586,16 +611,17 @@ return result;

if ((0, isString_1.default)(pattern)) {
result.push({ str: pattern, idx, tokenType: tokType });
result.push({ str: pattern, idx: idx, tokenType: tokType });
}
else if ((0, isRegExp_1.default)(pattern) && noMetaChar(pattern)) {
result.push({ str: pattern.source, idx, tokenType: tokType });
result.push({ str: pattern.source, idx: idx, tokenType: tokType });
}
return result;
}, []);
(0, forEach_1.default)(tokenTypes, (tokType, testIdx) => {
(0, forEach_1.default)(canBeTested, ({ str, idx, tokenType }) => {
(0, forEach_1.default)(tokenTypes, function (tokType, testIdx) {
(0, forEach_1.default)(canBeTested, function (_a) {
var str = _a.str, idx = _a.idx, tokenType = _a.tokenType;
if (testIdx < idx && testTokenType(str, tokType.PATTERN)) {
const msg = `Token: ->${tokenType.name}<- can never be matched.\n` +
`Because it appears AFTER the Token Type ->${tokType.name}<-` +
`in the lexer's definition.\n` +
`See https://chevrotain.io/docs/guide/resolving_lexer_errors.html#UNREACHABLE`;
var msg = "Token: ->".concat(tokenType.name, "<- can never be matched.\n") +
"Because it appears AFTER the Token Type ->".concat(tokType.name, "<-") +
"in the lexer's definition.\n" +
"See https://chevrotain.io/docs/guide/resolving_lexer_errors.html#UNREACHABLE";
errors.push({

@@ -615,3 +641,3 @@ message: msg,

if ((0, isRegExp_1.default)(pattern)) {
const regExpArray = pattern.exec(str);
var regExpArray = pattern.exec(str);
return regExpArray !== null && regExpArray.index === 0;

@@ -636,3 +662,3 @@ }

//https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp
const metaChars = [
var metaChars = [
".",

@@ -652,20 +678,20 @@ "\\",

];
return ((0, find_1.default)(metaChars, (char) => regExp.source.indexOf(char) !== -1) === undefined);
return ((0, find_1.default)(metaChars, function (char) { return regExp.source.indexOf(char) !== -1; }) === undefined);
}
function addStartOfInput(pattern) {
const flags = pattern.ignoreCase ? "i" : "";
var flags = pattern.ignoreCase ? "i" : "";
// always wrapping in a none capturing group preceded by '^' to make sure matching can only work on start of input.
// duplicate/redundant start of input markers have no meaning (/^^^^A/ === /^A/)
return new RegExp(`^(?:${pattern.source})`, flags);
return new RegExp("^(?:".concat(pattern.source, ")"), flags);
}
exports.addStartOfInput = addStartOfInput;
function addStickyFlag(pattern) {
const flags = pattern.ignoreCase ? "iy" : "y";
var flags = pattern.ignoreCase ? "iy" : "y";
// always wrapping in a none capturing group preceded by '^' to make sure matching can only work on start of input.
// duplicate/redundant start of input markers have no meaning (/^^^^A/ === /^A/)
return new RegExp(`${pattern.source}`, flags);
return new RegExp("".concat(pattern.source), flags);
}
exports.addStickyFlag = addStickyFlag;
function performRuntimeChecks(lexerDefinition, trackLines, lineTerminatorCharacters) {
const errors = [];
var errors = [];
// some run time checks to help the end users.

@@ -692,4 +718,4 @@ if (!(0, has_1.default)(lexerDefinition, exports.DEFAULT_MODE)) {

errors.push({
message: `A MultiMode Lexer cannot be initialized with a ${exports.DEFAULT_MODE}: <${lexerDefinition.defaultMode}>` +
`which does not exist\n`,
message: "A MultiMode Lexer cannot be initialized with a ".concat(exports.DEFAULT_MODE, ": <").concat(lexerDefinition.defaultMode, ">") +
"which does not exist\n",
type: lexer_public_1.LexerDefinitionErrorType.MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST

@@ -699,8 +725,8 @@ });

if ((0, has_1.default)(lexerDefinition, exports.MODES)) {
(0, forEach_1.default)(lexerDefinition.modes, (currModeValue, currModeName) => {
(0, forEach_1.default)(currModeValue, (currTokType, currIdx) => {
(0, forEach_1.default)(lexerDefinition.modes, function (currModeValue, currModeName) {
(0, forEach_1.default)(currModeValue, function (currTokType, currIdx) {
if ((0, isUndefined_1.default)(currTokType)) {
errors.push({
message: `A Lexer cannot be initialized using an undefined Token Type. Mode:` +
`<${currModeName}> at index: <${currIdx}>\n`,
message: "A Lexer cannot be initialized using an undefined Token Type. Mode:" +
"<".concat(currModeName, "> at index: <").concat(currIdx, ">\n"),
type: lexer_public_1.LexerDefinitionErrorType.LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED

@@ -710,10 +736,10 @@ });

else if ((0, has_1.default)(currTokType, "LONGER_ALT")) {
const longerAlt = (0, isArray_1.default)(currTokType.LONGER_ALT)
var longerAlt = (0, isArray_1.default)(currTokType.LONGER_ALT)
? currTokType.LONGER_ALT
: [currTokType.LONGER_ALT];
(0, forEach_1.default)(longerAlt, (currLongerAlt) => {
(0, forEach_1.default)(longerAlt, function (currLongerAlt) {
if (!(0, isUndefined_1.default)(currLongerAlt) &&
!(0, includes_1.default)(currModeValue, currLongerAlt)) {
errors.push({
message: `A MultiMode Lexer cannot be initialized with a longer_alt <${currLongerAlt.name}> on token <${currTokType.name}> outside of mode <${currModeName}>\n`,
message: "A MultiMode Lexer cannot be initialized with a longer_alt <".concat(currLongerAlt.name, "> on token <").concat(currTokType.name, "> outside of mode <").concat(currModeName, ">\n"),
type: lexer_public_1.LexerDefinitionErrorType.MULTI_MODE_LEXER_LONGER_ALT_NOT_IN_CURRENT_MODE

@@ -731,14 +757,14 @@ });

function performWarningRuntimeChecks(lexerDefinition, trackLines, lineTerminatorCharacters) {
const warnings = [];
let hasAnyLineBreak = false;
const allTokenTypes = (0, compact_1.default)((0, flatten_1.default)((0, values_1.default)(lexerDefinition.modes)));
const concreteTokenTypes = (0, reject_1.default)(allTokenTypes, (currType) => currType[PATTERN] === lexer_public_1.Lexer.NA);
const terminatorCharCodes = getCharCodes(lineTerminatorCharacters);
var warnings = [];
var hasAnyLineBreak = false;
var allTokenTypes = (0, compact_1.default)((0, flatten_1.default)((0, values_1.default)(lexerDefinition.modes)));
var concreteTokenTypes = (0, reject_1.default)(allTokenTypes, function (currType) { return currType[PATTERN] === lexer_public_1.Lexer.NA; });
var terminatorCharCodes = getCharCodes(lineTerminatorCharacters);
if (trackLines) {
(0, forEach_1.default)(concreteTokenTypes, (tokType) => {
const currIssue = checkLineBreaksIssues(tokType, terminatorCharCodes);
(0, forEach_1.default)(concreteTokenTypes, function (tokType) {
var currIssue = checkLineBreaksIssues(tokType, terminatorCharCodes);
if (currIssue !== false) {
const message = buildLineBreakIssueMessage(tokType, currIssue);
const warningDescriptor = {
message,
var message = buildLineBreakIssueMessage(tokType, currIssue);
var warningDescriptor = {
message: message,
type: currIssue.issue,

@@ -778,6 +804,6 @@ tokenType: tokType

function cloneEmptyGroups(emptyGroups) {
const clonedResult = {};
const groupKeys = (0, keys_1.default)(emptyGroups);
(0, forEach_1.default)(groupKeys, (currKey) => {
const currGroupValue = emptyGroups[currKey];
var clonedResult = {};
var groupKeys = (0, keys_1.default)(emptyGroups);
(0, forEach_1.default)(groupKeys, function (currKey) {
var currGroupValue = emptyGroups[currKey];
/* istanbul ignore else */

@@ -796,3 +822,3 @@ if ((0, isArray_1.default)(currGroupValue)) {

function isCustomPattern(tokenType) {
const pattern = tokenType.PATTERN;
var pattern = tokenType.PATTERN;
/* istanbul ignore else */

@@ -833,5 +859,5 @@ if ((0, isRegExp_1.default)(pattern)) {

test: function (text) {
const len = text.length;
for (let i = this.lastIndex; i < len; i++) {
const c = text.charCodeAt(i);
var len = text.length;
for (var i = this.lastIndex; i < len; i++) {
var c = text.charCodeAt(i);
if (c === 10) {

@@ -894,4 +920,4 @@ this.lastIndex = i + 1;

return ("Warning: unable to identify line terminator usage in pattern.\n" +
`\tThe problem is in the <${tokType.name}> Token Type\n` +
`\t Root cause: ${details.errMsg}.\n` +
"\tThe problem is in the <".concat(tokType.name, "> Token Type\n") +
"\t Root cause: ".concat(details.errMsg, ".\n") +
"\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#IDENTIFY_TERMINATOR");

@@ -901,3 +927,3 @@ }

return ("Warning: A Custom Token Pattern should specify the <line_breaks> option.\n" +
`\tThe problem is in the <${tokType.name}> Token Type\n` +
"\tThe problem is in the <".concat(tokType.name, "> Token Type\n") +
"\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_LINE_BREAK");

@@ -911,3 +937,3 @@ }

function getCharCodes(charsOrCodes) {
const charCodes = (0, map_1.default)(charsOrCodes, (numOrString) => {
var charCodes = (0, map_1.default)(charsOrCodes, function (numOrString) {
if ((0, isString_1.default)(numOrString)) {

@@ -946,3 +972,3 @@ return numOrString.charCodeAt(0);

*/
let charCodeToOptimizedIdxMap = [];
var charCodeToOptimizedIdxMap = [];
function charCodeToOptimizedIndex(charCode) {

@@ -965,3 +991,3 @@ return charCode < exports.minOptimizationVal

charCodeToOptimizedIdxMap = new Array(65536);
for (let i = 0; i < 65536; i++) {
for (var i = 0; i < 65536; i++) {
charCodeToOptimizedIdxMap[i] = i > 255 ? 255 + ~~(i / 255) : i;

@@ -968,0 +994,0 @@ }

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.clearRegExpParserCache = exports.getRegExpAst = void 0;
const regexp_to_ast_1 = require("regexp-to-ast");
let regExpAstCache = {};
const regExpParser = new regexp_to_ast_1.RegExpParser();
var regexp_to_ast_1 = require("regexp-to-ast");
var regExpAstCache = {};
var regExpParser = new regexp_to_ast_1.RegExpParser();
function getRegExpAst(regExp) {
const regExpStr = regExp.toString();
var regExpStr = regExp.toString();
if (regExpAstCache.hasOwnProperty(regExpStr)) {

@@ -13,3 +13,3 @@ return regExpAstCache[regExpStr];

else {
const regExpAst = regExpParser.pattern(regExpStr);
var regExpAst = regExpParser.pattern(regExpStr);
regExpAstCache[regExpStr] = regExpAst;

@@ -16,0 +16,0 @@ return regExpAst;

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -7,18 +22,19 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.canMatchCharCode = exports.firstCharOptimizedIndices = exports.getOptimizedStartCodesIndices = exports.failedOptimizationPrefixMsg = void 0;
const regexp_to_ast_1 = require("regexp-to-ast");
const isArray_1 = __importDefault(require("lodash/isArray"));
const every_1 = __importDefault(require("lodash/every"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const find_1 = __importDefault(require("lodash/find"));
const values_1 = __importDefault(require("lodash/values"));
const includes_1 = __importDefault(require("lodash/includes"));
const utils_1 = require("@chevrotain/utils");
const reg_exp_parser_1 = require("./reg_exp_parser");
const lexer_1 = require("./lexer");
const complementErrorMessage = "Complement Sets are not supported for first char optimization";
var regexp_to_ast_1 = require("regexp-to-ast");
var isArray_1 = __importDefault(require("lodash/isArray"));
var every_1 = __importDefault(require("lodash/every"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var find_1 = __importDefault(require("lodash/find"));
var values_1 = __importDefault(require("lodash/values"));
var includes_1 = __importDefault(require("lodash/includes"));
var utils_1 = require("@chevrotain/utils");
var reg_exp_parser_1 = require("./reg_exp_parser");
var lexer_1 = require("./lexer");
var complementErrorMessage = "Complement Sets are not supported for first char optimization";
exports.failedOptimizationPrefixMsg = 'Unable to use "first char" lexer optimizations:\n';
function getOptimizedStartCodesIndices(regExp, ensureOptimizations = false) {
function getOptimizedStartCodesIndices(regExp, ensureOptimizations) {
if (ensureOptimizations === void 0) { ensureOptimizations = false; }
try {
const ast = (0, reg_exp_parser_1.getRegExpAst)(regExp);
const firstChars = firstCharOptimizedIndices(ast.value, {}, ast.flags.ignoreCase);
var ast = (0, reg_exp_parser_1.getRegExpAst)(regExp);
var firstChars = firstCharOptimizedIndices(ast.value, {}, ast.flags.ignoreCase);
return firstChars;

@@ -32,4 +48,4 @@ }

if (ensureOptimizations) {
(0, utils_1.PRINT_WARNING)(`${exports.failedOptimizationPrefixMsg}` +
`\tUnable to optimize: < ${regExp.toString()} >\n` +
(0, utils_1.PRINT_WARNING)("".concat(exports.failedOptimizationPrefixMsg) +
"\tUnable to optimize: < ".concat(regExp.toString(), " >\n") +
"\tComplement Sets cannot be automatically optimized.\n" +

@@ -41,3 +57,3 @@ "\tThis will disable the lexer's first char optimizations.\n" +

else {
let msgSuffix = "";
var msgSuffix = "";
if (ensureOptimizations) {

@@ -48,5 +64,5 @@ msgSuffix =

}
(0, utils_1.PRINT_ERROR)(`${exports.failedOptimizationPrefixMsg}\n` +
`\tFailed parsing: < ${regExp.toString()} >\n` +
`\tUsing the regexp-to-ast library version: ${regexp_to_ast_1.VERSION}\n` +
(0, utils_1.PRINT_ERROR)("".concat(exports.failedOptimizationPrefixMsg, "\n") +
"\tFailed parsing: < ".concat(regExp.toString(), " >\n") +
"\tUsing the regexp-to-ast library version: ".concat(regexp_to_ast_1.VERSION, "\n") +
"\tPlease open an issue at: https://github.com/bd82/regexp-to-ast/issues" +

@@ -62,3 +78,3 @@ msgSuffix);

case "Disjunction":
for (let i = 0; i < ast.value.length; i++) {
for (var i = 0; i < ast.value.length; i++) {
firstCharOptimizedIndices(ast.value[i], result, ignoreCase);

@@ -68,5 +84,5 @@ }

case "Alternative":
const terms = ast.value;
for (let i = 0; i < terms.length; i++) {
const term = terms[i];
var terms = ast.value;
for (var i = 0; i < terms.length; i++) {
var term = terms[i];
// skip terms that cannot effect the first char results

@@ -87,3 +103,3 @@ switch (term.type) {

}
const atom = term;
var atom = term;
switch (atom.type) {

@@ -97,3 +113,3 @@ case "Character":

}
(0, forEach_1.default)(atom.value, (code) => {
(0, forEach_1.default)(atom.value, function (code) {
if (typeof code === "number") {

@@ -104,6 +120,6 @@ addOptimizedIdxToResult(code, result, ignoreCase);

// range
const range = code;
var range = code;
// cannot optimize when ignoreCase is
if (ignoreCase === true) {
for (let rangeCode = range.from; rangeCode <= range.to; rangeCode++) {
for (var rangeCode = range.from; rangeCode <= range.to; rangeCode++) {
addOptimizedIdxToResult(rangeCode, result, ignoreCase);

@@ -115,3 +131,3 @@ }

// handle unoptimized values
for (let rangeCode = range.from; rangeCode <= range.to && rangeCode < lexer_1.minOptimizationVal; rangeCode++) {
for (var rangeCode = range.from; rangeCode <= range.to && rangeCode < lexer_1.minOptimizationVal; rangeCode++) {
addOptimizedIdxToResult(rangeCode, result, ignoreCase);

@@ -121,9 +137,9 @@ }

if (range.to >= lexer_1.minOptimizationVal) {
const minUnOptVal = range.from >= lexer_1.minOptimizationVal
var minUnOptVal = range.from >= lexer_1.minOptimizationVal
? range.from
: lexer_1.minOptimizationVal;
const maxUnOptVal = range.to;
const minOptIdx = (0, lexer_1.charCodeToOptimizedIndex)(minUnOptVal);
const maxOptIdx = (0, lexer_1.charCodeToOptimizedIndex)(maxUnOptVal);
for (let currOptIdx = minOptIdx; currOptIdx <= maxOptIdx; currOptIdx++) {
var maxUnOptVal = range.to;
var minOptIdx = (0, lexer_1.charCodeToOptimizedIndex)(minUnOptVal);
var maxOptIdx = (0, lexer_1.charCodeToOptimizedIndex)(maxUnOptVal);
for (var currOptIdx = minOptIdx; currOptIdx <= maxOptIdx; currOptIdx++) {
result[currOptIdx] = currOptIdx;

@@ -144,3 +160,3 @@ }

// reached a mandatory production, no more **start** codes can be found on this alternative
const isOptionalQuantifier = atom.quantifier !== undefined && atom.quantifier.atLeast === 0;
var isOptionalQuantifier = atom.quantifier !== undefined && atom.quantifier.atLeast === 0;
if (

@@ -165,3 +181,3 @@ // A group may be optional due to empty contents /(?:)/

function addOptimizedIdxToResult(code, result, ignoreCase) {
const optimizedCharIdx = (0, lexer_1.charCodeToOptimizedIndex)(code);
var optimizedCharIdx = (0, lexer_1.charCodeToOptimizedIndex)(code);
result[optimizedCharIdx] = optimizedCharIdx;

@@ -173,13 +189,13 @@ if (ignoreCase === true) {

function handleIgnoreCase(code, result) {
const char = String.fromCharCode(code);
const upperChar = char.toUpperCase();
var char = String.fromCharCode(code);
var upperChar = char.toUpperCase();
/* istanbul ignore else */
if (upperChar !== char) {
const optimizedCharIdx = (0, lexer_1.charCodeToOptimizedIndex)(upperChar.charCodeAt(0));
var optimizedCharIdx = (0, lexer_1.charCodeToOptimizedIndex)(upperChar.charCodeAt(0));
result[optimizedCharIdx] = optimizedCharIdx;
}
else {
const lowerChar = char.toLowerCase();
var lowerChar = char.toLowerCase();
if (lowerChar !== char) {
const optimizedCharIdx = (0, lexer_1.charCodeToOptimizedIndex)(lowerChar.charCodeAt(0));
var optimizedCharIdx = (0, lexer_1.charCodeToOptimizedIndex)(lowerChar.charCodeAt(0));
result[optimizedCharIdx] = optimizedCharIdx;

@@ -190,3 +206,3 @@ }

function findCode(setNode, targetCharCodes) {
return (0, find_1.default)(setNode.value, (codeOrRange) => {
return (0, find_1.default)(setNode.value, function (codeOrRange) {
if (typeof codeOrRange === "number") {

@@ -197,4 +213,4 @@ return (0, includes_1.default)(targetCharCodes, codeOrRange);

// range
const range = codeOrRange;
return ((0, find_1.default)(targetCharCodes, (targetCode) => range.from <= targetCode && targetCode <= range.to) !== undefined);
var range_1 = codeOrRange;
return ((0, find_1.default)(targetCharCodes, function (targetCode) { return range_1.from <= targetCode && targetCode <= range_1.to; }) !== undefined);
}

@@ -204,3 +220,3 @@ });

function isWholeOptional(ast) {
const quantifier = ast.quantifier;
var quantifier = ast.quantifier;
if (quantifier && quantifier.atLeast === 0) {

@@ -216,9 +232,11 @@ return true;

}
class CharCodeFinder extends regexp_to_ast_1.BaseRegExpVisitor {
constructor(targetCharCodes) {
super();
this.targetCharCodes = targetCharCodes;
this.found = false;
var CharCodeFinder = /** @class */ (function (_super) {
__extends(CharCodeFinder, _super);
function CharCodeFinder(targetCharCodes) {
var _this = _super.call(this) || this;
_this.targetCharCodes = targetCharCodes;
_this.found = false;
return _this;
}
visitChildren(node) {
CharCodeFinder.prototype.visitChildren = function (node) {
// No need to keep looking...

@@ -238,10 +256,10 @@ if (this.found === true) {

}
super.visitChildren(node);
}
visitCharacter(node) {
_super.prototype.visitChildren.call(this, node);
};
CharCodeFinder.prototype.visitCharacter = function (node) {
if ((0, includes_1.default)(this.targetCharCodes, node.value)) {
this.found = true;
}
}
visitSet(node) {
};
CharCodeFinder.prototype.visitSet = function (node) {
if (node.complement) {

@@ -257,8 +275,9 @@ if (findCode(node, this.targetCharCodes) === undefined) {

}
}
}
};
return CharCodeFinder;
}(regexp_to_ast_1.BaseRegExpVisitor));
function canMatchCharCode(charCodes, pattern) {
if (pattern instanceof RegExp) {
const ast = (0, reg_exp_parser_1.getRegExpAst)(pattern);
const charCodeFinder = new CharCodeFinder(charCodes);
var ast = (0, reg_exp_parser_1.getRegExpAst)(pattern);
var charCodeFinder = new CharCodeFinder(charCodes);
charCodeFinder.visit(ast);

@@ -268,3 +287,3 @@ return charCodeFinder.found;

else {
return ((0, find_1.default)(pattern, (char) => {
return ((0, find_1.default)(pattern, function (char) {
return (0, includes_1.default)(charCodes, char.charCodeAt(0));

@@ -271,0 +290,0 @@ }) !== undefined);

@@ -7,7 +7,7 @@ "use strict";

exports.tokenMatcher = exports.createTokenInstance = exports.EOF = exports.createToken = exports.hasTokenLabel = exports.tokenName = exports.tokenLabel = void 0;
const isString_1 = __importDefault(require("lodash/isString"));
const has_1 = __importDefault(require("lodash/has"));
const isUndefined_1 = __importDefault(require("lodash/isUndefined"));
const lexer_public_1 = require("./lexer_public");
const tokens_1 = require("./tokens");
var isString_1 = __importDefault(require("lodash/isString"));
var has_1 = __importDefault(require("lodash/has"));
var isUndefined_1 = __importDefault(require("lodash/isUndefined"));
var lexer_public_1 = require("./lexer_public");
var tokens_1 = require("./tokens");
function tokenLabel(tokType) {

@@ -30,11 +30,11 @@ if (hasTokenLabel(tokType)) {

exports.hasTokenLabel = hasTokenLabel;
const PARENT = "parent";
const CATEGORIES = "categories";
const LABEL = "label";
const GROUP = "group";
const PUSH_MODE = "push_mode";
const POP_MODE = "pop_mode";
const LONGER_ALT = "longer_alt";
const LINE_BREAKS = "line_breaks";
const START_CHARS_HINT = "start_chars_hint";
var PARENT = "parent";
var CATEGORIES = "categories";
var LABEL = "label";
var GROUP = "group";
var PUSH_MODE = "push_mode";
var POP_MODE = "pop_mode";
var LONGER_ALT = "longer_alt";
var LINE_BREAKS = "line_breaks";
var START_CHARS_HINT = "start_chars_hint";
function createToken(config) {

@@ -45,4 +45,4 @@ return createTokenInternal(config);

function createTokenInternal(config) {
const pattern = config.pattern;
const tokenType = {};
var pattern = config.pattern;
var tokenType = {};
tokenType.name = config.name;

@@ -88,9 +88,9 @@ if (!(0, isUndefined_1.default)(pattern)) {

return {
image,
startOffset,
endOffset,
startLine,
endLine,
startColumn,
endColumn,
image: image,
startOffset: startOffset,
endOffset: endOffset,
startLine: startLine,
endLine: endLine,
startColumn: startColumn,
endColumn: endColumn,
tokenTypeIdx: tokType.tokenTypeIdx,

@@ -97,0 +97,0 @@ tokenType: tokType

@@ -7,14 +7,14 @@ "use strict";

exports.isTokenType = exports.hasExtendingTokensTypesMapProperty = exports.hasExtendingTokensTypesProperty = exports.hasCategoriesProperty = exports.hasShortKeyProperty = exports.singleAssignCategoriesToksMap = exports.assignCategoriesMapProp = exports.assignCategoriesTokensProp = exports.assignTokenDefaultProps = exports.expandCategories = exports.augmentTokenTypes = exports.tokenIdxToClass = exports.tokenShortNameIdx = exports.tokenStructuredMatcherNoCategories = exports.tokenStructuredMatcher = void 0;
const isEmpty_1 = __importDefault(require("lodash/isEmpty"));
const compact_1 = __importDefault(require("lodash/compact"));
const isArray_1 = __importDefault(require("lodash/isArray"));
const flatten_1 = __importDefault(require("lodash/flatten"));
const difference_1 = __importDefault(require("lodash/difference"));
const map_1 = __importDefault(require("lodash/map"));
const forEach_1 = __importDefault(require("lodash/forEach"));
const has_1 = __importDefault(require("lodash/has"));
const includes_1 = __importDefault(require("lodash/includes"));
const clone_1 = __importDefault(require("lodash/clone"));
var isEmpty_1 = __importDefault(require("lodash/isEmpty"));
var compact_1 = __importDefault(require("lodash/compact"));
var isArray_1 = __importDefault(require("lodash/isArray"));
var flatten_1 = __importDefault(require("lodash/flatten"));
var difference_1 = __importDefault(require("lodash/difference"));
var map_1 = __importDefault(require("lodash/map"));
var forEach_1 = __importDefault(require("lodash/forEach"));
var has_1 = __importDefault(require("lodash/has"));
var includes_1 = __importDefault(require("lodash/includes"));
var clone_1 = __importDefault(require("lodash/clone"));
function tokenStructuredMatcher(tokInstance, tokConstructor) {
const instanceType = tokInstance.tokenTypeIdx;
var instanceType = tokInstance.tokenTypeIdx;
if (instanceType === tokConstructor.tokenTypeIdx) {

@@ -39,3 +39,3 @@ return true;

// collect the parent Token Types as well.
const tokenTypesAndParents = expandCategories(tokenTypes);
var tokenTypesAndParents = expandCategories(tokenTypes);
// add required tokenType and categoryMatches properties

@@ -46,3 +46,3 @@ assignTokenDefaultProps(tokenTypesAndParents);

assignCategoriesTokensProp(tokenTypesAndParents);
(0, forEach_1.default)(tokenTypesAndParents, (tokType) => {
(0, forEach_1.default)(tokenTypesAndParents, function (tokType) {
tokType.isParent = tokType.categoryMatches.length > 0;

@@ -53,8 +53,8 @@ });

function expandCategories(tokenTypes) {
let result = (0, clone_1.default)(tokenTypes);
let categories = tokenTypes;
let searching = true;
var result = (0, clone_1.default)(tokenTypes);
var categories = tokenTypes;
var searching = true;
while (searching) {
categories = (0, compact_1.default)((0, flatten_1.default)((0, map_1.default)(categories, (currTokType) => currTokType.CATEGORIES)));
const newCategories = (0, difference_1.default)(categories, result);
categories = (0, compact_1.default)((0, flatten_1.default)((0, map_1.default)(categories, function (currTokType) { return currTokType.CATEGORIES; })));
var newCategories = (0, difference_1.default)(categories, result);
result = result.concat(newCategories);

@@ -72,3 +72,3 @@ if ((0, isEmpty_1.default)(newCategories)) {

function assignTokenDefaultProps(tokenTypes) {
(0, forEach_1.default)(tokenTypes, (currTokType) => {
(0, forEach_1.default)(tokenTypes, function (currTokType) {
if (!hasShortKeyProperty(currTokType)) {

@@ -99,6 +99,6 @@ exports.tokenIdxToClass[exports.tokenShortNameIdx] = currTokType;

function assignCategoriesTokensProp(tokenTypes) {
(0, forEach_1.default)(tokenTypes, (currTokType) => {
(0, forEach_1.default)(tokenTypes, function (currTokType) {
// avoid duplications
currTokType.categoryMatches = [];
(0, forEach_1.default)(currTokType.categoryMatchesMap, (val, key) => {
(0, forEach_1.default)(currTokType.categoryMatchesMap, function (val, key) {
currTokType.categoryMatches.push(exports.tokenIdxToClass[key].tokenTypeIdx);

@@ -110,3 +110,3 @@ });

function assignCategoriesMapProp(tokenTypes) {
(0, forEach_1.default)(tokenTypes, (currTokType) => {
(0, forEach_1.default)(tokenTypes, function (currTokType) {
singleAssignCategoriesToksMap([], currTokType);

@@ -117,7 +117,7 @@ });

function singleAssignCategoriesToksMap(path, nextNode) {
(0, forEach_1.default)(path, (pathNode) => {
(0, forEach_1.default)(path, function (pathNode) {
nextNode.categoryMatchesMap[pathNode.tokenTypeIdx] = true;
});
(0, forEach_1.default)(nextNode.CATEGORIES, (nextCategory) => {
const newPath = path.concat(nextNode);
(0, forEach_1.default)(nextNode.CATEGORIES, function (nextCategory) {
var newPath = path.concat(nextNode);
// avoids infinite loops due to cyclic categories.

@@ -124,0 +124,0 @@ if (!(0, includes_1.default)(newPath, nextCategory)) {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isValidRange = exports.Range = void 0;
class Range {
constructor(start, end) {
var Range = /** @class */ (function () {
function Range(start, end) {
this.start = start;

@@ -12,18 +12,19 @@ this.end = end;

}
contains(num) {
Range.prototype.contains = function (num) {
return this.start <= num && this.end >= num;
}
containsRange(other) {
};
Range.prototype.containsRange = function (other) {
return this.start <= other.start && this.end >= other.end;
}
isContainedInRange(other) {
};
Range.prototype.isContainedInRange = function (other) {
return other.containsRange(this);
}
strictlyContainsRange(other) {
};
Range.prototype.strictlyContainsRange = function (other) {
return this.start < other.start && this.end > other.end;
}
isStrictlyContainedInRange(other) {
};
Range.prototype.isStrictlyContainedInRange = function (other) {
return other.strictlyContainsRange(this);
}
}
};
return Range;
}());
exports.Range = Range;

@@ -30,0 +31,0 @@ function isValidRange(start, end) {

@@ -7,3 +7,3 @@ "use strict";

// A separate file avoids cyclic dependencies and webpack errors.
exports.VERSION = "10.2.0";
exports.VERSION = "10.3.0";
//# sourceMappingURL=version.js.map
{
"name": "chevrotain",
"version": "10.2.0",
"version": "10.3.0",
"description": "Chevrotain is a high performance fault tolerant javascript parsing DSL for building recursive decent parsers",

@@ -75,6 +75,6 @@ "keywords": [

"dependencies": {
"@chevrotain/cst-dts-gen": "^10.2.0",
"@chevrotain/gast": "^10.2.0",
"@chevrotain/types": "^10.2.0",
"@chevrotain/utils": "^10.2.0",
"@chevrotain/cst-dts-gen": "10.3.0",
"@chevrotain/gast": "10.3.0",
"@chevrotain/types": "10.3.0",
"@chevrotain/utils": "10.3.0",
"lodash": "4.17.21",

@@ -97,3 +97,3 @@ "regexp-to-ast": "0.5.0"

},
"gitHead": "f9c92d8ec45c9236abb53091380682d4b32f8207"
"gitHead": "5ca7d276f475839c815a2686f38e2814371935c9"
}

@@ -10,3 +10,2 @@ import isEmpty from "lodash/isEmpty"

import isUndefined from "lodash/isUndefined"
import includes from "lodash/includes"
import { defineNameProp } from "../../lang/lang_extensions"

@@ -127,5 +126,4 @@ import { CstNode, ICstVisitor } from "@chevrotain/types"

const missingErrors = validateMissingCstMethods(visitorInstance, ruleNames)
const redundantErrors = validateRedundantMethods(visitorInstance, ruleNames)
return missingErrors.concat(redundantErrors)
return missingErrors
}

@@ -156,31 +154,1 @@

}
const VALID_PROP_NAMES = ["constructor", "visit", "validateVisitor"]
export function validateRedundantMethods(
visitorInstance: ICstVisitor<unknown, unknown>,
ruleNames: string[]
): IVisitorDefinitionError[] {
const errors: IVisitorDefinitionError[] = []
const propNames = Object.getOwnPropertyNames(
visitorInstance.constructor.prototype
)
forEach(propNames, (prop) => {
if (
isFunction((visitorInstance as any)[prop]) &&
!includes(VALID_PROP_NAMES, prop) &&
!includes(ruleNames, prop)
) {
errors.push({
msg:
`Redundant visitor method: <${prop}> on ${<any>(
visitorInstance.constructor.name
)} CST Visitor\n` +
`There is no Grammar Rule corresponding to this method's name.\n`,
type: CstVisitorDefinitionError.REDUNDANT_METHOD,
methodName: prop
})
}
})
return errors
}
// needs a separate module as this is required inside chevrotain productive code
// and also in the entry point for webpack(api.ts).
// A separate file avoids cyclic dependencies and webpack errors.
export const VERSION = "10.2.0"
export const VERSION = "10.3.0"

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc