Socket
Socket
Sign inDemoInstall

js-parse

Package Overview
Dependencies
0
Maintainers
1
Versions
5
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 0.1.1 to 0.1.2

_site/assets/css/bootstrap-theme.css

5

examples/php/lexical/keywords.json
{
"symbols":{
"this":{
"terminal":true,
"match":"(this)",
"matchCaseInsensitive":true
},
"enddeclare":{

@@ -4,0 +9,0 @@ "terminal":true,

2

examples/php/lexical/literals.js

@@ -47,3 +47,3 @@ /**

"terminal":true,
"match":"[0-9]+(\\.)?[0-9]*[e][\\-\\+]?[0-9]+",
"match":"[0-9]+(\\.)?[0-9]*[Ee][\\-\\+]?[0-9]+",
"matchCaseInsensitive":true

@@ -50,0 +50,0 @@ },

@@ -34,13 +34,10 @@ /**

// TODO: Potential bug here (S/R conflict)
"namespace-name-as-a-prefix":[
["namespace-name", "literals.\\"],
["literals.\\", "namespace-name", "literals.\\"],
["literals.\\"],
["kw.namespace", "literals.\\", "namespace-name", "literals.\\"],
["kw.namespace", "literals.\\"]
],
// Combined namespace-name-as-a-prefix with qualified name.
"qualified-name":[
["namespace-name-as-a-prefix", "literals.name"],
["literals.name"]
["literals.name"],
["literals.\\", "literals.name"],
["literals.\\", "namespace-name", "literals.\\", "literals.name"],
["kw.namespace", "literals.\\", "literals.name"],
["kw.namespace", "literals.\\", "namespace-name", "literals.\\", "literals.name"],
["namespace-name", "literals.\\", "literals.name"]
]

@@ -47,0 +44,0 @@ },

@@ -9,3 +9,20 @@ var Parser = require("../../../lib").Parser.LRParser;

var positive_literal_cases = {
"$test":"tokens.variable-name"
"$test":"tokens.variable-name",
"$test12345":"tokens.variable-name",
"$_test12345":"tokens.variable-name",
"$___test12345":"tokens.variable-name",
"___test12345":"tokens.qualified-name",
"\\___test12345":"tokens.qualified-name",
"\\a_namespace\\___test12345":"tokens.qualified-name",
"\\___test12345":"tokens.qualified-name",
"namespace\\___test12345":"tokens.qualified-name",
"123456":"literals.literal",
"TRUE":"literals.literal",
"false":"literals.literal",
"NULL":"literals.literal",
"1.35E10":"literals.literal",
"$this":"$this",
"($this)":"expression-in-parens",
"(1.35E123)":"expression-in-parens",
"($test)":"expression-in-parens",
};

@@ -26,9 +43,12 @@

return function(end){
terminals_pd.startSymbols = [expected];
var parser = Parser.CreateWithLexer(terminals_pd, {"path":"./syntax"});
parser.on("error", function(error){ throw error.message; });
// parser.getLexer().on("token", function(r){console.log(r); });
parser.getLexer().on("token", function(r){console.log(r); });
parser.on("accept", function(ast){
var type = typeof ast[0].type === "undefined" ? ast[0].head : ast[0].type;
type.should.eql("primary-expression");
var type = typeof ast[0].body[0].type === "undefined" ? ast[0].body[0].head : ast[0].body[0].type;
type.should.eql(expected);
end();

@@ -51,3 +71,2 @@ });

try {
terminals_pd.startSymbols = [expected];
var parser = Parser.CreateWithLexer(terminals_pd, {"path":"./lexical"});

@@ -54,0 +73,0 @@ // parser.getLexer().on("token", function(r){console.log(r); });

@@ -9,7 +9,18 @@ /**

"kw":"../lexical/keywords.json",
"tokens":"../lexical/tokens.js"
"tokens":"../lexical/tokens.js",
"literals":"../lexical/literals.js"
},
"productions":{
"$this":[
["literals.$", "kw.this"]
],
"expression-in-parens":[
["literals.(", "primary-expression", "literals.)"]
],
"primary-expression":[
["tokens.variable-name"]
["$this"],
["tokens.variable-name"],
["tokens.qualified-name"],
["literals.literal"],
["expression-in-parens"],
]

@@ -16,0 +27,0 @@ },

@@ -13,3 +13,3 @@ {

"modules":{
"REX":"examples/re/re.json"
"REX":"re/re.json"
},

@@ -16,0 +16,0 @@ "symbols": {

@@ -77,2 +77,10 @@ /*

}(symbol.matchOnly),
"getPriority":function(priority){
if(typeof priority === "undefined") {
priority = 0;
}
return function() {
return priority;
}
}(symbol.priority),
"removeValue":function(string) {

@@ -136,2 +144,4 @@ return string.replace(this.prefixMatch, "");

var longestRule = null;
var longestPriority = -1;
var tie = false;

@@ -144,6 +154,18 @@ for(var r in prefixMatches) {

longestRule = rule;
longestPriority = rule.getPriority();
tie = false;
} else if(length == longest) {
// see if the tie is broken by priority
if(longestPriority === rule.getPriority()) {
tie = true;
} else if(rule.getPriority() > longestPriority) {
longest = length;
longestRule = rule;
longestPriority = rule.getPriority();
tie = false;
}
}
}
if(longestRule !== null) {
if(longestRule !== null && tie !== false) {

@@ -180,3 +202,2 @@ if(longestRule.includeInStream()) {

var rule = this.rules[r];
if(rule.testPrefixWithLookAhead(this.string) || rule.testFull(this.string)) {

@@ -199,2 +220,3 @@ if(rule.testFull(this.string)) {

// If we found prefix matches and no full matches,

@@ -205,2 +227,4 @@ // pick the longest prefix match.

var longestRule = null;
var longestPriority = -1;
var tie = false;

@@ -213,2 +237,14 @@ for(var r in prefixMatches) {

longestRule = rule;
longestPriority = rule.getPriority();
tie = false;
} else if(length == longest) {
// see if the tie is broken by priority
if(longestPriority === rule.getPriority()) {
tie = true;
} else if(rule.getPriority() > longestPriority) {
longest = length;
longestRule = rule;
longestPriority = rule.getPriority();
tie = false;
}
}

@@ -215,0 +251,0 @@ }

@@ -49,2 +49,25 @@ /*

LRParserPrototype.bnfString = function() {
var str = "";
var productions = this.getParserDescription().productions;
for(var i in productions) {
if(i === constants.augmentation_symbol) {
continue ;
}
var prod = productions[i];
str += i + " -> \n";
for(var p in prod) {
var list = prod[p];
str += "\t";
for(var l in list) {
str += list[l] + " ";
}
str += "\n";
}
}
return str;
};
// Pretty print

@@ -247,3 +270,3 @@ // Optionally you can provide a tab character.

console.debug("[Parser] Importing module (" + modName + ")");
// Determine the prefix to use while building this module.

@@ -265,3 +288,3 @@ // In most cases it will be the modName + ".", the only exception

// deeper down the rabbit hole must be rewritten to reference the correct module.
// TODO: Names for modules should be standardized in the parser descriptions to avoid name mismatches allowing modules
// TODO: Names for modules should be standardized in the parser descriptions to avoid name mismatches allowing modules
// to be loaded multiple times.

@@ -282,3 +305,3 @@ var dontLoad = [];

// provide a function for converting symbol names in this context
var processSymbolName = function(currentLevelModules, prefix) {
var processSymbolName = function(currentLevelModules, prefix) {
return function(symbol) {

@@ -294,3 +317,3 @@ for(var key in currentLevelModules) {

return prefix + symbol;
};
};
}(currentLevelModules, prefix);

@@ -394,5 +417,11 @@

var type = table_entry[symbol].operation + "-reduce";
throw type + " Conflict for symbol " + symbol + " in production " + clause.head;
if(type === "shift-reduce") {
console.log("WARNING: " + type + " Conflict for symbol " + symbol + " in production " + clause.head);
} else {
throw type + " Conflict for symbol " + symbol + " in production " + clause.head;
}
} else {
table_entry[symbol] = {"operation":"reduce", "state":productionIndex};
}
table_entry[symbol] = {"operation":"reduce", "state":productionIndex};
}

@@ -848,2 +877,3 @@ }

var tok = this.token_stack.pop();
if(!tok) return this.shift(token);
var type = typeof tok.type === "undefined" ? tok.head : tok.type;

@@ -878,3 +908,7 @@

}
this.token_stack.push({"head":production.head, "body":params});
// the abandon option will drop this from the stack as we parse them.
if(this.getParserDescription().symbols[type].abandon !== true) {
this.token_stack.push({"head":production.head, "body":params});
}
this.emit(production.head, params);

@@ -930,1 +964,2 @@ this.emit("production", production.head, params);

module.exports.CreateWithLexer = CreateLRParserWithLexer;
module.exports.prettyPrint = prettyPrint;
{
"name": "js-parse",
"version": "0.1.1",
"version": "0.1.2",
"description": "A generic shift-reduce parser for nodejs",

@@ -5,0 +5,0 @@ "main": "index.js",

@@ -304,2 +304,16 @@

##### abandon
The `abandon` option will cause the matched production to be dropped from the parser
stack. This means when for all intents and purposes it vanishes from the parse
tree. I added this option to support really incremental processing.
For example, the `statement_list` defined in [NodePL](https://github.com/kkoch986/nodepl/blob/master/grammars/source.json#L28-L32).
In that case I had no use for the statements once they were already parsed since
they were handled by the EventEmitter. What would happen when parsing large files
with a few thousand statements is it would parse pretty fast but on the file step
it would have to traverse all the way back to the beginning of the file. This was
a waste, so instead i drop them from the parse tree and a 10s parse is transformed
into a 0.5s parse!
### Productions

@@ -306,0 +320,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc