Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@salvoravida/dt-sql-parser

Package Overview
Dependencies
Maintainers
1
Versions
6
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@salvoravida/dt-sql-parser - npm Package Compare versions

Comparing version 4.0.0-beta.2.9 to 4.0.0-beta.2.10

2

dist/index.d.ts

@@ -13,1 +13,3 @@ export * from './parser';

export * from './lib/spark/SparkSqlListener';
export * from './lib/pgsql/PostgreSQLParserListener';
export * from './lib/pgsql/PostgreSQLParserVisitor';

@@ -29,2 +29,4 @@ "use strict";

__exportStar(require("./lib/spark/SparkSqlListener"), exports);
__exportStar(require("./lib/pgsql/PostgreSQLParserListener"), exports);
__exportStar(require("./lib/pgsql/PostgreSQLParserVisitor"), exports);
//# sourceMappingURL=index.js.map

5

dist/lib/pgsql/base/PostgreSQLLexerBase.d.ts

@@ -1,4 +0,4 @@

export class PostgreSQLLexerBase extends antlr4.Lexer {
export function PostgreSQLLexerBase(input: any): any;
export class PostgreSQLLexerBase {
constructor(input: any);
tags: any[];
pushTag(): void;

@@ -15,2 +15,1 @@ isTag(): any;

}
import antlr4 = require("antlr4/index");

@@ -1,83 +0,89 @@

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
// https://github.com/antlr/grammars-v4/blob/master/sql/postgresql/Java/PostgreSQLLexerBase.java
// eslint-disable-next-line no-invalid-this
const __extends = (this && this.__extends) || (function () {
let extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
({ __proto__: [] } instanceof Array && function (d, b) {
d.__proto__ = b;
}) ||
function (d, b) {
for (const p in b)
if (Object.prototype.hasOwnProperty.call(b, p))
d[p] = b[p];
};
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
if (typeof b !== 'function' && b !== null) {
throw new TypeError('Class extends value ' + String(b) + ' is not a constructor or null');
}
extendStatics(d, b);
function __() { this.constructor = d; }
function __() {
this.constructor = d;
}
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.PostgreSQLLexerBase = void 0;
var antlr4 = require('antlr4/index');
var Lexer = antlr4.Lexer;
const Lexer = require('antlr4').Lexer;
function isLetter(str) {
return str.length === 1 && str.match(/[a-z]/i);
}
var PostgreSQLLexerBase = /** @class */ (function (_super) {
__extends(PostgreSQLLexerBase, _super);
function PostgreSQLLexerBase(input) {
var _this = _super.call(this, input) || this;
_this.tags = [];
return _this;
function PostgreSQLLexerBase(input) {
const _this = Lexer.call(this, input) || this;
_this.tags = [];
return _this;
}
__extends(PostgreSQLLexerBase, Lexer);
PostgreSQLLexerBase.prototype.pushTag = function () {
this.tags.push(getText());
};
PostgreSQLLexerBase.prototype.isTag = function () {
return this.getText().equals(this.tags.peek());
};
PostgreSQLLexerBase.prototype.popTag = function () {
this.tags.pop();
};
PostgreSQLLexerBase.prototype.getInputStream = function () {
return this._input;
};
PostgreSQLLexerBase.prototype.checkLA = function (c) {
// eslint-disable-next-line new-cap
return this.getInputStream().LA(1) !== c;
};
PostgreSQLLexerBase.prototype.charIsLetter = function () {
// eslint-disable-next-line new-cap
return isLetter(this.getInputStream().LA(-1));
};
PostgreSQLLexerBase.prototype.HandleNumericFail = function () {
this.getInputStream().seek(this.getInputStream().index() - 2);
const Integral = 535;
this.setType(Integral);
};
PostgreSQLLexerBase.prototype.HandleLessLessGreaterGreater = function () {
const LESS_LESS = 18;
const GREATER_GREATER = 19;
if (this.getText() === '<<') {
this.setType(LESS_LESS);
}
PostgreSQLLexerBase.prototype.pushTag = function () {
this.tags.push(getText());
};
PostgreSQLLexerBase.prototype.isTag = function () {
return this.getText().equals(this.tags.peek());
};
PostgreSQLLexerBase.prototype.popTag = function () {
tags.pop();
};
PostgreSQLLexerBase.prototype.getInputStream = function () {
return this._input;
};
PostgreSQLLexerBase.prototype.checkLA = function (c) {
// eslint-disable-next-line new-cap
return this.getInputStream().LA(1) !== c;
};
PostgreSQLLexerBase.prototype.charIsLetter = function () {
// eslint-disable-next-line new-cap
return isLetter(this.getInputStream().LA(-1));
};
PostgreSQLLexerBase.prototype.HandleNumericFail = function () {
this.getInputStream().seek(this.getInputStream().index() - 2);
var Integral = 535;
this.setType(Integral);
};
PostgreSQLLexerBase.prototype.HandleLessLessGreaterGreater = function () {
var LESS_LESS = 18;
var GREATER_GREATER = 19;
if (this.getText() === '<<')
this.setType(LESS_LESS);
if (this.getText() === '>>')
this.setType(GREATER_GREATER);
};
PostgreSQLLexerBase.prototype.UnterminatedBlockCommentDebugAssert = function () {
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
};
PostgreSQLLexerBase.prototype.CheckIfUtf32Letter = function () {
// eslint-disable-next-line new-cap
var codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
var c;
if (codePoint < 0x10000) {
c = String.fromCharCode(codePoint);
}
else {
codePoint -= 0x10000;
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
}
return isLetter(c[0]);
};
return PostgreSQLLexerBase;
}(Lexer));
if (this.getText() === '>>') {
this.setType(GREATER_GREATER);
}
};
PostgreSQLLexerBase.prototype.UnterminatedBlockCommentDebugAssert = function () {
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
};
PostgreSQLLexerBase.prototype.CheckIfUtf32Letter = function () {
// eslint-disable-next-line new-cap
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
let c;
if (codePoint < 0x10000) {
c = String.fromCharCode(codePoint);
}
else {
codePoint -= 0x10000;
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
}
return isLetter(c[0]);
};
exports.PostgreSQLLexerBase = PostgreSQLLexerBase;
//# sourceMappingURL=PostgreSQLLexerBase.js.map

@@ -1,3 +0,3 @@

export class PostgreSQLParserBase extends antlr4.Parser {
static getPostgreSQLParser(script: any): PostgreSQLParser;
export function PostgreSQLParserBase(input: any): any;
export class PostgreSQLParserBase {
constructor(input: any);

@@ -10,3 +10,5 @@ GetParsedSqlTree(script: any, line: any): any;

}
import antlr4 = require("antlr4/index");
import { PostgreSQLParser } from "../PostgreSQLParser";
export namespace PostgreSQLParserBase {
function getPostgreSQLParser(script: any): PostgreSQLParser_1.PostgreSQLParser;
}
import PostgreSQLParser_1 = require("../PostgreSQLParser");

@@ -1,50 +0,53 @@

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
/* eslint-disable new-cap,camelcase */
// https://github.com/antlr/grammars-v4/blob/master/sql/postgresql/Java/PostgreSQLParserBase.java
// eslint-disable-next-line no-invalid-this
const __extends = (this && this.__extends) || (function () {
let extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
({ __proto__: [] } instanceof Array && function (d, b) {
d.__proto__ = b;
}) ||
function (d, b) {
for (const p in b)
if (Object.prototype.hasOwnProperty.call(b, p))
d[p] = b[p];
};
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
if (typeof b !== 'function' && b !== null) {
throw new TypeError('Class extends value ' + String(b) + ' is not a constructor or null');
}
extendStatics(d, b);
function __() { this.constructor = d; }
function __() {
this.constructor = d;
}
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.PostgreSQLParserBase = void 0;
/* eslint-disable new-cap */
var PostgreSQLLexer_1 = require("../PostgreSQLLexer");
var PostgreSQLParser_1 = require("../PostgreSQLParser");
var antlr4 = require('antlr4/index');
var CharStreams = antlr4.CharStreams;
var CommonTokenStream = antlr4.CommonTokenStream;
// @ts-ignore
var PostgreSQLParserBase = /** @class */ (function (_super) {
__extends(PostgreSQLParserBase, _super);
function PostgreSQLParserBase(input) {
return _super.call(this, input) || this;
}
PostgreSQLParserBase.prototype.GetParsedSqlTree = function (script, line) {
var ph = this.getPostgreSQLParser(script);
return ph.root();
};
PostgreSQLParserBase.prototype.ParseRoutineBody = function (_localctx) {
var lang = null;
for (var _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
var coi = _a[_i];
// eslint-disable-next-line new-cap
if (!coi.LANGUAGE()) {
if (!coi.nonreservedword_or_sconst()) {
if (!coi.nonreservedword_or_sconst().nonreservedword()) {
if (!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
// eslint-disable-next-line new-cap
if (!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
// eslint-disable-next-line new-cap
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
break;
}
const PostgreSQLLexer_1 = require('../PostgreSQLLexer');
const PostgreSQLParser_1 = require('../PostgreSQLParser');
const antlr4 = require('antlr4/index');
const CharStreams = antlr4.CharStreams;
const CommonTokenStream = antlr4.CommonTokenStream;
const Parser = antlr4.Parser;
__extends(PostgreSQLParserBase, Parser);
function PostgreSQLParserBase(input) {
return Parser.call(this, input) || this;
}
PostgreSQLParserBase.prototype.GetParsedSqlTree = function (script, line) {
const ph = this.getPostgreSQLParser(script);
return ph.program();
};
PostgreSQLParserBase.prototype.ParseRoutineBody = function (_localctx) {
let lang = null;
for (let _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
const coi = _a[_i];
if (!!coi.LANGUAGE()) {
if (!!coi.nonreservedword_or_sconst()) {
if (!!coi.nonreservedword_or_sconst().nonreservedword()) {
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
break;
}

@@ -55,81 +58,84 @@ }

}
if (!lang)
return;
// eslint-disable-next-line camelcase
var func_as = null;
for (var _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
var a = _c[_b];
if (!a.func_as()) {
// eslint-disable-next-line camelcase
func_as = a;
}
if (!lang) {
return;
}
// eslint-disable-next-line camelcase
let func_as = null;
for (let _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
const a = _c[_b];
if (!a.func_as()) {
// eslint-disable-next-line camelcase
func_as = a;
break;
}
}
// eslint-disable-next-line camelcase
if (!!func_as) {
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
const line = func_as.func_as().sconst(0).start.getLine();
const ph = this.getPostgreSQLParser(txt);
switch (lang) {
case 'plpgsql':
func_as.func_as().Definition = ph.plsqlroot();
break;
}
case 'sql':
func_as.func_as().Definition = ph.program();
break;
}
// eslint-disable-next-line camelcase
if (!func_as) {
var txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
var line = func_as.func_as().sconst(0).start.getLine();
var ph = this.getPostgreSQLParser(txt);
switch (lang) {
case 'plpgsql':
func_as.func_as().Definition = ph.plsqlroot();
break;
case 'sql':
func_as.func_as().Definition = ph.root();
break;
}
}
};
PostgreSQLParserBase.prototype.TrimQuotes = function (s) {
return (!s) ? s : s.substring(1, s.length() - 1);
};
PostgreSQLParserBase.prototype.unquote = function (s) {
var slength = s.length();
var r = '';
var i = 0;
while (i < slength) {
var c = s.charAt(i);
r.append(c);
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\''))
i++;
}
};
PostgreSQLParserBase.prototype.TrimQuotes = function (s) {
return (!s) ? s : s.substring(1, s.length() - 1);
};
PostgreSQLParserBase.prototype.unquote = function (s) {
const slength = s.length();
const r = '';
let i = 0;
while (i < slength) {
const c = s.charAt(i);
r.append(c);
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) {
i++;
}
return r.toString();
};
PostgreSQLParserBase.prototype.GetRoutineBodyString = function (rule) {
var anysconst = rule.anysconst();
// eslint-disable-next-line new-cap
var StringConstant = anysconst.StringConstant();
if (null !== StringConstant)
return this.unquote(this.TrimQuotes(StringConstant.getText()));
var UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
if (null !== UnicodeEscapeStringConstant)
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
var EscapeStringConstant = anysconst.EscapeStringConstant();
if (null !== EscapeStringConstant)
return this.TrimQuotes(EscapeStringConstant.getText());
var result = '';
var dollartext = anysconst.DollarText();
for (var _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
var s = dollartext_1[_i];
result += s.getText();
}
return result;
};
PostgreSQLParserBase.getPostgreSQLParser = function (script) {
var charStream = CharStreams.fromString(script);
var lexer = new PostgreSQLLexer_1.PostgreSQLLexer(charStream);
var tokens = new CommonTokenStream(lexer);
var parser = new PostgreSQLParser_1.PostgreSQLParser(tokens);
lexer.removeErrorListeners();
parser.removeErrorListeners();
// LexerDispatchingErrorListener listener_lexer = new LexerDispatchingErrorListener((Lexer)(((CommonTokenStream)(this.getInputStream())).getTokenSource()));
// ParserDispatchingErrorListener listener_parser = new ParserDispatchingErrorListener(this);
// lexer.addErrorListener(listener_lexer);
// parser.addErrorListener(listener_parser);
return parser;
};
return PostgreSQLParserBase;
}(antlr4.Parser));
i++;
}
return r.toString();
};
PostgreSQLParserBase.prototype.GetRoutineBodyString = function (rule) {
const anysconst = rule.anysconst();
const StringConstant = anysconst.StringConstant();
if (!!StringConstant) {
return this.unquote(this.TrimQuotes(StringConstant.getText()));
}
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
if (!!UnicodeEscapeStringConstant) {
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
}
const EscapeStringConstant = anysconst.EscapeStringConstant();
if (!!EscapeStringConstant) {
return this.TrimQuotes(EscapeStringConstant.getText());
}
let result = '';
const dollartext = anysconst.DollarText();
for (let _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
const s = dollartext_1[_i];
result += s.getText();
}
return result;
};
PostgreSQLParserBase.getPostgreSQLParser = function (script) {
const charStream = CharStreams.fromString(script);
const lexer = new PostgreSQLLexer_1.PostgreSQLLexer(charStream);
const tokens = new CommonTokenStream(lexer);
const parser = new PostgreSQLParser_1.PostgreSQLParser(tokens);
lexer.removeErrorListeners();
parser.removeErrorListeners();
// LexerDispatchingErrorListener listener_lexer = new LexerDispatchingErrorListener((Lexer)(((CommonTokenStream)(this.getInputStream())).getTokenSource()));
// ParserDispatchingErrorListener listener_parser = new ParserDispatchingErrorListener(this);
// lexer.addErrorListener(listener_lexer);
// parser.addErrorListener(listener_parser);
return parser;
};
exports.PostgreSQLParserBase = PostgreSQLParserBase;
//# sourceMappingURL=PostgreSQLParserBase.js.map
export function PostgreSQLParserVisitor(): this;
export class PostgreSQLParserVisitor {
constructor: typeof PostgreSQLParserVisitor;
visitRoot(ctx: any): any;
visitProgram(ctx: any): any;
visitPlsqlroot(ctx: any): any;

@@ -6,0 +6,0 @@ visitStmtblock(ctx: any): any;

@@ -1,17 +0,4 @@

var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
};
var Lexer = require('antlr4').Lexer;
function PlSqlBaseLexer() {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
Lexer.call.apply(Lexer, __spreadArray([this], args, false));
const Lexer = require('antlr4').Lexer;
function PlSqlBaseLexer(...args) {
Lexer.call(this, ...args);
return this;

@@ -22,3 +9,3 @@ }

PlSqlBaseLexer.prototype.IsNewlineAtPos = function (pos) {
var la = this._input.LA(pos);
const la = this._input.LA(pos);
return la == -1 || la == '\n';

@@ -25,0 +12,0 @@ };

@@ -1,17 +0,4 @@

var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
};
var Parser = require('antlr4').Parser;
function PlSqlBaseParser() {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
Parser.call.apply(Parser, __spreadArray([this], args, false));
const Parser = require('antlr4').Parser;
function PlSqlBaseParser(...args) {
Parser.call(this, ...args);
this._isVersion10 = false;

@@ -18,0 +5,0 @@ this._isVersion12 = true;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var tree_1 = require("antlr4/tree");
var parserErrorListener_1 = require("./parserErrorListener");
const tree_1 = require("antlr4/tree");
const parserErrorListener_1 = require("./parserErrorListener");
/**
* Custom Parser class, subclass needs extends it.
*/
var BasicParser = /** @class */ (function () {
function BasicParser() {
}
BasicParser.prototype.parse = function (input, errorListener) {
var parser = this.createParser(input);
class BasicParser {
parse(input, errorListener) {
const parser = this.createParser(input);
this._parser = parser;
parser.removeErrorListeners();
parser.addErrorListener(new parserErrorListener_1.default(errorListener));
// Note : needed by pgsql
var parserTree = parser.program ? parser.program() : parser.root();
const parserTree = parser.program();
return parserTree;
};
BasicParser.prototype.validate = function (input) {
var lexerError = [];
var syntaxErrors = [];
var parser = this.createParser(input);
}
validate(input) {
const lexerError = [];
const syntaxErrors = [];
const parser = this.createParser(input);
this._parser = parser;
parser.removeErrorListeners();
parser.addErrorListener(new parserErrorListener_1.ParserErrorCollector(syntaxErrors));
// Note : needed by pgsql
if (parser.program) {
parser.program();
}
else {
parser.root();
}
parser.program();
return lexerError.concat(syntaxErrors);
};
}
/**

@@ -45,5 +36,5 @@ * Visit parser tree

*/
BasicParser.prototype.getAllTokens = function (input) {
getAllTokens(input) {
return this.createLexer(input).getAllTokens();
};
}
;

@@ -54,9 +45,9 @@ /**

*/
BasicParser.prototype.createParser = function (input) {
var lexer = this.createLexer(input);
var parser = this.createParserFromLexer(lexer);
createParser(input) {
const lexer = this.createLexer(input);
const parser = this.createParserFromLexer(lexer);
parser.buildParseTrees = true;
this._parser = parser;
return parser;
};
}
/**

@@ -66,9 +57,8 @@ * It convert tree to string, it's convenient to use in unit test.

*/
BasicParser.prototype.parserTreeToString = function (input) {
var parser = this.createParser(input);
parserTreeToString(input) {
const parser = this.createParser(input);
this._parser = parser;
// Note : needed by pgsql
var tree = parser.program ? parser.program() : parser.root();
const tree = parser.program();
return tree.toStringTree(parser.ruleNames);
};
}
/**

@@ -78,5 +68,5 @@ * Get List-like style tree string

*/
BasicParser.prototype.toString = function (parserTree) {
toString(parserTree) {
return parserTree.toStringTree(this._parser.ruleNames);
};
}
/**

@@ -86,8 +76,7 @@ * @param listener Listener instance extends ParserListener

*/
BasicParser.prototype.listen = function (listener, parserTree) {
listen(listener, parserTree) {
tree_1.ParseTreeWalker.DEFAULT.walk(listener, parserTree);
};
return BasicParser;
}());
}
}
exports.default = BasicParser;
//# sourceMappingURL=basicParser.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.ParserErrorCollector = void 0;
var error_1 = require("antlr4/error");
var ParserErrorCollector = /** @class */ (function (_super) {
__extends(ParserErrorCollector, _super);
function ParserErrorCollector(error) {
var _this = _super.call(this) || this;
_this._errors = error;
return _this;
const error_1 = require("antlr4/error");
class ParserErrorCollector extends error_1.ErrorListener {
constructor(error) {
super();
this._errors = error;
}
ParserErrorCollector.prototype.syntaxError = function (recognizer, offendingSymbol, line, charPositionInLine, msg, e) {
var endCol = charPositionInLine + 1;
syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e) {
let endCol = charPositionInLine + 1;
if (offendingSymbol && offendingSymbol.text !== null) {

@@ -39,15 +22,12 @@ endCol = charPositionInLine + offendingSymbol.text.length;

});
};
return ParserErrorCollector;
}(error_1.ErrorListener));
}
}
exports.ParserErrorCollector = ParserErrorCollector;
var ParserErrorListener = /** @class */ (function (_super) {
__extends(ParserErrorListener, _super);
function ParserErrorListener(errorListener) {
var _this = _super.call(this) || this;
_this._errorHandler = errorListener;
return _this;
class ParserErrorListener extends error_1.ErrorListener {
constructor(errorListener) {
super();
this._errorHandler = errorListener;
}
ParserErrorListener.prototype.syntaxError = function (recognizer, offendingSymbol, line, charPositionInLine, msg, e) {
var endCol = charPositionInLine + 1;
syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e) {
let endCol = charPositionInLine + 1;
if (offendingSymbol && offendingSymbol.text !== null) {

@@ -64,14 +44,13 @@ endCol = charPositionInLine + offendingSymbol.text.length;

}, {
e: e,
line: line,
msg: msg,
recognizer: recognizer,
offendingSymbol: offendingSymbol,
charPositionInLine: charPositionInLine,
e,
line,
msg,
recognizer,
offendingSymbol,
charPositionInLine,
});
}
};
return ParserErrorListener;
}(error_1.ErrorListener));
}
}
exports.default = ParserErrorListener;
//# sourceMappingURL=parserErrorListener.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var antlr4_1 = require("antlr4");
var FlinkSqlLexer_1 = require("../lib/flinksql/FlinkSqlLexer");
var FlinkSqlParser_1 = require("../lib/flinksql/FlinkSqlParser");
var basicParser_1 = require("./common/basicParser");
var FlinkSQL = /** @class */ (function (_super) {
__extends(FlinkSQL, _super);
function FlinkSQL() {
return _super !== null && _super.apply(this, arguments) || this;
const antlr4_1 = require("antlr4");
const FlinkSqlLexer_1 = require("../lib/flinksql/FlinkSqlLexer");
const FlinkSqlParser_1 = require("../lib/flinksql/FlinkSqlParser");
const basicParser_1 = require("./common/basicParser");
class FlinkSQL extends basicParser_1.default {
createLexer(input) {
const chars = new antlr4_1.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new FlinkSqlLexer_1.FlinkSqlLexer(chars);
return lexer;
}
FlinkSQL.prototype.createLexer = function (input) {
var chars = new antlr4_1.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
var lexer = new FlinkSqlLexer_1.FlinkSqlLexer(chars);
return lexer;
};
FlinkSQL.prototype.createParserFromLexer = function (lexer) {
var tokenStream = new antlr4_1.CommonTokenStream(lexer);
createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new FlinkSqlParser_1.FlinkSqlParser(tokenStream);
};
return FlinkSQL;
}(basicParser_1.default));
}
}
exports.default = FlinkSQL;
//# sourceMappingURL=flinksql.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var antlr4_1 = require("antlr4");
var SqlLexer_1 = require("../lib/generic/SqlLexer");
var SqlParser_1 = require("../lib/generic/SqlParser");
var basicParser_1 = require("./common/basicParser");
var GenericSQL = /** @class */ (function (_super) {
__extends(GenericSQL, _super);
function GenericSQL() {
return _super !== null && _super.apply(this, arguments) || this;
const antlr4_1 = require("antlr4");
const SqlLexer_1 = require("../lib/generic/SqlLexer");
const SqlParser_1 = require("../lib/generic/SqlParser");
const basicParser_1 = require("./common/basicParser");
class GenericSQL extends basicParser_1.default {
createLexer(input) {
const chars = new antlr4_1.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new SqlLexer_1.SqlLexer(chars);
return lexer;
}
GenericSQL.prototype.createLexer = function (input) {
var chars = new antlr4_1.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
var lexer = new SqlLexer_1.SqlLexer(chars);
return lexer;
};
GenericSQL.prototype.createParserFromLexer = function (lexer) {
var tokenStream = new antlr4_1.CommonTokenStream(lexer);
createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new SqlParser_1.SqlParser(tokenStream);
};
return GenericSQL;
}(basicParser_1.default));
}
}
exports.default = GenericSQL;
//# sourceMappingURL=generic.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var antlr4_1 = require("antlr4");
var HiveSqlLexer_1 = require("../lib/hive/HiveSqlLexer");
var HiveSql_1 = require("../lib/hive/HiveSql");
var basicParser_1 = require("./common/basicParser");
var HiveSQL = /** @class */ (function (_super) {
__extends(HiveSQL, _super);
function HiveSQL() {
return _super !== null && _super.apply(this, arguments) || this;
const antlr4_1 = require("antlr4");
const HiveSqlLexer_1 = require("../lib/hive/HiveSqlLexer");
const HiveSql_1 = require("../lib/hive/HiveSql");
const basicParser_1 = require("./common/basicParser");
class HiveSQL extends basicParser_1.default {
createLexer(input) {
const chars = new antlr4_1.InputStream(input);
const lexer = new HiveSqlLexer_1.HiveSqlLexer(chars);
return lexer;
}
HiveSQL.prototype.createLexer = function (input) {
var chars = new antlr4_1.InputStream(input);
var lexer = new HiveSqlLexer_1.HiveSqlLexer(chars);
return lexer;
};
HiveSQL.prototype.createParserFromLexer = function (lexer) {
var tokenStream = new antlr4_1.CommonTokenStream(lexer);
createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new HiveSql_1.HiveSql(tokenStream);
};
return HiveSQL;
}(basicParser_1.default));
}
}
exports.default = HiveSQL;
//# sourceMappingURL=hive.js.map

@@ -6,1 +6,2 @@ export { default as GenericSQL } from './generic';

export { default as SparkSQL } from './spark';
export { default as PostgresSQL } from './pgsql';
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SparkSQL = exports.FlinkSQL = exports.HiveSQL = exports.PLSQL = exports.GenericSQL = void 0;
exports.PostgresSQL = exports.SparkSQL = exports.FlinkSQL = exports.HiveSQL = exports.PLSQL = exports.GenericSQL = void 0;
var generic_1 = require("./generic");

@@ -14,2 +14,4 @@ Object.defineProperty(exports, "GenericSQL", { enumerable: true, get: function () { return generic_1.default; } });

Object.defineProperty(exports, "SparkSQL", { enumerable: true, get: function () { return spark_1.default; } });
var pgsql_1 = require("./pgsql");
Object.defineProperty(exports, "PostgresSQL", { enumerable: true, get: function () { return pgsql_1.default; } });
//# sourceMappingURL=index.js.map
import { Lexer } from 'antlr4';
import BasicParser from './common/basicParser';
export default class PLSQLParser extends BasicParser {
export default class PostgresSQL extends BasicParser {
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
}
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var antlr4_1 = require("antlr4");
var PostgreSQLLexer_1 = require("../lib/pgsql/PostgreSQLLexer");
var PostgreSQLParser_1 = require("../lib/pgsql/PostgreSQLParser");
var basicParser_1 = require("./common/basicParser");
var PLSQLParser = /** @class */ (function (_super) {
__extends(PLSQLParser, _super);
function PLSQLParser() {
return _super !== null && _super.apply(this, arguments) || this;
const antlr4_1 = require("antlr4");
const PostgreSQLLexer_1 = require("../lib/pgsql/PostgreSQLLexer");
// @ts-ignore
const PostgreSQLParser_1 = require("../lib/pgsql/PostgreSQLParser");
const basicParser_1 = require("./common/basicParser");
class PostgresSQL extends basicParser_1.default {
createLexer(input) {
const chars = new antlr4_1.InputStream(input.toUpperCase());
const lexer = new PostgreSQLLexer_1.PostgreSQLLexer(chars);
return lexer;
}
PLSQLParser.prototype.createLexer = function (input) {
var chars = new antlr4_1.InputStream(input.toUpperCase());
var lexer = new PostgreSQLLexer_1.PostgreSQLLexer(chars);
return lexer;
};
PLSQLParser.prototype.createParserFromLexer = function (lexer) {
var tokenStream = new antlr4_1.CommonTokenStream(lexer);
createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new PostgreSQLParser_1.PostgreSQLParser(tokenStream);
};
return PLSQLParser;
}(basicParser_1.default));
exports.default = PLSQLParser;
}
}
exports.default = PostgresSQL;
//# sourceMappingURL=pgsql.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var antlr4_1 = require("antlr4");
var PlSqlLexer_1 = require("../lib/plsql/PlSqlLexer");
var PlSqlParser_1 = require("../lib/plsql/PlSqlParser");
var basicParser_1 = require("./common/basicParser");
var PLSQLParser = /** @class */ (function (_super) {
__extends(PLSQLParser, _super);
function PLSQLParser() {
return _super !== null && _super.apply(this, arguments) || this;
const antlr4_1 = require("antlr4");
const PlSqlLexer_1 = require("../lib/plsql/PlSqlLexer");
// @ts-ignore
const PlSqlParser_1 = require("../lib/plsql/PlSqlParser");
const basicParser_1 = require("./common/basicParser");
class PLSQLParser extends basicParser_1.default {
createLexer(input) {
const chars = new antlr4_1.InputStream(input.toUpperCase());
const lexer = new PlSqlLexer_1.PlSqlLexer(chars);
return lexer;
}
PLSQLParser.prototype.createLexer = function (input) {
var chars = new antlr4_1.InputStream(input.toUpperCase());
var lexer = new PlSqlLexer_1.PlSqlLexer(chars);
return lexer;
};
PLSQLParser.prototype.createParserFromLexer = function (lexer) {
var tokenStream = new antlr4_1.CommonTokenStream(lexer);
createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new PlSqlParser_1.PlSqlParser(tokenStream);
};
return PLSQLParser;
}(basicParser_1.default));
}
}
exports.default = PLSQLParser;
//# sourceMappingURL=plsql.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var antlr4_1 = require("antlr4");
var SparkSqlLexer_1 = require("../lib/spark/SparkSqlLexer");
var SparkSqlParser_1 = require("../lib/spark/SparkSqlParser");
var basicParser_1 = require("./common/basicParser");
var SparkSQL = /** @class */ (function (_super) {
__extends(SparkSQL, _super);
function SparkSQL() {
return _super !== null && _super.apply(this, arguments) || this;
const antlr4_1 = require("antlr4");
const SparkSqlLexer_1 = require("../lib/spark/SparkSqlLexer");
const SparkSqlParser_1 = require("../lib/spark/SparkSqlParser");
const basicParser_1 = require("./common/basicParser");
class SparkSQL extends basicParser_1.default {
createLexer(input) {
const chars = new antlr4_1.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new SparkSqlLexer_1.SparkSqlLexer(chars);
return lexer;
}
SparkSQL.prototype.createLexer = function (input) {
var chars = new antlr4_1.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
var lexer = new SparkSqlLexer_1.SparkSqlLexer(chars);
return lexer;
};
SparkSQL.prototype.createParserFromLexer = function (lexer) {
var tokenStream = new antlr4_1.CommonTokenStream(lexer);
createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new SparkSqlParser_1.SparkSqlParser(tokenStream);
};
return SparkSQL;
}(basicParser_1.default));
}
}
exports.default = SparkSQL;
//# sourceMappingURL=spark.js.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.lexer = exports.splitSql = exports.cleanSql = void 0;
var token_1 = require("./token");
const token_1 = require("./token");
/**

@@ -11,6 +11,6 @@ * 获取 注释 以及 分隔符 等词法信息

// 记录当前字符的位置
var current = 0;
var line = 1;
let current = 0;
let line = 1;
// 最终的 TokenTypes 结果
var tokens = [];
const tokens = [];
/**

@@ -20,5 +20,5 @@ * 提取 TokenType

// eslint-disable-next-line
var extract = function (currentChar, validator, TokenType) {
var value = '';
var start = current;
const extract = (currentChar, validator, TokenType) => {
let value = '';
const start = current;
while (validator.test(currentChar)) {

@@ -39,5 +39,5 @@ value += currentChar;

*/
var matchFunction = function (currentChar, validator) {
var value = currentChar;
var start = current;
const matchFunction = (currentChar, validator) => {
let value = currentChar;
const start = current;
do {

@@ -52,3 +52,3 @@ if (currentChar === '\n') {

type: token_1.TokenType.FunctionArguments,
value: value,
value,
start: start,

@@ -63,3 +63,3 @@ lineNumber: line,

*/
var matchQuotation = function (currentChar, validator, TokenType) {
const matchQuotation = (currentChar, validator, TokenType) => {
do {

@@ -74,3 +74,3 @@ if (currentChar === '\n') {

while (current < input.length) {
var char = input[current];
let char = input[current];
// 按顺序处理 括号函数 换行符 反引号 单引号 双引号 注释 分号

@@ -104,4 +104,4 @@ // 引号内 可能包含注释包含的符号以及分号 所以优先处理引号里面的内容 去除干扰信息

if (char === '-' && input[current + 1] === '-') {
var value = '';
var start = current;
let value = '';
const start = current;
while (char !== '\n' && current < input.length) {

@@ -113,3 +113,3 @@ value += char;

type: token_1.TokenType.Comment,
value: value,
value,
start: start,

@@ -123,5 +123,5 @@ lineNumber: line,

if (char === '/' && input[current + 1] === '*') {
var value = '';
var start = current;
var startLine = line;
let value = '';
const start = current;
const startLine = line;
while (!(char === '/' && input[current - 1] === '*')) {

@@ -138,3 +138,3 @@ if (char === '\n') {

type: token_1.TokenType.Comment,
value: value,
value,
start: start,

@@ -148,3 +148,3 @@ lineNumber: startLine,

if (token_1.TokenReg.StatementTerminator.test(char)) {
var newToken = extract(char, token_1.TokenReg.StatementTerminator, token_1.TokenType.StatementTerminator);
const newToken = extract(char, token_1.TokenReg.StatementTerminator, token_1.TokenType.StatementTerminator);
tokens.push(newToken);

@@ -163,6 +163,6 @@ continue;

function splitSql(sql) {
var tokens = lexer(sql);
var sqlArr = [];
var startIndex = 0;
tokens.forEach(function (ele) {
const tokens = lexer(sql);
const sqlArr = [];
let startIndex = 0;
tokens.forEach((ele) => {
if (ele.type === token_1.TokenType.StatementTerminator) {

@@ -185,6 +185,6 @@ sqlArr.push(sql.slice(startIndex, ele.end));

sql = sql.trim(); // 删除前后空格
var tokens = lexer(sql);
var resultSql = '';
var startIndex = 0;
tokens.forEach(function (ele) {
const tokens = lexer(sql);
let resultSql = '';
let startIndex = 0;
tokens.forEach((ele) => {
if (ele.type === token_1.TokenType.Comment) {

@@ -191,0 +191,0 @@ resultSql += sql.slice(startIndex, ele.start);

"use strict";
var _a;
Object.defineProperty(exports, "__esModule", { value: true });

@@ -40,11 +39,11 @@ exports.TokenReg = exports.TokenType = void 0;

*/
exports.TokenReg = (_a = {},
_a[TokenType.StatementTerminator] = /[;]/,
_a[TokenType.SingleQuotation] = /['|\']/,
_a[TokenType.DoubleQuotation] = /["]/,
_a[TokenType.BackQuotation] = /[`]/,
_a[TokenType.LeftSmallBracket] = /[(]/,
_a[TokenType.RightSmallBracket] = /[)]/,
_a[TokenType.Comma] = /[,]/,
_a);
exports.TokenReg = {
[TokenType.StatementTerminator]: /[;]/,
[TokenType.SingleQuotation]: /['|\']/,
[TokenType.DoubleQuotation]: /["]/,
[TokenType.BackQuotation]: /[`]/,
[TokenType.LeftSmallBracket]: /[(]/,
[TokenType.RightSmallBracket]: /[)]/,
[TokenType.Comma]: /[,]/,
};
//# sourceMappingURL=token.js.map
{
"name": "@salvoravida/dt-sql-parser",
"version": "4.0.0-beta.2.9",
"version": "4.0.0-beta.2.10",
"description": "SQL Parsers for BigData, built with antlr4",

@@ -5,0 +5,0 @@ "keywords": [

@@ -6,3 +6,3 @@ {

"allowJs":true,
"target": "es5",
"target": "es6",
"module": "commonjs",

@@ -9,0 +9,0 @@ "declaration": true,

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc