Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@salvoravida/dt-sql-parser

Package Overview
Dependencies
Maintainers
1
Versions
6
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@salvoravida/dt-sql-parser - npm Package Compare versions

Comparing version 4.0.0-beta.2.4 to 4.0.0-beta.2.5

9

dist/lib/flinksql/FlinkSqlLexer.d.ts

@@ -1,2 +0,2 @@

declare class FlinkSqlLexer extends antlr4.Lexer {
declare class FlinkSqlLexer {
static grammarFileName: string;

@@ -9,7 +9,7 @@ static channelNames: string[];

constructor(input: any);
_interp: antlr4.atn.LexerATNSimulator;
get atn(): antlr4.atn.ATN;
_interp: any;
get atn(): any;
}
declare namespace FlinkSqlLexer {
const EOF: -1;
const EOF: any;
const SPACE: number;

@@ -308,2 +308,1 @@ const COMMENT_INPUT: number;

export default FlinkSqlLexer;
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

export default class FlinkSqlParserListener extends antlr4.tree.ParseTreeListener {
export default class FlinkSqlParserListener {
enterProgram(ctx: any): void;

@@ -283,2 +283,1 @@ exitProgram(ctx: any): void;

}
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

export default class FlinkSqlParserVisitor extends antlr4.tree.ParseTreeVisitor {
export default class FlinkSqlParserVisitor {
visitProgram(ctx: any): any;

@@ -143,2 +143,1 @@ visitStatement(ctx: any): any;

}
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

declare class SqlLexer extends antlr4.Lexer {
declare class SqlLexer {
static grammarFileName: string;

@@ -9,7 +9,7 @@ static channelNames: string[];

constructor(input: any);
_interp: antlr4.atn.LexerATNSimulator;
get atn(): antlr4.atn.ATN;
_interp: any;
get atn(): any;
}
declare namespace SqlLexer {
const EOF: -1;
const EOF: any;
const SPACE: number;

@@ -1067,2 +1067,1 @@ const SPEC_MYSQL_COMMENT: number;

export default SqlLexer;
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

export default class SqlParserListener extends antlr4.tree.ParseTreeListener {
export default class SqlParserListener {
enterProgram(ctx: any): void;

@@ -1095,2 +1095,1 @@ exitProgram(ctx: any): void;

}
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

export default class SqlParserVisitor extends antlr4.tree.ParseTreeVisitor {
export default class SqlParserVisitor {
visitProgram(ctx: any): any;

@@ -549,2 +549,1 @@ visitStatement(ctx: any): any;

}
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

declare class HiveSqlLexer extends antlr4.Lexer {
declare class HiveSqlLexer {
static grammarFileName: string;

@@ -9,7 +9,7 @@ static channelNames: string[];

constructor(input: any);
_interp: antlr4.atn.LexerATNSimulator;
get atn(): antlr4.atn.ATN;
_interp: any;
get atn(): any;
}
declare namespace HiveSqlLexer {
const EOF: -1;
const EOF: any;
const T_ACTION: number;

@@ -393,2 +393,1 @@ const T_ADD2: number;

export default HiveSqlLexer;
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

export default class HiveSqlListener extends antlr4.tree.ParseTreeListener {
export default class HiveSqlListener {
enterProgram(ctx: any): void;

@@ -457,2 +457,1 @@ exitProgram(ctx: any): void;

}
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

export default class HiveSqlVisitor extends antlr4.tree.ParseTreeVisitor {
export default class HiveSqlVisitor {
visitProgram(ctx: any): any;

@@ -230,2 +230,1 @@ visitBlock(ctx: any): any;

}
import antlr4 from "antlr4";

@@ -9,4 +9,4 @@ declare class PostgreSQLLexer {

constructor(input: any);
_interp: antlr4.atn.LexerATNSimulator;
get atn(): antlr4.atn.ATN;
_interp: any;
get atn(): any;
action(localctx: any, ruleIndex: any, actionIndex: any): void;

@@ -27,3 +27,3 @@ Operator_action(localctx: any, actionIndex: any): void;

declare namespace PostgreSQLLexer {
const EOF: -1;
const EOF: any;
const Dollar: number;

@@ -590,2 +590,1 @@ const OPEN_PAREN: number;

export default PostgreSQLLexer;
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

export default class PostgreSQLParserVisitor extends antlr4.tree.ParseTreeVisitor {
export default class PostgreSQLParserVisitor {
visitRoot(ctx: any): any;

@@ -813,2 +813,1 @@ visitPlsqlroot(ctx: any): any;

}
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

export default class PlSqlParserVisitor extends antlr4.tree.ParseTreeVisitor {
export default class PlSqlParserVisitor {
visitProgram(ctx: any): any;

@@ -756,2 +756,1 @@ visitSql_script(ctx: any): any;

}
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

declare class SparkSqlLexer extends antlr4.Lexer {
declare class SparkSqlLexer {
static grammarFileName: string;

@@ -9,4 +9,4 @@ static channelNames: string[];

constructor(input: any);
_interp: antlr4.atn.LexerATNSimulator;
get atn(): antlr4.atn.ATN;
_interp: any;
get atn(): any;
sempred(localctx: any, ruleIndex: any, predIndex: any): any;

@@ -21,3 +21,3 @@ EXPONENT_VALUE_sempred(localctx: any, predIndex: any): any;

declare namespace SparkSqlLexer {
const EOF: -1;
const EOF: any;
const T__0: number;

@@ -320,2 +320,1 @@ const T__1: number;

export default SparkSqlLexer;
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

export default class SparkSqlListener extends antlr4.tree.ParseTreeListener {
export default class SparkSqlListener {
enterProgram(ctx: any): void;

@@ -545,2 +545,1 @@ exitProgram(ctx: any): void;

}
import antlr4 from "antlr4";

@@ -1,2 +0,2 @@

export default class SparkSqlVisitor extends antlr4.tree.ParseTreeVisitor {
export default class SparkSqlVisitor {
visitProgram(ctx: any): any;

@@ -274,2 +274,1 @@ visitSingleStatement(ctx: any): any;

}
import antlr4 from "antlr4";

@@ -1,3 +0,3 @@

import antlr4 from 'antlr4';
import { ParserError, ErrorHandler } from './parserErrorListener';
import { Lexer, Token } from 'antlr4';
/**

@@ -14,3 +14,3 @@ * Custom Parser class, subclass needs extends it.

*/
abstract createLexer(input: string): antlr4.Lexer;
abstract createLexer(input: string): Lexer;
/**

@@ -20,3 +20,3 @@ * Create Parser by lexer

*/
abstract createParserFromLexer(lexer: antlr4.Lexer): any;
abstract createParserFromLexer(lexer: Lexer): any;
/**

@@ -30,3 +30,3 @@ * Visit parser tree

*/
getAllTokens(input: string): antlr4.Token[];
getAllTokens(input: string): Token[];
/**

@@ -33,0 +33,0 @@ * Get Parser instance by input string

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const parserErrorListener_1 = require("./parserErrorListener");
const antlr4_1 = require("antlr4");
const parserErrorListener_1 = require("./parserErrorListener");
/**

@@ -37,2 +37,3 @@ * Custom Parser class, subclass needs extends it.

getAllTokens(input) {
// @ts-ignore
return this.createLexer(input).getAllTokens();

@@ -74,3 +75,3 @@ }

listen(listener, parserTree) {
antlr4_1.default.tree.ParseTreeWalker.DEFAULT.walk(listener, parserTree);
antlr4_1.ParseTreeWalker.DEFAULT.walk(listener, parserTree);
}

@@ -77,0 +78,0 @@ }

@@ -1,3 +0,3 @@

import antlr4 from 'antlr4';
import type Recognizer from 'antlr4/Recognizer';
import { ErrorListener, Token } from 'antlr4';
import { Recognizer } from 'antlr4/src/antlr4/Recognizer';
export interface ParserError {

@@ -11,4 +11,4 @@ startLine: number;

export interface SyntaxError {
recognizer: Recognizer;
offendingSymbol: antlr4.Token;
recognizer: Recognizer<any>;
offendingSymbol: Token;
line: number;

@@ -20,11 +20,11 @@ charPositionInLine: number;

export type ErrorHandler = (err: ParserError, errOption: SyntaxError) => void;
export declare class ParserErrorCollector extends antlr4.error.ErrorListener {
export declare class ParserErrorCollector extends ErrorListener<any> {
private _errors;
constructor(error: ParserError[]);
syntaxError(recognizer: Recognizer, offendingSymbol: antlr4.Token, line: number, charPositionInLine: number, msg: string, e: any): void;
syntaxError(recognizer: Recognizer<any>, offendingSymbol: Token, line: number, charPositionInLine: number, msg: string, e: any): void;
}
export default class ParserErrorListener extends antlr4.error.ErrorListener {
export default class ParserErrorListener extends ErrorListener<any> {
private _errorHandler;
constructor(errorListener: ErrorHandler);
syntaxError(recognizer: Recognizer, offendingSymbol: antlr4.Token, line: number, charPositionInLine: number, msg: string, e: any): void;
syntaxError(recognizer: Recognizer<any>, offendingSymbol: Token, line: number, charPositionInLine: number, msg: string, e: any): void;
}

@@ -5,3 +5,3 @@ "use strict";

const antlr4_1 = require("antlr4");
class ParserErrorCollector extends antlr4_1.default.error.ErrorListener {
class ParserErrorCollector extends antlr4_1.ErrorListener {
constructor(error) {

@@ -26,3 +26,3 @@ super();

exports.ParserErrorCollector = ParserErrorCollector;
class ParserErrorListener extends antlr4_1.default.error.ErrorListener {
class ParserErrorListener extends antlr4_1.ErrorListener {
constructor(errorListener) {

@@ -29,0 +29,0 @@ super();

@@ -1,6 +0,6 @@

import antlr4 from 'antlr4';
import { Lexer } from 'antlr4';
import BasicParser from './common/basicParser';
export default class FlinkSQL extends BasicParser {
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
}

@@ -9,3 +9,3 @@ "use strict";

createLexer(input) {
const chars = new antlr4_1.default.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const chars = new antlr4_1.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new FlinkSqlLexer_1.default(chars);

@@ -15,3 +15,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new FlinkSqlParser_1.default(tokenStream);

@@ -18,0 +18,0 @@ }

@@ -1,6 +0,6 @@

import antlr4 from 'antlr4';
import { Lexer } from 'antlr4';
import BasicParser from './common/basicParser';
export default class GenericSQL extends BasicParser {
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
}

@@ -9,3 +9,3 @@ "use strict";

createLexer(input) {
const chars = new antlr4_1.default.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const chars = new antlr4_1.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new SqlLexer_1.default(chars);

@@ -15,3 +15,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new SqlParser_1.default(tokenStream);

@@ -18,0 +18,0 @@ }

@@ -1,6 +0,6 @@

import antlr4 from 'antlr4';
import { Lexer } from 'antlr4';
import BasicParser from './common/basicParser';
export default class HiveSQL extends BasicParser {
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
}

@@ -9,3 +9,3 @@ "use strict";

createLexer(input) {
const chars = new antlr4_1.default.InputStream(input);
const chars = new antlr4_1.InputStream(input);
const lexer = new HiveSqlLexer_1.default(chars);

@@ -15,3 +15,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new HiveSql_1.default(tokenStream);

@@ -18,0 +18,0 @@ }

@@ -1,6 +0,6 @@

import antlr4 from 'antlr4';
import { Lexer } from 'antlr4';
import BasicParser from './common/basicParser';
export default class PLSQLParser extends BasicParser {
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
}

@@ -9,3 +9,3 @@ "use strict";

createLexer(input) {
const chars = new antlr4_1.default.InputStream(input.toUpperCase());
const chars = new antlr4_1.InputStream(input.toUpperCase());
const lexer = new PostgreSQLLexer_1.default(chars);

@@ -15,3 +15,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new PostgreSQLParser_1.default(tokenStream);

@@ -18,0 +18,0 @@ }

@@ -1,6 +0,6 @@

import antlr4 from 'antlr4';
import { Lexer } from 'antlr4';
import BasicParser from './common/basicParser';
export default class PLSQLParser extends BasicParser {
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
}

@@ -10,3 +10,3 @@ "use strict";

createLexer(input) {
const chars = new antlr4_1.default.InputStream(input.toUpperCase());
const chars = new antlr4_1.InputStream(input.toUpperCase());
const lexer = new PlSqlLexer_1.default(chars);

@@ -16,3 +16,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new PlSqlParser_1.default(tokenStream);

@@ -19,0 +19,0 @@ }

@@ -1,6 +0,6 @@

import antlr4 from 'antlr4';
import BasicParser from './common/basicParser';
import { Lexer } from 'antlr4';
export default class SparkSQL extends BasicParser {
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const antlr4_1 = require("antlr4");
const SparkSqlLexer_1 = require("../lib/spark/SparkSqlLexer");
const SparkSqlParser_1 = require("../lib/spark/SparkSqlParser");
const basicParser_1 = require("./common/basicParser");
const antlr4_1 = require("antlr4");
class SparkSQL extends basicParser_1.default {
createLexer(input) {
const chars = new antlr4_1.default.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const chars = new antlr4_1.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new SparkSqlLexer_1.default(chars);

@@ -14,3 +14,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
const tokenStream = new antlr4_1.CommonTokenStream(lexer);
return new SparkSqlParser_1.default(tokenStream);

@@ -17,0 +17,0 @@ }

{
"name": "@salvoravida/dt-sql-parser",
"version": "4.0.0-beta.2.4",
"version": "4.0.0-beta.2.5",
"description": "SQL Parsers for BigData, built with antlr4",

@@ -44,5 +44,4 @@ "keywords": [

"dependencies": {
"@types/antlr4": "^4.11.2",
"antlr4": "^4.11.0"
"antlr4": "^4.12.0-beta.0"
}
}

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc