Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@salvoravida/dt-sql-parser

Package Overview
Dependencies
Maintainers
1
Versions
6
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@salvoravida/dt-sql-parser - npm Package Compare versions

Comparing version 4.0.0-beta.2.3 to 4.0.0-beta.2.4

9

dist/parser/common/basicParser.d.ts

@@ -1,3 +0,2 @@

import Token from 'antlr4/Token';
import Lexer from 'antlr4/Lexer';
import antlr4 from 'antlr4';
import { ParserError, ErrorHandler } from './parserErrorListener';

@@ -15,3 +14,3 @@ /**

*/
abstract createLexer(input: string): Lexer;
abstract createLexer(input: string): antlr4.Lexer;
/**

@@ -21,3 +20,3 @@ * Create Parser by lexer

*/
abstract createParserFromLexer(lexer: Lexer): any;
abstract createParserFromLexer(lexer: antlr4.Lexer): any;
/**

@@ -31,3 +30,3 @@ * Visit parser tree

*/
getAllTokens(input: string): Token[];
getAllTokens(input: string): antlr4.Token[];
/**

@@ -34,0 +33,0 @@ * Get Parser instance by input string

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const ParseTreeWalker_1 = require("antlr4/tree/ParseTreeWalker");
const antlr4_1 = require("antlr4");
const parserErrorListener_1 = require("./parserErrorListener");

@@ -73,3 +73,3 @@ /**

listen(listener, parserTree) {
ParseTreeWalker_1.default.DEFAULT.walk(listener, parserTree);
antlr4_1.default.tree.ParseTreeWalker.DEFAULT.walk(listener, parserTree);
}

@@ -76,0 +76,0 @@ }

@@ -1,4 +0,3 @@

import Token from 'antlr4/Token';
import Recognizer from 'antlr4/Recognizer';
import ErrorListener from 'antlr4/error/ErrorListener';
import antlr4 from 'antlr4';
import type Recognizer from 'antlr4/Recognizer';
export interface ParserError {

@@ -13,3 +12,3 @@ startLine: number;

recognizer: Recognizer;
offendingSymbol: Token;
offendingSymbol: antlr4.Token;
line: number;

@@ -21,11 +20,11 @@ charPositionInLine: number;

export type ErrorHandler = (err: ParserError, errOption: SyntaxError) => void;
export declare class ParserErrorCollector extends ErrorListener {
export declare class ParserErrorCollector extends antlr4.error.ErrorListener {
private _errors;
constructor(error: ParserError[]);
syntaxError(recognizer: Recognizer, offendingSymbol: Token, line: number, charPositionInLine: number, msg: string, e: any): void;
syntaxError(recognizer: Recognizer, offendingSymbol: antlr4.Token, line: number, charPositionInLine: number, msg: string, e: any): void;
}
export default class ParserErrorListener extends ErrorListener {
export default class ParserErrorListener extends antlr4.error.ErrorListener {
private _errorHandler;
constructor(errorListener: ErrorHandler);
syntaxError(recognizer: Recognizer, offendingSymbol: Token, line: number, charPositionInLine: number, msg: string, e: any): void;
syntaxError(recognizer: Recognizer, offendingSymbol: antlr4.Token, line: number, charPositionInLine: number, msg: string, e: any): void;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ParserErrorCollector = void 0;
const ErrorListener_1 = require("antlr4/error/ErrorListener");
class ParserErrorCollector extends ErrorListener_1.default {
const antlr4_1 = require("antlr4");
class ParserErrorCollector extends antlr4_1.default.error.ErrorListener {
constructor(error) {

@@ -25,3 +25,3 @@ super();

exports.ParserErrorCollector = ParserErrorCollector;
class ParserErrorListener extends ErrorListener_1.default {
class ParserErrorListener extends antlr4_1.default.error.ErrorListener {
constructor(errorListener) {

@@ -28,0 +28,0 @@ super();

@@ -1,6 +0,6 @@

import type Lexer from 'antlr4/Lexer';
import antlr4 from 'antlr4';
import BasicParser from './common/basicParser';
export default class FlinkSQL extends BasicParser {
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const InputStream_1 = require("antlr4/InputStream");
const CommonTokenStream_1 = require("antlr4/CommonTokenStream");
const antlr4_1 = require("antlr4");
const FlinkSqlLexer_1 = require("../lib/flinksql/FlinkSqlLexer");

@@ -10,3 +9,3 @@ const FlinkSqlParser_1 = require("../lib/flinksql/FlinkSqlParser");

createLexer(input) {
const chars = new InputStream_1.default(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const chars = new antlr4_1.default.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new FlinkSqlLexer_1.default(chars);

@@ -16,3 +15,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new CommonTokenStream_1.default(lexer);
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
return new FlinkSqlParser_1.default(tokenStream);

@@ -19,0 +18,0 @@ }

@@ -1,6 +0,6 @@

import type Lexer from 'antlr4/Lexer';
import antlr4 from 'antlr4';
import BasicParser from './common/basicParser';
export default class GenericSQL extends BasicParser {
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const InputStream_1 = require("antlr4/InputStream");
const CommonTokenStream_1 = require("antlr4/CommonTokenStream");
const antlr4_1 = require("antlr4");
const SqlLexer_1 = require("../lib/generic/SqlLexer");

@@ -10,3 +9,3 @@ const SqlParser_1 = require("../lib/generic/SqlParser");

createLexer(input) {
const chars = new InputStream_1.default(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const chars = new antlr4_1.default.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new SqlLexer_1.default(chars);

@@ -16,3 +15,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new CommonTokenStream_1.default(lexer);
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
return new SqlParser_1.default(tokenStream);

@@ -19,0 +18,0 @@ }

@@ -1,6 +0,6 @@

import type Lexer from 'antlr4/Lexer';
import antlr4 from 'antlr4';
import BasicParser from './common/basicParser';
export default class HiveSQL extends BasicParser {
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const InputStream_1 = require("antlr4/InputStream");
const CommonTokenStream_1 = require("antlr4/CommonTokenStream");
const antlr4_1 = require("antlr4");
const HiveSqlLexer_1 = require("../lib/hive/HiveSqlLexer");

@@ -10,3 +9,3 @@ const HiveSql_1 = require("../lib/hive/HiveSql");

createLexer(input) {
const chars = new InputStream_1.default(input);
const chars = new antlr4_1.default.InputStream(input);
const lexer = new HiveSqlLexer_1.default(chars);

@@ -16,3 +15,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new CommonTokenStream_1.default(lexer);
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
return new HiveSql_1.default(tokenStream);

@@ -19,0 +18,0 @@ }

@@ -1,6 +0,6 @@

import type Lexer from 'antlr4/Lexer';
import antlr4 from 'antlr4';
import BasicParser from './common/basicParser';
export default class PLSQLParser extends BasicParser {
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const InputStream_1 = require("antlr4/InputStream");
const CommonTokenStream_1 = require("antlr4/CommonTokenStream");
const antlr4_1 = require("antlr4");
const PostgreSQLLexer_1 = require("../lib/pgsql/PostgreSQLLexer");

@@ -10,3 +9,3 @@ const PostgreSQLParser_1 = require("../lib/pgsql/PostgreSQLParser");

createLexer(input) {
const chars = new InputStream_1.default(input.toUpperCase());
const chars = new antlr4_1.default.InputStream(input.toUpperCase());
const lexer = new PostgreSQLLexer_1.default(chars);

@@ -16,3 +15,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new CommonTokenStream_1.default(lexer);
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
return new PostgreSQLParser_1.default(tokenStream);

@@ -19,0 +18,0 @@ }

@@ -1,6 +0,6 @@

import type Lexer from 'antlr4/Lexer';
import antlr4 from 'antlr4';
import BasicParser from './common/basicParser';
export default class PLSQLParser extends BasicParser {
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const InputStream_1 = require("antlr4/InputStream");
const CommonTokenStream_1 = require("antlr4/CommonTokenStream");
const antlr4_1 = require("antlr4");
const PlSqlLexer_1 = require("../lib/plsql/PlSqlLexer");

@@ -11,3 +10,3 @@ // @ts-ignore

createLexer(input) {
const chars = new InputStream_1.default(input.toUpperCase());
const chars = new antlr4_1.default.InputStream(input.toUpperCase());
const lexer = new PlSqlLexer_1.default(chars);

@@ -17,3 +16,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new CommonTokenStream_1.default(lexer);
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
return new PlSqlParser_1.default(tokenStream);

@@ -20,0 +19,0 @@ }

@@ -1,6 +0,6 @@

import type Lexer from 'antlr4/Lexer';
import antlr4 from 'antlr4';
import BasicParser from './common/basicParser';
export default class SparkSQL extends BasicParser {
createLexer(input: string): Lexer;
createParserFromLexer(lexer: Lexer): any;
createLexer(input: string): antlr4.Lexer;
createParserFromLexer(lexer: antlr4.Lexer): any;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const InputStream_1 = require("antlr4/InputStream");
const CommonTokenStream_1 = require("antlr4/CommonTokenStream");
const antlr4_1 = require("antlr4");
const SparkSqlLexer_1 = require("../lib/spark/SparkSqlLexer");

@@ -10,3 +9,3 @@ const SparkSqlParser_1 = require("../lib/spark/SparkSqlParser");

createLexer(input) {
const chars = new InputStream_1.default(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const chars = new antlr4_1.default.InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new SparkSqlLexer_1.default(chars);

@@ -16,3 +15,3 @@ return lexer;

createParserFromLexer(lexer) {
const tokenStream = new CommonTokenStream_1.default(lexer);
const tokenStream = new antlr4_1.default.CommonTokenStream(lexer);
return new SparkSqlParser_1.default(tokenStream);

@@ -19,0 +18,0 @@ }

{
"name": "@salvoravida/dt-sql-parser",
"version": "4.0.0-beta.2.3",
"version": "4.0.0-beta.2.4",
"description": "SQL Parsers for BigData, built with antlr4",

@@ -5,0 +5,0 @@ "keywords": [

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc