oracle-plsql-parser
Advanced tools
Comparing version
declare function parsePlSql(code: string): { | ||
errors: import("chevrotain").IRecognitionException[]; | ||
cst: any; | ||
cst: import("chevrotain").CstNode; | ||
}; | ||
export default parsePlSql; |
@@ -0,5 +1,8 @@ | ||
import { CstNode } from 'chevrotain'; | ||
import PlSqlParser from './rules'; | ||
export declare const parserInstance: PlSqlParser; | ||
declare function parse(input: string, log?: boolean): { | ||
errors: import("chevrotain").IRecognitionException[]; | ||
cst: any; | ||
cst: CstNode; | ||
}; | ||
export default parse; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parserInstance = void 0; | ||
const tokens_1 = require("../tokenDictionary/tokens"); | ||
const rules_1 = require("./rules"); | ||
const logParserErrors_1 = require("./util/logParserErrors"); | ||
const parserInstance = new rules_1.default({ recover: false }); | ||
exports.parserInstance = new rules_1.default({ recover: false }); | ||
function parse(input, log = false) { | ||
const lexResult = (0, tokens_1.lex)(input); | ||
// ".input" is a setter which will reset the parser's internal's state. | ||
parserInstance.input = lexResult.tokens; | ||
exports.parserInstance.input = lexResult.tokens; | ||
// No semantic actions so this won't return anything yet. | ||
const cst = parserInstance.global(); | ||
if (parserInstance.errors.length > 0 && log) { | ||
(0, logParserErrors_1.default)(parserInstance.errors); | ||
const cst = exports.parserInstance.global(); | ||
if (exports.parserInstance.errors.length > 0 && log) { | ||
(0, logParserErrors_1.default)(exports.parserInstance.errors); | ||
} | ||
return { errors: parserInstance.errors, cst }; | ||
return { errors: exports.parserInstance.errors, cst }; | ||
} | ||
exports.default = parse; | ||
//# sourceMappingURL=noRecoveryParser.js.map |
@@ -0,5 +1,6 @@ | ||
import { CstNode } from 'chevrotain'; | ||
declare function parse(input: string, log?: boolean): { | ||
errors: import("chevrotain").IRecognitionException[]; | ||
cst: any; | ||
cst: CstNode; | ||
}; | ||
export default parse; |
@@ -7,3 +7,6 @@ "use strict"; | ||
constructor({ recover }) { | ||
super(tokens_1.tokenVocabulary, { recoveryEnabled: recover }); | ||
super(tokens_1.tokenVocabulary, { | ||
recoveryEnabled: recover, | ||
nodeLocationTracking: 'full', | ||
}); | ||
// eslint-disable-next-line @typescript-eslint/no-this-alias | ||
@@ -381,3 +384,3 @@ const $ = this; | ||
$.SUBRULE($.createPackageStatement); // create (or replace) package | ||
$.SUBRULE($.dottedIdentifier); // pkg_name | schema_name.pkg_name | ||
$.SUBRULE($.dottedIdentifier, { LABEL: 'package_name' }); // pkg_name | schema_name.pkg_name | ||
$.CONSUME(tokens_1.tokenVocabulary.AsIs); // as | ||
@@ -399,3 +402,3 @@ $.MANY(() => { | ||
$.CONSUME(tokens_1.tokenVocabulary.BodyKw); // body | ||
$.SUBRULE($.dottedIdentifier); // pkg_name | schema_name.pkg_name | ||
$.SUBRULE($.dottedIdentifier, { LABEL: 'package_name' }); // pkg_name | schema_name.pkg_name | ||
$.CONSUME(tokens_1.tokenVocabulary.AsIs); | ||
@@ -479,3 +482,3 @@ $.MANY(() => { | ||
$.CONSUME(tokens_1.tokenVocabulary.CursorKw); // cursor | ||
$.CONSUME(tokens_1.tokenVocabulary.Identifier); // my_cursor | ||
$.CONSUME(tokens_1.tokenVocabulary.Identifier, { LABEL: 'cursor_name' }); // my_cursor | ||
$.CONSUME(tokens_1.tokenVocabulary.IsKw); // is | ||
@@ -492,3 +495,3 @@ $.SUBRULE($.query); | ||
$.RULE('exceptionDeclaration', () => { | ||
$.CONSUME(tokens_1.tokenVocabulary.Identifier); | ||
$.CONSUME(tokens_1.tokenVocabulary.Identifier, { LABEL: 'exception_name' }); | ||
$.CONSUME(tokens_1.tokenVocabulary.ExceptionKw); | ||
@@ -514,3 +517,3 @@ $.CONSUME(tokens_1.tokenVocabulary.Semicolon); | ||
// follow pattern ident (constant) type... | ||
$.CONSUME(tokens_1.tokenVocabulary.Identifier); // l_row | ||
$.CONSUME(tokens_1.tokenVocabulary.Identifier, { LABEL: 'variable_name' }); // l_row | ||
$.OPTION(() => { | ||
@@ -810,3 +813,3 @@ $.CONSUME(tokens_1.tokenVocabulary.ConstantKw); | ||
$.CONSUME(tokens_1.tokenVocabulary.FunctionKw); // function | ||
$.CONSUME(tokens_1.tokenVocabulary.Identifier); // fnc_name | ||
$.CONSUME(tokens_1.tokenVocabulary.Identifier, { LABEL: 'function_name' }); // fnc_name | ||
$.OPTION(() => { | ||
@@ -832,3 +835,3 @@ $.SUBRULE($.argumentList); // (pi_vc in varchar2, pi_dat in date) | ||
$.CONSUME(tokens_1.tokenVocabulary.ProcedureKw); // procedure | ||
$.CONSUME(tokens_1.tokenVocabulary.Identifier); // prc_name | ||
$.CONSUME(tokens_1.tokenVocabulary.Identifier, { LABEL: 'procedure_name' }); // prc_name | ||
$.OPTION(() => { | ||
@@ -835,0 +838,0 @@ $.SUBRULE($.argumentList); // (pi_vc in varchar2, pi_dat in date) |
@@ -6,2 +6,3 @@ "use strict"; | ||
const noRecoveryParser_1 = require("./components/mainParser/noRecoveryParser"); | ||
const cstVisitor_1 = require("./components/cstVisitor"); | ||
// const yellowLog = (text) => `\x1b[33m${text}\x1b[0m`; | ||
@@ -45,3 +46,5 @@ function getText(token) { | ||
logLexer(file); | ||
(0, noRecoveryParser_1.default)(file, true); | ||
const res = (0, noRecoveryParser_1.default)(file, true); | ||
const interpreted = cstVisitor_1.default.visit(res.cst); | ||
console.log('interpreted', JSON.stringify(interpreted, null, 2)); | ||
} | ||
@@ -48,0 +51,0 @@ catch (err) { |
{ | ||
"name": "oracle-plsql-parser", | ||
"description": "Parser for Oracle PL/SQL", | ||
"version": "0.1.3", | ||
"version": "0.1.4", | ||
"repository": "https://github.com/phartenfeller/plsql-parser.git", | ||
@@ -6,0 +6,0 @@ "website": "https://github.com/phartenfeller/plsql-parser", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
297580
7.51%120
11.11%4605
7.12%0
-100%