@solidity-parser/parser
Advanced tools
Comparing version 0.13.0-rc.1 to 0.13.0-rc.2
@@ -1283,2 +1283,3 @@ import { ATN } from "antlr4ts/atn/ATN"; | ||
ReceiveKeyword(): TerminalNode | undefined; | ||
ConstructorKeyword(): TerminalNode | undefined; | ||
PayableKeyword(): TerminalNode | undefined; | ||
@@ -1285,0 +1286,0 @@ LeaveKeyword(): TerminalNode | undefined; |
@@ -66,4 +66,4 @@ import { ParserRuleContext } from 'antlr4ts'; | ||
visitTupleExpression(ctx: SP.TupleExpressionContext): AST.TupleExpression & WithMeta; | ||
buildIdentifierList(ctx: SP.IdentifierListContext): (AST.VariableDeclaration | null)[]; | ||
buildVariableDeclarationList(ctx: SP.VariableDeclarationListContext): Array<AST.VariableDeclaration | null>; | ||
buildIdentifierList(ctx: SP.IdentifierListContext): ((AST.VariableDeclaration & WithMeta) | null)[]; | ||
buildVariableDeclarationList(ctx: SP.VariableDeclarationListContext): Array<(AST.VariableDeclaration & WithMeta) | null>; | ||
visitImportDirective(ctx: SP.ImportDirectiveContext): AST.ImportDirective & WithMeta; | ||
@@ -70,0 +70,0 @@ buildEventParameterList(ctx: SP.EventParameterListContext): { |
import { Token, AntlrToken, TokenizeOptions } from './types'; | ||
export declare function buildTokenList(tokens: AntlrToken[], options: TokenizeOptions): Token[]; | ||
export declare function buildTokenList(tokensArg: AntlrToken[], options: TokenizeOptions): Token[]; |
{ | ||
"name": "@solidity-parser/parser", | ||
"version": "0.13.0-rc.1", | ||
"version": "0.13.0-rc.2", | ||
"description": "A Solidity parser built from a robust ANTLR 4 grammar", | ||
@@ -5,0 +5,0 @@ "main": "dist/index.cjs.js", |
@@ -9,2 +9,3 @@ import { ParserRuleContext } from 'antlr4ts' | ||
import * as AST from './ast-types' | ||
import { ErrorNode } from 'antlr4ts/tree/ErrorNode' | ||
@@ -39,3 +40,3 @@ interface SourceLocation { | ||
defaultResult(): AST.ASTNode & WithMeta { | ||
throw new Error("Unknown node"); | ||
throw new Error('Unknown node') | ||
} | ||
@@ -48,3 +49,5 @@ | ||
public visitSourceUnit(ctx: SP.SourceUnitContext): AST.SourceUnit & WithMeta { | ||
const children = ctx.children ?? [] | ||
const children = (ctx.children ?? []).filter( | ||
(x) => !(x instanceof ErrorNode) | ||
) | ||
@@ -56,5 +59,5 @@ const node: AST.SourceUnit = { | ||
const result = this._addMeta(node, ctx) | ||
this.result = result; | ||
this.result = result | ||
return result; | ||
return result | ||
} | ||
@@ -698,3 +701,5 @@ | ||
public visitEmitStatement(ctx: SP.EmitStatementContext): AST.EmitStatement & WithMeta { | ||
public visitEmitStatement( | ||
ctx: SP.EmitStatementContext | ||
): AST.EmitStatement & WithMeta { | ||
const node: AST.EmitStatement = { | ||
@@ -708,3 +713,5 @@ type: 'EmitStatement', | ||
public visitFunctionCall(ctx: SP.FunctionCallContext): AST.FunctionCall & WithMeta { | ||
public visitFunctionCall( | ||
ctx: SP.FunctionCallContext | ||
): AST.FunctionCall & WithMeta { | ||
let args: AST.Expression[] = [] | ||
@@ -775,3 +782,5 @@ const names = [] | ||
public visitIfStatement(ctx: SP.IfStatementContext): AST.IfStatement & WithMeta { | ||
public visitIfStatement( | ||
ctx: SP.IfStatementContext | ||
): AST.IfStatement & WithMeta { | ||
const trueBody = this.visitStatement(ctx.statement(0)) | ||
@@ -794,3 +803,5 @@ | ||
public visitTryStatement(ctx: SP.TryStatementContext): AST.TryStatement & WithMeta { | ||
public visitTryStatement( | ||
ctx: SP.TryStatementContext | ||
): AST.TryStatement & WithMeta { | ||
let returnParameters = null | ||
@@ -817,3 +828,5 @@ const ctxReturnParameters = ctx.returnParameters() | ||
public visitCatchClause(ctx: SP.CatchClauseContext): AST.CatchClause & WithMeta { | ||
public visitCatchClause( | ||
ctx: SP.CatchClauseContext | ||
): AST.CatchClause & WithMeta { | ||
let parameters = null | ||
@@ -864,3 +877,5 @@ if (ctx.parameterList()) { | ||
public visitNumberLiteral(ctx: SP.NumberLiteralContext): AST.NumberLiteral & WithMeta { | ||
public visitNumberLiteral( | ||
ctx: SP.NumberLiteralContext | ||
): AST.NumberLiteral & WithMeta { | ||
const number = this._toText(ctx.getChild(0)) | ||
@@ -1187,3 +1202,5 @@ let subdenomination = null | ||
public visitNameValueList(ctx: SP.NameValueListContext): AST.NameValueList & WithMeta { | ||
public visitNameValueList( | ||
ctx: SP.NameValueListContext | ||
): AST.NameValueList & WithMeta { | ||
const names: string[] = [] | ||
@@ -1387,8 +1404,13 @@ const args: AST.Expression[] = [] | ||
const children = ctx.children!.slice(1, -1) | ||
return this._mapCommasToNulls(children).map((iden) => { | ||
const identifiers = ctx.identifier() | ||
let i = 0 | ||
return this._mapCommasToNulls(children).map((idenOrNull) => { | ||
// add a null for each empty value | ||
if (!iden) { | ||
if (!idenOrNull) { | ||
return null | ||
} | ||
const iden = identifiers[i] | ||
i++ | ||
const node: AST.VariableDeclaration = { | ||
@@ -1404,3 +1426,3 @@ type: 'VariableDeclaration', | ||
return node | ||
return this._addMeta(node, iden) | ||
}) | ||
@@ -1411,10 +1433,16 @@ } | ||
ctx: SP.VariableDeclarationListContext | ||
): Array<AST.VariableDeclaration | null> { | ||
): Array<(AST.VariableDeclaration & WithMeta) | null> { | ||
// remove parentheses | ||
return this._mapCommasToNulls(ctx.children!).map((decl: any) => { | ||
const variableDeclarations = ctx.variableDeclaration() | ||
let i = 0 | ||
return this._mapCommasToNulls(ctx.children!).map((declOrNull) => { | ||
// add a null for each empty value | ||
if (!decl) { | ||
if (!declOrNull) { | ||
return null | ||
} | ||
const decl = variableDeclarations[i] | ||
i++ | ||
let storageLocation: string | null = null | ||
@@ -1435,3 +1463,3 @@ if (decl.storageLocation()) { | ||
return result | ||
return this._addMeta(result, decl) | ||
}) | ||
@@ -1516,3 +1544,5 @@ } | ||
public visitAssemblyBlock(ctx: SP.AssemblyBlockContext): AST.AssemblyBlock & WithMeta { | ||
public visitAssemblyBlock( | ||
ctx: SP.AssemblyBlockContext | ||
): AST.AssemblyBlock & WithMeta { | ||
const operations = ctx | ||
@@ -1530,3 +1560,5 @@ .assemblyItem() | ||
public visitAssemblyItem(ctx: SP.AssemblyItemContext): AST.AssemblyItem & WithMeta { | ||
public visitAssemblyItem( | ||
ctx: SP.AssemblyItemContext | ||
): AST.AssemblyItem & WithMeta { | ||
let text | ||
@@ -1576,3 +1608,5 @@ | ||
const functionName = this._toText(ctx.getChild(0)) | ||
const args = ctx.assemblyExpression().map((assemblyExpr) => this.visitAssemblyExpression(assemblyExpr)) | ||
const args = ctx | ||
.assemblyExpression() | ||
.map((assemblyExpr) => this.visitAssemblyExpression(assemblyExpr)) | ||
@@ -1641,3 +1675,5 @@ const node: AST.AssemblyCall = { | ||
public visitAssemblyCase(ctx: SP.AssemblyCaseContext): AST.AssemblyCase & WithMeta { | ||
public visitAssemblyCase( | ||
ctx: SP.AssemblyCaseContext | ||
): AST.AssemblyCase & WithMeta { | ||
let value = null | ||
@@ -1684,3 +1720,3 @@ if (this._toText(ctx.getChild(0)) === 'case') { | ||
names, | ||
expression | ||
expression, | ||
} | ||
@@ -1804,16 +1840,20 @@ | ||
public visitContinueStatement(ctx: SP.ContinueStatementContext): AST.ContinueStatement & WithMeta { | ||
public visitContinueStatement( | ||
ctx: SP.ContinueStatementContext | ||
): AST.ContinueStatement & WithMeta { | ||
const node: AST.ContinueStatement = { | ||
type: 'ContinueStatement' | ||
type: 'ContinueStatement', | ||
} | ||
return this._addMeta(node, ctx); | ||
return this._addMeta(node, ctx) | ||
} | ||
public visitBreakStatement(ctx: SP.BreakStatementContext): AST.BreakStatement & WithMeta { | ||
public visitBreakStatement( | ||
ctx: SP.BreakStatementContext | ||
): AST.BreakStatement & WithMeta { | ||
const node: AST.BreakStatement = { | ||
type: 'BreakStatement' | ||
type: 'BreakStatement', | ||
} | ||
return this._addMeta(node, ctx); | ||
return this._addMeta(node, ctx) | ||
} | ||
@@ -1820,0 +1860,0 @@ |
@@ -1,2 +0,2 @@ | ||
import { ANTLRInputStream, CommonTokenStream } from 'antlr4ts' | ||
import { ANTLRInputStream, BufferedTokenStream, CommonTokenStream } from 'antlr4ts' | ||
@@ -42,5 +42,4 @@ import { SolidityLexer } from './antlr/SolidityLexer' | ||
const lexer = new SolidityLexer(inputStream) | ||
const tokenStream = new CommonTokenStream(lexer) | ||
return buildTokenList(tokenStream.getTokens(), options) | ||
return buildTokenList(lexer.getAllTokens(), options) | ||
} | ||
@@ -47,0 +46,0 @@ |
@@ -70,3 +70,3 @@ import { Token, AntlrToken, TokenizeOptions } from './types' | ||
export function buildTokenList( | ||
tokens: AntlrToken[], | ||
tokensArg: AntlrToken[], | ||
options: TokenizeOptions | ||
@@ -76,3 +76,3 @@ ): Token[] { | ||
const result = tokens.map((token) => { | ||
const result = tokensArg.map((token) => { | ||
const type = getTokenType(tokenTypes[token.type]) | ||
@@ -79,0 +79,0 @@ const node: Token = { type, value: token.text } |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
9419368
59
103155