New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

conseiljs

Package Overview
Dependencies
Maintainers
4
Versions
159
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

conseiljs - npm Package Compare versions

Comparing version 0.4.0-alpha.2 to 0.4.0-alpha.3

dist/chain/tezos/contracts/MorleyTokenHelper.d.ts

7

CHANGELOG.md

@@ -11,2 +11,3 @@ <!-- markdownlint-disable MD024 -->

- `EntryPoint.generateInvocationPair()` now returns a tuple with `entrypoint` and `parameters` keys.
- `TezosConseilClient.awaitOperationConfirmation` now returns a single item, not an array.
- Removed `CryptoUtils.getPasswordStrength()` and the related zxcvbn dependency. This functionality should be added by the implementing application.

@@ -21,2 +22,3 @@ - nodejs 12 is now a base requirement.

- Michelson parser support for `D\[UI\]G n`, `D\[UI\]P n`, `DROP n`.
- Generally improved Michelson contract parser.
- Improved `TezosContractIntrospector` parser.

@@ -27,8 +29,5 @@

- added Tezos Commons Baker Registry interface `chain/tezos/contracts/TCFBakerRegistryHelper`.
- added TZIP 0007 (fa1.2) token contract interface
- added TZIP 0007 (FA1.2) token contract interface `chain/tezos/contracts/MorleyTokenHelper`.
- `TezosMessageUtil` can now `pack` `key_hash` value.
<!-- markdownlint-disable MD024 -->
# ConseilJS Change Log
## 0.3.7

@@ -35,0 +34,0 @@

import { KeyStore } from '../../../types/wallet/KeyStore';
import * as TezosTypes from '../../../types/tezos/TezosChainTypes';
export declare namespace TezosProtocolHelper {
export declare namespace BabylonDelegationHelper {
function verifyDestination(server: string, address: string): Promise<boolean>;

@@ -5,0 +5,0 @@ function setDelegate(server: string, keyStore: KeyStore, contract: string, delegate: string, fee: number, derivationPath?: string): Promise<TezosTypes.OperationResult>;

@@ -24,4 +24,4 @@ "use strict";

const TezosNodeReader_1 = require("../TezosNodeReader");
var TezosProtocolHelper;
(function (TezosProtocolHelper) {
var BabylonDelegationHelper;
(function (BabylonDelegationHelper) {
function verifyDestination(server, address) {

@@ -40,3 +40,3 @@ return __awaiter(this, void 0, void 0, function* () {

}
TezosProtocolHelper.verifyDestination = verifyDestination;
BabylonDelegationHelper.verifyDestination = verifyDestination;
function setDelegate(server, keyStore, contract, delegate, fee, derivationPath = '') {

@@ -51,3 +51,3 @@ if (contract.startsWith('KT1')) {

}
TezosProtocolHelper.setDelegate = setDelegate;
BabylonDelegationHelper.setDelegate = setDelegate;
function unSetDelegate(server, keyStore, contract, fee, derivationPath = '') {

@@ -62,7 +62,7 @@ if (contract.startsWith('KT1')) {

}
TezosProtocolHelper.unSetDelegate = unSetDelegate;
BabylonDelegationHelper.unSetDelegate = unSetDelegate;
function withdrawDelegatedFunds(server, keyStore, contract, fee, amount, derivationPath = '') {
return sendDelegatedFunds(server, keyStore, contract, fee, amount, derivationPath, keyStore.publicKeyHash);
}
TezosProtocolHelper.withdrawDelegatedFunds = withdrawDelegatedFunds;
BabylonDelegationHelper.withdrawDelegatedFunds = withdrawDelegatedFunds;
function sendDelegatedFunds(server, keyStore, contract, fee, amount, derivationPath = '', destination) {

@@ -79,7 +79,7 @@ let parameters = `[ { "prim": "DROP" },

}
TezosProtocolHelper.sendDelegatedFunds = sendDelegatedFunds;
BabylonDelegationHelper.sendDelegatedFunds = sendDelegatedFunds;
function depositDelegatedFunds(server, keyStore, contract, fee, amount, derivationPath = '') {
return TezosNodeWriter_1.TezosNodeWriter.sendContractInvocationOperation(server, keyStore, contract, amount, fee, derivationPath, 0, TezosConstants_1.TezosConstants.P005ManagerContractDepositGasLimit, undefined, undefined);
}
TezosProtocolHelper.depositDelegatedFunds = depositDelegatedFunds;
BabylonDelegationHelper.depositDelegatedFunds = depositDelegatedFunds;
function deployManagerContract(server, keyStore, delegate, fee, amount, derivationPath = '') {

@@ -120,4 +120,4 @@ const code = `[ { "prim": "parameter",

}
TezosProtocolHelper.deployManagerContract = deployManagerContract;
})(TezosProtocolHelper = exports.TezosProtocolHelper || (exports.TezosProtocolHelper = {}));
BabylonDelegationHelper.deployManagerContract = deployManagerContract;
})(BabylonDelegationHelper = exports.BabylonDelegationHelper || (exports.BabylonDelegationHelper = {}));
//# sourceMappingURL=BabylonDelegationHelper.js.map

@@ -1,13 +0,13 @@

export interface Token {
interface NearleyToken {
value: any;
[key: string]: any;
}
export interface Lexer {
interface NearleyLexer {
reset: (chunk: string, info: any) => void;
next: () => Token | undefined;
next: () => NearleyToken | undefined;
save: () => any;
formatError: (token: Token) => string;
formatError: (token: NearleyToken) => string;
has: (tokenType: string) => boolean;
}
export interface NearleyRule {
interface NearleyRule {
name: string;

@@ -17,3 +17,3 @@ symbols: NearleySymbol[];

}
export declare type NearleySymbol = string | {
declare type NearleySymbol = string | {
literal: any;

@@ -23,4 +23,8 @@ } | {

};
export declare var Lexer: Lexer | undefined;
export declare var ParserRules: NearleyRule[];
export declare var ParserStart: string;
interface Grammar {
Lexer: NearleyLexer | undefined;
ParserRules: NearleyRule[];
ParserStart: string;
}
declare const grammar: Grammar;
export default grammar;

@@ -346,27 +346,31 @@ "use strict";

;
exports.Lexer = lexer;
exports.ParserRules = [
{ "name": "entry", "symbols": [(lexer.has("parameter") ? { type: "parameter" } : parameter), "__", "parameters", "_", (lexer.has("semicolon") ? { type: "semicolon" } : semicolon)], "postprocess": breakParameter },
{ "name": "parameters", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", "parameters", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": stripParen },
{ "name": "parameters", "symbols": [(lexer.has("or") ? { type: "or" } : or), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": branchOrWithTwoAnnot },
{ "name": "parameters", "symbols": [(lexer.has("or") ? { type: "or" } : or), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": branchOrWithAnnot },
{ "name": "parameters", "symbols": [(lexer.has("or") ? { type: "or" } : or), "_", "parameters", "__", "parameters"], "postprocess": branchOr },
{ "name": "parameters", "symbols": [(lexer.has("pair") ? { type: "pair" } : pair), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": mergePairWithTwoAnnot },
{ "name": "parameters", "symbols": [(lexer.has("pair") ? { type: "pair" } : pair), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": mergePairWithAnnot },
{ "name": "parameters", "symbols": [(lexer.has("pair") ? { type: "pair" } : pair), "__", "parameters", "__", "parameters"], "postprocess": mergePair },
{ "name": "parameters", "symbols": [(lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters"], "postprocess": recordSingleArgDataWithTwoAnnot },
{ "name": "parameters", "symbols": [(lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters"], "postprocess": recordSingleArgDataWithAnnot },
{ "name": "parameters", "symbols": [(lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", "parameters"], "postprocess": recordSingleArgData },
{ "name": "parameters", "symbols": [(lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": recordDoubleArgDataWithTwoAnnot },
{ "name": "parameters", "symbols": [(lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": recordDoubleArgDataWithAnnot },
{ "name": "parameters", "symbols": [(lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", "parameters", "__", "parameters"], "postprocess": recordDoubleArgData },
{ "name": "parameters", "symbols": [(lexer.has("data") ? { type: "data" } : data), "__", (lexer.has("annot") ? { type: "annot" } : annot)], "postprocess": recordData },
{ "name": "parameters", "symbols": [(lexer.has("data") ? { type: "data" } : data), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", (lexer.has("annot") ? { type: "annot" } : annot)], "postprocess": recordData },
{ "name": "parameters", "symbols": [(lexer.has("data") ? { type: "data" } : data)], "postprocess": recordData },
{ "name": "_$ebnf$1", "symbols": [] },
{ "name": "_$ebnf$1", "symbols": ["_$ebnf$1", /[\s]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "_", "symbols": ["_$ebnf$1"] },
{ "name": "__", "symbols": [/[\s]/] }
];
exports.ParserStart = "entry";
;
const grammar = {
Lexer: lexer,
ParserRules: [
{ "name": "entry", "symbols": [(lexer.has("parameter") ? { type: "parameter" } : parameter), "__", "parameters", "_", (lexer.has("semicolon") ? { type: "semicolon" } : semicolon)], "postprocess": breakParameter },
{ "name": "parameters", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", "parameters", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": stripParen },
{ "name": "parameters", "symbols": [(lexer.has("or") ? { type: "or" } : or), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": branchOrWithTwoAnnot },
{ "name": "parameters", "symbols": [(lexer.has("or") ? { type: "or" } : or), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": branchOrWithAnnot },
{ "name": "parameters", "symbols": [(lexer.has("or") ? { type: "or" } : or), "_", "parameters", "__", "parameters"], "postprocess": branchOr },
{ "name": "parameters", "symbols": [(lexer.has("pair") ? { type: "pair" } : pair), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": mergePairWithTwoAnnot },
{ "name": "parameters", "symbols": [(lexer.has("pair") ? { type: "pair" } : pair), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": mergePairWithAnnot },
{ "name": "parameters", "symbols": [(lexer.has("pair") ? { type: "pair" } : pair), "__", "parameters", "__", "parameters"], "postprocess": mergePair },
{ "name": "parameters", "symbols": [(lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters"], "postprocess": recordSingleArgDataWithTwoAnnot },
{ "name": "parameters", "symbols": [(lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters"], "postprocess": recordSingleArgDataWithAnnot },
{ "name": "parameters", "symbols": [(lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", "parameters"], "postprocess": recordSingleArgData },
{ "name": "parameters", "symbols": [(lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": recordDoubleArgDataWithTwoAnnot },
{ "name": "parameters", "symbols": [(lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", (lexer.has("annot") ? { type: "annot" } : annot), "__", "parameters", "__", "parameters"], "postprocess": recordDoubleArgDataWithAnnot },
{ "name": "parameters", "symbols": [(lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", "parameters", "__", "parameters"], "postprocess": recordDoubleArgData },
{ "name": "parameters", "symbols": [(lexer.has("data") ? { type: "data" } : data), "__", (lexer.has("annot") ? { type: "annot" } : annot)], "postprocess": recordData },
{ "name": "parameters", "symbols": [(lexer.has("data") ? { type: "data" } : data), "__", (lexer.has("annot") ? { type: "annot" } : annot), "__", (lexer.has("annot") ? { type: "annot" } : annot)], "postprocess": recordData },
{ "name": "parameters", "symbols": [(lexer.has("data") ? { type: "data" } : data)], "postprocess": recordData },
{ "name": "_$ebnf$1", "symbols": [] },
{ "name": "_$ebnf$1", "symbols": ["_$ebnf$1", /[\s]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "_", "symbols": ["_$ebnf$1"] },
{ "name": "__", "symbols": [/[\s]/] }
],
ParserStart: "entry",
};
exports.default = grammar;
//# sourceMappingURL=EntryPointTemplate.js.map

@@ -1,13 +0,13 @@

export interface Token {
interface NearleyToken {
value: any;
[key: string]: any;
}
export interface Lexer {
interface NearleyLexer {
reset: (chunk: string, info: any) => void;
next: () => Token | undefined;
next: () => NearleyToken | undefined;
save: () => any;
formatError: (token: Token) => string;
formatError: (token: NearleyToken) => string;
has: (tokenType: string) => boolean;
}
export interface NearleyRule {
interface NearleyRule {
name: string;

@@ -17,3 +17,3 @@ symbols: NearleySymbol[];

}
export declare type NearleySymbol = string | {
declare type NearleySymbol = string | {
literal: any;

@@ -23,4 +23,8 @@ } | {

};
export declare var Lexer: Lexer | undefined;
export declare var ParserRules: NearleyRule[];
export declare var ParserStart: string;
interface Grammar {
Lexer: NearleyLexer | undefined;
ParserRules: NearleyRule[];
ParserStart: string;
}
declare const grammar: Grammar;
export default grammar;

@@ -142,113 +142,117 @@ "use strict";

;
exports.Lexer = lexer;
exports.ParserRules = [
{ "name": "main", "symbols": ["staticObject"], "postprocess": id },
{ "name": "main", "symbols": ["primBare"], "postprocess": id },
{ "name": "main", "symbols": ["primArg"], "postprocess": id },
{ "name": "main", "symbols": ["primAnn"], "postprocess": id },
{ "name": "main", "symbols": ["primArgAnn"], "postprocess": id },
{ "name": "main", "symbols": ["anyArray"], "postprocess": id },
{ "name": "staticInt$ebnf$1", "symbols": [] },
{ "name": "staticInt$ebnf$1", "symbols": ["staticInt$ebnf$1", (lexer.has("_") ? { type: "_" } : _)], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "staticInt", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"int\"" }, "staticInt$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": staticIntToHex },
{ "name": "staticString$ebnf$1", "symbols": [] },
{ "name": "staticString$ebnf$1", "symbols": ["staticString$ebnf$1", (lexer.has("_") ? { type: "_" } : _)], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "staticString", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"string\"" }, "staticString$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": staticStringToHex },
{ "name": "staticBytes$ebnf$1", "symbols": [] },
{ "name": "staticBytes$ebnf$1", "symbols": ["staticBytes$ebnf$1", (lexer.has("_") ? { type: "_" } : _)], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "staticBytes", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"bytes\"" }, "staticBytes$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": staticBytesToHex },
{ "name": "staticObject", "symbols": ["staticInt"], "postprocess": id },
{ "name": "staticObject", "symbols": ["staticString"], "postprocess": id },
{ "name": "staticObject", "symbols": ["staticBytes"], "postprocess": id },
{ "name": "primBare$ebnf$1", "symbols": [] },
{ "name": "primBare$ebnf$1", "symbols": ["primBare$ebnf$1", (lexer.has("_") ? { type: "_" } : _)], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "primBare", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"prim\"" }, "primBare$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("keyword") ? { type: "keyword" } : keyword), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": primBareToHex },
{ "name": "primArg$ebnf$1", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArg$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArg$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$3$subexpression$1$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArg$ebnf$3$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$3$subexpression$1$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArg$ebnf$3$subexpression$1$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$3$subexpression$1", "symbols": ["any", "primArg$ebnf$3$subexpression$1$ebnf$1", "primArg$ebnf$3$subexpression$1$ebnf$2"] },
{ "name": "primArg$ebnf$3", "symbols": ["primArg$ebnf$3$subexpression$1"] },
{ "name": "primArg$ebnf$3$subexpression$2$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArg$ebnf$3$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$3$subexpression$2$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArg$ebnf$3$subexpression$2$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$3$subexpression$2", "symbols": ["any", "primArg$ebnf$3$subexpression$2$ebnf$1", "primArg$ebnf$3$subexpression$2$ebnf$2"] },
{ "name": "primArg$ebnf$3", "symbols": ["primArg$ebnf$3", "primArg$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "primArg", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"prim\"" }, "primArg$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("keyword") ? { type: "keyword" } : keyword), (lexer.has("comma") ? { type: "comma" } : comma), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"args\"" }, "primArg$ebnf$2", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("_") ? { type: "_" } : _), "primArg$ebnf$3", (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": primArgToHex },
{ "name": "primAnn$ebnf$1", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primAnn$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primAnn$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$3$subexpression$1$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primAnn$ebnf$3$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$3$subexpression$1$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primAnn$ebnf$3$subexpression$1$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$3$subexpression$1", "symbols": [(lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), "primAnn$ebnf$3$subexpression$1$ebnf$1", "primAnn$ebnf$3$subexpression$1$ebnf$2"] },
{ "name": "primAnn$ebnf$3", "symbols": ["primAnn$ebnf$3$subexpression$1"] },
{ "name": "primAnn$ebnf$3$subexpression$2$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primAnn$ebnf$3$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$3$subexpression$2$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primAnn$ebnf$3$subexpression$2$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$3$subexpression$2", "symbols": [(lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), "primAnn$ebnf$3$subexpression$2$ebnf$1", "primAnn$ebnf$3$subexpression$2$ebnf$2"] },
{ "name": "primAnn$ebnf$3", "symbols": ["primAnn$ebnf$3", "primAnn$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "primAnn", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"prim\"" }, "primAnn$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("keyword") ? { type: "keyword" } : keyword), (lexer.has("comma") ? { type: "comma" } : comma), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"annots\"" }, "primAnn$ebnf$2", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("_") ? { type: "_" } : _), "primAnn$ebnf$3", (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": primAnnToHex },
{ "name": "primArgAnn$ebnf$1", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$3$subexpression$1$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArgAnn$ebnf$3$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$3$subexpression$1$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$3$subexpression$1$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$3$subexpression$1", "symbols": ["any", "primArgAnn$ebnf$3$subexpression$1$ebnf$1", "primArgAnn$ebnf$3$subexpression$1$ebnf$2"] },
{ "name": "primArgAnn$ebnf$3", "symbols": ["primArgAnn$ebnf$3$subexpression$1"] },
{ "name": "primArgAnn$ebnf$3$subexpression$2$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArgAnn$ebnf$3$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$3$subexpression$2$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$3$subexpression$2$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$3$subexpression$2", "symbols": ["any", "primArgAnn$ebnf$3$subexpression$2$ebnf$1", "primArgAnn$ebnf$3$subexpression$2$ebnf$2"] },
{ "name": "primArgAnn$ebnf$3", "symbols": ["primArgAnn$ebnf$3", "primArgAnn$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "primArgAnn$ebnf$4", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$4", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$5$subexpression$1$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArgAnn$ebnf$5$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$5$subexpression$1$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$5$subexpression$1$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$5$subexpression$1", "symbols": [(lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), "primArgAnn$ebnf$5$subexpression$1$ebnf$1", "primArgAnn$ebnf$5$subexpression$1$ebnf$2"] },
{ "name": "primArgAnn$ebnf$5", "symbols": ["primArgAnn$ebnf$5$subexpression$1"] },
{ "name": "primArgAnn$ebnf$5$subexpression$2$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArgAnn$ebnf$5$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$5$subexpression$2$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$5$subexpression$2$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$5$subexpression$2", "symbols": [(lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), "primArgAnn$ebnf$5$subexpression$2$ebnf$1", "primArgAnn$ebnf$5$subexpression$2$ebnf$2"] },
{ "name": "primArgAnn$ebnf$5", "symbols": ["primArgAnn$ebnf$5", "primArgAnn$ebnf$5$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "primArgAnn", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"prim\"" }, "primArgAnn$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("keyword") ? { type: "keyword" } : keyword), (lexer.has("comma") ? { type: "comma" } : comma), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"args\"" }, "primArgAnn$ebnf$2", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("_") ? { type: "_" } : _), "primArgAnn$ebnf$3", (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket), (lexer.has("comma") ? { type: "comma" } : comma), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"annots\"" }, "primArgAnn$ebnf$4", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("_") ? { type: "_" } : _), "primArgAnn$ebnf$5", (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": primArgAnnToHex },
{ "name": "primAny", "symbols": ["primBare"], "postprocess": id },
{ "name": "primAny", "symbols": ["primArg"], "postprocess": id },
{ "name": "primAny", "symbols": ["primAnn"], "postprocess": id },
{ "name": "primAny", "symbols": ["primArgAnn"], "postprocess": id },
{ "name": "any", "symbols": ["primAny"], "postprocess": id },
{ "name": "any", "symbols": ["staticObject"], "postprocess": id },
{ "name": "any", "symbols": ["anyArray"], "postprocess": id },
{ "name": "anyArray", "symbols": [(lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket)], "postprocess": function (d) { return '0200000000'; } },
{ "name": "anyArray$ebnf$1$subexpression$1$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "anyArray$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "anyArray$ebnf$1$subexpression$1$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "anyArray$ebnf$1$subexpression$1$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "anyArray$ebnf$1$subexpression$1", "symbols": ["any", "anyArray$ebnf$1$subexpression$1$ebnf$1", "anyArray$ebnf$1$subexpression$1$ebnf$2"] },
{ "name": "anyArray$ebnf$1", "symbols": ["anyArray$ebnf$1$subexpression$1"] },
{ "name": "anyArray$ebnf$1$subexpression$2$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "anyArray$ebnf$1$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "anyArray$ebnf$1$subexpression$2$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "anyArray$ebnf$1$subexpression$2$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "anyArray$ebnf$1$subexpression$2", "symbols": ["any", "anyArray$ebnf$1$subexpression$2$ebnf$1", "anyArray$ebnf$1$subexpression$2$ebnf$2"] },
{ "name": "anyArray$ebnf$1", "symbols": ["anyArray$ebnf$1", "anyArray$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "anyArray", "symbols": [(lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("_") ? { type: "_" } : _), "anyArray$ebnf$1", (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket)], "postprocess": staticArrayToHex }
];
exports.ParserStart = "main";
;
const grammar = {
Lexer: lexer,
ParserRules: [
{ "name": "main", "symbols": ["staticObject"], "postprocess": id },
{ "name": "main", "symbols": ["primBare"], "postprocess": id },
{ "name": "main", "symbols": ["primArg"], "postprocess": id },
{ "name": "main", "symbols": ["primAnn"], "postprocess": id },
{ "name": "main", "symbols": ["primArgAnn"], "postprocess": id },
{ "name": "main", "symbols": ["anyArray"], "postprocess": id },
{ "name": "staticInt$ebnf$1", "symbols": [] },
{ "name": "staticInt$ebnf$1", "symbols": ["staticInt$ebnf$1", (lexer.has("_") ? { type: "_" } : _)], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "staticInt", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"int\"" }, "staticInt$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": staticIntToHex },
{ "name": "staticString$ebnf$1", "symbols": [] },
{ "name": "staticString$ebnf$1", "symbols": ["staticString$ebnf$1", (lexer.has("_") ? { type: "_" } : _)], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "staticString", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"string\"" }, "staticString$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": staticStringToHex },
{ "name": "staticBytes$ebnf$1", "symbols": [] },
{ "name": "staticBytes$ebnf$1", "symbols": ["staticBytes$ebnf$1", (lexer.has("_") ? { type: "_" } : _)], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "staticBytes", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"bytes\"" }, "staticBytes$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": staticBytesToHex },
{ "name": "staticObject", "symbols": ["staticInt"], "postprocess": id },
{ "name": "staticObject", "symbols": ["staticString"], "postprocess": id },
{ "name": "staticObject", "symbols": ["staticBytes"], "postprocess": id },
{ "name": "primBare$ebnf$1", "symbols": [] },
{ "name": "primBare$ebnf$1", "symbols": ["primBare$ebnf$1", (lexer.has("_") ? { type: "_" } : _)], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "primBare", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"prim\"" }, "primBare$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("keyword") ? { type: "keyword" } : keyword), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": primBareToHex },
{ "name": "primArg$ebnf$1", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArg$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArg$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$3$subexpression$1$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArg$ebnf$3$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$3$subexpression$1$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArg$ebnf$3$subexpression$1$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$3$subexpression$1", "symbols": ["any", "primArg$ebnf$3$subexpression$1$ebnf$1", "primArg$ebnf$3$subexpression$1$ebnf$2"] },
{ "name": "primArg$ebnf$3", "symbols": ["primArg$ebnf$3$subexpression$1"] },
{ "name": "primArg$ebnf$3$subexpression$2$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArg$ebnf$3$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$3$subexpression$2$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArg$ebnf$3$subexpression$2$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArg$ebnf$3$subexpression$2", "symbols": ["any", "primArg$ebnf$3$subexpression$2$ebnf$1", "primArg$ebnf$3$subexpression$2$ebnf$2"] },
{ "name": "primArg$ebnf$3", "symbols": ["primArg$ebnf$3", "primArg$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "primArg", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"prim\"" }, "primArg$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("keyword") ? { type: "keyword" } : keyword), (lexer.has("comma") ? { type: "comma" } : comma), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"args\"" }, "primArg$ebnf$2", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("_") ? { type: "_" } : _), "primArg$ebnf$3", (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": primArgToHex },
{ "name": "primAnn$ebnf$1", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primAnn$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primAnn$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$3$subexpression$1$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primAnn$ebnf$3$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$3$subexpression$1$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primAnn$ebnf$3$subexpression$1$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$3$subexpression$1", "symbols": [(lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), "primAnn$ebnf$3$subexpression$1$ebnf$1", "primAnn$ebnf$3$subexpression$1$ebnf$2"] },
{ "name": "primAnn$ebnf$3", "symbols": ["primAnn$ebnf$3$subexpression$1"] },
{ "name": "primAnn$ebnf$3$subexpression$2$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primAnn$ebnf$3$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$3$subexpression$2$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primAnn$ebnf$3$subexpression$2$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primAnn$ebnf$3$subexpression$2", "symbols": [(lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), "primAnn$ebnf$3$subexpression$2$ebnf$1", "primAnn$ebnf$3$subexpression$2$ebnf$2"] },
{ "name": "primAnn$ebnf$3", "symbols": ["primAnn$ebnf$3", "primAnn$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "primAnn", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"prim\"" }, "primAnn$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("keyword") ? { type: "keyword" } : keyword), (lexer.has("comma") ? { type: "comma" } : comma), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"annots\"" }, "primAnn$ebnf$2", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("_") ? { type: "_" } : _), "primAnn$ebnf$3", (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": primAnnToHex },
{ "name": "primArgAnn$ebnf$1", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$3$subexpression$1$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArgAnn$ebnf$3$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$3$subexpression$1$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$3$subexpression$1$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$3$subexpression$1", "symbols": ["any", "primArgAnn$ebnf$3$subexpression$1$ebnf$1", "primArgAnn$ebnf$3$subexpression$1$ebnf$2"] },
{ "name": "primArgAnn$ebnf$3", "symbols": ["primArgAnn$ebnf$3$subexpression$1"] },
{ "name": "primArgAnn$ebnf$3$subexpression$2$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArgAnn$ebnf$3$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$3$subexpression$2$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$3$subexpression$2$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$3$subexpression$2", "symbols": ["any", "primArgAnn$ebnf$3$subexpression$2$ebnf$1", "primArgAnn$ebnf$3$subexpression$2$ebnf$2"] },
{ "name": "primArgAnn$ebnf$3", "symbols": ["primArgAnn$ebnf$3", "primArgAnn$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "primArgAnn$ebnf$4", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$4", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$5$subexpression$1$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArgAnn$ebnf$5$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$5$subexpression$1$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$5$subexpression$1$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$5$subexpression$1", "symbols": [(lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), "primArgAnn$ebnf$5$subexpression$1$ebnf$1", "primArgAnn$ebnf$5$subexpression$1$ebnf$2"] },
{ "name": "primArgAnn$ebnf$5", "symbols": ["primArgAnn$ebnf$5$subexpression$1"] },
{ "name": "primArgAnn$ebnf$5$subexpression$2$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "primArgAnn$ebnf$5$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$5$subexpression$2$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "primArgAnn$ebnf$5$subexpression$2$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "primArgAnn$ebnf$5$subexpression$2", "symbols": [(lexer.has("quotedValue") ? { type: "quotedValue" } : quotedValue), "primArgAnn$ebnf$5$subexpression$2$ebnf$1", "primArgAnn$ebnf$5$subexpression$2$ebnf$2"] },
{ "name": "primArgAnn$ebnf$5", "symbols": ["primArgAnn$ebnf$5", "primArgAnn$ebnf$5$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "primArgAnn", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"prim\"" }, "primArgAnn$ebnf$1", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("keyword") ? { type: "keyword" } : keyword), (lexer.has("comma") ? { type: "comma" } : comma), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"args\"" }, "primArgAnn$ebnf$2", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("_") ? { type: "_" } : _), "primArgAnn$ebnf$3", (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket), (lexer.has("comma") ? { type: "comma" } : comma), (lexer.has("_") ? { type: "_" } : _), { "literal": "\"annots\"" }, "primArgAnn$ebnf$4", (lexer.has("colon") ? { type: "colon" } : colon), (lexer.has("_") ? { type: "_" } : _), (lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("_") ? { type: "_" } : _), "primArgAnn$ebnf$5", (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket), (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": primArgAnnToHex },
{ "name": "primAny", "symbols": ["primBare"], "postprocess": id },
{ "name": "primAny", "symbols": ["primArg"], "postprocess": id },
{ "name": "primAny", "symbols": ["primAnn"], "postprocess": id },
{ "name": "primAny", "symbols": ["primArgAnn"], "postprocess": id },
{ "name": "any", "symbols": ["primAny"], "postprocess": id },
{ "name": "any", "symbols": ["staticObject"], "postprocess": id },
{ "name": "any", "symbols": ["anyArray"], "postprocess": id },
{ "name": "anyArray", "symbols": [(lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket)], "postprocess": function (d) { return '0200000000'; } },
{ "name": "anyArray$ebnf$1$subexpression$1$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "anyArray$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "anyArray$ebnf$1$subexpression$1$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "anyArray$ebnf$1$subexpression$1$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "anyArray$ebnf$1$subexpression$1", "symbols": ["any", "anyArray$ebnf$1$subexpression$1$ebnf$1", "anyArray$ebnf$1$subexpression$1$ebnf$2"] },
{ "name": "anyArray$ebnf$1", "symbols": ["anyArray$ebnf$1$subexpression$1"] },
{ "name": "anyArray$ebnf$1$subexpression$2$ebnf$1", "symbols": [(lexer.has("comma") ? { type: "comma" } : comma)], "postprocess": id },
{ "name": "anyArray$ebnf$1$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "anyArray$ebnf$1$subexpression$2$ebnf$2", "symbols": [(lexer.has("_") ? { type: "_" } : _)], "postprocess": id },
{ "name": "anyArray$ebnf$1$subexpression$2$ebnf$2", "symbols": [], "postprocess": () => null },
{ "name": "anyArray$ebnf$1$subexpression$2", "symbols": ["any", "anyArray$ebnf$1$subexpression$2$ebnf$1", "anyArray$ebnf$1$subexpression$2$ebnf$2"] },
{ "name": "anyArray$ebnf$1", "symbols": ["anyArray$ebnf$1", "anyArray$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "anyArray", "symbols": [(lexer.has("lbracket") ? { type: "lbracket" } : lbracket), (lexer.has("_") ? { type: "_" } : _), "anyArray$ebnf$1", (lexer.has("_") ? { type: "_" } : _), (lexer.has("rbracket") ? { type: "rbracket" } : rbracket)], "postprocess": staticArrayToHex }
],
ParserStart: "main",
};
exports.default = grammar;
//# sourceMappingURL=Micheline.js.map

@@ -1,13 +0,13 @@

export interface Token {
interface NearleyToken {
value: any;
[key: string]: any;
}
export interface Lexer {
interface NearleyLexer {
reset: (chunk: string, info: any) => void;
next: () => Token | undefined;
next: () => NearleyToken | undefined;
save: () => any;
formatError: (token: Token) => string;
formatError: (token: NearleyToken) => string;
has: (tokenType: string) => boolean;
}
export interface NearleyRule {
interface NearleyRule {
name: string;

@@ -17,3 +17,3 @@ symbols: NearleySymbol[];

}
export declare type NearleySymbol = string | {
declare type NearleySymbol = string | {
literal: any;

@@ -23,4 +23,8 @@ } | {

};
export declare var Lexer: Lexer | undefined;
export declare var ParserRules: NearleyRule[];
export declare var ParserStart: string;
interface Grammar {
Lexer: NearleyLexer | undefined;
ParserRules: NearleyRule[];
ParserStart: string;
}
declare const grammar: Grammar;
export default grammar;

@@ -32,3 +32,3 @@ "use strict";

baseInstruction: ['ABS', 'ADD', 'ADDRESS', 'AMOUNT', 'AND', 'BALANCE', 'BLAKE2B', 'CAR', 'CAST', 'CDR', 'CHECK_SIGNATURE',
'COMPARE', 'CONCAT', 'CONS', 'CONTRACT', 'CREATE_CONTRACT', 'DIP', 'DUP', 'EDIV', 'EMPTY_MAP',
'COMPARE', 'CONCAT', 'CONS', 'CONTRACT', 'DIP', 'DUP', 'EDIV',
'EMPTY_SET', 'EQ', 'EXEC', 'FAIL', 'FAILWITH', 'GE', 'GET', 'GT', 'HASH_KEY', 'IF', 'IF_CONS', 'IF_LEFT', 'IF_NONE',

@@ -365,2 +365,3 @@ 'IF_RIGHT', 'IMPLICIT_ACCOUNT', 'INT', 'ISNAT', 'ITER', 'LAMBDA', 'LE', 'LEFT', 'LOOP', 'LOOP_LEFT', 'LSL', 'LSR', 'LT',

const doubleArgKeywordToJson = d => `{ "prim": "${d[0]}", "args": [ ${d[2]}, ${d[4]} ] }`;
const doubleArgParenKeywordToJson = d => `{ "prim": "${d[0]}", "args": [ ${d[4]}, ${d[8]} ] }`;
const doubleArgInstrKeywordToJson = d => {

@@ -420,283 +421,289 @@ const word = `${d[0].toString()}`;

;
exports.Lexer = lexer;
exports.ParserRules = [
{ "name": "main", "symbols": ["instruction"], "postprocess": id },
{ "name": "main", "symbols": ["data"], "postprocess": id },
{ "name": "main", "symbols": ["type"], "postprocess": id },
{ "name": "main", "symbols": ["parameter"], "postprocess": id },
{ "name": "main", "symbols": ["storage"], "postprocess": id },
{ "name": "main", "symbols": ["code"], "postprocess": id },
{ "name": "main", "symbols": ["script"], "postprocess": id },
{ "name": "main", "symbols": ["parameterValue"], "postprocess": id },
{ "name": "main", "symbols": ["storageValue"], "postprocess": id },
{ "name": "main", "symbols": ["typeData"], "postprocess": id },
{ "name": "script", "symbols": ["parameter", "_", "storage", "_", "code"], "postprocess": scriptToJson },
{ "name": "parameterValue", "symbols": [(lexer.has("parameter") ? { type: "parameter" } : parameter), "_", "typeData", "_", "semicolons"], "postprocess": singleArgKeywordToJson },
{ "name": "storageValue", "symbols": [(lexer.has("storage") ? { type: "storage" } : storage), "_", "typeData", "_", "semicolons"], "postprocess": singleArgKeywordToJson },
{ "name": "parameter", "symbols": [(lexer.has("parameter") ? { type: "parameter" } : parameter), "_", "type", "_", "semicolons"], "postprocess": singleArgKeywordToJson },
{ "name": "storage", "symbols": [(lexer.has("storage") ? { type: "storage" } : storage), "_", "type", "_", "semicolons"], "postprocess": singleArgKeywordToJson },
{ "name": "code", "symbols": [(lexer.has("code") ? { type: "code" } : code), "_", "subInstruction", "_", "semicolons", "_"], "postprocess": d => d[2] },
{ "name": "code", "symbols": [(lexer.has("code") ? { type: "code" } : code), "_", { "literal": "{};" }], "postprocess": d => "code {}" },
{ "name": "type", "symbols": [(lexer.has("comparableType") ? { type: "comparableType" } : comparableType)], "postprocess": keywordToJson },
{ "name": "type", "symbols": [(lexer.has("constantType") ? { type: "constantType" } : constantType)], "postprocess": keywordToJson },
{ "name": "type", "symbols": [(lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "_", "type"], "postprocess": singleArgKeywordToJson },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "_", "type", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgKeywordWithParenToJson },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "_", (lexer.has("lparen") ? { type: "lparen" } : lparen), "_", "type", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen), "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgKeywordWithParenToJson },
{ "name": "type", "symbols": [(lexer.has("doubleArgType") ? { type: "doubleArgType" } : doubleArgType), "_", "type", "_", "type"], "postprocess": doubleArgKeywordToJson },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("doubleArgType") ? { type: "doubleArgType" } : doubleArgType), "_", "type", "_", "type", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": doubleArgKeywordWithParenToJson },
{ "name": "type$ebnf$1$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$1", "symbols": ["type$ebnf$1$subexpression$1"] },
{ "name": "type$ebnf$1$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$1", "symbols": ["type$ebnf$1", "type$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("comparableType") ? { type: "comparableType" } : comparableType), "type$ebnf$1"], "postprocess": keywordToJson },
{ "name": "type$ebnf$2$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$2", "symbols": ["type$ebnf$2$subexpression$1"] },
{ "name": "type$ebnf$2$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$2", "symbols": ["type$ebnf$2", "type$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("constantType") ? { type: "constantType" } : constantType), "type$ebnf$2"], "postprocess": keywordToJson },
{ "name": "type$ebnf$3$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$3", "symbols": ["type$ebnf$3$subexpression$1"] },
{ "name": "type$ebnf$3$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$3", "symbols": ["type$ebnf$3", "type$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("comparableType") ? { type: "comparableType" } : comparableType), "type$ebnf$3", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": comparableTypeToJson },
{ "name": "type$ebnf$4$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$4", "symbols": ["type$ebnf$4$subexpression$1"] },
{ "name": "type$ebnf$4$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$4", "symbols": ["type$ebnf$4", "type$ebnf$4$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("constantType") ? { type: "constantType" } : constantType), "type$ebnf$4", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": comparableTypeToJson },
{ "name": "type$ebnf$5$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$5", "symbols": ["type$ebnf$5$subexpression$1"] },
{ "name": "type$ebnf$5$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$5", "symbols": ["type$ebnf$5", "type$ebnf$5$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "type$ebnf$5", "_", "type", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgTypeKeywordWithParenToJson },
{ "name": "type$ebnf$6$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$6", "symbols": ["type$ebnf$6$subexpression$1"] },
{ "name": "type$ebnf$6$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$6", "symbols": ["type$ebnf$6", "type$ebnf$6$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("doubleArgType") ? { type: "doubleArgType" } : doubleArgType), "type$ebnf$6", "_", "type", "_", "type", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": doubleArgTypeKeywordWithParenToJson },
{ "name": "typeData", "symbols": [(lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "_", "typeData"], "postprocess": singleArgKeywordToJson },
{ "name": "typeData", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "_", "typeData", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgKeywordWithParenToJson },
{ "name": "typeData", "symbols": [(lexer.has("doubleArgType") ? { type: "doubleArgType" } : doubleArgType), "_", "typeData", "_", "typeData"], "postprocess": doubleArgKeywordToJson },
{ "name": "typeData", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("doubleArgType") ? { type: "doubleArgType" } : doubleArgType), "_", "typeData", "_", "typeData", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": doubleArgKeywordWithParenToJson },
{ "name": "typeData", "symbols": ["subTypeData"], "postprocess": id },
{ "name": "typeData", "symbols": ["subTypeElt"], "postprocess": id },
{ "name": "typeData", "symbols": [(lexer.has("number") ? { type: "number" } : number)], "postprocess": intToJson },
{ "name": "typeData", "symbols": [(lexer.has("string") ? { type: "string" } : string)], "postprocess": stringToJson },
{ "name": "typeData", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => [] },
{ "name": "data", "symbols": [(lexer.has("constantData") ? { type: "constantData" } : constantData)], "postprocess": keywordToJson },
{ "name": "data", "symbols": [(lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", "data"], "postprocess": singleArgKeywordToJson },
{ "name": "data", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", "data", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgKeywordWithParenToJson },
{ "name": "data", "symbols": [(lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", "data", "_", "data"], "postprocess": doubleArgKeywordToJson },
{ "name": "data", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", "data", "_", "data", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": doubleArgKeywordWithParenToJson },
{ "name": "data", "symbols": ["subData"], "postprocess": id },
{ "name": "data", "symbols": ["subElt"], "postprocess": id },
{ "name": "data", "symbols": [(lexer.has("number") ? { type: "number" } : number)], "postprocess": intToJson },
{ "name": "data", "symbols": [(lexer.has("string") ? { type: "string" } : string)], "postprocess": stringToJson },
{ "name": "subTypeData", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "[]" },
{ "name": "subTypeData$ebnf$1$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeData$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeData$ebnf$1$subexpression$1", "symbols": ["data", "subTypeData$ebnf$1$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeData$ebnf$1", "symbols": ["subTypeData$ebnf$1$subexpression$1"] },
{ "name": "subTypeData$ebnf$1$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeData$ebnf$1$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeData$ebnf$1$subexpression$2", "symbols": ["data", "subTypeData$ebnf$1$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeData$ebnf$1", "symbols": ["subTypeData$ebnf$1", "subTypeData$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeData", "symbols": [{ "literal": "{" }, "_", "subTypeData$ebnf$1", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subTypeData$ebnf$2$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeData$ebnf$2$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeData$ebnf$2$subexpression$1", "symbols": ["data", "subTypeData$ebnf$2$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeData$ebnf$2", "symbols": ["subTypeData$ebnf$2$subexpression$1"] },
{ "name": "subTypeData$ebnf$2$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeData$ebnf$2$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeData$ebnf$2$subexpression$2", "symbols": ["data", "subTypeData$ebnf$2$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeData$ebnf$2", "symbols": ["subTypeData$ebnf$2", "subTypeData$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeData", "symbols": [{ "literal": "(" }, "_", "subTypeData$ebnf$2", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subTypeElt", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "[]" },
{ "name": "subTypeElt$ebnf$1$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$1$subexpression$1", "symbols": ["typeElt", "subTypeElt$ebnf$1$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$1", "symbols": ["subTypeElt$ebnf$1$subexpression$1"] },
{ "name": "subTypeElt$ebnf$1$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$1$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$1$subexpression$2", "symbols": ["typeElt", "subTypeElt$ebnf$1$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$1", "symbols": ["subTypeElt$ebnf$1", "subTypeElt$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeElt", "symbols": [{ "literal": "{" }, "_", "subTypeElt$ebnf$1", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subTypeElt$ebnf$2$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$2$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$2$subexpression$1", "symbols": ["typeElt", "subTypeElt$ebnf$2$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$2", "symbols": ["subTypeElt$ebnf$2$subexpression$1"] },
{ "name": "subTypeElt$ebnf$2$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$2$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$2$subexpression$2", "symbols": ["typeElt", "subTypeElt$ebnf$2$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$2", "symbols": ["subTypeElt$ebnf$2", "subTypeElt$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeElt", "symbols": [{ "literal": "(" }, "_", "subTypeElt$ebnf$2", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subTypeElt$ebnf$3$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$3$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$3$subexpression$1", "symbols": ["typeElt", "_", "subTypeElt$ebnf$3$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$3", "symbols": ["subTypeElt$ebnf$3$subexpression$1"] },
{ "name": "subTypeElt$ebnf$3$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$3$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$3$subexpression$2", "symbols": ["typeElt", "_", "subTypeElt$ebnf$3$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$3", "symbols": ["subTypeElt$ebnf$3", "subTypeElt$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeElt", "symbols": [{ "literal": "{" }, "_", "subTypeElt$ebnf$3", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subTypeElt$ebnf$4$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$4$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$4$subexpression$1", "symbols": ["typeElt", "_", "subTypeElt$ebnf$4$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$4", "symbols": ["subTypeElt$ebnf$4$subexpression$1"] },
{ "name": "subTypeElt$ebnf$4$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$4$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$4$subexpression$2", "symbols": ["typeElt", "_", "subTypeElt$ebnf$4$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$4", "symbols": ["subTypeElt$ebnf$4", "subTypeElt$ebnf$4$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeElt", "symbols": [{ "literal": "(" }, "_", "subTypeElt$ebnf$4", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "typeElt", "symbols": [(lexer.has("elt") ? { type: "elt" } : elt), "_", "typeData", "_", "typeData"], "postprocess": doubleArgKeywordToJson },
{ "name": "subInstruction", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "" },
{ "name": "subInstruction", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", "instruction", "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => d[2] },
{ "name": "subInstruction$ebnf$1$subexpression$1", "symbols": ["instruction", "_", (lexer.has("semicolon") ? { type: "semicolon" } : semicolon), "_"] },
{ "name": "subInstruction$ebnf$1", "symbols": ["subInstruction$ebnf$1$subexpression$1"] },
{ "name": "subInstruction$ebnf$1$subexpression$2", "symbols": ["instruction", "_", (lexer.has("semicolon") ? { type: "semicolon" } : semicolon), "_"] },
{ "name": "subInstruction$ebnf$1", "symbols": ["subInstruction$ebnf$1", "subInstruction$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subInstruction", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", "subInstruction$ebnf$1", "instruction", "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": instructionSetToJsonNoSemi },
{ "name": "subInstruction$ebnf$2$subexpression$1", "symbols": ["instruction", "_", (lexer.has("semicolon") ? { type: "semicolon" } : semicolon), "_"] },
{ "name": "subInstruction$ebnf$2", "symbols": ["subInstruction$ebnf$2$subexpression$1"] },
{ "name": "subInstruction$ebnf$2$subexpression$2", "symbols": ["instruction", "_", (lexer.has("semicolon") ? { type: "semicolon" } : semicolon), "_"] },
{ "name": "subInstruction$ebnf$2", "symbols": ["subInstruction$ebnf$2", "subInstruction$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subInstruction", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", "subInstruction$ebnf$2", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": instructionSetToJsonSemi },
{ "name": "instructions", "symbols": [(lexer.has("baseInstruction") ? { type: "baseInstruction" } : baseInstruction)] },
{ "name": "instructions", "symbols": [(lexer.has("macroCADR") ? { type: "macroCADR" } : macroCADR)] },
{ "name": "instructions", "symbols": [(lexer.has("macroDIP") ? { type: "macroDIP" } : macroDIP)] },
{ "name": "instructions", "symbols": [(lexer.has("macroDUP") ? { type: "macroDUP" } : macroDUP)] },
{ "name": "instructions", "symbols": [(lexer.has("macroSETCADR") ? { type: "macroSETCADR" } : macroSETCADR)] },
{ "name": "instructions", "symbols": [(lexer.has("macroASSERTlist") ? { type: "macroASSERTlist" } : macroASSERTlist)] },
{ "name": "instruction", "symbols": ["instructions"], "postprocess": keywordToJson },
{ "name": "instruction$ebnf$1$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$1", "symbols": ["instruction$ebnf$1$subexpression$1"] },
{ "name": "instruction$ebnf$1$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$1", "symbols": ["instruction$ebnf$1", "instruction$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$1", "_"], "postprocess": keywordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "subInstruction"], "postprocess": singleArgInstrKeywordToJson },
{ "name": "instruction$ebnf$2$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$2", "symbols": ["instruction$ebnf$2$subexpression$1"] },
{ "name": "instruction$ebnf$2$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$2", "symbols": ["instruction$ebnf$2", "instruction$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$2", "_", "subInstruction"], "postprocess": singleArgTypeKeywordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "type"], "postprocess": singleArgKeywordToJson },
{ "name": "instruction$ebnf$3$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$3", "symbols": ["instruction$ebnf$3$subexpression$1"] },
{ "name": "instruction$ebnf$3$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$3", "symbols": ["instruction$ebnf$3", "instruction$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$3", "_", "type"], "postprocess": singleArgTypeKeywordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "data"], "postprocess": singleArgKeywordToJson },
{ "name": "instruction$ebnf$4$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$4", "symbols": ["instruction$ebnf$4$subexpression$1"] },
{ "name": "instruction$ebnf$4$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$4", "symbols": ["instruction$ebnf$4", "instruction$ebnf$4$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$4", "_", "data"], "postprocess": singleArgTypeKeywordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "type", "_", "type", "_", "subInstruction"], "postprocess": tripleArgKeyWordToJson },
{ "name": "instruction$ebnf$5$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$5", "symbols": ["instruction$ebnf$5$subexpression$1"] },
{ "name": "instruction$ebnf$5$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$5", "symbols": ["instruction$ebnf$5", "instruction$ebnf$5$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$5", "_", "type", "_", "type", "_", "subInstruction"], "postprocess": tripleArgTypeKeyWordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "subInstruction", "_", "subInstruction"], "postprocess": doubleArgInstrKeywordToJson },
{ "name": "instruction$ebnf$6$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$6", "symbols": ["instruction$ebnf$6$subexpression$1"] },
{ "name": "instruction$ebnf$6$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$6", "symbols": ["instruction$ebnf$6", "instruction$ebnf$6$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$6", "_", "subInstruction", "_", "subInstruction"], "postprocess": doubleArgTypeKeywordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "type", "_", "type"], "postprocess": doubleArgKeywordToJson },
{ "name": "instruction$ebnf$7$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$7", "symbols": ["instruction$ebnf$7$subexpression$1"] },
{ "name": "instruction$ebnf$7$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$7", "symbols": ["instruction$ebnf$7", "instruction$ebnf$7$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$7", "_", "type", "_", "type"], "postprocess": doubleArgTypeKeywordToJson },
{ "name": "instruction", "symbols": [{ "literal": "PUSH" }, "_", "type", "_", "data"], "postprocess": doubleArgKeywordToJson },
{ "name": "instruction", "symbols": [{ "literal": "PUSH" }, "_", "type", "_", (lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": pushToJson },
{ "name": "instruction$ebnf$8$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$8", "symbols": ["instruction$ebnf$8$subexpression$1"] },
{ "name": "instruction$ebnf$8$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$8", "symbols": ["instruction$ebnf$8", "instruction$ebnf$8$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "PUSH" }, "instruction$ebnf$8", "_", "type", "_", "data"], "postprocess": pushWithAnnotsToJson },
{ "name": "instruction$ebnf$9", "symbols": [/[0-9]/] },
{ "name": "instruction$ebnf$9", "symbols": ["instruction$ebnf$9", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "DIP" }, "_", "instruction$ebnf$9", "_", "subInstruction"], "postprocess": dipnToJson },
{ "name": "instruction$ebnf$10", "symbols": [/[0-9]/] },
{ "name": "instruction$ebnf$10", "symbols": ["instruction$ebnf$10", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "DUP" }, "_", "instruction$ebnf$10", "_", "subInstruction"], "postprocess": dipnToJson },
{ "name": "instruction$ebnf$11", "symbols": [/[0-9]/] },
{ "name": "instruction$ebnf$11", "symbols": ["instruction$ebnf$11", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "DIG" }, "_", "instruction$ebnf$11"], "postprocess": dignToJson },
{ "name": "instruction$ebnf$12", "symbols": [/[0-9]/] },
{ "name": "instruction$ebnf$12", "symbols": ["instruction$ebnf$12", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "DUG" }, "_", "instruction$ebnf$12"], "postprocess": dignToJson },
{ "name": "instruction$ebnf$13", "symbols": [/[0-9]/] },
{ "name": "instruction$ebnf$13", "symbols": ["instruction$ebnf$13", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "DROP" }, "_", "instruction$ebnf$13"], "postprocess": dropnToJson },
{ "name": "instruction", "symbols": [{ "literal": "DROP" }], "postprocess": keywordToJson },
{ "name": "instruction", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "" },
{ "name": "instruction", "symbols": [{ "literal": "CREATE_CONTRACT" }, "_", (lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", "parameter", "_", "storage", "_", "code", "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": subContractToJson },
{ "name": "subData", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "[]" },
{ "name": "subData$ebnf$1$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$1$subexpression$1", "symbols": ["data", "subData$ebnf$1$subexpression$1$ebnf$1", "_"] },
{ "name": "subData$ebnf$1", "symbols": ["subData$ebnf$1$subexpression$1"] },
{ "name": "subData$ebnf$1$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$1$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$1$subexpression$2", "symbols": ["data", "subData$ebnf$1$subexpression$2$ebnf$1", "_"] },
{ "name": "subData$ebnf$1", "symbols": ["subData$ebnf$1", "subData$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subData", "symbols": [{ "literal": "{" }, "_", "subData$ebnf$1", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subData$ebnf$2$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$2$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$2$subexpression$1", "symbols": ["data", "subData$ebnf$2$subexpression$1$ebnf$1", "_"] },
{ "name": "subData$ebnf$2", "symbols": ["subData$ebnf$2$subexpression$1"] },
{ "name": "subData$ebnf$2$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$2$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$2$subexpression$2", "symbols": ["data", "subData$ebnf$2$subexpression$2$ebnf$1", "_"] },
{ "name": "subData$ebnf$2", "symbols": ["subData$ebnf$2", "subData$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subData", "symbols": [{ "literal": "(" }, "_", "subData$ebnf$2", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subData$ebnf$3$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$3$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$3$subexpression$1", "symbols": ["data", "_", "subData$ebnf$3$subexpression$1$ebnf$1", "_"] },
{ "name": "subData$ebnf$3", "symbols": ["subData$ebnf$3$subexpression$1"] },
{ "name": "subData$ebnf$3$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$3$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$3$subexpression$2", "symbols": ["data", "_", "subData$ebnf$3$subexpression$2$ebnf$1", "_"] },
{ "name": "subData$ebnf$3", "symbols": ["subData$ebnf$3", "subData$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subData", "symbols": [{ "literal": "{" }, "_", "subData$ebnf$3", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subData$ebnf$4$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$4$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$4$subexpression$1", "symbols": ["data", "_", "subData$ebnf$4$subexpression$1$ebnf$1", "_"] },
{ "name": "subData$ebnf$4", "symbols": ["subData$ebnf$4$subexpression$1"] },
{ "name": "subData$ebnf$4$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$4$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$4$subexpression$2", "symbols": ["data", "_", "subData$ebnf$4$subexpression$2$ebnf$1", "_"] },
{ "name": "subData$ebnf$4", "symbols": ["subData$ebnf$4", "subData$ebnf$4$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subData", "symbols": [{ "literal": "(" }, "_", "subData$ebnf$4", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subElt", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "[]" },
{ "name": "subElt$ebnf$1$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subElt$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subElt$ebnf$1$subexpression$1", "symbols": ["elt", "subElt$ebnf$1$subexpression$1$ebnf$1", "_"] },
{ "name": "subElt$ebnf$1", "symbols": ["subElt$ebnf$1$subexpression$1"] },
{ "name": "subElt$ebnf$1$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subElt$ebnf$1$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subElt$ebnf$1$subexpression$2", "symbols": ["elt", "subElt$ebnf$1$subexpression$2$ebnf$1", "_"] },
{ "name": "subElt$ebnf$1", "symbols": ["subElt$ebnf$1", "subElt$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subElt", "symbols": [{ "literal": "{" }, "_", "subElt$ebnf$1", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subElt$ebnf$2$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subElt$ebnf$2$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subElt$ebnf$2$subexpression$1", "symbols": ["elt", "subElt$ebnf$2$subexpression$1$ebnf$1", "_"] },
{ "name": "subElt$ebnf$2", "symbols": ["subElt$ebnf$2$subexpression$1"] },
{ "name": "subElt$ebnf$2$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subElt$ebnf$2$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subElt$ebnf$2$subexpression$2", "symbols": ["elt", "subElt$ebnf$2$subexpression$2$ebnf$1", "_"] },
{ "name": "subElt$ebnf$2", "symbols": ["subElt$ebnf$2", "subElt$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subElt", "symbols": [{ "literal": "(" }, "_", "subElt$ebnf$2", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "elt", "symbols": [(lexer.has("elt") ? { type: "elt" } : elt), "_", "data", "_", "data"], "postprocess": doubleArgKeywordToJson },
{ "name": "_$ebnf$1", "symbols": [] },
{ "name": "_$ebnf$1", "symbols": ["_$ebnf$1", /[\s]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "_", "symbols": ["_$ebnf$1"] },
{ "name": "semicolons$ebnf$1", "symbols": [/[;]/], "postprocess": id },
{ "name": "semicolons$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "semicolons", "symbols": ["semicolons$ebnf$1"] }
];
exports.ParserStart = "main";
;
const grammar = {
Lexer: lexer,
ParserRules: [
{ "name": "main", "symbols": ["instruction"], "postprocess": id },
{ "name": "main", "symbols": ["data"], "postprocess": id },
{ "name": "main", "symbols": ["type"], "postprocess": id },
{ "name": "main", "symbols": ["parameter"], "postprocess": id },
{ "name": "main", "symbols": ["storage"], "postprocess": id },
{ "name": "main", "symbols": ["code"], "postprocess": id },
{ "name": "main", "symbols": ["script"], "postprocess": id },
{ "name": "main", "symbols": ["parameterValue"], "postprocess": id },
{ "name": "main", "symbols": ["storageValue"], "postprocess": id },
{ "name": "main", "symbols": ["typeData"], "postprocess": id },
{ "name": "script", "symbols": ["parameter", "_", "storage", "_", "code"], "postprocess": scriptToJson },
{ "name": "parameterValue", "symbols": [(lexer.has("parameter") ? { type: "parameter" } : parameter), "_", "typeData", "_", "semicolons"], "postprocess": singleArgKeywordToJson },
{ "name": "storageValue", "symbols": [(lexer.has("storage") ? { type: "storage" } : storage), "_", "typeData", "_", "semicolons"], "postprocess": singleArgKeywordToJson },
{ "name": "parameter", "symbols": [(lexer.has("parameter") ? { type: "parameter" } : parameter), "_", "type", "_", "semicolons"], "postprocess": singleArgKeywordToJson },
{ "name": "storage", "symbols": [(lexer.has("storage") ? { type: "storage" } : storage), "_", "type", "_", "semicolons"], "postprocess": singleArgKeywordToJson },
{ "name": "code", "symbols": [(lexer.has("code") ? { type: "code" } : code), "_", "subInstruction", "_", "semicolons", "_"], "postprocess": d => d[2] },
{ "name": "code", "symbols": [(lexer.has("code") ? { type: "code" } : code), "_", { "literal": "{};" }], "postprocess": d => "code {}" },
{ "name": "type", "symbols": [(lexer.has("comparableType") ? { type: "comparableType" } : comparableType)], "postprocess": keywordToJson },
{ "name": "type", "symbols": [(lexer.has("constantType") ? { type: "constantType" } : constantType)], "postprocess": keywordToJson },
{ "name": "type", "symbols": [(lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "_", "type"], "postprocess": singleArgKeywordToJson },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "_", "type", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgKeywordWithParenToJson },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "_", (lexer.has("lparen") ? { type: "lparen" } : lparen), "_", "type", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen), "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgKeywordWithParenToJson },
{ "name": "type", "symbols": [(lexer.has("doubleArgType") ? { type: "doubleArgType" } : doubleArgType), "_", "type", "_", "type"], "postprocess": doubleArgKeywordToJson },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("doubleArgType") ? { type: "doubleArgType" } : doubleArgType), "_", "type", "_", "type", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": doubleArgKeywordWithParenToJson },
{ "name": "type$ebnf$1$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$1", "symbols": ["type$ebnf$1$subexpression$1"] },
{ "name": "type$ebnf$1$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$1", "symbols": ["type$ebnf$1", "type$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("comparableType") ? { type: "comparableType" } : comparableType), "type$ebnf$1"], "postprocess": keywordToJson },
{ "name": "type$ebnf$2$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$2", "symbols": ["type$ebnf$2$subexpression$1"] },
{ "name": "type$ebnf$2$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$2", "symbols": ["type$ebnf$2", "type$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("constantType") ? { type: "constantType" } : constantType), "type$ebnf$2"], "postprocess": keywordToJson },
{ "name": "type$ebnf$3$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$3", "symbols": ["type$ebnf$3$subexpression$1"] },
{ "name": "type$ebnf$3$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$3", "symbols": ["type$ebnf$3", "type$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("comparableType") ? { type: "comparableType" } : comparableType), "type$ebnf$3", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": comparableTypeToJson },
{ "name": "type$ebnf$4$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$4", "symbols": ["type$ebnf$4$subexpression$1"] },
{ "name": "type$ebnf$4$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$4", "symbols": ["type$ebnf$4", "type$ebnf$4$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("constantType") ? { type: "constantType" } : constantType), "type$ebnf$4", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": comparableTypeToJson },
{ "name": "type$ebnf$5$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$5", "symbols": ["type$ebnf$5$subexpression$1"] },
{ "name": "type$ebnf$5$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$5", "symbols": ["type$ebnf$5", "type$ebnf$5$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "type$ebnf$5", "_", "type", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgTypeKeywordWithParenToJson },
{ "name": "type$ebnf$6$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$6", "symbols": ["type$ebnf$6$subexpression$1"] },
{ "name": "type$ebnf$6$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "type$ebnf$6", "symbols": ["type$ebnf$6", "type$ebnf$6$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "type", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("doubleArgType") ? { type: "doubleArgType" } : doubleArgType), "type$ebnf$6", "_", "type", "_", "type", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": doubleArgTypeKeywordWithParenToJson },
{ "name": "typeData", "symbols": [(lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "_", "typeData"], "postprocess": singleArgKeywordToJson },
{ "name": "typeData", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgType") ? { type: "singleArgType" } : singleArgType), "_", "typeData", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgKeywordWithParenToJson },
{ "name": "typeData", "symbols": [(lexer.has("doubleArgType") ? { type: "doubleArgType" } : doubleArgType), "_", "typeData", "_", "typeData"], "postprocess": doubleArgKeywordToJson },
{ "name": "typeData", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("doubleArgType") ? { type: "doubleArgType" } : doubleArgType), "_", "typeData", "_", "typeData", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": doubleArgKeywordWithParenToJson },
{ "name": "typeData", "symbols": ["subTypeData"], "postprocess": id },
{ "name": "typeData", "symbols": ["subTypeElt"], "postprocess": id },
{ "name": "typeData", "symbols": [(lexer.has("number") ? { type: "number" } : number)], "postprocess": intToJson },
{ "name": "typeData", "symbols": [(lexer.has("string") ? { type: "string" } : string)], "postprocess": stringToJson },
{ "name": "typeData", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => [] },
{ "name": "data", "symbols": [(lexer.has("constantData") ? { type: "constantData" } : constantData)], "postprocess": keywordToJson },
{ "name": "data", "symbols": [(lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", "data"], "postprocess": singleArgKeywordToJson },
{ "name": "data", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", "data", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgKeywordWithParenToJson },
{ "name": "data", "symbols": [(lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", "data", "_", "data"], "postprocess": doubleArgKeywordToJson },
{ "name": "data", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", "data", "_", "data", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": doubleArgKeywordWithParenToJson },
{ "name": "data", "symbols": ["subData"], "postprocess": id },
{ "name": "data", "symbols": ["subElt"], "postprocess": id },
{ "name": "data", "symbols": [(lexer.has("number") ? { type: "number" } : number)], "postprocess": intToJson },
{ "name": "data", "symbols": [(lexer.has("string") ? { type: "string" } : string)], "postprocess": stringToJson },
{ "name": "subTypeData", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "[]" },
{ "name": "subTypeData$ebnf$1$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeData$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeData$ebnf$1$subexpression$1", "symbols": ["data", "subTypeData$ebnf$1$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeData$ebnf$1", "symbols": ["subTypeData$ebnf$1$subexpression$1"] },
{ "name": "subTypeData$ebnf$1$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeData$ebnf$1$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeData$ebnf$1$subexpression$2", "symbols": ["data", "subTypeData$ebnf$1$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeData$ebnf$1", "symbols": ["subTypeData$ebnf$1", "subTypeData$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeData", "symbols": [{ "literal": "{" }, "_", "subTypeData$ebnf$1", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subTypeData$ebnf$2$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeData$ebnf$2$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeData$ebnf$2$subexpression$1", "symbols": ["data", "subTypeData$ebnf$2$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeData$ebnf$2", "symbols": ["subTypeData$ebnf$2$subexpression$1"] },
{ "name": "subTypeData$ebnf$2$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeData$ebnf$2$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeData$ebnf$2$subexpression$2", "symbols": ["data", "subTypeData$ebnf$2$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeData$ebnf$2", "symbols": ["subTypeData$ebnf$2", "subTypeData$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeData", "symbols": [{ "literal": "(" }, "_", "subTypeData$ebnf$2", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subTypeElt", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "[]" },
{ "name": "subTypeElt$ebnf$1$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$1$subexpression$1", "symbols": ["typeElt", "subTypeElt$ebnf$1$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$1", "symbols": ["subTypeElt$ebnf$1$subexpression$1"] },
{ "name": "subTypeElt$ebnf$1$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$1$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$1$subexpression$2", "symbols": ["typeElt", "subTypeElt$ebnf$1$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$1", "symbols": ["subTypeElt$ebnf$1", "subTypeElt$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeElt", "symbols": [{ "literal": "{" }, "_", "subTypeElt$ebnf$1", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subTypeElt$ebnf$2$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$2$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$2$subexpression$1", "symbols": ["typeElt", "subTypeElt$ebnf$2$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$2", "symbols": ["subTypeElt$ebnf$2$subexpression$1"] },
{ "name": "subTypeElt$ebnf$2$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$2$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$2$subexpression$2", "symbols": ["typeElt", "subTypeElt$ebnf$2$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$2", "symbols": ["subTypeElt$ebnf$2", "subTypeElt$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeElt", "symbols": [{ "literal": "(" }, "_", "subTypeElt$ebnf$2", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subTypeElt$ebnf$3$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$3$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$3$subexpression$1", "symbols": ["typeElt", "_", "subTypeElt$ebnf$3$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$3", "symbols": ["subTypeElt$ebnf$3$subexpression$1"] },
{ "name": "subTypeElt$ebnf$3$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$3$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$3$subexpression$2", "symbols": ["typeElt", "_", "subTypeElt$ebnf$3$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$3", "symbols": ["subTypeElt$ebnf$3", "subTypeElt$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeElt", "symbols": [{ "literal": "{" }, "_", "subTypeElt$ebnf$3", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subTypeElt$ebnf$4$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$4$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$4$subexpression$1", "symbols": ["typeElt", "_", "subTypeElt$ebnf$4$subexpression$1$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$4", "symbols": ["subTypeElt$ebnf$4$subexpression$1"] },
{ "name": "subTypeElt$ebnf$4$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subTypeElt$ebnf$4$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subTypeElt$ebnf$4$subexpression$2", "symbols": ["typeElt", "_", "subTypeElt$ebnf$4$subexpression$2$ebnf$1", "_"] },
{ "name": "subTypeElt$ebnf$4", "symbols": ["subTypeElt$ebnf$4", "subTypeElt$ebnf$4$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subTypeElt", "symbols": [{ "literal": "(" }, "_", "subTypeElt$ebnf$4", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "typeElt", "symbols": [(lexer.has("elt") ? { type: "elt" } : elt), "_", "typeData", "_", "typeData"], "postprocess": doubleArgKeywordToJson },
{ "name": "subInstruction", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "" },
{ "name": "subInstruction", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", "instruction", "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => d[2] },
{ "name": "subInstruction$ebnf$1$subexpression$1", "symbols": ["instruction", "_", (lexer.has("semicolon") ? { type: "semicolon" } : semicolon), "_"] },
{ "name": "subInstruction$ebnf$1", "symbols": ["subInstruction$ebnf$1$subexpression$1"] },
{ "name": "subInstruction$ebnf$1$subexpression$2", "symbols": ["instruction", "_", (lexer.has("semicolon") ? { type: "semicolon" } : semicolon), "_"] },
{ "name": "subInstruction$ebnf$1", "symbols": ["subInstruction$ebnf$1", "subInstruction$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subInstruction", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", "subInstruction$ebnf$1", "instruction", "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": instructionSetToJsonNoSemi },
{ "name": "subInstruction$ebnf$2$subexpression$1", "symbols": ["instruction", "_", (lexer.has("semicolon") ? { type: "semicolon" } : semicolon), "_"] },
{ "name": "subInstruction$ebnf$2", "symbols": ["subInstruction$ebnf$2$subexpression$1"] },
{ "name": "subInstruction$ebnf$2$subexpression$2", "symbols": ["instruction", "_", (lexer.has("semicolon") ? { type: "semicolon" } : semicolon), "_"] },
{ "name": "subInstruction$ebnf$2", "symbols": ["subInstruction$ebnf$2", "subInstruction$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subInstruction", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", "subInstruction$ebnf$2", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": instructionSetToJsonSemi },
{ "name": "instructions", "symbols": [(lexer.has("baseInstruction") ? { type: "baseInstruction" } : baseInstruction)] },
{ "name": "instructions", "symbols": [(lexer.has("macroCADR") ? { type: "macroCADR" } : macroCADR)] },
{ "name": "instructions", "symbols": [(lexer.has("macroDIP") ? { type: "macroDIP" } : macroDIP)] },
{ "name": "instructions", "symbols": [(lexer.has("macroDUP") ? { type: "macroDUP" } : macroDUP)] },
{ "name": "instructions", "symbols": [(lexer.has("macroSETCADR") ? { type: "macroSETCADR" } : macroSETCADR)] },
{ "name": "instructions", "symbols": [(lexer.has("macroASSERTlist") ? { type: "macroASSERTlist" } : macroASSERTlist)] },
{ "name": "instruction", "symbols": ["instructions"], "postprocess": keywordToJson },
{ "name": "instruction$ebnf$1$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$1", "symbols": ["instruction$ebnf$1$subexpression$1"] },
{ "name": "instruction$ebnf$1$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$1", "symbols": ["instruction$ebnf$1", "instruction$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$1", "_"], "postprocess": keywordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "subInstruction"], "postprocess": singleArgInstrKeywordToJson },
{ "name": "instruction$ebnf$2$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$2", "symbols": ["instruction$ebnf$2$subexpression$1"] },
{ "name": "instruction$ebnf$2$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$2", "symbols": ["instruction$ebnf$2", "instruction$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$2", "_", "subInstruction"], "postprocess": singleArgTypeKeywordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "type"], "postprocess": singleArgKeywordToJson },
{ "name": "instruction$ebnf$3$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$3", "symbols": ["instruction$ebnf$3$subexpression$1"] },
{ "name": "instruction$ebnf$3$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$3", "symbols": ["instruction$ebnf$3", "instruction$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$3", "_", "type"], "postprocess": singleArgTypeKeywordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "data"], "postprocess": singleArgKeywordToJson },
{ "name": "instruction$ebnf$4$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$4", "symbols": ["instruction$ebnf$4$subexpression$1"] },
{ "name": "instruction$ebnf$4$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$4", "symbols": ["instruction$ebnf$4", "instruction$ebnf$4$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$4", "_", "data"], "postprocess": singleArgTypeKeywordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "type", "_", "type", "_", "subInstruction"], "postprocess": tripleArgKeyWordToJson },
{ "name": "instruction$ebnf$5$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$5", "symbols": ["instruction$ebnf$5$subexpression$1"] },
{ "name": "instruction$ebnf$5$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$5", "symbols": ["instruction$ebnf$5", "instruction$ebnf$5$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$5", "_", "type", "_", "type", "_", "subInstruction"], "postprocess": tripleArgTypeKeyWordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "subInstruction", "_", "subInstruction"], "postprocess": doubleArgInstrKeywordToJson },
{ "name": "instruction$ebnf$6$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$6", "symbols": ["instruction$ebnf$6$subexpression$1"] },
{ "name": "instruction$ebnf$6$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$6", "symbols": ["instruction$ebnf$6", "instruction$ebnf$6$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$6", "_", "subInstruction", "_", "subInstruction"], "postprocess": doubleArgTypeKeywordToJson },
{ "name": "instruction", "symbols": ["instructions", "_", "type", "_", "type"], "postprocess": doubleArgKeywordToJson },
{ "name": "instruction$ebnf$7$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$7", "symbols": ["instruction$ebnf$7$subexpression$1"] },
{ "name": "instruction$ebnf$7$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$7", "symbols": ["instruction$ebnf$7", "instruction$ebnf$7$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": ["instructions", "instruction$ebnf$7", "_", "type", "_", "type"], "postprocess": doubleArgTypeKeywordToJson },
{ "name": "instruction", "symbols": [{ "literal": "PUSH" }, "_", "type", "_", "data"], "postprocess": doubleArgKeywordToJson },
{ "name": "instruction", "symbols": [{ "literal": "PUSH" }, "_", "type", "_", (lexer.has("lbrace") ? { type: "lbrace" } : lbrace), (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": pushToJson },
{ "name": "instruction$ebnf$8$subexpression$1", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$8", "symbols": ["instruction$ebnf$8$subexpression$1"] },
{ "name": "instruction$ebnf$8$subexpression$2", "symbols": ["_", (lexer.has("annot") ? { type: "annot" } : annot)] },
{ "name": "instruction$ebnf$8", "symbols": ["instruction$ebnf$8", "instruction$ebnf$8$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "PUSH" }, "instruction$ebnf$8", "_", "type", "_", "data"], "postprocess": pushWithAnnotsToJson },
{ "name": "instruction$ebnf$9", "symbols": [/[0-9]/] },
{ "name": "instruction$ebnf$9", "symbols": ["instruction$ebnf$9", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "DIP" }, "_", "instruction$ebnf$9", "_", "subInstruction"], "postprocess": dipnToJson },
{ "name": "instruction$ebnf$10", "symbols": [/[0-9]/] },
{ "name": "instruction$ebnf$10", "symbols": ["instruction$ebnf$10", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "DUP" }, "_", "instruction$ebnf$10", "_", "subInstruction"], "postprocess": dipnToJson },
{ "name": "instruction$ebnf$11", "symbols": [/[0-9]/] },
{ "name": "instruction$ebnf$11", "symbols": ["instruction$ebnf$11", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "DIG" }, "_", "instruction$ebnf$11"], "postprocess": dignToJson },
{ "name": "instruction$ebnf$12", "symbols": [/[0-9]/] },
{ "name": "instruction$ebnf$12", "symbols": ["instruction$ebnf$12", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "DUG" }, "_", "instruction$ebnf$12"], "postprocess": dignToJson },
{ "name": "instruction$ebnf$13", "symbols": [/[0-9]/] },
{ "name": "instruction$ebnf$13", "symbols": ["instruction$ebnf$13", /[0-9]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "instruction", "symbols": [{ "literal": "DROP" }, "_", "instruction$ebnf$13"], "postprocess": dropnToJson },
{ "name": "instruction", "symbols": [{ "literal": "DROP" }], "postprocess": keywordToJson },
{ "name": "instruction", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "" },
{ "name": "instruction", "symbols": [{ "literal": "CREATE_CONTRACT" }, "_", (lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", "parameter", "_", "storage", "_", "code", "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": subContractToJson },
{ "name": "instruction", "symbols": [{ "literal": "EMPTY_MAP" }, "_", "type", "_", "type"], "postprocess": doubleArgKeywordToJson },
{ "name": "instruction", "symbols": [{ "literal": "EMPTY_MAP" }, "_", (lexer.has("lparen") ? { type: "lparen" } : lparen), "_", "type", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen), "_", "type"], "postprocess": doubleArgParenKeywordToJson },
{ "name": "subData", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "[]" },
{ "name": "subData$ebnf$1$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$1$subexpression$1", "symbols": ["data", "subData$ebnf$1$subexpression$1$ebnf$1", "_"] },
{ "name": "subData$ebnf$1", "symbols": ["subData$ebnf$1$subexpression$1"] },
{ "name": "subData$ebnf$1$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$1$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$1$subexpression$2", "symbols": ["data", "subData$ebnf$1$subexpression$2$ebnf$1", "_"] },
{ "name": "subData$ebnf$1", "symbols": ["subData$ebnf$1", "subData$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subData", "symbols": [{ "literal": "{" }, "_", "subData$ebnf$1", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subData$ebnf$2$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$2$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$2$subexpression$1", "symbols": ["data", "subData$ebnf$2$subexpression$1$ebnf$1", "_"] },
{ "name": "subData$ebnf$2", "symbols": ["subData$ebnf$2$subexpression$1"] },
{ "name": "subData$ebnf$2$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$2$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$2$subexpression$2", "symbols": ["data", "subData$ebnf$2$subexpression$2$ebnf$1", "_"] },
{ "name": "subData$ebnf$2", "symbols": ["subData$ebnf$2", "subData$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subData", "symbols": [{ "literal": "(" }, "_", "subData$ebnf$2", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subData$ebnf$3$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$3$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$3$subexpression$1", "symbols": ["data", "_", "subData$ebnf$3$subexpression$1$ebnf$1", "_"] },
{ "name": "subData$ebnf$3", "symbols": ["subData$ebnf$3$subexpression$1"] },
{ "name": "subData$ebnf$3$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$3$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$3$subexpression$2", "symbols": ["data", "_", "subData$ebnf$3$subexpression$2$ebnf$1", "_"] },
{ "name": "subData$ebnf$3", "symbols": ["subData$ebnf$3", "subData$ebnf$3$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subData", "symbols": [{ "literal": "{" }, "_", "subData$ebnf$3", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subData$ebnf$4$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$4$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$4$subexpression$1", "symbols": ["data", "_", "subData$ebnf$4$subexpression$1$ebnf$1", "_"] },
{ "name": "subData$ebnf$4", "symbols": ["subData$ebnf$4$subexpression$1"] },
{ "name": "subData$ebnf$4$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subData$ebnf$4$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subData$ebnf$4$subexpression$2", "symbols": ["data", "_", "subData$ebnf$4$subexpression$2$ebnf$1", "_"] },
{ "name": "subData$ebnf$4", "symbols": ["subData$ebnf$4", "subData$ebnf$4$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subData", "symbols": [{ "literal": "(" }, "_", "subData$ebnf$4", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subElt", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "[]" },
{ "name": "subElt$ebnf$1$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subElt$ebnf$1$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subElt$ebnf$1$subexpression$1", "symbols": ["elt", "subElt$ebnf$1$subexpression$1$ebnf$1", "_"] },
{ "name": "subElt$ebnf$1", "symbols": ["subElt$ebnf$1$subexpression$1"] },
{ "name": "subElt$ebnf$1$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subElt$ebnf$1$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subElt$ebnf$1$subexpression$2", "symbols": ["elt", "subElt$ebnf$1$subexpression$2$ebnf$1", "_"] },
{ "name": "subElt$ebnf$1", "symbols": ["subElt$ebnf$1", "subElt$ebnf$1$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subElt", "symbols": [{ "literal": "{" }, "_", "subElt$ebnf$1", { "literal": "}" }], "postprocess": instructionSetToJsonSemi },
{ "name": "subElt$ebnf$2$subexpression$1$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subElt$ebnf$2$subexpression$1$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subElt$ebnf$2$subexpression$1", "symbols": ["elt", "subElt$ebnf$2$subexpression$1$ebnf$1", "_"] },
{ "name": "subElt$ebnf$2", "symbols": ["subElt$ebnf$2$subexpression$1"] },
{ "name": "subElt$ebnf$2$subexpression$2$ebnf$1", "symbols": [{ "literal": ";" }], "postprocess": id },
{ "name": "subElt$ebnf$2$subexpression$2$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "subElt$ebnf$2$subexpression$2", "symbols": ["elt", "subElt$ebnf$2$subexpression$2$ebnf$1", "_"] },
{ "name": "subElt$ebnf$2", "symbols": ["subElt$ebnf$2", "subElt$ebnf$2$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "subElt", "symbols": [{ "literal": "(" }, "_", "subElt$ebnf$2", { "literal": ")" }], "postprocess": instructionSetToJsonSemi },
{ "name": "elt", "symbols": [(lexer.has("elt") ? { type: "elt" } : elt), "_", "data", "_", "data"], "postprocess": doubleArgKeywordToJson },
{ "name": "_$ebnf$1", "symbols": [] },
{ "name": "_$ebnf$1", "symbols": ["_$ebnf$1", /[\s]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "_", "symbols": ["_$ebnf$1"] },
{ "name": "semicolons$ebnf$1", "symbols": [/[;]/], "postprocess": id },
{ "name": "semicolons$ebnf$1", "symbols": [], "postprocess": () => null },
{ "name": "semicolons", "symbols": ["semicolons$ebnf$1"] }
],
ParserStart: "main",
};
exports.default = grammar;
//# sourceMappingURL=Michelson.js.map

@@ -1,13 +0,13 @@

export interface Token {
interface NearleyToken {
value: any;
[key: string]: any;
}
export interface Lexer {
interface NearleyLexer {
reset: (chunk: string, info: any) => void;
next: () => Token | undefined;
next: () => NearleyToken | undefined;
save: () => any;
formatError: (token: Token) => string;
formatError: (token: NearleyToken) => string;
has: (tokenType: string) => boolean;
}
export interface NearleyRule {
interface NearleyRule {
name: string;

@@ -17,3 +17,3 @@ symbols: NearleySymbol[];

}
export declare type NearleySymbol = string | {
declare type NearleySymbol = string | {
literal: any;

@@ -23,4 +23,8 @@ } | {

};
export declare var Lexer: Lexer | undefined;
export declare var ParserRules: NearleyRule[];
export declare var ParserStart: string;
interface Grammar {
Lexer: NearleyLexer | undefined;
ParserRules: NearleyRule[];
ParserStart: string;
}
declare const grammar: Grammar;
export default grammar;

@@ -27,17 +27,21 @@ "use strict";

;
exports.Lexer = lexer;
exports.ParserRules = [
{ "name": "data", "symbols": [(lexer.has("keyword") ? { type: "keyword" } : keyword)], "postprocess": keywordToJson },
{ "name": "data", "symbols": [(lexer.has("string") ? { type: "string" } : string)], "postprocess": stringToJson },
{ "name": "data", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "[]" },
{ "name": "data", "symbols": [(lexer.has("number") ? { type: "number" } : number)], "postprocess": intToJson },
{ "name": "data", "symbols": [(lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", "data"], "postprocess": singleArgDataToJson },
{ "name": "data", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", "data", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgDataWithParenToJson },
{ "name": "data", "symbols": [(lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", "data", "_", "data"], "postprocess": doubleArgDataToJson },
{ "name": "data", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", "data", "_", "data", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": doubleArgDataWithParenToJson },
{ "name": "_$ebnf$1", "symbols": [] },
{ "name": "_$ebnf$1", "symbols": ["_$ebnf$1", /[\s]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "_", "symbols": ["_$ebnf$1"] }
];
exports.ParserStart = "data";
;
const grammar = {
Lexer: lexer,
ParserRules: [
{ "name": "data", "symbols": [(lexer.has("keyword") ? { type: "keyword" } : keyword)], "postprocess": keywordToJson },
{ "name": "data", "symbols": [(lexer.has("string") ? { type: "string" } : string)], "postprocess": stringToJson },
{ "name": "data", "symbols": [(lexer.has("lbrace") ? { type: "lbrace" } : lbrace), "_", (lexer.has("rbrace") ? { type: "rbrace" } : rbrace)], "postprocess": d => "[]" },
{ "name": "data", "symbols": [(lexer.has("number") ? { type: "number" } : number)], "postprocess": intToJson },
{ "name": "data", "symbols": [(lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", "data"], "postprocess": singleArgDataToJson },
{ "name": "data", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("singleArgData") ? { type: "singleArgData" } : singleArgData), "_", "data", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": singleArgDataWithParenToJson },
{ "name": "data", "symbols": [(lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", "data", "_", "data"], "postprocess": doubleArgDataToJson },
{ "name": "data", "symbols": [(lexer.has("lparen") ? { type: "lparen" } : lparen), "_", (lexer.has("doubleArgData") ? { type: "doubleArgData" } : doubleArgData), "_", "data", "_", "data", "_", (lexer.has("rparen") ? { type: "rparen" } : rparen)], "postprocess": doubleArgDataWithParenToJson },
{ "name": "_$ebnf$1", "symbols": [] },
{ "name": "_$ebnf$1", "symbols": ["_$ebnf$1", /[\s]/], "postprocess": (d) => d[0].concat([d[1]]) },
{ "name": "_", "symbols": ["_$ebnf$1"] }
],
ParserStart: "data",
};
exports.default = grammar;
//# sourceMappingURL=MichelsonParameters.js.map

@@ -26,3 +26,3 @@ "use strict";

function generateEntryPointsFromParams(params) {
const parser = new nearley.Parser(nearley.Grammar.fromCompiled(EntryPointTemplate));
const parser = new nearley.Parser(nearley.Grammar.fromCompiled(EntryPointTemplate.default));
parser.feed(TezosLanguageUtil_1.TezosLanguageUtil.normalizeMichelineWhiteSpace(TezosLanguageUtil_1.TezosLanguageUtil.stripComments(params)));

@@ -29,0 +29,0 @@ return parser.results[0];

@@ -150,3 +150,3 @@ "use strict";

function translateMichelsonToMicheline(code) {
const parser = new nearley.Parser(nearley.Grammar.fromCompiled(Michelson));
const parser = new nearley.Parser(nearley.Grammar.fromCompiled(Michelson.default));
preProcessMichelsonScript(code).forEach(p => { parser.feed(p); });

@@ -157,3 +157,3 @@ return parser.results[0];

function translateParameterMichelsonToMicheline(code) {
const parser = new nearley.Parser(nearley.Grammar.fromCompiled(MichelsonParameters));
const parser = new nearley.Parser(nearley.Grammar.fromCompiled(MichelsonParameters.default));
preProcessMichelsonScript(code).forEach(p => { parser.feed(p); });

@@ -190,3 +190,3 @@ return parser.results[0];

function translateMichelineToHex(code) {
const parser = new nearley.Parser(nearley.Grammar.fromCompiled(Micheline));
const parser = new nearley.Parser(nearley.Grammar.fromCompiled(Micheline.default));
parser.feed(normalizeMichelineWhiteSpace(code));

@@ -193,0 +193,0 @@ return parser.results.join('');

@@ -7,4 +7,5 @@ export * from './chain/tezos/TezosContractIntrospector';

export * from './chain/tezos/contracts/BabylonDelegationHelper';
export * from './chain/tezos/contracts/DexterTokenHelper';
export * from './chain/tezos/contracts/MorleyTokenHelper';
export * from './chain/tezos/contracts/TCFBakerRegistryHelper';
export * from './chain/tezos/contracts/DexterTokenHelper';
export * from './identity/tezos/TezosWalletUtil';

@@ -11,0 +12,0 @@ export * from './identity/tezos/TezosFileWallet';

@@ -19,4 +19,5 @@ "use strict";

__export(require("./chain/tezos/contracts/BabylonDelegationHelper"));
__export(require("./chain/tezos/contracts/DexterTokenHelper"));
__export(require("./chain/tezos/contracts/MorleyTokenHelper"));
__export(require("./chain/tezos/contracts/TCFBakerRegistryHelper"));
__export(require("./chain/tezos/contracts/DexterTokenHelper"));
__export(require("./identity/tezos/TezosWalletUtil"));

@@ -23,0 +24,0 @@ __export(require("./identity/tezos/TezosFileWallet"));

@@ -8,4 +8,5 @@ export declare function setLogLevel(level: string): void;

export * from './chain/tezos/contracts/BabylonDelegationHelper';
export * from './chain/tezos/contracts/DexterTokenHelper';
export * from './chain/tezos/contracts/MorleyTokenHelper';
export * from './chain/tezos/contracts/TCFBakerRegistryHelper';
export * from './chain/tezos/contracts/DexterTokenHelper';
export * from "./identity/tezos/TezosFileWallet";

@@ -12,0 +13,0 @@ export * from "./identity/tezos/TezosLedgerWallet";

@@ -36,4 +36,5 @@ "use strict";

__export(require("./chain/tezos/contracts/BabylonDelegationHelper"));
__export(require("./chain/tezos/contracts/DexterTokenHelper"));
__export(require("./chain/tezos/contracts/MorleyTokenHelper"));
__export(require("./chain/tezos/contracts/TCFBakerRegistryHelper"));
__export(require("./chain/tezos/contracts/DexterTokenHelper"));
__export(require("./identity/tezos/TezosFileWallet"));

@@ -40,0 +41,0 @@ __export(require("./identity/tezos/TezosLedgerWallet"));

@@ -18,3 +18,3 @@ import { ConseilQuery, ConseilServerInfo } from "../../types/conseil/QueryTypes";

function getBallots(serverInfo: ConseilServerInfo, network: string, query: ConseilQuery): Promise<any[]>;
function awaitOperationConfirmation(serverInfo: ConseilServerInfo, network: string, hash: string, duration: number, blocktime?: number): Promise<any[]>;
function awaitOperationConfirmation(serverInfo: ConseilServerInfo, network: string, hash: string, duration: number, blocktime?: number): Promise<any>;
function awaitOperationForkConfirmation(serverInfo: ConseilServerInfo, network: string, hash: string, duration: number, depth: number): Promise<boolean>;

@@ -21,0 +21,0 @@ function getEntityQueryForId(id: string | number): {

@@ -136,3 +136,3 @@ "use strict";

if (group.length > 0) {
return group;
return group[0];
}

@@ -139,0 +139,0 @@ currentLevel = (yield getBlockHead(serverInfo, network))['level'];

@@ -5,3 +5,3 @@ export interface Parameter {

optional?: boolean;
constituent?: string;
constituentType?: string;
}

@@ -8,0 +8,0 @@ export interface EntryPoint {

{
"name": "conseiljs",
"version": "0.4.0-alpha.2",
"version": "0.4.0-alpha.3",
"description": "Client-side library for dApp development.",

@@ -81,3 +81,3 @@ "browser": "dist/index-web.js",

"moo": "0.5.0",
"nearley": "2.18.0",
"nearley": "2.19.1",
"node-fetch": "2.6.0"

@@ -84,0 +84,0 @@ },

@@ -35,3 +35,3 @@ # ConseilJS

<script src="https://cdn.jsdelivr.net/gh/cryptonomic/conseiljs/dist-web/conseiljs.min.js"
integrity="sha384-olp0B4bUxCBiUnHvWX7kRfeNpLL9+HrwhbonLdRboBbZqXhvZA0c15nlcDMjoUKT"
integrity="sha384-8JMZ3yQBhD78hNdpSVR9/XrWSIN6k8VLqiJuqQcg+xMzIDMbFQN1aF3tk9vxETea"
crossorigin="anonymous"></script>

@@ -38,0 +38,0 @@ ```

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc