Socket
Socket
Sign inDemoInstall

@webassemblyjs/wast-parser

Package Overview
Dependencies
10
Maintainers
1
Versions
86
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 1.7.1-0 to 1.7.1

11

esm/grammar.js

@@ -6,12 +6,7 @@ function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }

import { codeFrameFromSource } from "@webassemblyjs/helper-code-frame";
import { define } from "mamacro";
import * as t from "@webassemblyjs/ast";
import { parse32I } from "./number-literals";
import { parseString } from "./string-literals";
import { tokens, keywords } from "./tokenizer";
var t = require("@webassemblyjs/ast");
var _require = require("./tokenizer"),
tokens = _require.tokens,
keywords = _require.keywords;
function hasPlugin(name) {

@@ -392,3 +387,2 @@ if (name !== "wast") throw new Error("unknow plugin");

var name = token.value;
var fnName = t.identifier("".concat(moduleName, ".").concat(name));
eatToken();

@@ -403,2 +397,3 @@ eatTokenOfType(tokens.openParen);

var fnResult = [];
var fnName = t.identifier(getUniqueName("func"));

@@ -405,0 +400,0 @@ if (token.type === tokens.identifier) {

import * as parser from "./grammar";
var _require = require("./tokenizer"),
tokenize = _require.tokenize;
import { tokenize } from "./tokenizer";
export function parse(source) {

@@ -11,2 +8,3 @@ var tokens = tokenize(source); // We pass the source here to show code frames

return ast;
}
}
export * from "./number-literals";

@@ -1,2 +0,2 @@

import Long from "long";
import Long from "@xtuc/long";
import parseHexFloat from "@webassemblyjs/floating-point-hex-parser";

@@ -3,0 +3,0 @@ import { CompileError } from "@webassemblyjs/helper-api-error";

@@ -1,2 +0,1 @@

import { define } from "mamacro";
import { FSM, makeTransition } from "@webassemblyjs/helper-fsm";

@@ -60,3 +59,3 @@ import { codeFrameFromSource } from "@webassemblyjs/helper-code-frame";

};
var keywords = {
export var keywords = {
module: "module",

@@ -131,4 +130,3 @@ func: "func",

}, "START", "STOP");
function tokenize(input) {
export function tokenize(input) {
var current = 0;

@@ -438,7 +436,2 @@ var char = input[current]; // Used by SourceLocation

}
module.exports = {
tokenize: tokenize,
tokens: tokenTypes,
keywords: keywords
};
export var tokens = tokenTypes;

@@ -10,3 +10,3 @@ "use strict";

var _mamacro = require("mamacro");
var t = _interopRequireWildcard(require("@webassemblyjs/ast"));

@@ -17,2 +17,6 @@ var _numberLiterals = require("./number-literals");

var _tokenizer = require("./tokenizer");
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
function _typeof(obj) { if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }

@@ -22,8 +26,2 @@

var t = require("@webassemblyjs/ast");
var _require = require("./tokenizer"),
tokens = _require.tokens,
keywords = _require.keywords;
function hasPlugin(name) {

@@ -35,3 +33,3 @@ if (name !== "wast") throw new Error("unknow plugin");

function isKeyword(token, id) {
return token.type === tokens.keyword && token.value === id;
return token.type === _tokenizer.tokens.keyword && token.value === id;
}

@@ -93,7 +91,7 @@

function parseExportIndex(token) {
if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
var index = identifierFromToken(token);
eatToken();
return index;
} else if (token.type === tokens.number) {
} else if (token.type === _tokenizer.tokens.number) {
var _index = t.numberLiteralFromRaw(token.value);

@@ -137,3 +135,3 @@

while (token.type === tokens.comment) {
while (token.type === _tokenizer.tokens.comment) {
eatToken();

@@ -165,3 +163,3 @@

if (token.type === tokens.string || token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.string || token.type === _tokenizer.tokens.identifier) {
id = t.identifier(token.value);

@@ -177,3 +175,3 @@ eatToken();

if (lookaheadAndCheck(tokens.openParen, keywords.data)) {
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.data)) {
eatToken(); // (

@@ -185,6 +183,6 @@

var stringInitializer = token.value;
eatTokenOfType(tokens.string); // Update limits accordingly
eatTokenOfType(_tokenizer.tokens.string); // Update limits accordingly
limits = t.limit(stringInitializer.length);
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -196,3 +194,3 @@ /**

if (lookaheadAndCheck(tokens.openParen, keywords.export)) {
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.export)) {
eatToken(); // (

@@ -202,3 +200,3 @@

if (token.type !== tokens.string) {
if (token.type !== _tokenizer.tokens.string) {
throw function () {

@@ -216,3 +214,3 @@ return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Expected string in export" + ", given " + tokenToString(token));

});
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -224,7 +222,7 @@ /**

if (token.type === tokens.number) {
if (token.type === _tokenizer.tokens.number) {
limits = t.limit((0, _numberLiterals.parse32I)(token.value));
eatToken();
if (token.type === tokens.number) {
if (token.type === _tokenizer.tokens.number) {
limits.max = (0, _numberLiterals.parse32I)(token.value);

@@ -251,14 +249,14 @@ eatToken();

if (token.type === tokens.number) {
if (token.type === _tokenizer.tokens.number) {
memidx = token.value;
eatTokenOfType(tokens.number); // .
eatTokenOfType(_tokenizer.tokens.number); // .
}
eatTokenOfType(tokens.openParen);
eatTokenOfType(_tokenizer.tokens.openParen);
var offset;
if (token.type === tokens.valtype) {
eatTokenOfType(tokens.valtype); // i32
if (token.type === _tokenizer.tokens.valtype) {
eatTokenOfType(_tokenizer.tokens.valtype); // i32
eatTokenOfType(tokens.dot); // .
eatTokenOfType(_tokenizer.tokens.dot); // .

@@ -269,3 +267,3 @@ if (token.value !== "const") {

eatTokenOfType(tokens.name); // const
eatTokenOfType(_tokenizer.tokens.name); // const

@@ -275,5 +273,5 @@ var numberLiteral = t.numberLiteralFromRaw(token.value, "i32");

eatToken();
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
} else {
eatTokenOfType(tokens.name); // get_global
eatTokenOfType(_tokenizer.tokens.name); // get_global

@@ -284,3 +282,3 @@ var _numberLiteral = t.numberLiteralFromRaw(token.value, "i32");

eatToken();
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -317,3 +315,3 @@

if (token.type === tokens.string || token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.string || token.type === _tokenizer.tokens.identifier) {
name = identifierFromToken(token);

@@ -325,7 +323,7 @@ eatToken();

while (token.type !== tokens.closeParen) {
while (token.type !== _tokenizer.tokens.closeParen) {
/**
* Maybe export
*/
if (lookaheadAndCheck(tokens.openParen, keywords.elem)) {
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.elem)) {
eatToken(); // (

@@ -335,3 +333,3 @@

while (token.type === tokens.identifier) {
while (token.type === _tokenizer.tokens.identifier) {
elemIndices.push(t.identifier(token.value));

@@ -341,4 +339,4 @@ eatToken();

eatTokenOfType(tokens.closeParen);
} else if (lookaheadAndCheck(tokens.openParen, keywords.export)) {
eatTokenOfType(_tokenizer.tokens.closeParen);
} else if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.export)) {
eatToken(); // (

@@ -348,3 +346,3 @@

if (token.type !== tokens.string) {
if (token.type !== _tokenizer.tokens.string) {
throw function () {

@@ -362,7 +360,7 @@ return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Expected string in export" + ", given " + tokenToString(token));

});
eatTokenOfType(tokens.closeParen);
} else if (isKeyword(token, keywords.anyfunc)) {
eatTokenOfType(_tokenizer.tokens.closeParen);
} else if (isKeyword(token, _tokenizer.keywords.anyfunc)) {
// It's the default value, we can ignore it
eatToken(); // anyfunc
} else if (token.type === tokens.number) {
} else if (token.type === _tokenizer.tokens.number) {
/**

@@ -374,3 +372,3 @@ * Table type

if (token.type === tokens.number) {
if (token.type === _tokenizer.tokens.number) {
var max = parseInt(token.value);

@@ -413,3 +411,3 @@ eatToken();

function parseImport() {
if (token.type !== tokens.string) {
if (token.type !== _tokenizer.tokens.string) {
throw new Error("Expected a string, " + token.type + " given.");

@@ -421,3 +419,3 @@ }

if (token.type !== tokens.string) {
if (token.type !== _tokenizer.tokens.string) {
throw new Error("Expected a string, " + token.type + " given.");

@@ -427,8 +425,7 @@ }

var name = token.value;
var fnName = t.identifier("".concat(moduleName, ".").concat(name));
eatToken();
eatTokenOfType(tokens.openParen);
eatTokenOfType(_tokenizer.tokens.openParen);
var descr;
if (isKeyword(token, keywords.func)) {
if (isKeyword(token, _tokenizer.keywords.func)) {
eatToken(); // keyword

@@ -438,4 +435,5 @@

var fnResult = [];
var fnName = t.identifier(getUniqueName("func"));
if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
fnName = identifierFromToken(token);

@@ -445,9 +443,9 @@ eatToken();

while (token.type === tokens.openParen) {
while (token.type === _tokenizer.tokens.openParen) {
eatToken();
if (lookaheadAndCheck(keywords.param) === true) {
if (lookaheadAndCheck(_tokenizer.keywords.param) === true) {
eatToken();
fnParams.push.apply(fnParams, _toConsumableArray(parseFuncParam()));
} else if (lookaheadAndCheck(keywords.result) === true) {
} else if (lookaheadAndCheck(_tokenizer.keywords.result) === true) {
eatToken();

@@ -461,3 +459,3 @@ fnResult.push.apply(fnResult, _toConsumableArray(parseFuncResult()));

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -470,9 +468,9 @@

descr = t.funcImportDescr(fnName, t.signature(fnParams, fnResult));
} else if (isKeyword(token, keywords.global)) {
} else if (isKeyword(token, _tokenizer.keywords.global)) {
eatToken(); // keyword
if (token.type === tokens.openParen) {
if (token.type === _tokenizer.tokens.openParen) {
eatToken(); // (
eatTokenOfType(tokens.keyword); // mut keyword
eatTokenOfType(_tokenizer.tokens.keyword); // mut keyword

@@ -482,13 +480,13 @@ var valtype = token.value;

descr = t.globalType(valtype, "var");
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
} else {
var _valtype = token.value;
eatTokenOfType(tokens.valtype);
eatTokenOfType(_tokenizer.tokens.valtype);
descr = t.globalType(_valtype, "const");
}
} else if (isKeyword(token, keywords.memory) === true) {
} else if (isKeyword(token, _tokenizer.keywords.memory) === true) {
eatToken(); // Keyword
descr = parseMemory();
} else if (isKeyword(token, keywords.table) === true) {
} else if (isKeyword(token, _tokenizer.keywords.table) === true) {
eatToken(); // Keyword

@@ -501,3 +499,3 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.moduleImport(moduleName, name, descr);

@@ -522,3 +520,3 @@ }

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
label = identifierFromToken(token);

@@ -530,10 +528,10 @@ eatToken();

while (token.type === tokens.openParen) {
while (token.type === _tokenizer.tokens.openParen) {
eatToken();
if (lookaheadAndCheck(keywords.result) === true) {
if (lookaheadAndCheck(_tokenizer.keywords.result) === true) {
eatToken();
blockResult = token.value;
eatToken();
} else if (lookaheadAndCheck(tokens.name) === true || lookaheadAndCheck(tokens.valtype) === true || token.type === "keyword" // is any keyword
} else if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
) {

@@ -549,3 +547,3 @@ // Instruction

maybeIgnoreComment();
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -580,3 +578,3 @@

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
label = identifierFromToken(token);

@@ -588,3 +586,3 @@ eatToken();

while (token.type === tokens.openParen) {
while (token.type === _tokenizer.tokens.openParen) {
eatToken(); // (

@@ -596,7 +594,7 @@

if (isKeyword(token, keywords.result) === true) {
if (isKeyword(token, _tokenizer.keywords.result) === true) {
eatToken();
blockResult = token.value;
eatTokenOfType(tokens.valtype);
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.valtype);
eatTokenOfType(_tokenizer.tokens.closeParen);
continue;

@@ -609,9 +607,9 @@ }

if (isKeyword(token, keywords.then) === true) {
if (isKeyword(token, _tokenizer.keywords.then) === true) {
eatToken(); // then
while (token.type === tokens.openParen) {
while (token.type === _tokenizer.tokens.openParen) {
eatToken(); // Instruction
if (lookaheadAndCheck(tokens.name) === true || lookaheadAndCheck(tokens.valtype) === true || token.type === "keyword" // is any keyword
if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
) {

@@ -625,6 +623,6 @@ consequent.push(parseFuncInstr());

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
continue;

@@ -637,9 +635,9 @@ }

if (isKeyword(token, keywords.else)) {
if (isKeyword(token, _tokenizer.keywords.else)) {
eatToken(); // else
while (token.type === tokens.openParen) {
while (token.type === _tokenizer.tokens.openParen) {
eatToken(); // Instruction
if (lookaheadAndCheck(tokens.name) === true || lookaheadAndCheck(tokens.valtype) === true || token.type === "keyword" // is any keyword
if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
) {

@@ -653,6 +651,6 @@ alternate.push(parseFuncInstr());

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
continue;

@@ -665,6 +663,6 @@ }

if (lookaheadAndCheck(tokens.name) === true || lookaheadAndCheck(tokens.valtype) === true || token.type === "keyword" // is any keyword
if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
) {
testInstrs.push(parseFuncInstr());
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
continue;

@@ -701,3 +699,3 @@ }

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
label = identifierFromToken(token);

@@ -709,10 +707,10 @@ eatToken();

while (token.type === tokens.openParen) {
while (token.type === _tokenizer.tokens.openParen) {
eatToken();
if (lookaheadAndCheck(keywords.result) === true) {
if (lookaheadAndCheck(_tokenizer.keywords.result) === true) {
eatToken();
blockResult = token.value;
eatToken();
} else if (lookaheadAndCheck(tokens.name) === true || lookaheadAndCheck(tokens.valtype) === true || token.type === "keyword" // is any keyword
} else if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
) {

@@ -727,3 +725,3 @@ // Instruction

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -740,4 +738,4 @@

while (token.type !== tokens.closeParen) {
if (lookaheadAndCheck(tokens.openParen, keywords.type)) {
while (token.type !== _tokenizer.tokens.closeParen) {
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.type)) {
eatToken(); // (

@@ -748,3 +746,3 @@

typeRef = parseTypeReference();
} else if (lookaheadAndCheck(tokens.openParen, keywords.param)) {
} else if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.param)) {
eatToken(); // (

@@ -759,6 +757,6 @@

if (token.type !== tokens.closeParen) {
if (token.type !== _tokenizer.tokens.closeParen) {
params.push.apply(params, _toConsumableArray(parseFuncParam()));
}
} else if (lookaheadAndCheck(tokens.openParen, keywords.result)) {
} else if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.result)) {
eatToken(); // (

@@ -773,11 +771,11 @@

if (token.type !== tokens.closeParen) {
if (token.type !== _tokenizer.tokens.closeParen) {
results.push.apply(results, _toConsumableArray(parseFuncResult()));
}
} else {
eatTokenOfType(tokens.openParen);
eatTokenOfType(_tokenizer.tokens.openParen);
instrs.push(parseFuncInstr());
}
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -803,3 +801,3 @@

function parseExport() {
if (token.type !== tokens.string) {
if (token.type !== _tokenizer.tokens.string) {
throw new Error("Expected string after export, got: " + token.type);

@@ -818,18 +816,18 @@ }

var index;
eatTokenOfType(tokens.openParen);
eatTokenOfType(_tokenizer.tokens.openParen);
while (token.type !== tokens.closeParen) {
if (isKeyword(token, keywords.func)) {
while (token.type !== _tokenizer.tokens.closeParen) {
if (isKeyword(token, _tokenizer.keywords.func)) {
type = "Func";
eatToken();
index = parseExportIndex(token);
} else if (isKeyword(token, keywords.table)) {
} else if (isKeyword(token, _tokenizer.keywords.table)) {
type = "Table";
eatToken();
index = parseExportIndex(token);
} else if (isKeyword(token, keywords.global)) {
} else if (isKeyword(token, _tokenizer.keywords.global)) {
type = "Global";
eatToken();
index = parseExportIndex(token);
} else if (isKeyword(token, keywords.memory)) {
} else if (isKeyword(token, _tokenizer.keywords.memory)) {
type = "Memory";

@@ -853,3 +851,3 @@ eatToken();

var endLoc = getEndLoc();
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(node, endLoc, startLoc);

@@ -864,3 +862,3 @@ }

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
name = token.value;

@@ -870,3 +868,3 @@ eatToken();

if (hasPlugin("wast") && token.type === tokens.name && token.value === "binary") {
if (hasPlugin("wast") && token.type === _tokenizer.tokens.name && token.value === "binary") {
eatToken();

@@ -876,3 +874,3 @@ isBinary = true;

if (hasPlugin("wast") && token.type === tokens.name && token.value === "quote") {
if (hasPlugin("wast") && token.type === _tokenizer.tokens.name && token.value === "quote") {
eatToken();

@@ -885,3 +883,3 @@ isQuote = true;

while (token.type === tokens.string) {
while (token.type === _tokenizer.tokens.string) {
blob.push(token.value);

@@ -892,3 +890,3 @@ eatToken();

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.binaryModule(name, blob);

@@ -900,3 +898,3 @@ }

while (token.type === tokens.string) {
while (token.type === _tokenizer.tokens.string) {
string.push(token.value);

@@ -906,7 +904,7 @@ eatToken();

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.quoteModule(name, string);
}
while (token.type !== tokens.closeParen) {
while (token.type !== _tokenizer.tokens.closeParen) {
moduleFields.push(walk());

@@ -924,3 +922,3 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.module(name, moduleFields);

@@ -938,9 +936,9 @@ }

while (token.type === tokens.name || isKeyword(token, keywords.offset)) {
while (token.type === _tokenizer.tokens.name || isKeyword(token, _tokenizer.keywords.offset)) {
var key = token.value;
eatToken();
eatTokenOfType(tokens.equal);
eatTokenOfType(_tokenizer.tokens.equal);
var value = void 0;
if (token.type === tokens.number) {
if (token.type === _tokenizer.tokens.number) {
value = t.numberLiteralFromRaw(token.value);

@@ -958,15 +956,15 @@ } else {

while (token.type !== tokens.closeParen && ( // $FlowIgnore
token.type === tokens.openParen || signaturePtr < signatureLength)) {
if (token.type === tokens.identifier) {
while (token.type !== _tokenizer.tokens.closeParen && ( // $FlowIgnore
token.type === _tokenizer.tokens.openParen || signaturePtr < signatureLength)) {
if (token.type === _tokenizer.tokens.identifier) {
args.push(t.identifier(token.value));
eatToken();
} else if (token.type === tokens.valtype) {
} else if (token.type === _tokenizer.tokens.valtype) {
// Handle locals
args.push(t.valtypeLiteral(token.value));
eatToken();
} else if (token.type === tokens.string) {
} else if (token.type === _tokenizer.tokens.string) {
args.push(t.stringLiteral(token.value));
eatToken();
} else if (token.type === tokens.number) {
} else if (token.type === _tokenizer.tokens.number) {
args.push( // TODO(sven): refactor the type signature handling

@@ -982,3 +980,3 @@ // https://github.com/xtuc/webassemblyjs/pull/129 is a good start

eatToken();
} else if (token.type === tokens.openParen) {
} else if (token.type === _tokenizer.tokens.openParen) {
/**

@@ -989,3 +987,3 @@ * Maybe some nested instructions

if (lookaheadAndCheck(tokens.name) === true || lookaheadAndCheck(tokens.valtype) === true || token.type === "keyword" // is any keyword
if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
) {

@@ -1000,3 +998,3 @@ // $FlowIgnore

if (token.type === tokens.closeParen) {
if (token.type === _tokenizer.tokens.closeParen) {
eatToken();

@@ -1093,3 +1091,3 @@ }

if (token.type === tokens.name || token.type === tokens.valtype) {
if (token.type === _tokenizer.tokens.name || token.type === _tokenizer.tokens.valtype) {
var _name2 = token.value;

@@ -1099,7 +1097,7 @@ var object;

if (token.type === tokens.dot) {
if (token.type === _tokenizer.tokens.dot) {
object = _name2;
eatToken();
if (token.type !== tokens.name) {
if (token.type !== _tokenizer.tokens.name) {
throw new TypeError("Unknown token: " + token.type + ", name expected");

@@ -1112,3 +1110,3 @@ }

if (token.type === tokens.closeParen) {
if (token.type === _tokenizer.tokens.closeParen) {
var _endLoc = token.loc.end;

@@ -1136,3 +1134,3 @@

}
} else if (isKeyword(token, keywords.loop)) {
} else if (isKeyword(token, _tokenizer.keywords.loop)) {
/**

@@ -1144,11 +1142,11 @@ * Else a instruction with a keyword (loop or block)

return parseLoop();
} else if (isKeyword(token, keywords.block)) {
} else if (isKeyword(token, _tokenizer.keywords.block)) {
eatToken(); // keyword
return parseBlock();
} else if (isKeyword(token, keywords.call_indirect)) {
} else if (isKeyword(token, _tokenizer.keywords.call_indirect)) {
eatToken(); // keyword
return parseCallIndirect();
} else if (isKeyword(token, keywords.call)) {
} else if (isKeyword(token, _tokenizer.keywords.call)) {
eatToken(); // keyword

@@ -1158,6 +1156,6 @@

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
index = identifierFromToken(token);
eatToken();
} else if (token.type === tokens.number) {
} else if (token.type === _tokenizer.tokens.number) {
index = t.indexLiteral(token.value);

@@ -1169,6 +1167,6 @@ eatToken();

while (token.type === tokens.openParen) {
while (token.type === _tokenizer.tokens.openParen) {
eatToken();
instrArgs.push(parseFuncInstr());
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -1185,7 +1183,7 @@

}
} else if (isKeyword(token, keywords.if)) {
} else if (isKeyword(token, _tokenizer.keywords.if)) {
eatToken(); // Keyword
return parseIf();
} else if (isKeyword(token, keywords.module) && hasPlugin("wast")) {
} else if (isKeyword(token, _tokenizer.keywords.module) && hasPlugin("wast")) {
eatToken(); // In WAST you can have a module as an instruction's argument

@@ -1232,3 +1230,3 @@ // we will cast it into a instruction to not break the flow

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
fnName = identifierFromToken(token);

@@ -1242,5 +1240,5 @@ eatToken();

while (token.type === tokens.openParen || token.type === tokens.name || token.type === tokens.valtype) {
while (token.type === _tokenizer.tokens.openParen || token.type === _tokenizer.tokens.name || token.type === _tokenizer.tokens.valtype) {
// Instructions without parens
if (token.type === tokens.name || token.type === tokens.valtype) {
if (token.type === _tokenizer.tokens.name || token.type === _tokenizer.tokens.valtype) {
fnBody.push(parseFuncInstr());

@@ -1252,15 +1250,15 @@ continue;

if (lookaheadAndCheck(keywords.param) === true) {
if (lookaheadAndCheck(_tokenizer.keywords.param) === true) {
eatToken();
fnParams.push.apply(fnParams, _toConsumableArray(parseFuncParam()));
} else if (lookaheadAndCheck(keywords.result) === true) {
} else if (lookaheadAndCheck(_tokenizer.keywords.result) === true) {
eatToken();
fnResult.push.apply(fnResult, _toConsumableArray(parseFuncResult()));
} else if (lookaheadAndCheck(keywords.export) === true) {
} else if (lookaheadAndCheck(_tokenizer.keywords.export) === true) {
eatToken();
parseFuncExport(fnName);
} else if (lookaheadAndCheck(keywords.type) === true) {
} else if (lookaheadAndCheck(_tokenizer.keywords.type) === true) {
eatToken();
typeRef = parseTypeReference();
} else if (lookaheadAndCheck(tokens.name) === true || lookaheadAndCheck(tokens.valtype) === true || token.type === "keyword" // is any keyword
} else if (lookaheadAndCheck(_tokenizer.tokens.name) === true || lookaheadAndCheck(_tokenizer.tokens.valtype) === true || token.type === "keyword" // is any keyword
) {

@@ -1275,3 +1273,3 @@ // Instruction

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -1289,3 +1287,3 @@

function parseFuncExport(funcId) {
if (token.type !== tokens.string) {
if (token.type !== _tokenizer.tokens.string) {
throw function () {

@@ -1326,3 +1324,3 @@ return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Function export expected a string" + ", given " + tokenToString(token));

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
id = identifierFromToken(token);

@@ -1332,3 +1330,3 @@ eatToken();

if (lookaheadAndCheck(tokens.openParen, keywords.func)) {
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.func)) {
eatToken(); // (

@@ -1338,3 +1336,3 @@

if (token.type === tokens.closeParen) {
if (token.type === _tokenizer.tokens.closeParen) {
eatToken(); // function with an empty signature, we can abort here

@@ -1345,3 +1343,3 @@

if (lookaheadAndCheck(tokens.openParen, keywords.param)) {
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.param)) {
eatToken(); // (

@@ -1352,6 +1350,6 @@

params = parseFuncParam();
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}
if (lookaheadAndCheck(tokens.openParen, keywords.result)) {
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.result)) {
eatToken(); // (

@@ -1362,6 +1360,6 @@

result = parseFuncResult();
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -1383,4 +1381,4 @@

while (token.type !== tokens.closeParen) {
if (token.type !== tokens.valtype) {
while (token.type !== _tokenizer.tokens.closeParen) {
if (token.type !== _tokenizer.tokens.valtype) {
throw function () {

@@ -1407,6 +1405,6 @@ return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unexpected token in func result" + ", given " + tokenToString(token));

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
ref = identifierFromToken(token);
eatToken();
} else if (token.type === tokens.number) {
} else if (token.type === _tokenizer.tokens.number) {
ref = t.numberLiteralFromRaw(token.value);

@@ -1439,3 +1437,3 @@ eatToken();

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
name = identifierFromToken(token);

@@ -1451,3 +1449,3 @@ eatToken();

if (lookaheadAndCheck(tokens.openParen, keywords.export)) {
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.export)) {
eatToken(); // (

@@ -1458,3 +1456,3 @@

var exportName = token.value;
eatTokenOfType(tokens.string);
eatTokenOfType(_tokenizer.tokens.string);
state.registredExportedElements.push({

@@ -1465,3 +1463,3 @@ exportType: "Global",

});
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -1473,3 +1471,3 @@ /**

if (lookaheadAndCheck(tokens.openParen, keywords.import)) {
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.import)) {
eatToken(); // (

@@ -1480,5 +1478,5 @@

var moduleName = token.value;
eatTokenOfType(tokens.string);
eatTokenOfType(_tokenizer.tokens.string);
var _name3 = token.value;
eatTokenOfType(tokens.string);
eatTokenOfType(_tokenizer.tokens.string);
importing = {

@@ -1489,3 +1487,3 @@ module: moduleName,

};
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -1497,9 +1495,9 @@ /**

if (token.type === tokens.valtype) {
if (token.type === _tokenizer.tokens.valtype) {
type = t.globalType(token.value, "const");
eatToken();
} else if (token.type === tokens.openParen) {
} else if (token.type === _tokenizer.tokens.openParen) {
eatToken(); // (
if (isKeyword(token, keywords.mut) === false) {
if (isKeyword(token, _tokenizer.keywords.mut) === false) {
throw function () {

@@ -1514,3 +1512,3 @@ return new Error("\n" + (0, _helperCodeFrame.codeFrameFromSource)(source, token.loc) + "\n" + "Unsupported global type, expected mut" + ", given " + tokenToString(token));

eatToken();
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -1536,6 +1534,6 @@

while (token.type === tokens.openParen) {
while (token.type === _tokenizer.tokens.openParen) {
eatToken();
init.push(parseFuncInstr());
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -1559,3 +1557,3 @@

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
id = token.value;

@@ -1565,3 +1563,3 @@ eatToken();

if (token.type === tokens.valtype) {
if (token.type === _tokenizer.tokens.valtype) {
valtype = token.value;

@@ -1580,3 +1578,3 @@ eatToken();

if (id === undefined) {
while (token.type === tokens.valtype) {
while (token.type === _tokenizer.tokens.valtype) {
valtype = token.value;

@@ -1612,3 +1610,3 @@ eatToken();

if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
tableIndex = identifierFromToken(token);

@@ -1618,3 +1616,3 @@ eatToken();

if (token.type === tokens.number) {
if (token.type === _tokenizer.tokens.number) {
tableIndex = t.indexLiteral(token.value);

@@ -1624,4 +1622,4 @@ eatToken();

while (token.type !== tokens.closeParen) {
if (lookaheadAndCheck(tokens.openParen, keywords.offset)) {
while (token.type !== _tokenizer.tokens.closeParen) {
if (lookaheadAndCheck(_tokenizer.tokens.openParen, _tokenizer.keywords.offset)) {
eatToken(); // (

@@ -1631,20 +1629,20 @@

while (token.type !== tokens.closeParen) {
eatTokenOfType(tokens.openParen);
while (token.type !== _tokenizer.tokens.closeParen) {
eatTokenOfType(_tokenizer.tokens.openParen);
offset.push(parseFuncInstr());
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}
eatTokenOfType(tokens.closeParen);
} else if (token.type === tokens.identifier) {
eatTokenOfType(_tokenizer.tokens.closeParen);
} else if (token.type === _tokenizer.tokens.identifier) {
funcs.push(t.identifier(token.value));
eatToken();
} else if (token.type === tokens.number) {
} else if (token.type === _tokenizer.tokens.number) {
funcs.push(t.indexLiteral(token.value));
eatToken();
} else if (token.type === tokens.openParen) {
} else if (token.type === _tokenizer.tokens.openParen) {
eatToken(); // (
offset.push(parseFuncInstr());
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
} else {

@@ -1673,3 +1671,3 @@ throw function () {

function parseStart() {
if (token.type === tokens.identifier) {
if (token.type === _tokenizer.tokens.identifier) {
var index = identifierFromToken(token);

@@ -1680,3 +1678,3 @@ eatToken();

if (token.type === tokens.number) {
if (token.type === _tokenizer.tokens.number) {
var _index2 = t.indexLiteral(token.value);

@@ -1691,7 +1689,7 @@

if (token.type === tokens.openParen) {
if (token.type === _tokenizer.tokens.openParen) {
eatToken();
var startLoc = getStartLoc();
if (isKeyword(token, keywords.export)) {
if (isKeyword(token, _tokenizer.keywords.export)) {
eatToken();

@@ -1705,3 +1703,3 @@ var node = parseExport();

if (isKeyword(token, keywords.loop)) {
if (isKeyword(token, _tokenizer.keywords.loop)) {
eatToken();

@@ -1716,3 +1714,3 @@

if (isKeyword(token, keywords.func)) {
if (isKeyword(token, _tokenizer.keywords.func)) {
eatToken();

@@ -1725,7 +1723,7 @@

maybeIgnoreComment();
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(_node2, _endLoc4, startLoc);
}
if (isKeyword(token, keywords.module)) {
if (isKeyword(token, _tokenizer.keywords.module)) {
eatToken();

@@ -1740,3 +1738,3 @@

if (isKeyword(token, keywords.import)) {
if (isKeyword(token, _tokenizer.keywords.import)) {
eatToken();

@@ -1748,7 +1746,7 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(_node4, _endLoc6, startLoc);
}
if (isKeyword(token, keywords.block)) {
if (isKeyword(token, _tokenizer.keywords.block)) {
eatToken();

@@ -1760,7 +1758,7 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(_node5, _endLoc7, startLoc);
}
if (isKeyword(token, keywords.memory)) {
if (isKeyword(token, _tokenizer.keywords.memory)) {
eatToken();

@@ -1772,7 +1770,7 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(_node6, _endLoc8, startLoc);
}
if (isKeyword(token, keywords.data)) {
if (isKeyword(token, _tokenizer.keywords.data)) {
eatToken();

@@ -1784,7 +1782,7 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(_node7, _endLoc9, startLoc);
}
if (isKeyword(token, keywords.table)) {
if (isKeyword(token, _tokenizer.keywords.table)) {
eatToken();

@@ -1796,7 +1794,7 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(_node8, _endLoc10, startLoc);
}
if (isKeyword(token, keywords.global)) {
if (isKeyword(token, _tokenizer.keywords.global)) {
eatToken();

@@ -1808,7 +1806,7 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(_node9, _endLoc11, startLoc);
}
if (isKeyword(token, keywords.type)) {
if (isKeyword(token, _tokenizer.keywords.type)) {
eatToken();

@@ -1820,7 +1818,7 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(_node10, _endLoc12, startLoc);
}
if (isKeyword(token, keywords.start)) {
if (isKeyword(token, _tokenizer.keywords.start)) {
eatToken();

@@ -1832,7 +1830,7 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(_node11, _endLoc13, startLoc);
}
if (isKeyword(token, keywords.elem)) {
if (isKeyword(token, _tokenizer.keywords.elem)) {
eatToken();

@@ -1844,3 +1842,3 @@

eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
return t.withLoc(_node12, _endLoc14, startLoc);

@@ -1855,3 +1853,3 @@ }

if (typeof token !== "undefined") {
eatTokenOfType(tokens.closeParen);
eatTokenOfType(_tokenizer.tokens.closeParen);
}

@@ -1863,3 +1861,3 @@

if (token.type === tokens.comment) {
if (token.type === _tokenizer.tokens.comment) {
var _startLoc = getStartLoc();

@@ -1866,0 +1864,0 @@

@@ -6,2 +6,5 @@ "use strict";

});
var _exportNames = {
parse: true
};
exports.parse = parse;

@@ -11,9 +14,21 @@

var _tokenizer = require("./tokenizer");
var _numberLiterals = require("./number-literals");
Object.keys(_numberLiterals).forEach(function (key) {
if (key === "default" || key === "__esModule") return;
if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return;
Object.defineProperty(exports, key, {
enumerable: true,
get: function get() {
return _numberLiterals[key];
}
});
});
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj.default = obj; return newObj; } }
var _require = require("./tokenizer"),
tokenize = _require.tokenize;
function parse(source) {
var tokens = tokenize(source); // We pass the source here to show code frames
var tokens = (0, _tokenizer.tokenize)(source); // We pass the source here to show code frames

@@ -20,0 +35,0 @@ var ast = parser.parse(tokens, source);

@@ -14,3 +14,3 @@ "use strict";

var _long = _interopRequireDefault(require("long"));
var _long = _interopRequireDefault(require("@xtuc/long"));

@@ -17,0 +17,0 @@ var _floatingPointHexParser = _interopRequireDefault(require("@webassemblyjs/floating-point-hex-parser"));

"use strict";
var _mamacro = require("mamacro");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.tokenize = tokenize;
exports.tokens = exports.keywords = void 0;

@@ -89,2 +93,3 @@ var _helperFsm = require("@webassemblyjs/helper-fsm");

};
exports.keywords = keywords;
var NUMERIC_SEPARATOR = "_";

@@ -442,6 +447,3 @@ /**

module.exports = {
tokenize: tokenize,
tokens: tokenTypes,
keywords: keywords
};
var tokens = tokenTypes;
exports.tokens = tokens;
{
"name": "@webassemblyjs/wast-parser",
"version": "1.7.1-0",
"version": "1.7.1",
"description": "WebAssembly text format parser",

@@ -21,12 +21,12 @@ "keywords": [

"dependencies": {
"@webassemblyjs/ast": "1.7.1-0",
"@webassemblyjs/floating-point-hex-parser": "1.7.1-0",
"@webassemblyjs/helper-api-error": "1.7.1-0",
"@webassemblyjs/helper-code-frame": "1.7.1-0",
"@webassemblyjs/helper-fsm": "1.7.1-0",
"long": "^3.2.0",
"@webassemblyjs/ast": "1.7.1",
"@webassemblyjs/floating-point-hex-parser": "1.7.1",
"@webassemblyjs/helper-api-error": "1.7.1",
"@webassemblyjs/helper-code-frame": "1.7.1",
"@webassemblyjs/helper-fsm": "1.7.1",
"@xtuc/long": "4.2.1",
"mamacro": "^0.0.3"
},
"devDependencies": {
"@webassemblyjs/helper-test-framework": "1.7.1-0"
"@webassemblyjs/helper-test-framework": "1.7.1"
},

@@ -33,0 +33,0 @@ "repository": {

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc