Socket
Socket
Sign inDemoInstall

babylon

Package Overview
Dependencies
Maintainers
1
Versions
132
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 5.8.5 to 5.8.6

lib/parser/comments.js

34

lib/index.js

@@ -9,28 +9,26 @@ "use strict";

var _state = require("./state");
var _parser = require("./parser");
var _options = require("./options");
var _parser2 = _interopRequireDefault(_parser);
require("./parseutil");
require("./parser/util");
require("./statement");
require("./parser/statement");
require("./lval");
require("./parser/lval");
require("./expression");
require("./parser/expression");
require("./node");
require("./parser/node");
require("./location");
require("./parser/location");
require("./lookahead");
require("./parser/comments");
var _tokentype = require("./tokentype");
var _tokenizerTypes = require("./tokenizer/types");
require("./tokenize");
require("./tokenizer");
require("./tokencontext");
require("./tokenizer/context");
require("./comments");
var _pluginsFlow = require("./plugins/flow");

@@ -44,9 +42,9 @@

_state.plugins.flow = _pluginsFlow2["default"];
_state.plugins.jsx = _pluginsJsx2["default"];
_parser.plugins.flow = _pluginsFlow2["default"];
_parser.plugins.jsx = _pluginsJsx2["default"];
function parse(input, options) {
return new _state.Parser(_options.getOptions(options), input).parse();
return new _parser2["default"](options, input).parse();
}
exports.tokTypes = _tokentype.types;
exports.tokTypes = _tokenizerTypes.types;

@@ -0,1 +1,4 @@

// A second optional argument can be given to further configure
// the parser process. These options are recognized:
"use strict";

@@ -5,8 +8,2 @@

exports.getOptions = getOptions;
var _util = require("./util");
// A second optional argument can be given to further configure
// the parser process. These options are recognized:
var defaultOptions = {

@@ -26,16 +23,2 @@ // Source type ("script" or "module") for different semantics

allowImportExportEverywhere: false,
// When `locations` is on, `loc` properties holding objects with
// `start` and `end` properties in `{line, column}` form (with
// line being 1-based and column 0-based) will be attached to the
// nodes.
locations: false,
// Nodes have their start and end characters offsets recorded in
// `start` and `end` properties (directly on the node, rather than
// the `loc` object, which holds line/column data. To also add a
// [semi-standardized][range] `range` property holding a `[start,
// end]` array with the same numbers, set the `ranges` option to
// `true`.
//
// [range]: https://bugzilla.mozilla.org/show_bug.cgi?id=745678
ranges: false,
plugins: {},

@@ -52,6 +35,6 @@ // Babel-specific options

var options = {};
for (var opt in defaultOptions) {
options[opt] = opts && _util.has(opts, opt) ? opts[opt] : defaultOptions[opt];
for (var key in defaultOptions) {
options[key] = opts && key in opts ? opts[key] : defaultOptions[key];
}
return options;
}
"use strict";
exports.__esModule = true;
// istanbul ignore next
var _tokentype = require("../tokentype");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
var _state = require("../state");
var _tokenizerTypes = require("../tokenizer/types");
var pp = _state.Parser.prototype;
var _parser = require("../parser");
var _parser2 = _interopRequireDefault(_parser);
var pp = _parser2["default"].prototype;
pp.flowParseTypeInitialiser = function (tok) {
var oldInType = this.inType;
this.inType = true;
this.expect(tok || _tokentype.types.colon);
this.expect(tok || _tokenizerTypes.types.colon);
var type = this.flowParseType();

@@ -40,7 +45,7 @@ this.inType = oldInType;

this.expect(_tokentype.types.parenL);
this.expect(_tokenizerTypes.types.parenL);
var tmp = this.flowParseFunctionTypeParams();
typeNode.params = tmp.params;
typeNode.rest = tmp.rest;
this.expect(_tokentype.types.parenR);
this.expect(_tokenizerTypes.types.parenR);
typeNode.returnType = this.flowParseTypeInitialiser();

@@ -59,7 +64,7 @@

pp.flowParseDeclare = function (node) {
if (this.type === _tokentype.types._class) {
if (this.match(_tokenizerTypes.types._class)) {
return this.flowParseDeclareClass(node);
} else if (this.type === _tokentype.types._function) {
} else if (this.match(_tokenizerTypes.types._function)) {
return this.flowParseDeclareFunction(node);
} else if (this.type === _tokentype.types._var) {
} else if (this.match(_tokenizerTypes.types._var)) {
return this.flowParseDeclareVariable(node);

@@ -83,3 +88,3 @@ } else if (this.isContextual("module")) {

if (this.type === _tokentype.types.string) {
if (this.match(_tokenizerTypes.types.string)) {
node.id = this.parseExprAtom();

@@ -92,4 +97,4 @@ } else {

var body = bodyNode.body = [];
this.expect(_tokentype.types.braceL);
while (this.type !== _tokentype.types.braceR) {
this.expect(_tokenizerTypes.types.braceL);
while (!this.match(_tokenizerTypes.types.braceR)) {
var node2 = this.startNode();

@@ -102,3 +107,3 @@

}
this.expect(_tokentype.types.braceR);
this.expect(_tokenizerTypes.types.braceR);

@@ -122,6 +127,6 @@ this.finishNode(bodyNode, "BlockStatement");

if (this.eat(_tokentype.types._extends)) {
if (this.eat(_tokenizerTypes.types._extends)) {
do {
node["extends"].push(this.flowParseInterfaceExtends());
} while (this.eat(_tokentype.types.comma));
} while (this.eat(_tokenizerTypes.types.comma));
}

@@ -161,3 +166,3 @@

node.right = this.flowParseTypeInitialiser(_tokentype.types.eq);
node.right = this.flowParseTypeInitialiser(_tokenizerTypes.types.eq);
this.semicolon();

@@ -178,3 +183,3 @@

if (!this.isRelational(">")) {
this.expect(_tokentype.types.comma);
this.expect(_tokenizerTypes.types.comma);
}

@@ -198,3 +203,3 @@ }

if (!this.isRelational(">")) {
this.expect(_tokentype.types.comma);
this.expect(_tokenizerTypes.types.comma);
}

@@ -210,3 +215,3 @@ }

pp.flowParseObjectPropertyKey = function () {
return this.type === _tokentype.types.num || this.type === _tokentype.types.string ? this.parseExprAtom() : this.parseIdent(true);
return this.match(_tokenizerTypes.types.num) || this.match(_tokenizerTypes.types.string) ? this.parseExprAtom() : this.parseIdent(true);
};

@@ -217,6 +222,6 @@

this.expect(_tokentype.types.bracketL);
this.expect(_tokenizerTypes.types.bracketL);
node.id = this.flowParseObjectPropertyKey();
node.key = this.flowParseTypeInitialiser();
this.expect(_tokentype.types.bracketR);
this.expect(_tokenizerTypes.types.bracketR);
node.value = this.flowParseTypeInitialiser();

@@ -237,14 +242,14 @@

this.expect(_tokentype.types.parenL);
while (this.type === _tokentype.types.name) {
this.expect(_tokenizerTypes.types.parenL);
while (this.match(_tokenizerTypes.types.name)) {
node.params.push(this.flowParseFunctionTypeParam());
if (this.type !== _tokentype.types.parenR) {
this.expect(_tokentype.types.comma);
if (!this.match(_tokenizerTypes.types.parenR)) {
this.expect(_tokenizerTypes.types.comma);
}
}
if (this.eat(_tokentype.types.ellipsis)) {
if (this.eat(_tokenizerTypes.types.ellipsis)) {
node.rest = this.flowParseFunctionTypeParam();
}
this.expect(_tokentype.types.parenR);
this.expect(_tokenizerTypes.types.parenR);
node.returnType = this.flowParseTypeInitialiser();

@@ -284,7 +289,7 @@

this.expect(_tokentype.types.braceL);
this.expect(_tokenizerTypes.types.braceL);
while (this.type !== _tokentype.types.braceR) {
var startPos = this.start,
startLoc = this.startLoc;
while (!this.match(_tokenizerTypes.types.braceR)) {
var startPos = this.state.start,
startLoc = this.state.startLoc;
node = this.startNode();

@@ -296,8 +301,8 @@ if (allowStatic && this.isContextual("static")) {

if (this.type === _tokentype.types.bracketL) {
if (this.match(_tokenizerTypes.types.bracketL)) {
nodeStart.indexers.push(this.flowParseObjectTypeIndexer(node, isStatic));
} else if (this.type === _tokentype.types.parenL || this.isRelational("<")) {
} else if (this.match(_tokenizerTypes.types.parenL) || this.isRelational("<")) {
nodeStart.callProperties.push(this.flowParseObjectTypeCallProperty(node, allowStatic));
} else {
if (isStatic && this.type === _tokentype.types.colon) {
if (isStatic && this.match(_tokenizerTypes.types.colon)) {
propertyKey = this.parseIdent();

@@ -307,7 +312,7 @@ } else {

}
if (this.isRelational("<") || this.type === _tokentype.types.parenL) {
if (this.isRelational("<") || this.match(_tokenizerTypes.types.parenL)) {
// This is a method property
nodeStart.properties.push(this.flowParseObjectTypeMethod(startPos, startLoc, isStatic, propertyKey));
} else {
if (this.eat(_tokentype.types.question)) {
if (this.eat(_tokenizerTypes.types.question)) {
optional = true;

@@ -325,3 +330,3 @@ }

this.expect(_tokentype.types.braceR);
this.expect(_tokenizerTypes.types.braceR);

@@ -332,3 +337,3 @@ return this.finishNode(nodeStart, "ObjectTypeAnnotation");

pp.flowObjectTypeSemicolon = function () {
if (!this.eat(_tokentype.types.semi) && !this.eat(_tokentype.types.comma) && this.type !== _tokentype.types.braceR) {
if (!this.eat(_tokenizerTypes.types.semi) && !this.eat(_tokenizerTypes.types.comma) && !this.match(_tokenizerTypes.types.braceR)) {
this.unexpected();

@@ -344,3 +349,3 @@ }

while (this.eat(_tokentype.types.dot)) {
while (this.eat(_tokenizerTypes.types.dot)) {
var node2 = this.startNodeAt(startPos, startLoc);

@@ -361,3 +366,3 @@ node2.qualification = node.id;

var node = this.startNode();
this.expect(_tokentype.types._typeof);
this.expect(_tokenizerTypes.types._typeof);
node.argument = this.flowParsePrimaryType();

@@ -370,10 +375,10 @@ return this.finishNode(node, "TypeofTypeAnnotation");

node.types = [];
this.expect(_tokentype.types.bracketL);
this.expect(_tokenizerTypes.types.bracketL);
// We allow trailing commas
while (this.pos < this.input.length && this.type !== _tokentype.types.bracketR) {
while (this.state.pos < this.input.length && !this.match(_tokenizerTypes.types.bracketR)) {
node.types.push(this.flowParseType());
if (this.type === _tokentype.types.bracketR) break;
this.expect(_tokentype.types.comma);
if (this.match(_tokenizerTypes.types.bracketR)) break;
this.expect(_tokenizerTypes.types.comma);
}
this.expect(_tokentype.types.bracketR);
this.expect(_tokenizerTypes.types.bracketR);
return this.finishNode(node, "TupleTypeAnnotation");

@@ -386,3 +391,3 @@ };

node.name = this.parseIdent();
if (this.eat(_tokentype.types.question)) {
if (this.eat(_tokenizerTypes.types.question)) {
optional = true;

@@ -397,9 +402,9 @@ }

var ret = { params: [], rest: null };
while (this.type === _tokentype.types.name) {
while (this.match(_tokenizerTypes.types.name)) {
ret.params.push(this.flowParseFunctionTypeParam());
if (this.type !== _tokentype.types.parenR) {
this.expect(_tokentype.types.comma);
if (!this.match(_tokenizerTypes.types.parenR)) {
this.expect(_tokenizerTypes.types.comma);
}
}
if (this.eat(_tokentype.types.ellipsis)) {
if (this.eat(_tokenizerTypes.types.ellipsis)) {
ret.rest = this.flowParseFunctionTypeParam();

@@ -440,4 +445,4 @@ }

pp.flowParsePrimaryType = function () {
var startPos = this.start,
startLoc = this.startLoc;
var startPos = this.state.start,
startLoc = this.state.startLoc;
var node = this.startNode();

@@ -448,22 +453,22 @@ var tmp;

switch (this.type) {
case _tokentype.types.name:
switch (this.state.type) {
case _tokenizerTypes.types.name:
return this.flowIdentToTypeAnnotation(startPos, startLoc, node, this.parseIdent());
case _tokentype.types.braceL:
case _tokenizerTypes.types.braceL:
return this.flowParseObjectType();
case _tokentype.types.bracketL:
case _tokenizerTypes.types.bracketL:
return this.flowParseTupleType();
case _tokentype.types.relational:
if (this.value === "<") {
case _tokenizerTypes.types.relational:
if (this.state.value === "<") {
node.typeParameters = this.flowParseTypeParameterDeclaration();
this.expect(_tokentype.types.parenL);
this.expect(_tokenizerTypes.types.parenL);
tmp = this.flowParseFunctionTypeParams();
node.params = tmp.params;
node.rest = tmp.rest;
this.expect(_tokentype.types.parenR);
this.expect(_tokenizerTypes.types.parenR);
this.expect(_tokentype.types.arrow);
this.expect(_tokenizerTypes.types.arrow);

@@ -475,10 +480,10 @@ node.returnType = this.flowParseType();

case _tokentype.types.parenL:
case _tokenizerTypes.types.parenL:
this.next();
// Check to see if this is actually a grouped type
if (this.type !== _tokentype.types.parenR && this.type !== _tokentype.types.ellipsis) {
if (this.type === _tokentype.types.name) {
if (!this.match(_tokenizerTypes.types.parenR) && !this.match(_tokenizerTypes.types.ellipsis)) {
if (this.match(_tokenizerTypes.types.name)) {
var token = this.lookahead().type;
isGroupedType = token !== _tokentype.types.question && token !== _tokentype.types.colon;
isGroupedType = token !== _tokenizerTypes.types.question && token !== _tokenizerTypes.types.colon;
} else {

@@ -491,7 +496,7 @@ isGroupedType = true;

type = this.flowParseType();
this.expect(_tokentype.types.parenR);
this.expect(_tokenizerTypes.types.parenR);
// If we see a => next then someone was probably confused about
// function types, so we can provide a better error message
if (this.eat(_tokentype.types.arrow)) {
if (this.eat(_tokenizerTypes.types.arrow)) {
this.raise(node, "Unexpected token =>. It looks like " + "you are trying to write a function type, but you ended up " + "writing a grouped type followed by an =>, which is a syntax " + "error. Remember, function type parameters are named so function " + "types look like (name1: type1, name2: type2) => returnType. You " + "probably wrote (type1) => returnType");

@@ -507,5 +512,5 @@ }

this.expect(_tokentype.types.parenR);
this.expect(_tokenizerTypes.types.parenR);
this.expect(_tokentype.types.arrow);
this.expect(_tokenizerTypes.types.arrow);

@@ -517,11 +522,11 @@ node.returnType = this.flowParseType();

case _tokentype.types.string:
node.rawValue = node.value = this.value;
node.raw = this.input.slice(this.start, this.end);
case _tokenizerTypes.types.string:
node.rawValue = node.value = this.state.value;
node.raw = this.input.slice(this.state.start, this.state.end);
this.next();
return this.finishNode(node, "StringLiteralTypeAnnotation");
case _tokentype.types.num:
node.rawValue = node.value = this.value;
node.raw = this.input.slice(this.start, this.end);
case _tokenizerTypes.types.num:
node.rawValue = node.value = this.state.value;
node.raw = this.input.slice(this.state.start, this.state.end);
this.next();

@@ -531,3 +536,3 @@ return this.finishNode(node, "NumberLiteralTypeAnnotation");

default:
if (this.type.keyword === "typeof") {
if (this.state.type.keyword === "typeof") {
return this.flowParseTypeofType();

@@ -543,5 +548,5 @@ }

var type = node.elementType = this.flowParsePrimaryType();
if (this.type === _tokentype.types.bracketL) {
this.expect(_tokentype.types.bracketL);
this.expect(_tokentype.types.bracketR);
if (this.match(_tokenizerTypes.types.bracketL)) {
this.expect(_tokenizerTypes.types.bracketL);
this.expect(_tokenizerTypes.types.bracketR);
return this.finishNode(node, "ArrayTypeAnnotation");

@@ -555,3 +560,3 @@ } else {

var node = this.startNode();
if (this.eat(_tokentype.types.question)) {
if (this.eat(_tokenizerTypes.types.question)) {
node.typeAnnotation = this.flowParsePrefixType();

@@ -568,3 +573,3 @@ return this.finishNode(node, "NullableTypeAnnotation");

node.types = [type];
while (this.eat(_tokentype.types.bitwiseAND)) {
while (this.eat(_tokenizerTypes.types.bitwiseAND)) {
node.types.push(this.flowParsePrefixType());

@@ -579,3 +584,3 @@ }

node.types = [type];
while (this.eat(_tokentype.types.bitwiseOR)) {
while (this.eat(_tokenizerTypes.types.bitwiseOR)) {
node.types.push(this.flowParseIntersectionType());

@@ -604,8 +609,8 @@ }

if (canBeOptionalParam && this.eat(_tokentype.types.question)) {
this.expect(_tokentype.types.question);
if (canBeOptionalParam && this.eat(_tokenizerTypes.types.question)) {
this.expect(_tokenizerTypes.types.question);
isOptionalParam = true;
}
if (requireTypeAnnotation || this.type === _tokentype.types.colon) {
if (requireTypeAnnotation || this.match(_tokenizerTypes.types.colon)) {
ident.typeAnnotation = this.flowParseTypeAnnotation();

@@ -627,3 +632,3 @@ this.finishNode(ident, ident.type);

return function (node, allowExpression) {
if (this.type === _tokentype.types.colon && !allowExpression) {
if (this.match(_tokenizerTypes.types.colon) && !allowExpression) {
// if allowExpression is true then we're parsing an arrow function and if

@@ -641,3 +646,3 @@ // there's a return type then it's been handled elsewhere

// strict mode handling of `interface` since it's a reserved word
if (this.strict && this.type === _tokentype.types.name && this.value === "interface") {
if (this.strict && this.match(_tokenizerTypes.types.name) && this.state.value === "interface") {
var node = this.startNode();

@@ -656,6 +661,6 @@ this.next();

if (expr.name === "declare") {
if (this.type === _tokentype.types._class || this.type === _tokentype.types.name || this.type === _tokentype.types._function || this.type === _tokentype.types._var) {
if (this.match(_tokenizerTypes.types._class) || this.match(_tokenizerTypes.types.name) || this.match(_tokenizerTypes.types._function) || this.match(_tokenizerTypes.types._var)) {
return this.flowParseDeclare(node);
}
} else if (this.type === _tokentype.types.name) {
} else if (this.match(_tokenizerTypes.types.name)) {
if (expr.name === "interface") {

@@ -681,3 +686,3 @@ return this.flowParseInterface(node);

return function (node, startLoc, startPos, forceArrow) {
if (this.type === _tokentype.types.colon) {
if (this.match(_tokenizerTypes.types.colon)) {
var typeCastNode = this.startNodeAt(startLoc, startPos);

@@ -687,7 +692,7 @@ typeCastNode.expression = node;

if (forceArrow && this.type !== _tokentype.types.arrow) {
if (forceArrow && !this.match(_tokenizerTypes.types.arrow)) {
this.unexpected();
}
if (this.eat(_tokentype.types.arrow)) {
if (this.eat(_tokenizerTypes.types.arrow)) {
// ((lol): number => {});

@@ -730,3 +735,3 @@ var func = this.parseArrowExpression(this.startNodeAt(startLoc, startPos), [node]);

if (this.inType && (code === 62 || code === 60)) {
return this.finishOp(_tokentype.types.relational, 1);
return this.finishOp(_tokenizerTypes.types.relational, 1);
} else {

@@ -778,3 +783,3 @@ return inner.call(this, code);

var node = inner.call(this, allowEmpty, refShorthandDefaultPos);
if (this.type === _tokentype.types.colon) {
if (this.match(_tokenizerTypes.types.colon)) {
container._exprListItem = true;

@@ -792,3 +797,3 @@ container.expression = node;

return function (node) {
if (this.type === _tokentype.types.colon) {
if (this.match(_tokenizerTypes.types.colon)) {
node.typeAnnotation = this.flowParseTypeAnnotation();

@@ -802,3 +807,3 @@ }

return function () {
return this.type === _tokentype.types.colon || inner.call(this);
return this.match(_tokenizerTypes.types.colon) || inner.call(this);
};

@@ -837,3 +842,3 @@ });

implemented.push(this.finishNode(_node, "ClassImplements"));
} while (this.eat(_tokentype.types.comma));
} while (this.eat(_tokenizerTypes.types.comma));
}

@@ -848,3 +853,3 @@ };

typeParameters = this.flowParseTypeParameterDeclaration();
if (this.type !== _tokentype.types.parenL) this.unexpected();
if (!this.match(_tokenizerTypes.types.parenL)) this.unexpected();
}

@@ -858,6 +863,6 @@ inner.apply(this, arguments);

return function (param) {
if (this.eat(_tokentype.types.question)) {
if (this.eat(_tokenizerTypes.types.question)) {
param.optional = true;
}
if (this.type === _tokentype.types.colon) {
if (this.match(_tokenizerTypes.types.colon)) {
param.typeAnnotation = this.flowParseTypeAnnotation();

@@ -874,6 +879,6 @@ }

var kind = this.type === _tokentype.types._typeof ? "typeof" : this.isContextual("type") ? "type" : null;
var kind = this.match(_tokenizerTypes.types._typeof) ? "typeof" : this.isContextual("type") ? "type" : null;
if (kind) {
var lh = this.lookahead();
if (lh.type === _tokentype.types.name && lh.value !== "from" || lh.type === _tokentype.types.braceL || lh.type === _tokentype.types.star) {
if (lh.type === _tokenizerTypes.types.name && lh.value !== "from" || lh.type === _tokenizerTypes.types.braceL || lh.type === _tokenizerTypes.types.star) {
this.next();

@@ -902,3 +907,3 @@ node.importKind = kind;

inner.call(this, decl);
if (this.type === _tokentype.types.colon) {
if (this.match(_tokenizerTypes.types.colon)) {
decl.id.typeAnnotation = this.flowParseTypeAnnotation();

@@ -913,3 +918,3 @@ this.finishNode(decl.id, decl.id.type);

return function (node, call) {
if (this.type === _tokentype.types.colon) {
if (this.match(_tokenizerTypes.types.colon)) {
node.returnType = this.flowParseTypeAnnotation();

@@ -924,13 +929,13 @@ }

return function (startPos, startLoc, canBeArrow, isAsync) {
startPos = startPos || this.start;
startLoc = startLoc || this.startLoc;
startPos = startPos || this.state.start;
startLoc = startLoc || this.state.startLoc;
if (this.lookahead().type === _tokentype.types.parenR) {
if (this.lookahead().type === _tokenizerTypes.types.parenR) {
// var foo = (): number => {};
this.expect(_tokentype.types.parenL);
this.expect(_tokentype.types.parenR);
this.expect(_tokenizerTypes.types.parenL);
this.expect(_tokenizerTypes.types.parenR);
var node = this.startNodeAt(startPos, startLoc);
if (this.type === _tokentype.types.colon) node.returnType = this.flowParseTypeAnnotation();
this.expect(_tokentype.types.arrow);
if (this.match(_tokenizerTypes.types.colon)) node.returnType = this.flowParseTypeAnnotation();
this.expect(_tokenizerTypes.types.arrow);
return this.parseArrowExpression(node, [], isAsync);

@@ -941,5 +946,5 @@ } else {

var state = this.getState();
var state = this.state.clone();
if (this.type === _tokentype.types.colon) {
if (this.match(_tokenizerTypes.types.colon)) {
try {

@@ -949,3 +954,3 @@ return this.parseParenItem(node, startPos, startLoc, true);

if (err instanceof SyntaxError) {
this.setState(state);
this.state = state;
return node;

@@ -952,0 +957,0 @@ } else {

@@ -12,41 +12,43 @@ "use strict";

var _tokentype = require("../../tokentype");
var _tokenizerTypes = require("../../tokenizer/types");
var _tokencontext = require("../../tokencontext");
var _tokenizerContext = require("../../tokenizer/context");
var _state = require("../../state");
var _parser = require("../../parser");
var _identifier = require("../../identifier");
var _parser2 = _interopRequireDefault(_parser);
var _whitespace = require("../../whitespace");
var _utilIdentifier = require("../../util/identifier");
var _utilWhitespace = require("../../util/whitespace");
var HEX_NUMBER = /^[\da-fA-F]+$/;
var DECIMAL_NUMBER = /^\d+$/;
_tokencontext.types.j_oTag = new _tokencontext.TokContext("<tag", false);
_tokencontext.types.j_cTag = new _tokencontext.TokContext("</tag", false);
_tokencontext.types.j_expr = new _tokencontext.TokContext("<tag>...</tag>", true, true);
_tokenizerContext.types.j_oTag = new _tokenizerContext.TokContext("<tag", false);
_tokenizerContext.types.j_cTag = new _tokenizerContext.TokContext("</tag", false);
_tokenizerContext.types.j_expr = new _tokenizerContext.TokContext("<tag>...</tag>", true, true);
_tokentype.types.jsxName = new _tokentype.TokenType("jsxName");
_tokentype.types.jsxText = new _tokentype.TokenType("jsxText", { beforeExpr: true });
_tokentype.types.jsxTagStart = new _tokentype.TokenType("jsxTagStart");
_tokentype.types.jsxTagEnd = new _tokentype.TokenType("jsxTagEnd");
_tokenizerTypes.types.jsxName = new _tokenizerTypes.TokenType("jsxName");
_tokenizerTypes.types.jsxText = new _tokenizerTypes.TokenType("jsxText", { beforeExpr: true });
_tokenizerTypes.types.jsxTagStart = new _tokenizerTypes.TokenType("jsxTagStart");
_tokenizerTypes.types.jsxTagEnd = new _tokenizerTypes.TokenType("jsxTagEnd");
_tokentype.types.jsxTagStart.updateContext = function () {
this.context.push(_tokencontext.types.j_expr); // treat as beginning of JSX expression
this.context.push(_tokencontext.types.j_oTag); // start opening tag context
this.exprAllowed = false;
_tokenizerTypes.types.jsxTagStart.updateContext = function () {
this.state.context.push(_tokenizerContext.types.j_expr); // treat as beginning of JSX expression
this.state.context.push(_tokenizerContext.types.j_oTag); // start opening tag context
this.state.exprAllowed = false;
};
_tokentype.types.jsxTagEnd.updateContext = function (prevType) {
var out = this.context.pop();
if (out === _tokencontext.types.j_oTag && prevType === _tokentype.types.slash || out === _tokencontext.types.j_cTag) {
this.context.pop();
this.exprAllowed = this.curContext() === _tokencontext.types.j_expr;
_tokenizerTypes.types.jsxTagEnd.updateContext = function (prevType) {
var out = this.state.context.pop();
if (out === _tokenizerContext.types.j_oTag && prevType === _tokenizerTypes.types.slash || out === _tokenizerContext.types.j_cTag) {
this.state.context.pop();
this.state.exprAllowed = this.curContext() === _tokenizerContext.types.j_expr;
} else {
this.exprAllowed = true;
this.state.exprAllowed = true;
}
};
var pp = _state.Parser.prototype;
var pp = _parser2["default"].prototype;

@@ -57,9 +59,9 @@ // Reads inline JSX contents token.

var out = "",
chunkStart = this.pos;
chunkStart = this.state.pos;
for (;;) {
if (this.pos >= this.input.length) {
this.raise(this.start, "Unterminated JSX contents");
if (this.state.pos >= this.input.length) {
this.raise(this.state.start, "Unterminated JSX contents");
}
var ch = this.input.charCodeAt(this.pos);
var ch = this.input.charCodeAt(this.state.pos);

@@ -70,26 +72,26 @@ switch (ch) {

// "{"
if (this.pos === this.start) {
if (ch === 60 && this.exprAllowed) {
++this.pos;
return this.finishToken(_tokentype.types.jsxTagStart);
if (this.state.pos === this.state.start) {
if (ch === 60 && this.state.exprAllowed) {
++this.state.pos;
return this.finishToken(_tokenizerTypes.types.jsxTagStart);
}
return this.getTokenFromCode(ch);
}
out += this.input.slice(chunkStart, this.pos);
return this.finishToken(_tokentype.types.jsxText, out);
out += this.input.slice(chunkStart, this.state.pos);
return this.finishToken(_tokenizerTypes.types.jsxText, out);
case 38:
// "&"
out += this.input.slice(chunkStart, this.pos);
out += this.input.slice(chunkStart, this.state.pos);
out += this.jsxReadEntity();
chunkStart = this.pos;
chunkStart = this.state.pos;
break;
default:
if (_whitespace.isNewLine(ch)) {
out += this.input.slice(chunkStart, this.pos);
if (_utilWhitespace.isNewLine(ch)) {
out += this.input.slice(chunkStart, this.state.pos);
out += this.jsxReadNewLine(true);
chunkStart = this.pos;
chunkStart = this.state.pos;
} else {
++this.pos;
++this.state.pos;
}

@@ -101,7 +103,7 @@ }

pp.jsxReadNewLine = function (normalizeCRLF) {
var ch = this.input.charCodeAt(this.pos);
var ch = this.input.charCodeAt(this.state.pos);
var out;
++this.pos;
if (ch === 13 && this.input.charCodeAt(this.pos) === 10) {
++this.pos;
++this.state.pos;
if (ch === 13 && this.input.charCodeAt(this.state.pos) === 10) {
++this.state.pos;
out = normalizeCRLF ? "\n" : "\r\n";

@@ -111,6 +113,4 @@ } else {

}
if (this.options.locations) {
++this.curLine;
this.lineStart = this.pos;
}
++this.state.curLine;
this.state.lineStart = this.state.pos;

@@ -122,25 +122,25 @@ return out;

var out = "",
chunkStart = ++this.pos;
chunkStart = ++this.state.pos;
for (;;) {
if (this.pos >= this.input.length) {
this.raise(this.start, "Unterminated string constant");
if (this.state.pos >= this.input.length) {
this.raise(this.state.start, "Unterminated string constant");
}
var ch = this.input.charCodeAt(this.pos);
var ch = this.input.charCodeAt(this.state.pos);
if (ch === quote) break;
if (ch === 38) {
// "&"
out += this.input.slice(chunkStart, this.pos);
out += this.input.slice(chunkStart, this.state.pos);
out += this.jsxReadEntity();
chunkStart = this.pos;
} else if (_whitespace.isNewLine(ch)) {
out += this.input.slice(chunkStart, this.pos);
chunkStart = this.state.pos;
} else if (_utilWhitespace.isNewLine(ch)) {
out += this.input.slice(chunkStart, this.state.pos);
out += this.jsxReadNewLine(false);
chunkStart = this.pos;
chunkStart = this.state.pos;
} else {
++this.pos;
++this.state.pos;
}
}
out += this.input.slice(chunkStart, this.pos++);
return this.finishToken(_tokentype.types.string, out);
out += this.input.slice(chunkStart, this.state.pos++);
return this.finishToken(_tokenizerTypes.types.string, out);
};

@@ -152,8 +152,7 @@

entity;
var ch = this.input[this.pos];
if (ch !== "&") this.raise(this.pos, "Entity must start with an ampersand");
var ch = this.input[this.state.pos];
var startPos = ++this.pos;
while (this.pos < this.input.length && count++ < 10) {
ch = this.input[this.pos++];
var startPos = ++this.state.pos;
while (this.state.pos < this.input.length && count++ < 10) {
ch = this.input[this.state.pos++];
if (ch === ";") {

@@ -176,3 +175,3 @@ if (str[0] === "#") {

if (!entity) {
this.pos = startPos;
this.state.pos = startPos;
return "&";

@@ -192,7 +191,7 @@ }

var ch,
start = this.pos;
start = this.state.pos;
do {
ch = this.input.charCodeAt(++this.pos);
} while (_identifier.isIdentifierChar(ch) || ch === 45); // "-"
return this.finishToken(_tokentype.types.jsxName, this.input.slice(start, this.pos));
ch = this.input.charCodeAt(++this.state.pos);
} while (_utilIdentifier.isIdentifierChar(ch) || ch === 45); // "-"
return this.finishToken(_tokenizerTypes.types.jsxName, this.input.slice(start, this.state.pos));
};

@@ -220,6 +219,6 @@

var node = this.startNode();
if (this.type === _tokentype.types.jsxName) {
node.name = this.value;
} else if (this.type.keyword) {
node.name = this.type.keyword;
if (this.match(_tokenizerTypes.types.jsxName)) {
node.name = this.state.value;
} else if (this.state.type.keyword) {
node.name = this.state.type.keyword;
} else {

@@ -235,6 +234,6 @@ this.unexpected();

pp.jsxParseNamespacedName = function () {
var startPos = this.start,
startLoc = this.startLoc;
var startPos = this.state.start,
startLoc = this.state.startLoc;
var name = this.jsxParseIdentifier();
if (!this.eat(_tokentype.types.colon)) return name;
if (!this.eat(_tokenizerTypes.types.colon)) return name;

@@ -251,6 +250,6 @@ var node = this.startNodeAt(startPos, startLoc);

pp.jsxParseElementName = function () {
var startPos = this.start,
startLoc = this.startLoc;
var startPos = this.state.start,
startLoc = this.state.startLoc;
var node = this.jsxParseNamespacedName();
while (this.eat(_tokentype.types.dot)) {
while (this.eat(_tokenizerTypes.types.dot)) {
var newNode = this.startNodeAt(startPos, startLoc);

@@ -267,4 +266,4 @@ newNode.object = node;

pp.jsxParseAttributeValue = function () {
switch (this.type) {
case _tokentype.types.braceL:
switch (this.state.type) {
case _tokenizerTypes.types.braceL:
var node = this.jsxParseExpressionContainer();

@@ -277,8 +276,8 @@ if (node.expression.type === "JSXEmptyExpression") {

case _tokentype.types.jsxTagStart:
case _tokentype.types.string:
case _tokenizerTypes.types.jsxTagStart:
case _tokenizerTypes.types.string:
return this.parseExprAtom();
default:
this.raise(this.start, "JSX value should be either an expression or a quoted JSX text");
this.raise(this.state.start, "JSX value should be either an expression or a quoted JSX text");
}

@@ -292,9 +291,9 @@ };

pp.jsxParseEmptyExpression = function () {
var tmp = this.start;
this.start = this.lastTokEnd;
this.lastTokEnd = tmp;
var tmp = this.state.start;
this.state.start = this.state.lastTokEnd;
this.state.lastTokEnd = tmp;
tmp = this.startLoc;
this.startLoc = this.lastTokEndLoc;
this.lastTokEndLoc = tmp;
tmp = this.state.startLoc;
this.state.startLoc = this.state.lastTokEndLoc;
this.state.lastTokEndLoc = tmp;

@@ -309,3 +308,3 @@ return this.finishNode(this.startNode(), "JSXEmptyExpression");

this.next();
if (this.type === _tokentype.types.braceR) {
if (this.match(_tokenizerTypes.types.braceR)) {
node.expression = this.jsxParseEmptyExpression();

@@ -315,3 +314,3 @@ } else {

}
this.expect(_tokentype.types.braceR);
this.expect(_tokenizerTypes.types.braceR);
return this.finishNode(node, "JSXExpressionContainer");

@@ -324,10 +323,10 @@ };

var node = this.startNode();
if (this.eat(_tokentype.types.braceL)) {
this.expect(_tokentype.types.ellipsis);
if (this.eat(_tokenizerTypes.types.braceL)) {
this.expect(_tokenizerTypes.types.ellipsis);
node.argument = this.parseMaybeAssign();
this.expect(_tokentype.types.braceR);
this.expect(_tokenizerTypes.types.braceR);
return this.finishNode(node, "JSXSpreadAttribute");
}
node.name = this.jsxParseNamespacedName();
node.value = this.eat(_tokentype.types.eq) ? this.jsxParseAttributeValue() : null;
node.value = this.eat(_tokenizerTypes.types.eq) ? this.jsxParseAttributeValue() : null;
return this.finishNode(node, "JSXAttribute");

@@ -342,7 +341,7 @@ };

node.name = this.jsxParseElementName();
while (this.type !== _tokentype.types.slash && this.type !== _tokentype.types.jsxTagEnd) {
while (!this.match(_tokenizerTypes.types.slash) && !this.match(_tokenizerTypes.types.jsxTagEnd)) {
node.attributes.push(this.jsxParseAttribute());
}
node.selfClosing = this.eat(_tokentype.types.slash);
this.expect(_tokentype.types.jsxTagEnd);
node.selfClosing = this.eat(_tokenizerTypes.types.slash);
this.expect(_tokenizerTypes.types.jsxTagEnd);
return this.finishNode(node, "JSXOpeningElement");

@@ -356,3 +355,3 @@ };

node.name = this.jsxParseElementName();
this.expect(_tokentype.types.jsxTagEnd);
this.expect(_tokenizerTypes.types.jsxTagEnd);
return this.finishNode(node, "JSXClosingElement");

@@ -372,7 +371,7 @@ };

contents: for (;;) {
switch (this.type) {
case _tokentype.types.jsxTagStart:
startPos = this.start;startLoc = this.startLoc;
switch (this.state.type) {
case _tokenizerTypes.types.jsxTagStart:
startPos = this.state.start;startLoc = this.state.startLoc;
this.next();
if (this.eat(_tokentype.types.slash)) {
if (this.eat(_tokenizerTypes.types.slash)) {
closingElement = this.jsxParseClosingElementAt(startPos, startLoc);

@@ -384,7 +383,7 @@ break contents;

case _tokentype.types.jsxText:
case _tokenizerTypes.types.jsxText:
children.push(this.parseExprAtom());
break;
case _tokentype.types.braceL:
case _tokenizerTypes.types.braceL:
children.push(this.jsxParseExpressionContainer());

@@ -397,2 +396,3 @@ break;

}
if (getQualifiedJSXName(closingElement.name) !== getQualifiedJSXName(openingElement.name)) {

@@ -406,4 +406,4 @@ this.raise(closingElement.start, "Expected corresponding JSX closing tag for <" + getQualifiedJSXName(openingElement.name) + ">");

node.children = children;
if (this.type === _tokentype.types.relational && this.value === "<") {
this.raise(this.start, "Adjacent JSX elements must be wrapped in an enclosing tag");
if (this.match(_tokenizerTypes.types.relational) && this.state.value === "<") {
this.raise(this.state.start, "Adjacent JSX elements must be wrapped in an enclosing tag");
}

@@ -416,4 +416,4 @@ return this.finishNode(node, "JSXElement");

pp.jsxParseElement = function () {
var startPos = this.start,
startLoc = this.startLoc;
var startPos = this.state.start,
startLoc = this.state.startLoc;
this.next();

@@ -426,3 +426,12 @@ return this.jsxParseElementAt(startPos, startLoc);

return function (refShortHandDefaultPos) {
if (this.type === _tokentype.types.jsxText) return this.parseLiteral(this.value);else if (this.type === _tokentype.types.jsxTagStart) return this.jsxParseElement();else return inner.call(this, refShortHandDefaultPos);
if (this.match(_tokenizerTypes.types.jsxText)) {
var node = this.parseLiteral(this.state.value);
// https://github.com/babel/babel/issues/2078
node.rawValue = null;
return node;
} else if (this.match(_tokenizerTypes.types.jsxTagStart)) {
return this.jsxParseElement();
} else {
return inner.call(this, refShortHandDefaultPos);
}
};

@@ -435,19 +444,26 @@ });

if (context === _tokencontext.types.j_expr) return this.jsxReadToken();
if (context === _tokenizerContext.types.j_expr) {
return this.jsxReadToken();
}
if (context === _tokencontext.types.j_oTag || context === _tokencontext.types.j_cTag) {
if (_identifier.isIdentifierStart(code)) return this.jsxReadWord();
if (context === _tokenizerContext.types.j_oTag || context === _tokenizerContext.types.j_cTag) {
if (_utilIdentifier.isIdentifierStart(code)) {
return this.jsxReadWord();
}
if (code === 62) {
++this.pos;
return this.finishToken(_tokentype.types.jsxTagEnd);
++this.state.pos;
return this.finishToken(_tokenizerTypes.types.jsxTagEnd);
}
if ((code === 34 || code === 39) && context === _tokencontext.types.j_oTag) return this.jsxReadString(code);
if ((code === 34 || code === 39) && context === _tokenizerContext.types.j_oTag) {
return this.jsxReadString(code);
}
}
if (code === 60 && this.exprAllowed) {
++this.pos;
return this.finishToken(_tokentype.types.jsxTagStart);
if (code === 60 && this.state.exprAllowed) {
++this.state.pos;
return this.finishToken(_tokenizerTypes.types.jsxTagStart);
}
return inner.call(this, code);

@@ -459,10 +475,16 @@ };

return function (prevType) {
if (this.type === _tokentype.types.braceL) {
if (this.match(_tokenizerTypes.types.braceL)) {
var curContext = this.curContext();
if (curContext === _tokencontext.types.j_oTag) this.context.push(_tokencontext.types.b_expr);else if (curContext === _tokencontext.types.j_expr) this.context.push(_tokencontext.types.b_tmpl);else inner.call(this, prevType);
this.exprAllowed = true;
} else if (this.type === _tokentype.types.slash && prevType === _tokentype.types.jsxTagStart) {
this.context.length -= 2; // do not consider JSX expr -> JSX open tag -> ... anymore
this.context.push(_tokencontext.types.j_cTag); // reconsider as closing tag context
this.exprAllowed = false;
if (curContext === _tokenizerContext.types.j_oTag) {
this.state.context.push(_tokenizerContext.types.b_expr);
} else if (curContext === _tokenizerContext.types.j_expr) {
this.state.context.push(_tokenizerContext.types.b_tmpl);
} else {
inner.call(this, prevType);
}
this.state.exprAllowed = true;
} else if (this.match(_tokenizerTypes.types.slash) && prevType === _tokenizerTypes.types.jsxTagStart) {
this.state.context.length -= 2; // do not consider JSX expr -> JSX open tag -> ... anymore
this.state.context.push(_tokenizerContext.types.j_cTag); // reconsider as closing tag context
this.state.exprAllowed = false;
} else {

@@ -469,0 +491,0 @@ return inner.call(this, prevType);

{
"name": "babylon",
"version": "5.8.5",
"version": "5.8.6",
"description": "",

@@ -5,0 +5,0 @@ "author": "Sebastian McKenzie <sebmck@gmail.com>",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc