Use prettier (#600)
This commit is contained in:
parent
a95f55c468
commit
5180ecdca4
@ -3,4 +3,5 @@ build
|
||||
coverage
|
||||
lib
|
||||
node_modules
|
||||
test
|
||||
test/expressions
|
||||
test/fixtures
|
||||
|
||||
@ -1,6 +1,12 @@
|
||||
{
|
||||
"root": true,
|
||||
"extends": "babel",
|
||||
"plugins": [
|
||||
"prettier"
|
||||
],
|
||||
"rules": {
|
||||
"prettier/prettier": ["error", { "trailingComma": "all" }]
|
||||
},
|
||||
"env": {
|
||||
"node": true
|
||||
}
|
||||
|
||||
@ -3,20 +3,23 @@
|
||||
// Which Unicode version should be used?
|
||||
const version = "9.0.0";
|
||||
|
||||
const start = require("unicode-" + version + "/Binary_Property/ID_Start/code-points.js")
|
||||
.filter(function(ch) { return ch > 0x7f; });
|
||||
const start = require("unicode-" +
|
||||
version +
|
||||
"/Binary_Property/ID_Start/code-points.js").filter(function(ch) {
|
||||
return ch > 0x7f;
|
||||
});
|
||||
let last = -1;
|
||||
const cont = [0x200c, 0x200d].concat(
|
||||
require("unicode-" + version + "/Binary_Property/ID_Continue/code-points.js")
|
||||
.filter(function(ch) {
|
||||
require("unicode-" +
|
||||
version +
|
||||
"/Binary_Property/ID_Continue/code-points.js").filter(function(ch) {
|
||||
return ch > 0x7f && search(start, ch, last + 1) == -1;
|
||||
})
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
function search(arr, ch, starting) {
|
||||
for (let i = starting; arr[i] <= ch && i < arr.length; last = i++)
|
||||
if (arr[i] === ch)
|
||||
return i;
|
||||
if (arr[i] === ch) return i;
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -56,7 +59,13 @@ function generate(chars) {
|
||||
const startData = generate(start);
|
||||
const contData = generate(cont);
|
||||
|
||||
console.log("let nonASCIIidentifierStartChars = \"" + startData.nonASCII + "\";");
|
||||
console.log("let nonASCIIidentifierChars = \"" + contData.nonASCII + "\";");
|
||||
console.log("const astralIdentifierStartCodes = " + JSON.stringify(startData.astral) + ";");
|
||||
console.log("const astralIdentifierCodes = " + JSON.stringify(contData.astral) + ";");
|
||||
console.log('let nonASCIIidentifierStartChars = "' + startData.nonASCII + '";');
|
||||
console.log('let nonASCIIidentifierChars = "' + contData.nonASCII + '";');
|
||||
console.log(
|
||||
"const astralIdentifierStartCodes = " +
|
||||
JSON.stringify(startData.astral) +
|
||||
";",
|
||||
);
|
||||
console.log(
|
||||
"const astralIdentifierCodes = " + JSON.stringify(contData.astral) + ";",
|
||||
);
|
||||
|
||||
14
package.json
14
package.json
@ -32,10 +32,14 @@
|
||||
"chalk": "^1.1.3",
|
||||
"cross-env": "^5.0.0",
|
||||
"eslint": "^4.0.0",
|
||||
"eslint-config-babel": "^7.0.0",
|
||||
"eslint-config-babel": "^7.0.1",
|
||||
"eslint-plugin-flowtype": "^2.34.0",
|
||||
"eslint-plugin-prettier": "^2.1.2",
|
||||
"flow-bin": "^0.47.0",
|
||||
"husky": "^0.14.1",
|
||||
"lint-staged": "^4.0.0",
|
||||
"nyc": "^11.0.2",
|
||||
"prettier": "^1.5.2",
|
||||
"rimraf": "^2.5.4",
|
||||
"rollup": "^0.42.0",
|
||||
"rollup-plugin-babel": "3.0.0-alpha.12",
|
||||
@ -51,7 +55,7 @@
|
||||
"changelog": "git log `git describe --tags --abbrev=0`..HEAD --pretty=format:' * %s (%an)' | grep -v 'Merge pull request'",
|
||||
"clean": "rimraf lib",
|
||||
"flow": "flow",
|
||||
"lint": "eslint src bin",
|
||||
"lint": "eslint src bin test",
|
||||
"prepublish": "cross-env BABEL_ENV=production yarn run build",
|
||||
"preversion": "yarn run test && npm run changelog",
|
||||
"test": "yarn run lint && yarn run flow && yarn run build -- -m && yarn run test-only",
|
||||
@ -86,5 +90,11 @@
|
||||
"Tag: New Feature": ":rocket: New Feature",
|
||||
"Tag: Polish": ":nail_care: Polish"
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.js": [
|
||||
"eslint --format=codeframe --fix",
|
||||
"git add"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
13
scripts/yarn-install.js
Normal file
13
scripts/yarn-install.js
Normal file
@ -0,0 +1,13 @@
|
||||
"use strict";
|
||||
|
||||
const exec = require("child_process").exec;
|
||||
|
||||
const runIfYarn = fn => {
|
||||
exec("yarn -V", error => {
|
||||
if (error === null) fn();
|
||||
});
|
||||
};
|
||||
runIfYarn(() => {
|
||||
console.log("`package.json` was changed. Running yarn...🐈");
|
||||
exec("yarn");
|
||||
});
|
||||
30
src/index.js
30
src/index.js
@ -37,46 +37,54 @@ export function parseExpression(input: string, options?: Options): Expression {
|
||||
return parser.getExpression();
|
||||
}
|
||||
|
||||
|
||||
export { tokTypes };
|
||||
|
||||
function getParser(options: ?Options, input: string): Parser {
|
||||
const cls = options && options.plugins ? getParserClass(options.plugins) : Parser;
|
||||
const cls =
|
||||
options && options.plugins ? getParserClass(options.plugins) : Parser;
|
||||
return new cls(options, input);
|
||||
}
|
||||
|
||||
const parserClassCache: { [key: string]: Class<Parser> } = {};
|
||||
|
||||
/** Get a Parser class with plugins applied. */
|
||||
function getParserClass(pluginsFromOptions: $ReadOnlyArray<string>): Class<Parser> {
|
||||
|
||||
if (pluginsFromOptions.indexOf("decorators") >= 0 && pluginsFromOptions.indexOf("decorators2") >= 0) {
|
||||
function getParserClass(
|
||||
pluginsFromOptions: $ReadOnlyArray<string>,
|
||||
): Class<Parser> {
|
||||
if (
|
||||
pluginsFromOptions.indexOf("decorators") >= 0 &&
|
||||
pluginsFromOptions.indexOf("decorators2") >= 0
|
||||
) {
|
||||
throw new Error("Cannot use decorators and decorators2 plugin together");
|
||||
}
|
||||
|
||||
// Filter out just the plugins that have an actual mixin associated with them.
|
||||
let pluginList = pluginsFromOptions.filter((p) =>
|
||||
p === "estree" || p === "flow" || p === "jsx" || p === "typescript");
|
||||
let pluginList = pluginsFromOptions.filter(
|
||||
p => p === "estree" || p === "flow" || p === "jsx" || p === "typescript",
|
||||
);
|
||||
|
||||
if (pluginList.indexOf("flow") >= 0) {
|
||||
// ensure flow plugin loads last
|
||||
pluginList = pluginList.filter((plugin) => plugin !== "flow");
|
||||
pluginList = pluginList.filter(plugin => plugin !== "flow");
|
||||
pluginList.push("flow");
|
||||
}
|
||||
|
||||
if (pluginList.indexOf("flow") >= 0 && pluginList.indexOf("typescript") >= 0) {
|
||||
if (
|
||||
pluginList.indexOf("flow") >= 0 &&
|
||||
pluginList.indexOf("typescript") >= 0
|
||||
) {
|
||||
throw new Error("Cannot combine flow and typescript plugins.");
|
||||
}
|
||||
|
||||
if (pluginList.indexOf("typescript") >= 0) {
|
||||
// ensure typescript plugin loads last
|
||||
pluginList = pluginList.filter((plugin) => plugin !== "typescript");
|
||||
pluginList = pluginList.filter(plugin => plugin !== "typescript");
|
||||
pluginList.push("typescript");
|
||||
}
|
||||
|
||||
if (pluginList.indexOf("estree") >= 0) {
|
||||
// ensure estree plugin loads first
|
||||
pluginList = pluginList.filter((plugin) => plugin !== "estree");
|
||||
pluginList = pluginList.filter(plugin => plugin !== "estree");
|
||||
pluginList.unshift("estree");
|
||||
}
|
||||
|
||||
|
||||
@ -4,16 +4,16 @@
|
||||
// the parser process. These options are recognized:
|
||||
|
||||
export type Options = {
|
||||
sourceType: "script" | "module";
|
||||
sourceFilename?: string;
|
||||
startLine: number;
|
||||
allowReturnOutsideFunction: boolean;
|
||||
allowImportExportEverywhere: boolean;
|
||||
allowSuperOutsideMethod: boolean;
|
||||
plugins: $ReadOnlyArray<string>;
|
||||
strictMode: ?boolean;
|
||||
ranges: boolean;
|
||||
tokens: boolean;
|
||||
sourceType: "script" | "module",
|
||||
sourceFilename?: string,
|
||||
startLine: number,
|
||||
allowReturnOutsideFunction: boolean,
|
||||
allowImportExportEverywhere: boolean,
|
||||
allowSuperOutsideMethod: boolean,
|
||||
plugins: $ReadOnlyArray<string>,
|
||||
strictMode: ?boolean,
|
||||
ranges: boolean,
|
||||
tokens: boolean,
|
||||
};
|
||||
|
||||
export const defaultOptions: Options = {
|
||||
|
||||
@ -66,7 +66,11 @@ export default class CommentsParser extends BaseParser {
|
||||
}
|
||||
} else {
|
||||
const lastInStack = last(stack);
|
||||
if (stack.length > 0 && lastInStack.trailingComments && lastInStack.trailingComments[0].start >= node.end) {
|
||||
if (
|
||||
stack.length > 0 &&
|
||||
lastInStack.trailingComments &&
|
||||
lastInStack.trailingComments[0].start >= node.end
|
||||
) {
|
||||
trailingComments = lastInStack.trailingComments;
|
||||
lastInStack.trailingComments = null;
|
||||
}
|
||||
@ -93,7 +97,10 @@ export default class CommentsParser extends BaseParser {
|
||||
if (lastComment.start >= node.start) {
|
||||
if (this.state.commentPreviousNode) {
|
||||
for (j = 0; j < this.state.leadingComments.length; j++) {
|
||||
if (this.state.leadingComments[j].end < this.state.commentPreviousNode.end) {
|
||||
if (
|
||||
this.state.leadingComments[j].end <
|
||||
this.state.commentPreviousNode.end
|
||||
) {
|
||||
this.state.leadingComments.splice(j, 1);
|
||||
j--;
|
||||
}
|
||||
@ -105,10 +112,18 @@ export default class CommentsParser extends BaseParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (node.type === "CallExpression" && node.arguments && node.arguments.length) {
|
||||
} else if (
|
||||
node.type === "CallExpression" &&
|
||||
node.arguments &&
|
||||
node.arguments.length
|
||||
) {
|
||||
const lastArg = last(node.arguments);
|
||||
|
||||
if (lastArg && lastComment.start >= lastArg.start && lastComment.end <= node.end) {
|
||||
if (
|
||||
lastArg &&
|
||||
lastComment.start >= lastArg.start &&
|
||||
lastComment.end <= node.end
|
||||
) {
|
||||
if (this.state.commentPreviousNode) {
|
||||
if (this.state.leadingComments.length > 0) {
|
||||
lastArg.trailingComments = this.state.leadingComments;
|
||||
@ -121,7 +136,10 @@ export default class CommentsParser extends BaseParser {
|
||||
|
||||
if (lastChild) {
|
||||
if (lastChild.leadingComments) {
|
||||
if (lastChild !== node && last(lastChild.leadingComments).end <= node.start) {
|
||||
if (
|
||||
lastChild !== node &&
|
||||
last(lastChild.leadingComments).end <= node.start
|
||||
) {
|
||||
node.leadingComments = lastChild.leadingComments;
|
||||
lastChild.leadingComments = null;
|
||||
} else {
|
||||
@ -140,7 +158,10 @@ export default class CommentsParser extends BaseParser {
|
||||
if (last(this.state.leadingComments).end <= node.start) {
|
||||
if (this.state.commentPreviousNode) {
|
||||
for (j = 0; j < this.state.leadingComments.length; j++) {
|
||||
if (this.state.leadingComments[j].end < this.state.commentPreviousNode.end) {
|
||||
if (
|
||||
this.state.leadingComments[j].end <
|
||||
this.state.commentPreviousNode.end
|
||||
) {
|
||||
this.state.leadingComments.splice(j, 1);
|
||||
j--;
|
||||
}
|
||||
@ -173,7 +194,8 @@ export default class CommentsParser extends BaseParser {
|
||||
// result in an empty array, and if so, the array must be
|
||||
// deleted.
|
||||
const leadingComments = this.state.leadingComments.slice(0, i);
|
||||
node.leadingComments = leadingComments.length === 0 ? null : leadingComments;
|
||||
node.leadingComments =
|
||||
leadingComments.length === 0 ? null : leadingComments;
|
||||
|
||||
// Similarly, trailing comments are attached later. The variable
|
||||
// must be reset to null if there are no trailing comments.
|
||||
@ -187,7 +209,11 @@ export default class CommentsParser extends BaseParser {
|
||||
this.state.commentPreviousNode = node;
|
||||
|
||||
if (trailingComments) {
|
||||
if (trailingComments.length && trailingComments[0].start >= node.start && last(trailingComments).end <= node.end) {
|
||||
if (
|
||||
trailingComments.length &&
|
||||
trailingComments[0].start >= node.start &&
|
||||
last(trailingComments).end <= node.end
|
||||
) {
|
||||
node.innerComments = trailingComments;
|
||||
} else {
|
||||
node.trailingComments = trailingComments;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -5,7 +5,9 @@ import type { File } from "../types";
|
||||
import { getOptions } from "../options";
|
||||
import StatementParser from "./statement";
|
||||
|
||||
export const plugins: { [name: string]: (superClass: Class<Parser>) => Class<Parser> } = {};
|
||||
export const plugins: {
|
||||
[name: string]: (superClass: Class<Parser>) => Class<Parser>,
|
||||
} = {};
|
||||
|
||||
export default class Parser extends StatementParser {
|
||||
constructor(options: ?Options, input: string) {
|
||||
@ -19,7 +21,11 @@ export default class Parser extends StatementParser {
|
||||
this.filename = options.sourceFilename;
|
||||
|
||||
// If enabled, skip leading hashbang line.
|
||||
if (this.state.pos === 0 && this.input[0] === "#" && this.input[1] === "!") {
|
||||
if (
|
||||
this.state.pos === 0 &&
|
||||
this.input[0] === "#" &&
|
||||
this.input[1] === "!"
|
||||
) {
|
||||
this.skipLineComment(2);
|
||||
}
|
||||
}
|
||||
@ -32,7 +38,9 @@ export default class Parser extends StatementParser {
|
||||
}
|
||||
}
|
||||
|
||||
function pluginsMap(pluginList: $ReadOnlyArray<string>): { [key: string]: boolean } {
|
||||
function pluginsMap(
|
||||
pluginList: $ReadOnlyArray<string>,
|
||||
): { [key: string]: boolean } {
|
||||
const pluginMap = {};
|
||||
for (const name of pluginList) {
|
||||
pluginMap[name] = true;
|
||||
|
||||
@ -14,7 +14,9 @@ export default class LocationParser extends CommentsParser {
|
||||
const loc = getLineInfo(this.input, pos);
|
||||
message += ` (${loc.line}:${loc.column})`;
|
||||
// $FlowIgnore
|
||||
const err: SyntaxError & { pos: number, loc: Position } = new SyntaxError(message);
|
||||
const err: SyntaxError & { pos: number, loc: Position } = new SyntaxError(
|
||||
message,
|
||||
);
|
||||
err.pos = pos;
|
||||
err.loc = loc;
|
||||
throw err;
|
||||
|
||||
@ -1,28 +1,51 @@
|
||||
// @flow
|
||||
|
||||
import { types as tt, type TokenType } from "../tokenizer/types";
|
||||
import type { TSParameterProperty, Decorator, Expression, Identifier, Node, ObjectExpression,
|
||||
ObjectPattern, Pattern, RestElement, SpreadElement } from "../types";
|
||||
import type {
|
||||
TSParameterProperty,
|
||||
Decorator,
|
||||
Expression,
|
||||
Identifier,
|
||||
Node,
|
||||
ObjectExpression,
|
||||
ObjectPattern,
|
||||
Pattern,
|
||||
RestElement,
|
||||
SpreadElement,
|
||||
} from "../types";
|
||||
import type { Pos, Position } from "../util/location";
|
||||
import { NodeUtils } from "./node";
|
||||
|
||||
export default class LValParser extends NodeUtils {
|
||||
// Forward-declaration: defined in expression.js
|
||||
+checkReservedWord: (word: string, startLoc: number, checkKeywords: boolean, isBinding: boolean) => void;
|
||||
+checkReservedWord: (
|
||||
word: string,
|
||||
startLoc: number,
|
||||
checkKeywords: boolean,
|
||||
isBinding: boolean,
|
||||
) => void;
|
||||
+parseIdentifier: (liberal?: boolean) => Identifier;
|
||||
+parseMaybeAssign: (
|
||||
noIn?: ?boolean,
|
||||
refShorthandDefaultPos?: ?Pos,
|
||||
afterLeftParse?: Function,
|
||||
refNeedsArrowPos?: ?Pos) => Expression;
|
||||
+parseObj: <T : ObjectPattern | ObjectExpression>(isPattern: boolean, refShorthandDefaultPos?: ?Pos) => T;
|
||||
refNeedsArrowPos?: ?Pos,
|
||||
) => Expression;
|
||||
+parseObj: <T: ObjectPattern | ObjectExpression>(
|
||||
isPattern: boolean,
|
||||
refShorthandDefaultPos?: ?Pos,
|
||||
) => T;
|
||||
// Forward-declaration: defined in statement.js
|
||||
+parseDecorator: () => Decorator;
|
||||
|
||||
// Convert existing expression atom to assignable pattern
|
||||
// if possible.
|
||||
|
||||
toAssignable(node: Node, isBinding: ?boolean, contextDescription: string): Node {
|
||||
toAssignable(
|
||||
node: Node,
|
||||
isBinding: ?boolean,
|
||||
contextDescription: string,
|
||||
): Node {
|
||||
if (node) {
|
||||
switch (node.type) {
|
||||
case "Identifier":
|
||||
@ -37,12 +60,22 @@ export default class LValParser extends NodeUtils {
|
||||
for (const prop of node.properties) {
|
||||
if (prop.type === "ObjectMethod") {
|
||||
if (prop.kind === "get" || prop.kind === "set") {
|
||||
this.raise(prop.key.start, "Object pattern can't contain getter or setter");
|
||||
this.raise(
|
||||
prop.key.start,
|
||||
"Object pattern can't contain getter or setter",
|
||||
);
|
||||
} else {
|
||||
this.raise(prop.key.start, "Object pattern can't contain methods");
|
||||
this.raise(
|
||||
prop.key.start,
|
||||
"Object pattern can't contain methods",
|
||||
);
|
||||
}
|
||||
} else {
|
||||
this.toAssignable(prop, isBinding, "object destructuring pattern");
|
||||
this.toAssignable(
|
||||
prop,
|
||||
isBinding,
|
||||
"object destructuring pattern",
|
||||
);
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -67,7 +100,10 @@ export default class LValParser extends NodeUtils {
|
||||
node.type = "AssignmentPattern";
|
||||
delete node.operator;
|
||||
} else {
|
||||
this.raise(node.left.end, "Only '=' operator can be used for specifying default value.");
|
||||
this.raise(
|
||||
node.left.end,
|
||||
"Only '=' operator can be used for specifying default value.",
|
||||
);
|
||||
}
|
||||
break;
|
||||
|
||||
@ -75,8 +111,11 @@ export default class LValParser extends NodeUtils {
|
||||
if (!isBinding) break;
|
||||
|
||||
default: {
|
||||
const message = "Invalid left-hand side" +
|
||||
(contextDescription ? " in " + contextDescription : /* istanbul ignore next */ "expression");
|
||||
const message =
|
||||
"Invalid left-hand side" +
|
||||
(contextDescription
|
||||
? " in " + contextDescription
|
||||
: /* istanbul ignore next */ "expression");
|
||||
this.raise(node.start, message);
|
||||
}
|
||||
}
|
||||
@ -87,7 +126,10 @@ export default class LValParser extends NodeUtils {
|
||||
// Convert list of expression atoms to binding list.
|
||||
|
||||
toAssignableList(
|
||||
exprList: Expression[], isBinding: ?boolean, contextDescription: string): $ReadOnlyArray<Pattern> {
|
||||
exprList: Expression[],
|
||||
isBinding: ?boolean,
|
||||
contextDescription: string,
|
||||
): $ReadOnlyArray<Pattern> {
|
||||
let end = exprList.length;
|
||||
if (end) {
|
||||
const last = exprList[end - 1];
|
||||
@ -110,7 +152,10 @@ export default class LValParser extends NodeUtils {
|
||||
for (let i = 0; i < end; i++) {
|
||||
const elt = exprList[i];
|
||||
if (elt && elt.type === "SpreadElement")
|
||||
this.raise(elt.start, "The rest element has to be the last element when destructuring");
|
||||
this.raise(
|
||||
elt.start,
|
||||
"The rest element has to be the last element when destructuring",
|
||||
);
|
||||
if (elt) this.toAssignable(elt, isBinding, contextDescription);
|
||||
}
|
||||
return exprList;
|
||||
@ -118,13 +163,15 @@ export default class LValParser extends NodeUtils {
|
||||
|
||||
// Convert list of expression atoms to a list of
|
||||
|
||||
toReferencedList(exprList: $ReadOnlyArray<?Expression>): $ReadOnlyArray<?Expression> {
|
||||
toReferencedList(
|
||||
exprList: $ReadOnlyArray<?Expression>,
|
||||
): $ReadOnlyArray<?Expression> {
|
||||
return exprList;
|
||||
}
|
||||
|
||||
// Parses spread element.
|
||||
|
||||
parseSpread<T : RestElement | SpreadElement>(refShorthandDefaultPos: ?Pos): T {
|
||||
parseSpread<T: RestElement | SpreadElement>(refShorthandDefaultPos: ?Pos): T {
|
||||
const node = this.startNode();
|
||||
this.next();
|
||||
node.argument = this.parseMaybeAssign(false, refShorthandDefaultPos);
|
||||
@ -139,7 +186,9 @@ export default class LValParser extends NodeUtils {
|
||||
}
|
||||
|
||||
shouldAllowYieldIdentifier(): boolean {
|
||||
return this.match(tt._yield) && !this.state.strict && !this.state.inGenerator;
|
||||
return (
|
||||
this.match(tt._yield) && !this.state.strict && !this.state.inGenerator
|
||||
);
|
||||
}
|
||||
|
||||
parseBindingIdentifier(): Identifier {
|
||||
@ -170,7 +219,7 @@ export default class LValParser extends NodeUtils {
|
||||
parseBindingList(
|
||||
close: TokenType,
|
||||
allowEmpty?: boolean,
|
||||
allowModifiers?: boolean
|
||||
allowModifiers?: boolean,
|
||||
): $ReadOnlyArray<Pattern | TSParameterProperty> {
|
||||
const elts: Array<Pattern | TSParameterProperty> = [];
|
||||
let first = true;
|
||||
@ -192,7 +241,10 @@ export default class LValParser extends NodeUtils {
|
||||
} else {
|
||||
const decorators = [];
|
||||
if (this.match(tt.at) && this.hasPlugin("decorators2")) {
|
||||
this.raise(this.state.start, "Stage 2 decorators cannot be used to decorate parameters");
|
||||
this.raise(
|
||||
this.state.start,
|
||||
"Stage 2 decorators cannot be used to decorate parameters",
|
||||
);
|
||||
}
|
||||
while (this.match(tt.at)) {
|
||||
decorators.push(this.parseDecorator());
|
||||
@ -203,7 +255,10 @@ export default class LValParser extends NodeUtils {
|
||||
return elts;
|
||||
}
|
||||
|
||||
parseAssignableListItem(allowModifiers: ?boolean, decorators: Decorator[]): Pattern | TSParameterProperty {
|
||||
parseAssignableListItem(
|
||||
allowModifiers: ?boolean,
|
||||
decorators: Decorator[],
|
||||
): Pattern | TSParameterProperty {
|
||||
const left = this.parseMaybeDefault();
|
||||
this.parseAssignableListItemTypes(left);
|
||||
const elt = this.parseMaybeDefault(left.start, left.loc.start, left);
|
||||
@ -219,7 +274,11 @@ export default class LValParser extends NodeUtils {
|
||||
|
||||
// Parses assignment pattern around given atom if possible.
|
||||
|
||||
parseMaybeDefault(startPos?: ?number, startLoc?: ?Position, left?: ?Pattern): Pattern {
|
||||
parseMaybeDefault(
|
||||
startPos?: ?number,
|
||||
startLoc?: ?Position,
|
||||
left?: ?Pattern,
|
||||
): Pattern {
|
||||
startLoc = startLoc || this.state.startLoc;
|
||||
startPos = startPos || this.state.start;
|
||||
left = left || this.parseBindingAtom();
|
||||
@ -238,7 +297,8 @@ export default class LValParser extends NodeUtils {
|
||||
expr: Expression,
|
||||
isBinding: ?boolean,
|
||||
checkClashes: ?{ [key: string]: boolean },
|
||||
contextDescription: string): void {
|
||||
contextDescription: string,
|
||||
): void {
|
||||
switch (expr.type) {
|
||||
case "PrivateName":
|
||||
case "Identifier":
|
||||
@ -273,18 +333,34 @@ export default class LValParser extends NodeUtils {
|
||||
case "ObjectPattern":
|
||||
for (let prop of expr.properties) {
|
||||
if (prop.type === "ObjectProperty") prop = prop.value;
|
||||
this.checkLVal(prop, isBinding, checkClashes, "object destructuring pattern");
|
||||
this.checkLVal(
|
||||
prop,
|
||||
isBinding,
|
||||
checkClashes,
|
||||
"object destructuring pattern",
|
||||
);
|
||||
}
|
||||
break;
|
||||
|
||||
case "ArrayPattern":
|
||||
for (const elem of expr.elements) {
|
||||
if (elem) this.checkLVal(elem, isBinding, checkClashes, "array destructuring pattern");
|
||||
if (elem)
|
||||
this.checkLVal(
|
||||
elem,
|
||||
isBinding,
|
||||
checkClashes,
|
||||
"array destructuring pattern",
|
||||
);
|
||||
}
|
||||
break;
|
||||
|
||||
case "AssignmentPattern":
|
||||
this.checkLVal(expr.left, isBinding, checkClashes, "assignment pattern");
|
||||
this.checkLVal(
|
||||
expr.left,
|
||||
isBinding,
|
||||
checkClashes,
|
||||
"assignment pattern",
|
||||
);
|
||||
break;
|
||||
|
||||
case "RestElement":
|
||||
@ -292,9 +368,14 @@ export default class LValParser extends NodeUtils {
|
||||
break;
|
||||
|
||||
default: {
|
||||
const message = (isBinding ? /* istanbul ignore next */ "Binding invalid" : "Invalid") +
|
||||
const message =
|
||||
(isBinding
|
||||
? /* istanbul ignore next */ "Binding invalid"
|
||||
: "Invalid") +
|
||||
" left-hand side" +
|
||||
(contextDescription ? " in " + contextDescription : /* istanbul ignore next */ "expression");
|
||||
(contextDescription
|
||||
? " in " + contextDescription
|
||||
: /* istanbul ignore next */ "expression");
|
||||
this.raise(expr.start, message);
|
||||
}
|
||||
}
|
||||
|
||||
@ -31,7 +31,7 @@ class Node implements NodeBase {
|
||||
|
||||
__clone(): this {
|
||||
// $FlowIgnore
|
||||
const node2: any = new Node;
|
||||
const node2: any = new Node();
|
||||
for (const key in this) {
|
||||
// Do not clone comments that are already attached to the node
|
||||
if (commentKeys.indexOf(key) < 0) {
|
||||
@ -45,30 +45,40 @@ class Node implements NodeBase {
|
||||
}
|
||||
|
||||
export class NodeUtils extends UtilParser {
|
||||
startNode<T : NodeType>(): T {
|
||||
startNode<T: NodeType>(): T {
|
||||
// $FlowIgnore
|
||||
return new Node(this, this.state.start, this.state.startLoc);
|
||||
}
|
||||
|
||||
startNodeAt<T : NodeType>(pos: number, loc: Position): T {
|
||||
startNodeAt<T: NodeType>(pos: number, loc: Position): T {
|
||||
// $FlowIgnore
|
||||
return new Node(this, pos, loc);
|
||||
}
|
||||
|
||||
/** Start a new node with a previous node's location. */
|
||||
startNodeAtNode<T : NodeType>(type: NodeType): T {
|
||||
startNodeAtNode<T: NodeType>(type: NodeType): T {
|
||||
return this.startNodeAt(type.start, type.loc.start);
|
||||
}
|
||||
|
||||
// Finish an AST node, adding `type` and `end` properties.
|
||||
|
||||
finishNode<T : NodeType>(node: T, type: string): T {
|
||||
return this.finishNodeAt(node, type, this.state.lastTokEnd, this.state.lastTokEndLoc);
|
||||
finishNode<T: NodeType>(node: T, type: string): T {
|
||||
return this.finishNodeAt(
|
||||
node,
|
||||
type,
|
||||
this.state.lastTokEnd,
|
||||
this.state.lastTokEndLoc,
|
||||
);
|
||||
}
|
||||
|
||||
// Finish node at given position
|
||||
|
||||
finishNodeAt<T : NodeType>(node: T, type: string, pos: number, loc: Position): T {
|
||||
finishNodeAt<T: NodeType>(
|
||||
node: T,
|
||||
type: string,
|
||||
pos: number,
|
||||
loc: Position,
|
||||
): T {
|
||||
node.type = type;
|
||||
node.end = pos;
|
||||
node.loc.end = loc;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -13,7 +13,7 @@ export default class UtilParser extends Tokenizer {
|
||||
addExtra(node: Node, key: string, val: any): void {
|
||||
if (!node) return;
|
||||
|
||||
const extra = node.extra = node.extra || {};
|
||||
const extra = (node.extra = node.extra || {});
|
||||
extra[key] = val;
|
||||
}
|
||||
|
||||
@ -64,13 +64,17 @@ export default class UtilParser extends Tokenizer {
|
||||
// Test whether a semicolon can be inserted at the current position.
|
||||
|
||||
canInsertSemicolon(): boolean {
|
||||
return this.match(tt.eof) ||
|
||||
return (
|
||||
this.match(tt.eof) ||
|
||||
this.match(tt.braceR) ||
|
||||
this.hasPrecedingLineBreak();
|
||||
this.hasPrecedingLineBreak()
|
||||
);
|
||||
}
|
||||
|
||||
hasPrecedingLineBreak(): boolean {
|
||||
return lineBreak.test(this.input.slice(this.state.lastTokEnd, this.state.start));
|
||||
return lineBreak.test(
|
||||
this.input.slice(this.state.lastTokEnd, this.state.start),
|
||||
);
|
||||
}
|
||||
|
||||
// TODO
|
||||
@ -96,7 +100,10 @@ export default class UtilParser extends Tokenizer {
|
||||
// Raise an unexpected token error. Can take the expected token type
|
||||
// instead of a message string.
|
||||
|
||||
unexpected(pos: ?number, messageOrType: string | TokenType = "Unexpected token"): empty {
|
||||
unexpected(
|
||||
pos: ?number,
|
||||
messageOrType: string | TokenType = "Unexpected token",
|
||||
): empty {
|
||||
if (typeof messageOrType !== "string") {
|
||||
messageOrType = `Unexpected token, expected ${messageOrType.label}`;
|
||||
}
|
||||
|
||||
@ -6,13 +6,16 @@ import * as N from "../types";
|
||||
import type { Pos, Position } from "../util/location";
|
||||
|
||||
function isSimpleProperty(node: N.Node): boolean {
|
||||
return node != null &&
|
||||
return (
|
||||
node != null &&
|
||||
node.type === "Property" &&
|
||||
node.kind === "init" &&
|
||||
node.method === false;
|
||||
node.method === false
|
||||
);
|
||||
}
|
||||
|
||||
export default (superClass: Class<Parser>): Class<Parser> => class extends superClass {
|
||||
export default (superClass: Class<Parser>): Class<Parser> =>
|
||||
class extends superClass {
|
||||
estreeParseRegExpLiteral({ pattern, flags }: N.RegExpLiteral): N.Node {
|
||||
let regex = null;
|
||||
try {
|
||||
@ -35,16 +38,28 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
const directiveLiteral = directive.value;
|
||||
|
||||
const stmt = this.startNodeAt(directive.start, directive.loc.start);
|
||||
const expression = this.startNodeAt(directiveLiteral.start, directiveLiteral.loc.start);
|
||||
const expression = this.startNodeAt(
|
||||
directiveLiteral.start,
|
||||
directiveLiteral.loc.start,
|
||||
);
|
||||
|
||||
expression.value = directiveLiteral.value;
|
||||
expression.raw = directiveLiteral.extra.raw;
|
||||
|
||||
stmt.expression = this.finishNodeAt(
|
||||
expression, "Literal", directiveLiteral.end, directiveLiteral.loc.end);
|
||||
expression,
|
||||
"Literal",
|
||||
directiveLiteral.end,
|
||||
directiveLiteral.loc.end,
|
||||
);
|
||||
stmt.directive = directiveLiteral.extra.raw.slice(1, -1);
|
||||
|
||||
return this.finishNodeAt(stmt, "ExpressionStatement", directive.end, directive.loc.end);
|
||||
return this.finishNodeAt(
|
||||
stmt,
|
||||
"ExpressionStatement",
|
||||
directive.end,
|
||||
directive.loc.end,
|
||||
);
|
||||
}
|
||||
|
||||
// ==================================
|
||||
@ -77,16 +92,16 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
expr: N.Expression,
|
||||
isBinding: ?boolean,
|
||||
checkClashes: ?{ [key: string]: boolean },
|
||||
contextDescription: string
|
||||
contextDescription: string,
|
||||
): void {
|
||||
switch (expr.type) {
|
||||
case "ObjectPattern":
|
||||
expr.properties.forEach((prop) => {
|
||||
expr.properties.forEach(prop => {
|
||||
this.checkLVal(
|
||||
prop.type === "Property" ? prop.value : prop,
|
||||
isBinding,
|
||||
checkClashes,
|
||||
"object destructuring pattern"
|
||||
"object destructuring pattern",
|
||||
);
|
||||
});
|
||||
break;
|
||||
@ -95,7 +110,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
}
|
||||
|
||||
checkPropClash(prop: N.ObjectMember, propHash: { [key: string]: boolean }): void {
|
||||
checkPropClash(
|
||||
prop: N.ObjectMember,
|
||||
propHash: { [key: string]: boolean },
|
||||
): void {
|
||||
if (prop.computed || !isSimpleProperty(prop)) return;
|
||||
|
||||
const key = prop.key;
|
||||
@ -103,15 +121,22 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
const name = key.type === "Identifier" ? key.name : String(key.value);
|
||||
|
||||
if (name === "__proto__") {
|
||||
if (propHash.proto) this.raise(key.start, "Redefinition of __proto__ property");
|
||||
if (propHash.proto)
|
||||
this.raise(key.start, "Redefinition of __proto__ property");
|
||||
propHash.proto = true;
|
||||
}
|
||||
}
|
||||
|
||||
isStrictBody(node: { body: N.BlockStatement }, isExpression: ?boolean): boolean {
|
||||
isStrictBody(
|
||||
node: { body: N.BlockStatement },
|
||||
isExpression: ?boolean,
|
||||
): boolean {
|
||||
if (!isExpression && node.body.body.length > 0) {
|
||||
for (const directive of node.body.body) {
|
||||
if (directive.type === "ExpressionStatement" && directive.expression.type === "Literal") {
|
||||
if (
|
||||
directive.type === "ExpressionStatement" &&
|
||||
directive.expression.type === "Literal"
|
||||
) {
|
||||
if (directive.expression.value === "use strict") return true;
|
||||
} else {
|
||||
// Break for the first non literal expression
|
||||
@ -124,10 +149,12 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
|
||||
isValidDirective(stmt: N.Statement): boolean {
|
||||
return stmt.type === "ExpressionStatement" &&
|
||||
return (
|
||||
stmt.type === "ExpressionStatement" &&
|
||||
stmt.expression.type === "Literal" &&
|
||||
typeof stmt.expression.value === "string" &&
|
||||
(!stmt.expression.extra || !stmt.expression.extra.parenthesized);
|
||||
(!stmt.expression.extra || !stmt.expression.extra.parenthesized)
|
||||
);
|
||||
}
|
||||
|
||||
stmtToDirective(stmt: N.Statement): N.Directive {
|
||||
@ -145,11 +172,13 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
node: N.BlockStatementLike,
|
||||
allowDirectives: ?boolean,
|
||||
topLevel: boolean,
|
||||
end: TokenType
|
||||
end: TokenType,
|
||||
): void {
|
||||
super.parseBlockBody(node, allowDirectives, topLevel, end);
|
||||
|
||||
const directiveStatements = node.directives.map((d) => this.directiveToStmt(d));
|
||||
const directiveStatements = node.directives.map(d =>
|
||||
this.directiveToStmt(d),
|
||||
);
|
||||
node.body = directiveStatements.concat(node.body);
|
||||
delete node.directives;
|
||||
}
|
||||
@ -159,9 +188,15 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
method: N.ClassMethod,
|
||||
isGenerator: boolean,
|
||||
isAsync: boolean,
|
||||
isConstructor: boolean
|
||||
isConstructor: boolean,
|
||||
): void {
|
||||
this.parseMethod(method, isGenerator, isAsync, isConstructor, "MethodDefinition");
|
||||
this.parseMethod(
|
||||
method,
|
||||
isGenerator,
|
||||
isAsync,
|
||||
isConstructor,
|
||||
"MethodDefinition",
|
||||
);
|
||||
if (method.typeParameters) {
|
||||
// $FlowIgnore
|
||||
method.value.typeParameters = method.typeParameters;
|
||||
@ -193,11 +228,11 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
}
|
||||
|
||||
parseLiteral<T : N.Literal>(
|
||||
parseLiteral<T: N.Literal>(
|
||||
value: any,
|
||||
type: /*T["kind"]*/string,
|
||||
type: /*T["kind"]*/ string,
|
||||
startPos?: number,
|
||||
startLoc?: Position
|
||||
startLoc?: Position,
|
||||
): T {
|
||||
const node = super.parseLiteral(value, type, startPos, startLoc);
|
||||
node.raw = node.extra.raw;
|
||||
@ -206,7 +241,7 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return node;
|
||||
}
|
||||
|
||||
parseMethod<T : N.MethodLike>(
|
||||
parseMethod<T: N.MethodLike>(
|
||||
node: T,
|
||||
isGenerator: boolean,
|
||||
isAsync: boolean,
|
||||
@ -215,7 +250,13 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
): T {
|
||||
let funcNode = this.startNode();
|
||||
funcNode.kind = node.kind; // provide kind, so super method correctly sets state
|
||||
funcNode = super.parseMethod(funcNode, isGenerator, isAsync, isConstructor, "FunctionExpression");
|
||||
funcNode = super.parseMethod(
|
||||
funcNode,
|
||||
isGenerator,
|
||||
isAsync,
|
||||
isConstructor,
|
||||
"FunctionExpression",
|
||||
);
|
||||
delete funcNode.kind;
|
||||
// $FlowIgnore
|
||||
node.value = funcNode;
|
||||
@ -227,9 +268,14 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
prop: N.ObjectMethod,
|
||||
isGenerator: boolean,
|
||||
isAsync: boolean,
|
||||
isPattern: boolean
|
||||
isPattern: boolean,
|
||||
): ?N.ObjectMethod {
|
||||
const node: N.EstreeProperty = (super.parseObjectMethod(prop, isGenerator, isAsync, isPattern): any);
|
||||
const node: N.EstreeProperty = (super.parseObjectMethod(
|
||||
prop,
|
||||
isGenerator,
|
||||
isAsync,
|
||||
isPattern,
|
||||
): any);
|
||||
|
||||
if (node) {
|
||||
node.type = "Property";
|
||||
@ -245,11 +291,15 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
startPos: ?number,
|
||||
startLoc: ?Position,
|
||||
isPattern: boolean,
|
||||
refShorthandDefaultPos: ?Pos
|
||||
refShorthandDefaultPos: ?Pos,
|
||||
): ?N.ObjectProperty {
|
||||
const node: N.EstreeProperty = (
|
||||
super.parseObjectProperty(prop, startPos, startLoc, isPattern, refShorthandDefaultPos): any
|
||||
);
|
||||
const node: N.EstreeProperty = (super.parseObjectProperty(
|
||||
prop,
|
||||
startPos,
|
||||
startLoc,
|
||||
isPattern,
|
||||
refShorthandDefaultPos,
|
||||
): any);
|
||||
|
||||
if (node) {
|
||||
node.kind = "init";
|
||||
@ -262,7 +312,7 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
toAssignable(
|
||||
node: N.Node,
|
||||
isBinding: ?boolean,
|
||||
contextDescription: string
|
||||
contextDescription: string,
|
||||
): N.Node {
|
||||
if (isSimpleProperty(node)) {
|
||||
this.toAssignable(node.value, isBinding, contextDescription);
|
||||
@ -272,7 +322,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
node.type = "ObjectPattern";
|
||||
for (const prop of node.properties) {
|
||||
if (prop.kind === "get" || prop.kind === "set") {
|
||||
this.raise(prop.key.start, "Object pattern can't contain getter or setter");
|
||||
this.raise(
|
||||
prop.key.start,
|
||||
"Object pattern can't contain getter or setter",
|
||||
);
|
||||
} else if (prop.method) {
|
||||
this.raise(prop.key.start, "Object pattern can't contain methods");
|
||||
} else {
|
||||
@ -285,4 +338,4 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
return super.toAssignable(node, isBinding, contextDescription);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@ -16,17 +16,16 @@ const primitiveTypes = [
|
||||
"number",
|
||||
"string",
|
||||
"void",
|
||||
"null"
|
||||
"null",
|
||||
];
|
||||
|
||||
function isEsModuleType(bodyElement: N.Node): boolean {
|
||||
return bodyElement.type === "DeclareExportAllDeclaration" ||
|
||||
(
|
||||
bodyElement.type === "DeclareExportDeclaration" &&
|
||||
(
|
||||
!bodyElement.declaration ||
|
||||
(bodyElement.declaration.type !== "TypeAlias" && bodyElement.declaration.type !== "InterfaceDeclaration")
|
||||
)
|
||||
return (
|
||||
bodyElement.type === "DeclareExportAllDeclaration" ||
|
||||
(bodyElement.type === "DeclareExportDeclaration" &&
|
||||
(!bodyElement.declaration ||
|
||||
(bodyElement.declaration.type !== "TypeAlias" &&
|
||||
bodyElement.declaration.type !== "InterfaceDeclaration")))
|
||||
);
|
||||
}
|
||||
|
||||
@ -37,7 +36,8 @@ const exportSuggestions = {
|
||||
interface: "export interface",
|
||||
};
|
||||
|
||||
export default (superClass: Class<Parser>): Class<Parser> => class extends superClass {
|
||||
export default (superClass: Class<Parser>): Class<Parser> =>
|
||||
class extends superClass {
|
||||
flowParseTypeInitialiser(tok?: TokenType): N.FlowType {
|
||||
const oldInType = this.state.inType;
|
||||
this.state.inType = true;
|
||||
@ -48,7 +48,7 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return type;
|
||||
}
|
||||
|
||||
flowParsePredicate() : N.FlowType {
|
||||
flowParsePredicate(): N.FlowType {
|
||||
const node = this.startNode();
|
||||
const moduloLoc = this.state.startLoc;
|
||||
const moduloPos = this.state.start;
|
||||
@ -56,8 +56,14 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
const checksLoc = this.state.startLoc;
|
||||
this.expectContextual("checks");
|
||||
// Force '%' and 'checks' to be adjacent
|
||||
if (moduloLoc.line !== checksLoc.line || moduloLoc.column !== checksLoc.column - 1) {
|
||||
this.raise(moduloPos, "Spaces between ´%´ and ´checks´ are not allowed here.");
|
||||
if (
|
||||
moduloLoc.line !== checksLoc.line ||
|
||||
moduloLoc.column !== checksLoc.column - 1
|
||||
) {
|
||||
this.raise(
|
||||
moduloPos,
|
||||
"Spaces between ´%´ and ´checks´ are not allowed here.",
|
||||
);
|
||||
}
|
||||
if (this.eat(tt.parenL)) {
|
||||
node.value = this.parseExpression();
|
||||
@ -93,10 +99,12 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.finishNode(node, "DeclareClass");
|
||||
}
|
||||
|
||||
flowParseDeclareFunction(node: N.FlowDeclareFunction): N.FlowDeclareFunction {
|
||||
flowParseDeclareFunction(
|
||||
node: N.FlowDeclareFunction,
|
||||
): N.FlowDeclareFunction {
|
||||
this.next();
|
||||
|
||||
const id = node.id = this.parseIdentifier();
|
||||
const id = (node.id = this.parseIdentifier());
|
||||
|
||||
const typeNode = this.startNode();
|
||||
const typeContainer = this.startNode();
|
||||
@ -113,9 +121,17 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
typeNode.rest = tmp.rest;
|
||||
this.expect(tt.parenR);
|
||||
|
||||
[
|
||||
// $FlowFixMe (destructuring not supported yet)
|
||||
[typeNode.returnType, node.predicate] = this.flowParseTypeAndPredicateInitialiser();
|
||||
typeContainer.typeAnnotation = this.finishNode(typeNode, "FunctionTypeAnnotation");
|
||||
typeNode.returnType,
|
||||
// $FlowFixMe (destructuring not supported yet)
|
||||
node.predicate,
|
||||
] = this.flowParseTypeAndPredicateInitialiser();
|
||||
|
||||
typeContainer.typeAnnotation = this.finishNode(
|
||||
typeNode,
|
||||
"FunctionTypeAnnotation",
|
||||
);
|
||||
|
||||
id.typeAnnotation = this.finishNode(typeContainer, "TypeAnnotation");
|
||||
|
||||
@ -126,7 +142,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.finishNode(node, "DeclareFunction");
|
||||
}
|
||||
|
||||
flowParseDeclare(node: N.FlowDeclare, insideModule?: boolean): N.FlowDeclare {
|
||||
flowParseDeclare(
|
||||
node: N.FlowDeclare,
|
||||
insideModule?: boolean,
|
||||
): N.FlowDeclare {
|
||||
if (this.match(tt._class)) {
|
||||
return this.flowParseDeclareClass(node);
|
||||
} else if (this.match(tt._function)) {
|
||||
@ -137,7 +156,11 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
if (this.lookahead().type === tt.dot) {
|
||||
return this.flowParseDeclareModuleExports(node);
|
||||
} else {
|
||||
if (insideModule) this.unexpected(null, "`declare module` cannot be used inside another `declare module`");
|
||||
if (insideModule)
|
||||
this.unexpected(
|
||||
null,
|
||||
"`declare module` cannot be used inside another `declare module`",
|
||||
);
|
||||
return this.flowParseDeclareModule(node);
|
||||
}
|
||||
} else if (this.isContextual("type")) {
|
||||
@ -151,7 +174,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
}
|
||||
|
||||
flowParseDeclareVariable(node: N.FlowDeclareVariable): N.FlowDeclareVariable {
|
||||
flowParseDeclareVariable(
|
||||
node: N.FlowDeclareVariable,
|
||||
): N.FlowDeclareVariable {
|
||||
this.next();
|
||||
node.id = this.flowParseTypeAnnotatableIdentifier();
|
||||
this.semicolon();
|
||||
@ -167,8 +192,8 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
node.id = this.parseIdentifier();
|
||||
}
|
||||
|
||||
const bodyNode = node.body = this.startNode();
|
||||
const body = bodyNode.body = [];
|
||||
const bodyNode = (node.body = this.startNode());
|
||||
const body = (bodyNode.body = []);
|
||||
this.expect(tt.braceL);
|
||||
while (!this.match(tt.braceR)) {
|
||||
let bodyNode = this.startNode();
|
||||
@ -176,12 +201,18 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
if (this.match(tt._import)) {
|
||||
const lookahead = this.lookahead();
|
||||
if (lookahead.value !== "type" && lookahead.value !== "typeof") {
|
||||
this.unexpected(null, "Imports within a `declare module` body must always be `import type` or `import typeof`");
|
||||
this.unexpected(
|
||||
null,
|
||||
"Imports within a `declare module` body must always be `import type` or `import typeof`",
|
||||
);
|
||||
}
|
||||
this.next();
|
||||
this.parseImport(bodyNode);
|
||||
} else {
|
||||
this.expectContextual("declare", "Only declares and type imports are allowed inside declare module");
|
||||
this.expectContextual(
|
||||
"declare",
|
||||
"Only declares and type imports are allowed inside declare module",
|
||||
);
|
||||
|
||||
bodyNode = this.flowParseDeclare(bodyNode, true);
|
||||
}
|
||||
@ -194,13 +225,19 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
let kind = null;
|
||||
let hasModuleExport = false;
|
||||
const errorMessage = "Found both `declare module.exports` and `declare export` in the same module. Modules can only have 1 since they are either an ES module or they are a CommonJS module";
|
||||
body.forEach((bodyElement) => {
|
||||
const errorMessage =
|
||||
"Found both `declare module.exports` and `declare export` in the same module. Modules can only have 1 since they are either an ES module or they are a CommonJS module";
|
||||
body.forEach(bodyElement => {
|
||||
if (isEsModuleType(bodyElement)) {
|
||||
if (kind === "CommonJS") this.unexpected(bodyElement.start, errorMessage);
|
||||
if (kind === "CommonJS")
|
||||
this.unexpected(bodyElement.start, errorMessage);
|
||||
kind = "ES";
|
||||
} else if (bodyElement.type === "DeclareModuleExports") {
|
||||
if (hasModuleExport) this.unexpected(bodyElement.start, "Duplicate `declare module.exports` statement");
|
||||
if (hasModuleExport)
|
||||
this.unexpected(
|
||||
bodyElement.start,
|
||||
"Duplicate `declare module.exports` statement",
|
||||
);
|
||||
if (kind === "ES") this.unexpected(bodyElement.start, errorMessage);
|
||||
kind = "CommonJS";
|
||||
hasModuleExport = true;
|
||||
@ -211,7 +248,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.finishNode(node, "DeclareModule");
|
||||
}
|
||||
|
||||
flowParseDeclareExportDeclaration(node: N.FlowDeclareExportDeclaration, insideModule: ?boolean): N.FlowDeclareExportDeclaration {
|
||||
flowParseDeclareExportDeclaration(
|
||||
node: N.FlowDeclareExportDeclaration,
|
||||
insideModule: ?boolean,
|
||||
): N.FlowDeclareExportDeclaration {
|
||||
this.expect(tt._export);
|
||||
|
||||
if (this.eat(tt._default)) {
|
||||
@ -229,15 +269,17 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.finishNode(node, "DeclareExportDeclaration");
|
||||
} else {
|
||||
if (
|
||||
this.match(tt._const) || this.match(tt._let) ||
|
||||
(
|
||||
(this.isContextual("type") || this.isContextual("interface")) &&
|
||||
!insideModule
|
||||
)
|
||||
this.match(tt._const) ||
|
||||
this.match(tt._let) ||
|
||||
((this.isContextual("type") || this.isContextual("interface")) &&
|
||||
!insideModule)
|
||||
) {
|
||||
const label = this.state.value;
|
||||
const suggestion = exportSuggestions[label];
|
||||
this.unexpected(this.state.start, `\`declare export ${label}\` is not supported. Use \`${suggestion}\` instead`);
|
||||
this.unexpected(
|
||||
this.state.start,
|
||||
`\`declare export ${label}\` is not supported. Use \`${suggestion}\` instead`,
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
@ -275,7 +317,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
throw this.unexpected();
|
||||
}
|
||||
|
||||
flowParseDeclareModuleExports(node: N.FlowDeclareModuleExports): N.FlowDeclareModuleExports {
|
||||
flowParseDeclareModuleExports(
|
||||
node: N.FlowDeclareModuleExports,
|
||||
): N.FlowDeclareModuleExports {
|
||||
this.expectContextual("module");
|
||||
this.expect(tt.dot);
|
||||
this.expectContextual("exports");
|
||||
@ -285,13 +329,17 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.finishNode(node, "DeclareModuleExports");
|
||||
}
|
||||
|
||||
flowParseDeclareTypeAlias(node: N.FlowDeclareTypeAlias): N.FlowDeclareTypeAlias {
|
||||
flowParseDeclareTypeAlias(
|
||||
node: N.FlowDeclareTypeAlias,
|
||||
): N.FlowDeclareTypeAlias {
|
||||
this.next();
|
||||
this.flowParseTypeAlias(node);
|
||||
return this.finishNode(node, "DeclareTypeAlias");
|
||||
}
|
||||
|
||||
flowParseDeclareInterface(node: N.FlowDeclareInterface): N.FlowDeclareInterface {
|
||||
flowParseDeclareInterface(
|
||||
node: N.FlowDeclareInterface,
|
||||
): N.FlowDeclareInterface {
|
||||
this.next();
|
||||
this.flowParseInterfaceish(node);
|
||||
return this.finishNode(node, "DeclareInterface");
|
||||
@ -347,7 +395,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
flowParseRestrictedIdentifier(liberal?: boolean): N.Identifier {
|
||||
if (primitiveTypes.indexOf(this.state.value) > -1) {
|
||||
this.raise(this.state.start, `Cannot overwrite primitive type ${this.state.value}`);
|
||||
this.raise(
|
||||
this.state.start,
|
||||
`Cannot overwrite primitive type ${this.state.value}`,
|
||||
);
|
||||
}
|
||||
|
||||
return this.parseIdentifier(liberal);
|
||||
@ -439,10 +490,16 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
|
||||
flowParseObjectPropertyKey(): N.Expression {
|
||||
return (this.match(tt.num) || this.match(tt.string)) ? this.parseExprAtom() : this.parseIdentifier(true);
|
||||
return this.match(tt.num) || this.match(tt.string)
|
||||
? this.parseExprAtom()
|
||||
: this.parseIdentifier(true);
|
||||
}
|
||||
|
||||
flowParseObjectTypeIndexer(node: N.FlowObjectTypeIndexer, isStatic: boolean, variance: ?N.FlowVariance): N.FlowObjectTypeIndexer {
|
||||
flowParseObjectTypeIndexer(
|
||||
node: N.FlowObjectTypeIndexer,
|
||||
isStatic: boolean,
|
||||
variance: ?N.FlowVariance,
|
||||
): N.FlowObjectTypeIndexer {
|
||||
node.static = isStatic;
|
||||
|
||||
this.expect(tt.bracketL);
|
||||
@ -460,7 +517,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.finishNode(node, "ObjectTypeIndexer");
|
||||
}
|
||||
|
||||
flowParseObjectTypeMethodish(node: N.FlowFunctionTypeAnnotation): N.FlowFunctionTypeAnnotation {
|
||||
flowParseObjectTypeMethodish(
|
||||
node: N.FlowFunctionTypeAnnotation,
|
||||
): N.FlowFunctionTypeAnnotation {
|
||||
node.params = [];
|
||||
node.rest = null;
|
||||
node.typeParameters = null;
|
||||
@ -486,14 +545,21 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.finishNode(node, "FunctionTypeAnnotation");
|
||||
}
|
||||
|
||||
flowParseObjectTypeCallProperty(node: N.FlowObjectTypeCallProperty, isStatic: boolean): N.FlowObjectTypeCallProperty {
|
||||
flowParseObjectTypeCallProperty(
|
||||
node: N.FlowObjectTypeCallProperty,
|
||||
isStatic: boolean,
|
||||
): N.FlowObjectTypeCallProperty {
|
||||
const valueNode = this.startNode();
|
||||
node.static = isStatic;
|
||||
node.value = this.flowParseObjectTypeMethodish(valueNode);
|
||||
return this.finishNode(node, "ObjectTypeCallProperty");
|
||||
}
|
||||
|
||||
flowParseObjectType(allowStatic: boolean, allowExact: boolean, allowSpread: boolean): N.FlowObjectTypeAnnotation {
|
||||
flowParseObjectType(
|
||||
allowStatic: boolean,
|
||||
allowExact: boolean,
|
||||
allowSpread: boolean,
|
||||
): N.FlowObjectTypeAnnotation {
|
||||
const oldInType = this.state.inType;
|
||||
this.state.inType = true;
|
||||
|
||||
@ -520,7 +586,11 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
while (!this.match(endDelim)) {
|
||||
let isStatic = false;
|
||||
const node = this.startNode();
|
||||
if (allowStatic && this.isContextual("static") && this.lookahead().type !== tt.colon) {
|
||||
if (
|
||||
allowStatic &&
|
||||
this.isContextual("static") &&
|
||||
this.lookahead().type !== tt.colon
|
||||
) {
|
||||
this.next();
|
||||
isStatic = true;
|
||||
}
|
||||
@ -528,12 +598,16 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
const variance = this.flowParseVariance();
|
||||
|
||||
if (this.match(tt.bracketL)) {
|
||||
nodeStart.indexers.push(this.flowParseObjectTypeIndexer(node, isStatic, variance));
|
||||
nodeStart.indexers.push(
|
||||
this.flowParseObjectTypeIndexer(node, isStatic, variance),
|
||||
);
|
||||
} else if (this.match(tt.parenL) || this.isRelational("<")) {
|
||||
if (variance) {
|
||||
this.unexpected(variance.start);
|
||||
}
|
||||
nodeStart.callProperties.push(this.flowParseObjectTypeCallProperty(node, isStatic));
|
||||
nodeStart.callProperties.push(
|
||||
this.flowParseObjectTypeCallProperty(node, isStatic),
|
||||
);
|
||||
} else {
|
||||
let kind = "init";
|
||||
|
||||
@ -549,7 +623,15 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
}
|
||||
|
||||
nodeStart.properties.push(this.flowParseObjectTypeProperty(node, isStatic, variance, kind, allowSpread));
|
||||
nodeStart.properties.push(
|
||||
this.flowParseObjectTypeProperty(
|
||||
node,
|
||||
isStatic,
|
||||
variance,
|
||||
kind,
|
||||
allowSpread,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
this.flowObjectTypeSemicolon();
|
||||
@ -575,11 +657,14 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
if (!allowSpread) {
|
||||
this.unexpected(
|
||||
null,
|
||||
"Spread operator cannot appear in class or interface definitions"
|
||||
"Spread operator cannot appear in class or interface definitions",
|
||||
);
|
||||
}
|
||||
if (variance) {
|
||||
this.unexpected(variance.start, "Spread properties cannot have variance");
|
||||
this.unexpected(
|
||||
variance.start,
|
||||
"Spread properties cannot have variance",
|
||||
);
|
||||
}
|
||||
this.expect(tt.ellipsis);
|
||||
node.argument = this.flowParseType();
|
||||
@ -597,8 +682,11 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
this.unexpected(variance.start);
|
||||
}
|
||||
|
||||
node.value = this.flowParseObjectTypeMethodish(this.startNodeAt(node.start, node.loc.start));
|
||||
if (kind === "get" || kind === "set") this.flowCheckGetterSetterParamCount(node);
|
||||
node.value = this.flowParseObjectTypeMethodish(
|
||||
this.startNodeAt(node.start, node.loc.start),
|
||||
);
|
||||
if (kind === "get" || kind === "set")
|
||||
this.flowCheckGetterSetterParamCount(node);
|
||||
} else {
|
||||
if (kind !== "init") this.unexpected();
|
||||
if (this.eat(tt.question)) {
|
||||
@ -606,7 +694,6 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
node.value = this.flowParseTypeInitialiser();
|
||||
node.variance = variance;
|
||||
|
||||
}
|
||||
|
||||
node.optional = optional;
|
||||
@ -617,7 +704,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
// This is similar to checkGetterSetterParamCount, but as
|
||||
// babylon uses non estree properties we cannot reuse it here
|
||||
flowCheckGetterSetterParamCount(property: N.FlowObjectTypeProperty | N.FlowObjectTypeSpreadProperty): void {
|
||||
flowCheckGetterSetterParamCount(
|
||||
property: N.FlowObjectTypeProperty | N.FlowObjectTypeSpreadProperty,
|
||||
): void {
|
||||
const paramCount = property.kind === "get" ? 0 : 1;
|
||||
if (property.value.params.length !== paramCount) {
|
||||
const start = property.start;
|
||||
@ -630,13 +719,21 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
|
||||
flowObjectTypeSemicolon(): void {
|
||||
if (!this.eat(tt.semi) && !this.eat(tt.comma) &&
|
||||
!this.match(tt.braceR) && !this.match(tt.braceBarR)) {
|
||||
if (
|
||||
!this.eat(tt.semi) &&
|
||||
!this.eat(tt.comma) &&
|
||||
!this.match(tt.braceR) &&
|
||||
!this.match(tt.braceBarR)
|
||||
) {
|
||||
this.unexpected();
|
||||
}
|
||||
}
|
||||
|
||||
flowParseQualifiedTypeIdentifier(startPos?: number, startLoc?: Position, id?: N.Identifier): N.FlowQualifiedTypeIdentifier {
|
||||
flowParseQualifiedTypeIdentifier(
|
||||
startPos?: number,
|
||||
startLoc?: Position,
|
||||
id?: N.Identifier,
|
||||
): N.FlowQualifiedTypeIdentifier {
|
||||
startPos = startPos || this.state.start;
|
||||
startLoc = startLoc || this.state.startLoc;
|
||||
let node = id || this.parseIdentifier();
|
||||
@ -651,7 +748,11 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return node;
|
||||
}
|
||||
|
||||
flowParseGenericType(startPos: number, startLoc: Position, id: N.Identifier): N.FlowGenericTypeAnnotation {
|
||||
flowParseGenericType(
|
||||
startPos: number,
|
||||
startLoc: Position,
|
||||
id: N.Identifier,
|
||||
): N.FlowGenericTypeAnnotation {
|
||||
const node = this.startNodeAt(startPos, startLoc);
|
||||
|
||||
node.typeParameters = null;
|
||||
@ -691,8 +792,7 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
let typeAnnotation = null;
|
||||
const node = this.startNode();
|
||||
const lh = this.lookahead();
|
||||
if (lh.type === tt.colon ||
|
||||
lh.type === tt.question) {
|
||||
if (lh.type === tt.colon || lh.type === tt.question) {
|
||||
name = this.parseIdentifier();
|
||||
if (this.eat(tt.question)) {
|
||||
optional = true;
|
||||
@ -707,7 +807,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.finishNode(node, "FunctionTypeParam");
|
||||
}
|
||||
|
||||
reinterpretTypeAsFunctionTypeParam(type: N.FlowType): N.FlowFunctionTypeParam {
|
||||
reinterpretTypeAsFunctionTypeParam(
|
||||
type: N.FlowType,
|
||||
): N.FlowFunctionTypeParam {
|
||||
const node = this.startNodeAt(type.start, type.loc.start);
|
||||
node.name = null;
|
||||
node.optional = false;
|
||||
@ -715,7 +817,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.finishNode(node, "FunctionTypeParam");
|
||||
}
|
||||
|
||||
flowParseFunctionTypeParams(params: N.FlowFunctionTypeParam[] = []): { params: N.FlowFunctionTypeParam[], rest: ?N.FlowFunctionTypeParam } {
|
||||
flowParseFunctionTypeParams(
|
||||
params: N.FlowFunctionTypeParam[] = [],
|
||||
): { params: N.FlowFunctionTypeParam[], rest: ?N.FlowFunctionTypeParam } {
|
||||
let rest: ?N.FlowFunctionTypeParam = null;
|
||||
while (!this.match(tt.parenR) && !this.match(tt.ellipsis)) {
|
||||
params.push(this.flowParseFunctionTypeParam());
|
||||
@ -729,7 +833,12 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return { params, rest };
|
||||
}
|
||||
|
||||
flowIdentToTypeAnnotation(startPos: number, startLoc: Position, node: N.FlowTypeAnnotation, id: N.Identifier): N.FlowTypeAnnotation {
|
||||
flowIdentToTypeAnnotation(
|
||||
startPos: number,
|
||||
startLoc: Position,
|
||||
node: N.FlowTypeAnnotation,
|
||||
id: N.Identifier,
|
||||
): N.FlowTypeAnnotation {
|
||||
switch (id.name) {
|
||||
case "any":
|
||||
return this.finishNode(node, "AnyTypeAnnotation");
|
||||
@ -772,7 +881,12 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
switch (this.state.type) {
|
||||
case tt.name:
|
||||
return this.flowIdentToTypeAnnotation(startPos, startLoc, node, this.parseIdentifier());
|
||||
return this.flowIdentToTypeAnnotation(
|
||||
startPos,
|
||||
startLoc,
|
||||
node,
|
||||
this.parseIdentifier(),
|
||||
);
|
||||
|
||||
case tt.braceL:
|
||||
return this.flowParseObjectType(false, false, true);
|
||||
@ -819,9 +933,13 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
this.state.noAnonFunctionType = oldNoAnonFunctionType;
|
||||
|
||||
// A `,` or a `) =>` means this is an anonymous function type
|
||||
if (this.state.noAnonFunctionType ||
|
||||
!(this.match(tt.comma) ||
|
||||
(this.match(tt.parenR) && this.lookahead().type === tt.arrow))) {
|
||||
if (
|
||||
this.state.noAnonFunctionType ||
|
||||
!(
|
||||
this.match(tt.comma) ||
|
||||
(this.match(tt.parenR) && this.lookahead().type === tt.arrow)
|
||||
)
|
||||
) {
|
||||
this.expect(tt.parenR);
|
||||
return type;
|
||||
} else {
|
||||
@ -831,9 +949,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
|
||||
if (type) {
|
||||
tmp = this.flowParseFunctionTypeParams(
|
||||
[this.reinterpretTypeAsFunctionTypeParam(type)],
|
||||
);
|
||||
tmp = this.flowParseFunctionTypeParams([
|
||||
this.reinterpretTypeAsFunctionTypeParam(type),
|
||||
]);
|
||||
} else {
|
||||
tmp = this.flowParseFunctionTypeParams();
|
||||
}
|
||||
@ -852,9 +970,13 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.finishNode(node, "FunctionTypeAnnotation");
|
||||
|
||||
case tt.string:
|
||||
return this.parseLiteral(this.state.value, "StringLiteralTypeAnnotation");
|
||||
return this.parseLiteral(
|
||||
this.state.value,
|
||||
"StringLiteralTypeAnnotation",
|
||||
);
|
||||
|
||||
case tt._true: case tt._false:
|
||||
case tt._true:
|
||||
case tt._false:
|
||||
node.value = this.match(tt._true);
|
||||
this.next();
|
||||
return this.finishNode(node, "BooleanLiteralTypeAnnotation");
|
||||
@ -862,14 +984,23 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
case tt.plusMin:
|
||||
if (this.state.value === "-") {
|
||||
this.next();
|
||||
if (!this.match(tt.num)) this.unexpected(null, "Unexpected token, expected number");
|
||||
if (!this.match(tt.num))
|
||||
this.unexpected(null, "Unexpected token, expected number");
|
||||
|
||||
return this.parseLiteral(-this.state.value, "NumberLiteralTypeAnnotation", node.start, node.loc.start);
|
||||
return this.parseLiteral(
|
||||
-this.state.value,
|
||||
"NumberLiteralTypeAnnotation",
|
||||
node.start,
|
||||
node.loc.start,
|
||||
);
|
||||
}
|
||||
|
||||
this.unexpected();
|
||||
case tt.num:
|
||||
return this.parseLiteral(this.state.value, "NumberLiteralTypeAnnotation");
|
||||
return this.parseLiteral(
|
||||
this.state.value,
|
||||
"NumberLiteralTypeAnnotation",
|
||||
);
|
||||
|
||||
case tt._null:
|
||||
node.value = this.match(tt._null);
|
||||
@ -895,7 +1026,8 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
|
||||
flowParsePostfixType(): N.FlowTypeAnnotation {
|
||||
const startPos = this.state.start, startLoc = this.state.startLoc;
|
||||
const startPos = this.state.start,
|
||||
startLoc = this.state.startLoc;
|
||||
let type = this.flowParsePrimaryType();
|
||||
while (!this.canInsertSemicolon() && this.match(tt.bracketL)) {
|
||||
const node = this.startNodeAt(startPos, startLoc);
|
||||
@ -939,7 +1071,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
while (this.eat(tt.bitwiseAND)) {
|
||||
node.types.push(this.flowParseAnonFunctionWithoutParens());
|
||||
}
|
||||
return node.types.length === 1 ? type : this.finishNode(node, "IntersectionTypeAnnotation");
|
||||
return node.types.length === 1
|
||||
? type
|
||||
: this.finishNode(node, "IntersectionTypeAnnotation");
|
||||
}
|
||||
|
||||
flowParseUnionType(): N.FlowTypeAnnotation {
|
||||
@ -950,7 +1084,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
while (this.eat(tt.bitwiseOR)) {
|
||||
node.types.push(this.flowParseIntersectionType());
|
||||
}
|
||||
return node.types.length === 1 ? type : this.finishNode(node, "UnionTypeAnnotation");
|
||||
return node.types.length === 1
|
||||
? type
|
||||
: this.finishNode(node, "UnionTypeAnnotation");
|
||||
}
|
||||
|
||||
flowParseType(): N.FlowTypeAnnotation {
|
||||
@ -985,7 +1121,7 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
node.expression,
|
||||
node.expression.type,
|
||||
node.typeAnnotation.end,
|
||||
node.typeAnnotation.loc.end
|
||||
node.typeAnnotation.loc.end,
|
||||
);
|
||||
}
|
||||
|
||||
@ -1008,12 +1144,22 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
// Overrides
|
||||
// ==================================
|
||||
|
||||
parseFunctionBodyAndFinish(node: N.BodilessFunctionOrMethodBase, type: string, allowExpressionBody?: boolean): void {
|
||||
parseFunctionBodyAndFinish(
|
||||
node: N.BodilessFunctionOrMethodBase,
|
||||
type: string,
|
||||
allowExpressionBody?: boolean,
|
||||
): void {
|
||||
// For arrow functions, `parseArrow` handles the return type itself.
|
||||
if (!allowExpressionBody && this.match(tt.colon)) {
|
||||
const typeNode = this.startNode();
|
||||
// $FlowFixMe
|
||||
[typeNode.typeAnnotation, node.predicate] = this.flowParseTypeAndPredicateInitialiser();
|
||||
|
||||
[
|
||||
// $FlowFixMe (destructuring not supported yet)
|
||||
typeNode.typeAnnotation,
|
||||
// $FlowFixMe (destructuring not supported yet)
|
||||
node.predicate,
|
||||
] = this.flowParseTypeAndPredicateInitialiser();
|
||||
|
||||
node.returnType = typeNode.typeAnnotation
|
||||
? this.finishNode(typeNode, "TypeAnnotation")
|
||||
: null;
|
||||
@ -1025,7 +1171,11 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
// interfaces
|
||||
parseStatement(declaration: boolean, topLevel?: boolean): N.Statement {
|
||||
// strict mode handling of `interface` since it's a reserved word
|
||||
if (this.state.strict && this.match(tt.name) && this.state.value === "interface") {
|
||||
if (
|
||||
this.state.strict &&
|
||||
this.match(tt.name) &&
|
||||
this.state.value === "interface"
|
||||
) {
|
||||
const node = this.startNode();
|
||||
this.next();
|
||||
return this.flowParseInterface(node);
|
||||
@ -1035,10 +1185,19 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
|
||||
// declares, interfaces and type aliases
|
||||
parseExpressionStatement(node: N.ExpressionStatement, expr: N.Expression): N.ExpressionStatement {
|
||||
parseExpressionStatement(
|
||||
node: N.ExpressionStatement,
|
||||
expr: N.Expression,
|
||||
): N.ExpressionStatement {
|
||||
if (expr.type === "Identifier") {
|
||||
if (expr.name === "declare") {
|
||||
if (this.match(tt._class) || this.match(tt.name) || this.match(tt._function) || this.match(tt._var) || this.match(tt._export)) {
|
||||
if (
|
||||
this.match(tt._class) ||
|
||||
this.match(tt.name) ||
|
||||
this.match(tt._function) ||
|
||||
this.match(tt._var) ||
|
||||
this.match(tt._export)
|
||||
) {
|
||||
return this.flowParseDeclare(node);
|
||||
}
|
||||
} else if (this.match(tt.name)) {
|
||||
@ -1055,9 +1214,11 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
// export type
|
||||
shouldParseExportDeclaration(): boolean {
|
||||
return this.isContextual("type")
|
||||
|| this.isContextual("interface")
|
||||
|| super.shouldParseExportDeclaration();
|
||||
return (
|
||||
this.isContextual("type") ||
|
||||
this.isContextual("interface") ||
|
||||
super.shouldParseExportDeclaration()
|
||||
);
|
||||
}
|
||||
|
||||
isExportDefaultSpecifier(): boolean {
|
||||
@ -1071,7 +1232,13 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return super.isExportDefaultSpecifier();
|
||||
}
|
||||
|
||||
parseConditional(expr: N.Expression, noIn: ?boolean, startPos: number, startLoc: Position, refNeedsArrowPos?: ?Pos): N.Expression {
|
||||
parseConditional(
|
||||
expr: N.Expression,
|
||||
noIn: ?boolean,
|
||||
startPos: number,
|
||||
startLoc: Position,
|
||||
refNeedsArrowPos?: ?Pos,
|
||||
): N.Expression {
|
||||
// only do the expensive clone if there is a question mark
|
||||
// and if we come from inside parens
|
||||
if (refNeedsArrowPos && this.match(tt.question)) {
|
||||
@ -1093,7 +1260,11 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return super.parseConditional(expr, noIn, startPos, startLoc);
|
||||
}
|
||||
|
||||
parseParenItem(node: N.Expression, startPos: number, startLoc: Position): N.Expression {
|
||||
parseParenItem(
|
||||
node: N.Expression,
|
||||
startPos: number,
|
||||
startLoc: Position,
|
||||
): N.Expression {
|
||||
node = super.parseParenItem(node, startPos, startLoc);
|
||||
if (this.eat(tt.question)) {
|
||||
node.optional = true;
|
||||
@ -1170,16 +1341,28 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
}
|
||||
|
||||
toAssignable(node: N.Node, isBinding: ?boolean, contextDescription: string): N.Node {
|
||||
toAssignable(
|
||||
node: N.Node,
|
||||
isBinding: ?boolean,
|
||||
contextDescription: string,
|
||||
): N.Node {
|
||||
if (node.type === "TypeCastExpression") {
|
||||
return super.toAssignable(this.typeCastToParameter(node), isBinding, contextDescription);
|
||||
return super.toAssignable(
|
||||
this.typeCastToParameter(node),
|
||||
isBinding,
|
||||
contextDescription,
|
||||
);
|
||||
} else {
|
||||
return super.toAssignable(node, isBinding, contextDescription);
|
||||
}
|
||||
}
|
||||
|
||||
// turn type casts that we found in function parameter head into type annotated params
|
||||
toAssignableList(exprList: N.Expression[], isBinding: ?boolean, contextDescription: string): $ReadOnlyArray<N.Pattern> {
|
||||
toAssignableList(
|
||||
exprList: N.Expression[],
|
||||
isBinding: ?boolean,
|
||||
contextDescription: string,
|
||||
): $ReadOnlyArray<N.Pattern> {
|
||||
for (let i = 0; i < exprList.length; i++) {
|
||||
const expr = exprList[i];
|
||||
if (expr && expr.type === "TypeCastExpression") {
|
||||
@ -1191,7 +1374,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
// this is a list of nodes, from something like a call expression, we need to filter the
|
||||
// type casts that we've found that are illegal in this context
|
||||
toReferencedList(exprList: $ReadOnlyArray<?N.Expression>): $ReadOnlyArray<?N.Expression> {
|
||||
toReferencedList(
|
||||
exprList: $ReadOnlyArray<?N.Expression>,
|
||||
): $ReadOnlyArray<?N.Expression> {
|
||||
for (let i = 0; i < exprList.length; i++) {
|
||||
const expr = exprList[i];
|
||||
if (expr && expr._exprListItem && expr.type === "TypeCastExpression") {
|
||||
@ -1204,9 +1389,17 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
// parse an item inside a expression list eg. `(NODE, NODE)` where NODE represents
|
||||
// the position where this function is called
|
||||
parseExprListItem(allowEmpty: ?boolean, refShorthandDefaultPos: ?Pos, refNeedsArrowPos: ?Pos): ?N.Expression {
|
||||
parseExprListItem(
|
||||
allowEmpty: ?boolean,
|
||||
refShorthandDefaultPos: ?Pos,
|
||||
refNeedsArrowPos: ?Pos,
|
||||
): ?N.Expression {
|
||||
const container = this.startNode();
|
||||
const node = super.parseExprListItem(allowEmpty, refShorthandDefaultPos, refNeedsArrowPos);
|
||||
const node = super.parseExprListItem(
|
||||
allowEmpty,
|
||||
refShorthandDefaultPos,
|
||||
refNeedsArrowPos,
|
||||
);
|
||||
if (this.match(tt.colon)) {
|
||||
container._exprListItem = true;
|
||||
container.expression = node;
|
||||
@ -1221,10 +1414,15 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
expr: N.Expression,
|
||||
isBinding: ?boolean,
|
||||
checkClashes: ?{ [key: string]: boolean },
|
||||
contextDescription: string
|
||||
contextDescription: string,
|
||||
): void {
|
||||
if (expr.type !== "TypeCastExpression") {
|
||||
return super.checkLVal(expr, isBinding, checkClashes, contextDescription);
|
||||
return super.checkLVal(
|
||||
expr,
|
||||
isBinding,
|
||||
checkClashes,
|
||||
contextDescription,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1256,7 +1454,7 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
method: N.ClassMethod,
|
||||
isGenerator: boolean,
|
||||
isAsync: boolean,
|
||||
isConstructor: boolean
|
||||
isConstructor: boolean,
|
||||
): void {
|
||||
if (method.variance) {
|
||||
this.unexpected(method.variance.start);
|
||||
@ -1266,7 +1464,13 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
method.typeParameters = this.flowParseTypeParameterDeclaration();
|
||||
}
|
||||
|
||||
super.parseClassMethod(classBody, method, isGenerator, isAsync, isConstructor);
|
||||
super.parseClassMethod(
|
||||
classBody,
|
||||
method,
|
||||
isGenerator,
|
||||
isAsync,
|
||||
isConstructor,
|
||||
);
|
||||
}
|
||||
|
||||
// parse a the super class type parameters and implements
|
||||
@ -1277,7 +1481,7 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
if (this.isContextual("implements")) {
|
||||
this.next();
|
||||
const implemented: N.FlowClassImplements[] = node.implements = [];
|
||||
const implemented: N.FlowClassImplements[] = (node.implements = []);
|
||||
do {
|
||||
const node = this.startNode();
|
||||
node.id = this.parseIdentifier();
|
||||
@ -1291,7 +1495,9 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
}
|
||||
|
||||
parsePropertyName(node: N.ObjectOrClassMember | N.TsNamedTypeElementBase): N.Identifier {
|
||||
parsePropertyName(
|
||||
node: N.ObjectOrClassMember | N.TsNamedTypeElementBase,
|
||||
): N.Identifier {
|
||||
const variance = this.flowParseVariance();
|
||||
const key = super.parsePropertyName(node);
|
||||
// $FlowIgnore ("variance" not defined on TsNamedTypeElementBase)
|
||||
@ -1307,7 +1513,7 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
isGenerator: boolean,
|
||||
isAsync: boolean,
|
||||
isPattern: boolean,
|
||||
refShorthandDefaultPos: ?Pos
|
||||
refShorthandDefaultPos: ?Pos,
|
||||
): void {
|
||||
if (prop.variance) {
|
||||
this.unexpected(prop.variance.start);
|
||||
@ -1329,7 +1535,7 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
isGenerator,
|
||||
isAsync,
|
||||
isPattern,
|
||||
refShorthandDefaultPos
|
||||
refShorthandDefaultPos,
|
||||
);
|
||||
|
||||
// add typeParameters if we found them
|
||||
@ -1344,7 +1550,8 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
if (param.type !== "Identifier") {
|
||||
throw this.raise(
|
||||
param.start,
|
||||
"A binding pattern parameter cannot be optional in an implementation signature.");
|
||||
"A binding pattern parameter cannot be optional in an implementation signature.",
|
||||
);
|
||||
}
|
||||
|
||||
param.optional = true;
|
||||
@ -1356,11 +1563,22 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return param;
|
||||
}
|
||||
|
||||
parseMaybeDefault(startPos?: ?number, startLoc?: ?Position, left?: ?N.Pattern): N.Pattern {
|
||||
parseMaybeDefault(
|
||||
startPos?: ?number,
|
||||
startLoc?: ?Position,
|
||||
left?: ?N.Pattern,
|
||||
): N.Pattern {
|
||||
const node = super.parseMaybeDefault(startPos, startLoc, left);
|
||||
|
||||
if (node.type === "AssignmentPattern" && node.typeAnnotation && node.right.start < node.typeAnnotation.start) {
|
||||
this.raise(node.typeAnnotation.start, "Type annotations must come before default assignments, e.g. instead of `age = 25: number` use `age: number = 25`");
|
||||
if (
|
||||
node.type === "AssignmentPattern" &&
|
||||
node.typeAnnotation &&
|
||||
node.right.start < node.typeAnnotation.start
|
||||
) {
|
||||
this.raise(
|
||||
node.typeAnnotation.start,
|
||||
"Type annotations must come before default assignments, e.g. instead of `age = 25: number` use `age: number = 25`",
|
||||
);
|
||||
}
|
||||
|
||||
return node;
|
||||
@ -1378,7 +1596,11 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
if (kind) {
|
||||
const lh = this.lookahead();
|
||||
if ((lh.type === tt.name && lh.value !== "from") || lh.type === tt.braceL || lh.type === tt.star) {
|
||||
if (
|
||||
(lh.type === tt.name && lh.value !== "from") ||
|
||||
lh.type === tt.braceL ||
|
||||
lh.type === tt.star
|
||||
) {
|
||||
this.next();
|
||||
node.importKind = kind;
|
||||
}
|
||||
@ -1403,7 +1625,11 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
let isBinding = false;
|
||||
if (this.isContextual("as")) {
|
||||
const as_ident = this.parseIdentifier(true);
|
||||
if (specifierTypeKind !== null && !this.match(tt.name) && !this.state.type.keyword) {
|
||||
if (
|
||||
specifierTypeKind !== null &&
|
||||
!this.match(tt.name) &&
|
||||
!this.state.type.keyword
|
||||
) {
|
||||
// `import {type as ,` or `import {type as }`
|
||||
specifier.imported = as_ident;
|
||||
specifier.importKind = specifierTypeKind;
|
||||
@ -1414,7 +1640,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
specifier.importKind = null;
|
||||
specifier.local = this.parseIdentifier();
|
||||
}
|
||||
} else if (specifierTypeKind !== null && (this.match(tt.name) || this.state.type.keyword)) {
|
||||
} else if (
|
||||
specifierTypeKind !== null &&
|
||||
(this.match(tt.name) || this.state.type.keyword)
|
||||
) {
|
||||
// `import {type foo`
|
||||
specifier.imported = this.parseIdentifier(true);
|
||||
specifier.importKind = specifierTypeKind;
|
||||
@ -1435,10 +1664,19 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
(node.importKind === "type" || node.importKind === "typeof") &&
|
||||
(specifier.importKind === "type" || specifier.importKind === "typeof")
|
||||
) {
|
||||
this.raise(firstIdentLoc, "`The `type` and `typeof` keywords on named imports can only be used on regular `import` statements. It cannot be used with `import type` or `import typeof` statements`");
|
||||
this.raise(
|
||||
firstIdentLoc,
|
||||
"`The `type` and `typeof` keywords on named imports can only be used on regular `import` statements. It cannot be used with `import type` or `import typeof` statements`",
|
||||
);
|
||||
}
|
||||
|
||||
if (isBinding) this.checkReservedWord(specifier.local.name, specifier.start, true, true);
|
||||
if (isBinding)
|
||||
this.checkReservedWord(
|
||||
specifier.local.name,
|
||||
specifier.start,
|
||||
true,
|
||||
true,
|
||||
);
|
||||
|
||||
this.checkLVal(specifier.local, true, undefined, "import specifier");
|
||||
node.specifiers.push(this.finishNode(specifier, "ImportSpecifier"));
|
||||
@ -1462,7 +1700,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
|
||||
// parse the return type of an async arrow function - let foo = (async (): number => {});
|
||||
parseAsyncArrowFromCallExpression(node: N.ArrowFunctionExpression, call: N.CallExpression): N.ArrowFunctionExpression {
|
||||
parseAsyncArrowFromCallExpression(
|
||||
node: N.ArrowFunctionExpression,
|
||||
call: N.CallExpression,
|
||||
): N.ArrowFunctionExpression {
|
||||
if (this.match(tt.colon)) {
|
||||
const oldNoAnonFunctionType = this.state.noAnonFunctionType;
|
||||
this.state.noAnonFunctionType = true;
|
||||
@ -1488,12 +1729,22 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
// parse the rest, make sure the rest is an arrow function, and go from
|
||||
// there
|
||||
// 3. This is neither. Just call the super method
|
||||
parseMaybeAssign(noIn?: ?boolean, refShorthandDefaultPos?: ?Pos, afterLeftParse?: Function, refNeedsArrowPos?: ?Pos): N.Expression {
|
||||
parseMaybeAssign(
|
||||
noIn?: ?boolean,
|
||||
refShorthandDefaultPos?: ?Pos,
|
||||
afterLeftParse?: Function,
|
||||
refNeedsArrowPos?: ?Pos,
|
||||
): N.Expression {
|
||||
let jsxError = null;
|
||||
if (tt.jsxTagStart && this.match(tt.jsxTagStart)) {
|
||||
const state = this.state.clone();
|
||||
try {
|
||||
return super.parseMaybeAssign(noIn, refShorthandDefaultPos, afterLeftParse, refNeedsArrowPos);
|
||||
return super.parseMaybeAssign(
|
||||
noIn,
|
||||
refShorthandDefaultPos,
|
||||
afterLeftParse,
|
||||
refNeedsArrowPos,
|
||||
);
|
||||
} catch (err) {
|
||||
if (err instanceof SyntaxError) {
|
||||
this.state = state;
|
||||
@ -1517,7 +1768,12 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
try {
|
||||
typeParameters = this.flowParseTypeParameterDeclaration();
|
||||
|
||||
arrowExpression = super.parseMaybeAssign(noIn, refShorthandDefaultPos, afterLeftParse, refNeedsArrowPos);
|
||||
arrowExpression = super.parseMaybeAssign(
|
||||
noIn,
|
||||
refShorthandDefaultPos,
|
||||
afterLeftParse,
|
||||
refNeedsArrowPos,
|
||||
);
|
||||
arrowExpression.typeParameters = typeParameters;
|
||||
this.resetStartLocationFromNode(arrowExpression, typeParameters);
|
||||
} catch (err) {
|
||||
@ -1536,7 +1792,12 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
}
|
||||
|
||||
return super.parseMaybeAssign(noIn, refShorthandDefaultPos, afterLeftParse, refNeedsArrowPos);
|
||||
return super.parseMaybeAssign(
|
||||
noIn,
|
||||
refShorthandDefaultPos,
|
||||
afterLeftParse,
|
||||
refNeedsArrowPos,
|
||||
);
|
||||
}
|
||||
|
||||
// handle return types for arrow functions
|
||||
@ -1548,8 +1809,13 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
this.state.noAnonFunctionType = true;
|
||||
|
||||
const typeNode = this.startNode();
|
||||
|
||||
[
|
||||
// $FlowFixMe (destructuring not supported yet)
|
||||
[typeNode.typeAnnotation, node.predicate] = this.flowParseTypeAndPredicateInitialiser();
|
||||
typeNode.typeAnnotation,
|
||||
// $FlowFixMe (destructuring not supported yet)
|
||||
node.predicate,
|
||||
] = this.flowParseTypeAndPredicateInitialiser();
|
||||
|
||||
this.state.noAnonFunctionType = oldNoAnonFunctionType;
|
||||
|
||||
@ -1576,4 +1842,4 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
shouldParseArrow(): boolean {
|
||||
return this.match(tt.colon) || super.shouldParseArrow();
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@ -29,7 +29,7 @@ tt.jsxTagStart.updateContext = function() {
|
||||
|
||||
tt.jsxTagEnd.updateContext = function(prevType) {
|
||||
const out = this.state.context.pop();
|
||||
if (out === tc.j_oTag && prevType === tt.slash || out === tc.j_cTag) {
|
||||
if ((out === tc.j_oTag && prevType === tt.slash) || out === tc.j_cTag) {
|
||||
this.state.context.pop();
|
||||
this.state.exprAllowed = this.curContext() === tc.j_expr;
|
||||
} else {
|
||||
@ -39,7 +39,9 @@ tt.jsxTagEnd.updateContext = function(prevType) {
|
||||
|
||||
// Transforms JSX element name to string.
|
||||
|
||||
function getQualifiedJSXName(object: N.JSXIdentifier | N.JSXNamespacedName | N.JSXMemberExpression): string {
|
||||
function getQualifiedJSXName(
|
||||
object: N.JSXIdentifier | N.JSXNamespacedName | N.JSXMemberExpression,
|
||||
): string {
|
||||
if (object.type === "JSXIdentifier") {
|
||||
return object.name;
|
||||
}
|
||||
@ -49,14 +51,19 @@ function getQualifiedJSXName(object: N.JSXIdentifier | N.JSXNamespacedName | N.J
|
||||
}
|
||||
|
||||
if (object.type === "JSXMemberExpression") {
|
||||
return getQualifiedJSXName(object.object) + "." + getQualifiedJSXName(object.property);
|
||||
return (
|
||||
getQualifiedJSXName(object.object) +
|
||||
"." +
|
||||
getQualifiedJSXName(object.property)
|
||||
);
|
||||
}
|
||||
|
||||
// istanbul ignore next
|
||||
throw new Error("Node had unexpected type: " + object.type);
|
||||
}
|
||||
|
||||
export default (superClass: Class<Parser>): Class<Parser> => class extends superClass {
|
||||
export default (superClass: Class<Parser>): Class<Parser> =>
|
||||
class extends superClass {
|
||||
// Reads inline JSX contents token.
|
||||
|
||||
jsxReadToken(): void {
|
||||
@ -126,7 +133,8 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
const ch = this.input.charCodeAt(this.state.pos);
|
||||
if (ch === quote) break;
|
||||
if (ch === 38) { // "&"
|
||||
if (ch === 38) {
|
||||
// "&"
|
||||
out += this.input.slice(chunkStart, this.state.pos);
|
||||
out += this.jsxReadEntity();
|
||||
chunkStart = this.state.pos;
|
||||
@ -176,7 +184,6 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return entity;
|
||||
}
|
||||
|
||||
|
||||
// Read a JSX identifier (valid tag or attribute name).
|
||||
//
|
||||
// Optimized version since JSX identifiers can"t contain
|
||||
@ -190,7 +197,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
do {
|
||||
ch = this.input.charCodeAt(++this.state.pos);
|
||||
} while (isIdentifierChar(ch) || ch === 45); // "-"
|
||||
return this.finishToken(tt.jsxName, this.input.slice(start, this.state.pos));
|
||||
return this.finishToken(
|
||||
tt.jsxName,
|
||||
this.input.slice(start, this.state.pos),
|
||||
);
|
||||
}
|
||||
|
||||
// Parse next token as JSX identifier
|
||||
@ -246,7 +256,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
case tt.braceL:
|
||||
node = this.jsxParseExpressionContainer();
|
||||
if (node.expression.type === "JSXEmptyExpression") {
|
||||
throw this.raise(node.start, "JSX attributes must only be assigned a non-empty expression");
|
||||
throw this.raise(
|
||||
node.start,
|
||||
"JSX attributes must only be assigned a non-empty expression",
|
||||
);
|
||||
} else {
|
||||
return node;
|
||||
}
|
||||
@ -256,7 +269,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return this.parseExprAtom();
|
||||
|
||||
default:
|
||||
throw this.raise(this.state.start, "JSX value should be either an expression or a quoted JSX text");
|
||||
throw this.raise(
|
||||
this.state.start,
|
||||
"JSX value should be either an expression or a quoted JSX text",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -265,8 +281,16 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
// at the beginning of the next one (right brace).
|
||||
|
||||
jsxParseEmptyExpression(): N.JSXEmptyExpression {
|
||||
const node = this.startNodeAt(this.state.lastTokEnd, this.state.lastTokEndLoc);
|
||||
return this.finishNodeAt(node, "JSXEmptyExpression", this.state.start, this.state.startLoc);
|
||||
const node = this.startNodeAt(
|
||||
this.state.lastTokEnd,
|
||||
this.state.lastTokEndLoc,
|
||||
);
|
||||
return this.finishNodeAt(
|
||||
node,
|
||||
"JSXEmptyExpression",
|
||||
this.state.start,
|
||||
this.state.startLoc,
|
||||
);
|
||||
}
|
||||
|
||||
// Parse JSX spread child
|
||||
@ -283,7 +307,6 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
// Parses JSX expression enclosed into curly brackets.
|
||||
|
||||
|
||||
jsxParseExpressionContainer(): N.JSXExpressionContainer {
|
||||
const node = this.startNode();
|
||||
this.next();
|
||||
@ -313,7 +336,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
// Parses JSX opening tag starting after "<".
|
||||
|
||||
jsxParseOpeningElementAt(startPos: number, startLoc: Position): N.JSXOpeningElement {
|
||||
jsxParseOpeningElementAt(
|
||||
startPos: number,
|
||||
startLoc: Position,
|
||||
): N.JSXOpeningElement {
|
||||
const node = this.startNodeAt(startPos, startLoc);
|
||||
node.attributes = [];
|
||||
node.name = this.jsxParseElementName();
|
||||
@ -327,7 +353,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
|
||||
// Parses JSX closing tag starting after "</".
|
||||
|
||||
jsxParseClosingElementAt(startPos: number, startLoc: Position): N.JSXClosingElement {
|
||||
jsxParseClosingElementAt(
|
||||
startPos: number,
|
||||
startLoc: Position,
|
||||
): N.JSXClosingElement {
|
||||
const node = this.startNodeAt(startPos, startLoc);
|
||||
node.name = this.jsxParseElementName();
|
||||
this.expect(tt.jsxTagEnd);
|
||||
@ -347,10 +376,14 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
contents: for (;;) {
|
||||
switch (this.state.type) {
|
||||
case tt.jsxTagStart:
|
||||
startPos = this.state.start; startLoc = this.state.startLoc;
|
||||
startPos = this.state.start;
|
||||
startLoc = this.state.startLoc;
|
||||
this.next();
|
||||
if (this.eat(tt.slash)) {
|
||||
closingElement = this.jsxParseClosingElementAt(startPos, startLoc);
|
||||
closingElement = this.jsxParseClosingElementAt(
|
||||
startPos,
|
||||
startLoc,
|
||||
);
|
||||
break contents;
|
||||
}
|
||||
children.push(this.jsxParseElementAt(startPos, startLoc));
|
||||
@ -375,12 +408,17 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
// $FlowIgnore
|
||||
if (getQualifiedJSXName(closingElement.name) !== getQualifiedJSXName(openingElement.name)) {
|
||||
getQualifiedJSXName(closingElement.name) !==
|
||||
getQualifiedJSXName(openingElement.name)
|
||||
) {
|
||||
this.raise(
|
||||
// $FlowIgnore
|
||||
closingElement.start,
|
||||
"Expected corresponding JSX closing tag for <" + getQualifiedJSXName(openingElement.name) + ">"
|
||||
"Expected corresponding JSX closing tag for <" +
|
||||
getQualifiedJSXName(openingElement.name) +
|
||||
">",
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -389,7 +427,10 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
node.closingElement = closingElement;
|
||||
node.children = children;
|
||||
if (this.match(tt.relational) && this.state.value === "<") {
|
||||
this.raise(this.state.start, "Adjacent JSX elements must be wrapped in an enclosing tag");
|
||||
this.raise(
|
||||
this.state.start,
|
||||
"Adjacent JSX elements must be wrapped in an enclosing tag",
|
||||
);
|
||||
}
|
||||
return this.finishNode(node, "JSXElement");
|
||||
}
|
||||
@ -468,4 +509,4 @@ export default (superClass: Class<Parser>): Class<Parser> => class extends super
|
||||
return super.updateContext(prevType);
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@ -225,7 +225,7 @@ const entities: { [name: string]: string } = {
|
||||
or: "\u2228",
|
||||
cap: "\u2229",
|
||||
cup: "\u222A",
|
||||
"int": "\u222B",
|
||||
int: "\u222B",
|
||||
there4: "\u2234",
|
||||
sim: "\u223C",
|
||||
cong: "\u2245",
|
||||
@ -253,6 +253,6 @@ const entities: { [name: string]: string } = {
|
||||
spades: "\u2660",
|
||||
clubs: "\u2663",
|
||||
hearts: "\u2665",
|
||||
diams: "\u2666"
|
||||
diams: "\u2666",
|
||||
};
|
||||
export default entities;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -27,27 +27,30 @@ export class TokContext {
|
||||
}
|
||||
|
||||
export const types: {
|
||||
[key: string]: TokContext;
|
||||
[key: string]: TokContext,
|
||||
} = {
|
||||
braceStatement: new TokContext("{", false),
|
||||
braceExpression: new TokContext("{", true),
|
||||
templateQuasi: new TokContext("${", true),
|
||||
parenStatement: new TokContext("(", false),
|
||||
parenExpression: new TokContext("(", true),
|
||||
template: new TokContext("`", true, true, (p) => p.readTmplToken()),
|
||||
functionExpression: new TokContext("function", true)
|
||||
template: new TokContext("`", true, true, p => p.readTmplToken()),
|
||||
functionExpression: new TokContext("function", true),
|
||||
};
|
||||
|
||||
// Token-specific context update code
|
||||
|
||||
tt.parenR.updateContext = tt.braceR.updateContext = function () {
|
||||
tt.parenR.updateContext = tt.braceR.updateContext = function() {
|
||||
if (this.state.context.length === 1) {
|
||||
this.state.exprAllowed = true;
|
||||
return;
|
||||
}
|
||||
|
||||
const out = this.state.context.pop();
|
||||
if (out === types.braceStatement && this.curContext() === types.functionExpression) {
|
||||
if (
|
||||
out === types.braceStatement &&
|
||||
this.curContext() === types.functionExpression
|
||||
) {
|
||||
this.state.context.pop();
|
||||
this.state.exprAllowed = false;
|
||||
} else if (out === types.templateQuasi) {
|
||||
@ -57,7 +60,7 @@ tt.parenR.updateContext = tt.braceR.updateContext = function () {
|
||||
}
|
||||
};
|
||||
|
||||
tt.name.updateContext = function (prevType) {
|
||||
tt.name.updateContext = function(prevType) {
|
||||
this.state.exprAllowed = false;
|
||||
|
||||
if (prevType === tt._let || prevType === tt._const || prevType === tt._var) {
|
||||
@ -67,28 +70,35 @@ tt.name.updateContext = function (prevType) {
|
||||
}
|
||||
};
|
||||
|
||||
tt.braceL.updateContext = function (prevType) {
|
||||
this.state.context.push(this.braceIsBlock(prevType) ? types.braceStatement : types.braceExpression);
|
||||
tt.braceL.updateContext = function(prevType) {
|
||||
this.state.context.push(
|
||||
this.braceIsBlock(prevType) ? types.braceStatement : types.braceExpression,
|
||||
);
|
||||
this.state.exprAllowed = true;
|
||||
};
|
||||
|
||||
tt.dollarBraceL.updateContext = function () {
|
||||
tt.dollarBraceL.updateContext = function() {
|
||||
this.state.context.push(types.templateQuasi);
|
||||
this.state.exprAllowed = true;
|
||||
};
|
||||
|
||||
tt.parenL.updateContext = function (prevType) {
|
||||
const statementParens = prevType === tt._if || prevType === tt._for ||
|
||||
prevType === tt._with || prevType === tt._while;
|
||||
this.state.context.push(statementParens ? types.parenStatement : types.parenExpression);
|
||||
tt.parenL.updateContext = function(prevType) {
|
||||
const statementParens =
|
||||
prevType === tt._if ||
|
||||
prevType === tt._for ||
|
||||
prevType === tt._with ||
|
||||
prevType === tt._while;
|
||||
this.state.context.push(
|
||||
statementParens ? types.parenStatement : types.parenExpression,
|
||||
);
|
||||
this.state.exprAllowed = true;
|
||||
};
|
||||
|
||||
tt.incDec.updateContext = function () {
|
||||
tt.incDec.updateContext = function() {
|
||||
// tokExprAllowed stays unchanged
|
||||
};
|
||||
|
||||
tt._function.updateContext = function () {
|
||||
tt._function.updateContext = function() {
|
||||
if (this.curContext() !== types.braceStatement) {
|
||||
this.state.context.push(types.functionExpression);
|
||||
}
|
||||
@ -96,7 +106,7 @@ tt._function.updateContext = function () {
|
||||
this.state.exprAllowed = false;
|
||||
};
|
||||
|
||||
tt.backQuote.updateContext = function () {
|
||||
tt.backQuote.updateContext = function() {
|
||||
if (this.curContext() === types.template) {
|
||||
this.state.context.pop();
|
||||
} else {
|
||||
|
||||
@ -4,15 +4,23 @@
|
||||
|
||||
import type { Options } from "../options";
|
||||
import type { Position } from "../util/location";
|
||||
import { isIdentifierStart, isIdentifierChar, isKeyword } from "../util/identifier";
|
||||
import {
|
||||
isIdentifierStart,
|
||||
isIdentifierChar,
|
||||
isKeyword,
|
||||
} from "../util/identifier";
|
||||
import { types as tt, keywords as keywordTypes, type TokenType } from "./types";
|
||||
import { type TokContext, types as ct } from "./context";
|
||||
import LocationParser from "../parser/location";
|
||||
import { SourceLocation } from "../util/location";
|
||||
import { lineBreak, lineBreakG, isNewLine, nonASCIIwhitespace } from "../util/whitespace";
|
||||
import {
|
||||
lineBreak,
|
||||
lineBreakG,
|
||||
isNewLine,
|
||||
nonASCIIwhitespace,
|
||||
} from "../util/whitespace";
|
||||
import State from "./state";
|
||||
|
||||
|
||||
// The following character codes are forbidden from being
|
||||
// an immediate sibling of NumericLiteralSeparator _
|
||||
|
||||
@ -59,10 +67,13 @@ export class Token {
|
||||
|
||||
function codePointToString(code: number): string {
|
||||
// UTF-16 Decoding
|
||||
if (code <= 0xFFFF) {
|
||||
if (code <= 0xffff) {
|
||||
return String.fromCharCode(code);
|
||||
} else {
|
||||
return String.fromCharCode(((code - 0x10000) >> 10) + 0xD800, ((code - 0x10000) & 1023) + 0xDC00);
|
||||
return String.fromCharCode(
|
||||
((code - 0x10000) >> 10) + 0xd800,
|
||||
((code - 0x10000) & 1023) + 0xdc00,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -75,7 +86,7 @@ export default class Tokenizer extends LocationParser {
|
||||
|
||||
constructor(options: Options, input: string) {
|
||||
super();
|
||||
this.state = new State;
|
||||
this.state = new State();
|
||||
this.state.init(options, input);
|
||||
this.isLookahead = false;
|
||||
}
|
||||
@ -140,7 +151,8 @@ export default class Tokenizer extends LocationParser {
|
||||
if (!this.match(tt.num) && !this.match(tt.string)) return;
|
||||
this.state.pos = this.state.start;
|
||||
while (this.state.pos < this.state.lineStart) {
|
||||
this.state.lineStart = this.input.lastIndexOf("\n", this.state.lineStart - 2) + 1;
|
||||
this.state.lineStart =
|
||||
this.input.lastIndexOf("\n", this.state.lineStart - 2) + 1;
|
||||
--this.state.curLine;
|
||||
}
|
||||
this.nextToken();
|
||||
@ -188,13 +200,20 @@ export default class Tokenizer extends LocationParser {
|
||||
return (code << 10) + next - 0x35fdc00;
|
||||
}
|
||||
|
||||
pushComment(block: boolean, text: string, start: number, end: number, startLoc: Position, endLoc: Position): void {
|
||||
pushComment(
|
||||
block: boolean,
|
||||
text: string,
|
||||
start: number,
|
||||
end: number,
|
||||
startLoc: Position,
|
||||
endLoc: Position,
|
||||
): void {
|
||||
const comment = {
|
||||
type: block ? "CommentBlock" : "CommentLine",
|
||||
value: text,
|
||||
start: start,
|
||||
end: end,
|
||||
loc: new SourceLocation(startLoc, endLoc)
|
||||
loc: new SourceLocation(startLoc, endLoc),
|
||||
};
|
||||
|
||||
if (!this.isLookahead) {
|
||||
@ -207,31 +226,54 @@ export default class Tokenizer extends LocationParser {
|
||||
skipBlockComment(): void {
|
||||
const startLoc = this.state.curPosition();
|
||||
const start = this.state.pos;
|
||||
const end = this.input.indexOf("*/", this.state.pos += 2);
|
||||
const end = this.input.indexOf("*/", (this.state.pos += 2));
|
||||
if (end === -1) this.raise(this.state.pos - 2, "Unterminated comment");
|
||||
|
||||
this.state.pos = end + 2;
|
||||
lineBreakG.lastIndex = start;
|
||||
let match;
|
||||
while ((match = lineBreakG.exec(this.input)) && match.index < this.state.pos) {
|
||||
while (
|
||||
(match = lineBreakG.exec(this.input)) &&
|
||||
match.index < this.state.pos
|
||||
) {
|
||||
++this.state.curLine;
|
||||
this.state.lineStart = match.index + match[0].length;
|
||||
}
|
||||
|
||||
this.pushComment(true, this.input.slice(start + 2, end), start, this.state.pos, startLoc, this.state.curPosition());
|
||||
this.pushComment(
|
||||
true,
|
||||
this.input.slice(start + 2, end),
|
||||
start,
|
||||
this.state.pos,
|
||||
startLoc,
|
||||
this.state.curPosition(),
|
||||
);
|
||||
}
|
||||
|
||||
skipLineComment(startSkip: number): void {
|
||||
const start = this.state.pos;
|
||||
const startLoc = this.state.curPosition();
|
||||
let ch = this.input.charCodeAt(this.state.pos += startSkip);
|
||||
let ch = this.input.charCodeAt((this.state.pos += startSkip));
|
||||
if (this.state.pos < this.input.length) {
|
||||
while (ch !== 10 && ch !== 13 && ch !== 8232 && ch !== 8233 && ++this.state.pos < this.input.length) {
|
||||
while (
|
||||
ch !== 10 &&
|
||||
ch !== 13 &&
|
||||
ch !== 8232 &&
|
||||
ch !== 8233 &&
|
||||
++this.state.pos < this.input.length
|
||||
) {
|
||||
ch = this.input.charCodeAt(this.state.pos);
|
||||
}
|
||||
}
|
||||
|
||||
this.pushComment(false, this.input.slice(start + startSkip, this.state.pos), start, this.state.pos, startLoc, this.state.curPosition());
|
||||
this.pushComment(
|
||||
false,
|
||||
this.input.slice(start + startSkip, this.state.pos),
|
||||
start,
|
||||
this.state.pos,
|
||||
startLoc,
|
||||
this.state.curPosition(),
|
||||
);
|
||||
}
|
||||
|
||||
// Called at the start of the parse and after every token. Skips
|
||||
@ -241,7 +283,8 @@ export default class Tokenizer extends LocationParser {
|
||||
loop: while (this.state.pos < this.input.length) {
|
||||
const ch = this.input.charCodeAt(this.state.pos);
|
||||
switch (ch) {
|
||||
case 32: case 160: // ' '
|
||||
case 32:
|
||||
case 160: // ' '
|
||||
++this.state.pos;
|
||||
break;
|
||||
|
||||
@ -250,7 +293,9 @@ export default class Tokenizer extends LocationParser {
|
||||
++this.state.pos;
|
||||
}
|
||||
|
||||
case 10: case 8232: case 8233:
|
||||
case 10:
|
||||
case 8232:
|
||||
case 8233:
|
||||
++this.state.pos;
|
||||
++this.state.curLine;
|
||||
this.state.lineStart = this.state.pos;
|
||||
@ -272,7 +317,10 @@ export default class Tokenizer extends LocationParser {
|
||||
break;
|
||||
|
||||
default:
|
||||
if (ch > 8 && ch < 14 || ch >= 5760 && nonASCIIwhitespace.test(String.fromCharCode(ch))) {
|
||||
if (
|
||||
(ch > 8 && ch < 14) ||
|
||||
(ch >= 5760 && nonASCIIwhitespace.test(String.fromCharCode(ch)))
|
||||
) {
|
||||
++this.state.pos;
|
||||
} else {
|
||||
break loop;
|
||||
@ -312,7 +360,8 @@ export default class Tokenizer extends LocationParser {
|
||||
}
|
||||
|
||||
const next2 = this.input.charCodeAt(this.state.pos + 2);
|
||||
if (next === 46 && next2 === 46) { // 46 = dot '.'
|
||||
if (next === 46 && next2 === 46) {
|
||||
// 46 = dot '.'
|
||||
this.state.pos += 3;
|
||||
return this.finishToken(tt.ellipsis);
|
||||
} else {
|
||||
@ -321,7 +370,8 @@ export default class Tokenizer extends LocationParser {
|
||||
}
|
||||
}
|
||||
|
||||
readToken_slash(): void { // '/'
|
||||
readToken_slash(): void {
|
||||
// '/'
|
||||
if (this.state.exprAllowed) {
|
||||
++this.state.pos;
|
||||
return this.readRegexp();
|
||||
@ -335,12 +385,14 @@ export default class Tokenizer extends LocationParser {
|
||||
}
|
||||
}
|
||||
|
||||
readToken_mult_modulo(code: number): void { // '%*'
|
||||
readToken_mult_modulo(code: number): void {
|
||||
// '%*'
|
||||
let type = code === 42 ? tt.star : tt.modulo;
|
||||
let width = 1;
|
||||
let next = this.input.charCodeAt(this.state.pos + 1);
|
||||
|
||||
if (next === 42) { // '*'
|
||||
if (next === 42) {
|
||||
// '*'
|
||||
width++;
|
||||
next = this.input.charCodeAt(this.state.pos + 2);
|
||||
type = tt.exponent;
|
||||
@ -354,15 +406,19 @@ export default class Tokenizer extends LocationParser {
|
||||
return this.finishOp(type, width);
|
||||
}
|
||||
|
||||
readToken_pipe_amp(code: number): void { // '|&'
|
||||
readToken_pipe_amp(code: number): void {
|
||||
// '|&'
|
||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
||||
if (next === code) return this.finishOp(code === 124 ? tt.logicalOR : tt.logicalAND, 2);
|
||||
if (next === code)
|
||||
return this.finishOp(code === 124 ? tt.logicalOR : tt.logicalAND, 2);
|
||||
if (next === 61) return this.finishOp(tt.assign, 2);
|
||||
if (code === 124 && next === 125 && this.hasPlugin("flow")) return this.finishOp(tt.braceBarR, 2);
|
||||
if (code === 124 && next === 125 && this.hasPlugin("flow"))
|
||||
return this.finishOp(tt.braceBarR, 2);
|
||||
return this.finishOp(code === 124 ? tt.bitwiseOR : tt.bitwiseAND, 1);
|
||||
}
|
||||
|
||||
readToken_caret(): void { // '^'
|
||||
readToken_caret(): void {
|
||||
// '^'
|
||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
||||
if (next === 61) {
|
||||
return this.finishOp(tt.assign, 2);
|
||||
@ -371,11 +427,16 @@ export default class Tokenizer extends LocationParser {
|
||||
}
|
||||
}
|
||||
|
||||
readToken_plus_min(code: number): void { // '+-'
|
||||
readToken_plus_min(code: number): void {
|
||||
// '+-'
|
||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
||||
|
||||
if (next === code) {
|
||||
if (next === 45 && this.input.charCodeAt(this.state.pos + 2) === 62 && lineBreak.test(this.input.slice(this.state.lastTokEnd, this.state.pos))) {
|
||||
if (
|
||||
next === 45 &&
|
||||
this.input.charCodeAt(this.state.pos + 2) === 62 &&
|
||||
lineBreak.test(this.input.slice(this.state.lastTokEnd, this.state.pos))
|
||||
) {
|
||||
// A `-->` line comment
|
||||
this.skipLineComment(3);
|
||||
this.skipSpace();
|
||||
@ -391,17 +452,25 @@ export default class Tokenizer extends LocationParser {
|
||||
}
|
||||
}
|
||||
|
||||
readToken_lt_gt(code: number): void { // '<>'
|
||||
readToken_lt_gt(code: number): void {
|
||||
// '<>'
|
||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
||||
let size = 1;
|
||||
|
||||
if (next === code) {
|
||||
size = code === 62 && this.input.charCodeAt(this.state.pos + 2) === 62 ? 3 : 2;
|
||||
if (this.input.charCodeAt(this.state.pos + size) === 61) return this.finishOp(tt.assign, size + 1);
|
||||
size =
|
||||
code === 62 && this.input.charCodeAt(this.state.pos + 2) === 62 ? 3 : 2;
|
||||
if (this.input.charCodeAt(this.state.pos + size) === 61)
|
||||
return this.finishOp(tt.assign, size + 1);
|
||||
return this.finishOp(tt.bitShift, size);
|
||||
}
|
||||
|
||||
if (next === 33 && code === 60 && this.input.charCodeAt(this.state.pos + 2) === 45 && this.input.charCodeAt(this.state.pos + 3) === 45) {
|
||||
if (
|
||||
next === 33 &&
|
||||
code === 60 &&
|
||||
this.input.charCodeAt(this.state.pos + 2) === 45 &&
|
||||
this.input.charCodeAt(this.state.pos + 3) === 45
|
||||
) {
|
||||
if (this.inModule) this.unexpected();
|
||||
// `<!--`, an XML-style comment that should be interpreted as a line comment
|
||||
this.skipLineComment(4);
|
||||
@ -417,20 +486,28 @@ export default class Tokenizer extends LocationParser {
|
||||
return this.finishOp(tt.relational, size);
|
||||
}
|
||||
|
||||
readToken_eq_excl(code: number): void { // '=!'
|
||||
readToken_eq_excl(code: number): void {
|
||||
// '=!'
|
||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
||||
if (next === 61) return this.finishOp(tt.equality, this.input.charCodeAt(this.state.pos + 2) === 61 ? 3 : 2);
|
||||
if (code === 61 && next === 62) { // '=>'
|
||||
if (next === 61)
|
||||
return this.finishOp(
|
||||
tt.equality,
|
||||
this.input.charCodeAt(this.state.pos + 2) === 61 ? 3 : 2,
|
||||
);
|
||||
if (code === 61 && next === 62) {
|
||||
// '=>'
|
||||
this.state.pos += 2;
|
||||
return this.finishToken(tt.arrow);
|
||||
}
|
||||
return this.finishOp(code === 61 ? tt.eq : tt.bang, 1);
|
||||
}
|
||||
|
||||
readToken_question() { // '?'
|
||||
readToken_question() {
|
||||
// '?'
|
||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
||||
const next2 = this.input.charCodeAt(this.state.pos + 2);
|
||||
if (next === 46 && !(next2 >= 48 && next2 <= 57)) { // '.' not followed by a number
|
||||
if (next === 46 && !(next2 >= 48 && next2 <= 57)) {
|
||||
// '.' not followed by a number
|
||||
this.state.pos += 2;
|
||||
return this.finishToken(tt.questionDot);
|
||||
} else {
|
||||
@ -441,12 +518,18 @@ export default class Tokenizer extends LocationParser {
|
||||
|
||||
getTokenFromCode(code: number): void {
|
||||
switch (code) {
|
||||
|
||||
case 35: // '#'
|
||||
if (this.hasPlugin("classPrivateProperties") && this.state.classLevel > 0) {
|
||||
++this.state.pos; return this.finishToken(tt.hash);
|
||||
if (
|
||||
this.hasPlugin("classPrivateProperties") &&
|
||||
this.state.classLevel > 0
|
||||
) {
|
||||
++this.state.pos;
|
||||
return this.finishToken(tt.hash);
|
||||
} else {
|
||||
this.raise(this.state.pos, `Unexpected character '${codePointToString(code)}'`);
|
||||
this.raise(
|
||||
this.state.pos,
|
||||
`Unexpected character '${codePointToString(code)}'`,
|
||||
);
|
||||
}
|
||||
|
||||
// The interpretation of a dot depends on whether it is followed
|
||||
@ -456,15 +539,30 @@ export default class Tokenizer extends LocationParser {
|
||||
return this.readToken_dot();
|
||||
|
||||
// Punctuation tokens.
|
||||
case 40: ++this.state.pos; return this.finishToken(tt.parenL);
|
||||
case 41: ++this.state.pos; return this.finishToken(tt.parenR);
|
||||
case 59: ++this.state.pos; return this.finishToken(tt.semi);
|
||||
case 44: ++this.state.pos; return this.finishToken(tt.comma);
|
||||
case 91: ++this.state.pos; return this.finishToken(tt.bracketL);
|
||||
case 93: ++this.state.pos; return this.finishToken(tt.bracketR);
|
||||
case 40:
|
||||
++this.state.pos;
|
||||
return this.finishToken(tt.parenL);
|
||||
case 41:
|
||||
++this.state.pos;
|
||||
return this.finishToken(tt.parenR);
|
||||
case 59:
|
||||
++this.state.pos;
|
||||
return this.finishToken(tt.semi);
|
||||
case 44:
|
||||
++this.state.pos;
|
||||
return this.finishToken(tt.comma);
|
||||
case 91:
|
||||
++this.state.pos;
|
||||
return this.finishToken(tt.bracketL);
|
||||
case 93:
|
||||
++this.state.pos;
|
||||
return this.finishToken(tt.bracketR);
|
||||
|
||||
case 123:
|
||||
if (this.hasPlugin("flow") && this.input.charCodeAt(this.state.pos + 1) === 124) {
|
||||
if (
|
||||
this.hasPlugin("flow") &&
|
||||
this.input.charCodeAt(this.state.pos + 1) === 124
|
||||
) {
|
||||
return this.finishOp(tt.braceBarL, 2);
|
||||
} else {
|
||||
++this.state.pos;
|
||||
@ -472,18 +570,25 @@ export default class Tokenizer extends LocationParser {
|
||||
}
|
||||
|
||||
case 125:
|
||||
++this.state.pos; return this.finishToken(tt.braceR);
|
||||
++this.state.pos;
|
||||
return this.finishToken(tt.braceR);
|
||||
|
||||
case 58:
|
||||
if (this.hasPlugin("functionBind") && this.input.charCodeAt(this.state.pos + 1) === 58) {
|
||||
if (
|
||||
this.hasPlugin("functionBind") &&
|
||||
this.input.charCodeAt(this.state.pos + 1) === 58
|
||||
) {
|
||||
return this.finishOp(tt.doubleColon, 2);
|
||||
} else {
|
||||
++this.state.pos;
|
||||
return this.finishToken(tt.colon);
|
||||
}
|
||||
|
||||
case 63: return this.readToken_question();
|
||||
case 64: ++this.state.pos; return this.finishToken(tt.at);
|
||||
case 63:
|
||||
return this.readToken_question();
|
||||
case 64:
|
||||
++this.state.pos;
|
||||
return this.finishToken(tt.at);
|
||||
|
||||
case 96: // '`'
|
||||
++this.state.pos;
|
||||
@ -496,11 +601,20 @@ export default class Tokenizer extends LocationParser {
|
||||
if (next === 98 || next === 66) return this.readRadixNumber(2); // '0b', '0B' - binary number
|
||||
// Anything else beginning with a digit is an integer, octal
|
||||
// number, or float.
|
||||
case 49: case 50: case 51: case 52: case 53: case 54: case 55: case 56: case 57: // 1-9
|
||||
case 49:
|
||||
case 50:
|
||||
case 51:
|
||||
case 52:
|
||||
case 53:
|
||||
case 54:
|
||||
case 55:
|
||||
case 56:
|
||||
case 57: // 1-9
|
||||
return this.readNumber(false);
|
||||
|
||||
// Quotes produce strings.
|
||||
case 34: case 39: // '"', "'"
|
||||
case 34:
|
||||
case 39: // '"', "'"
|
||||
return this.readString(code);
|
||||
|
||||
// Operators are parsed inline in tiny state machines. '=' (61) is
|
||||
@ -511,29 +625,37 @@ export default class Tokenizer extends LocationParser {
|
||||
case 47: // '/'
|
||||
return this.readToken_slash();
|
||||
|
||||
case 37: case 42: // '%*'
|
||||
case 37:
|
||||
case 42: // '%*'
|
||||
return this.readToken_mult_modulo(code);
|
||||
|
||||
case 124: case 38: // '|&'
|
||||
case 124:
|
||||
case 38: // '|&'
|
||||
return this.readToken_pipe_amp(code);
|
||||
|
||||
case 94: // '^'
|
||||
return this.readToken_caret();
|
||||
|
||||
case 43: case 45: // '+-'
|
||||
case 43:
|
||||
case 45: // '+-'
|
||||
return this.readToken_plus_min(code);
|
||||
|
||||
case 60: case 62: // '<>'
|
||||
case 60:
|
||||
case 62: // '<>'
|
||||
return this.readToken_lt_gt(code);
|
||||
|
||||
case 61: case 33: // '=!'
|
||||
case 61:
|
||||
case 33: // '=!'
|
||||
return this.readToken_eq_excl(code);
|
||||
|
||||
case 126: // '~'
|
||||
return this.finishOp(tt.tilde, 1);
|
||||
}
|
||||
|
||||
this.raise(this.state.pos, `Unexpected character '${codePointToString(code)}'`);
|
||||
this.raise(
|
||||
this.state.pos,
|
||||
`Unexpected character '${codePointToString(code)}'`,
|
||||
);
|
||||
}
|
||||
|
||||
finishOp(type: TokenType, size: number): void {
|
||||
@ -546,7 +668,8 @@ export default class Tokenizer extends LocationParser {
|
||||
const start = this.state.pos;
|
||||
let escaped, inClass;
|
||||
for (;;) {
|
||||
if (this.state.pos >= this.input.length) this.raise(start, "Unterminated regular expression");
|
||||
if (this.state.pos >= this.input.length)
|
||||
this.raise(start, "Unterminated regular expression");
|
||||
const ch = this.input.charAt(this.state.pos);
|
||||
if (lineBreak.test(ch)) {
|
||||
this.raise(start, "Unterminated regular expression");
|
||||
@ -572,11 +695,12 @@ export default class Tokenizer extends LocationParser {
|
||||
const mods = this.readWord1();
|
||||
if (mods) {
|
||||
const validFlags = /^[gmsiyu]*$/;
|
||||
if (!validFlags.test(mods)) this.raise(start, "Invalid regular expression flag");
|
||||
if (!validFlags.test(mods))
|
||||
this.raise(start, "Invalid regular expression flag");
|
||||
}
|
||||
return this.finishToken(tt.regexp, {
|
||||
pattern: content,
|
||||
flags: mods
|
||||
flags: mods,
|
||||
});
|
||||
}
|
||||
|
||||
@ -586,9 +710,10 @@ export default class Tokenizer extends LocationParser {
|
||||
|
||||
readInt(radix: number, len?: number): number | null {
|
||||
const start = this.state.pos;
|
||||
const forbiddenSiblings = radix === 16 ?
|
||||
forbiddenNumericSeparatorSiblings.hex :
|
||||
forbiddenNumericSeparatorSiblings.decBinOct;
|
||||
const forbiddenSiblings =
|
||||
radix === 16
|
||||
? forbiddenNumericSeparatorSiblings.hex
|
||||
: forbiddenNumericSeparatorSiblings.decBinOct;
|
||||
let total = 0;
|
||||
|
||||
for (let i = 0, e = len == null ? Infinity : len; i < e; ++i) {
|
||||
@ -599,9 +724,11 @@ export default class Tokenizer extends LocationParser {
|
||||
const prev = this.input.charCodeAt(this.state.pos - 1);
|
||||
const next = this.input.charCodeAt(this.state.pos + 1);
|
||||
if (code === 95) {
|
||||
if ((forbiddenSiblings.indexOf(prev) > -1) ||
|
||||
(forbiddenSiblings.indexOf(next) > -1) ||
|
||||
Number.isNaN(next)) {
|
||||
if (
|
||||
forbiddenSiblings.indexOf(prev) > -1 ||
|
||||
forbiddenSiblings.indexOf(next) > -1 ||
|
||||
Number.isNaN(next)
|
||||
) {
|
||||
this.raise(this.state.pos, "Invalid NumericLiteralSeparator");
|
||||
}
|
||||
|
||||
@ -624,7 +751,11 @@ export default class Tokenizer extends LocationParser {
|
||||
++this.state.pos;
|
||||
total = total * radix + val;
|
||||
}
|
||||
if (this.state.pos === start || len != null && this.state.pos - start !== len) return null;
|
||||
if (
|
||||
this.state.pos === start ||
|
||||
(len != null && this.state.pos - start !== len)
|
||||
)
|
||||
return null;
|
||||
|
||||
return total;
|
||||
}
|
||||
@ -635,16 +766,19 @@ export default class Tokenizer extends LocationParser {
|
||||
|
||||
this.state.pos += 2; // 0x
|
||||
const val = this.readInt(radix);
|
||||
if (val == null) this.raise(this.state.start + 2, "Expected number in radix " + radix);
|
||||
if (val == null)
|
||||
this.raise(this.state.start + 2, "Expected number in radix " + radix);
|
||||
|
||||
if (this.hasPlugin("bigInt")) {
|
||||
if (this.input.charCodeAt(this.state.pos) === 0x6E) { // 'n'
|
||||
if (this.input.charCodeAt(this.state.pos) === 0x6e) {
|
||||
// 'n'
|
||||
++this.state.pos;
|
||||
isBigInt = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (isIdentifierStart(this.fullCharCodeAtPos())) this.raise(this.state.pos, "Identifier directly after number");
|
||||
if (isIdentifierStart(this.fullCharCodeAtPos()))
|
||||
this.raise(this.state.pos, "Identifier directly after number");
|
||||
|
||||
if (isBigInt) {
|
||||
const str = this.input.slice(start, this.state.pos).replace(/[_n]/g, "");
|
||||
@ -662,27 +796,31 @@ export default class Tokenizer extends LocationParser {
|
||||
let isFloat = false;
|
||||
let isBigInt = false;
|
||||
|
||||
if (!startsWithDot && this.readInt(10) === null) this.raise(start, "Invalid number");
|
||||
if (!startsWithDot && this.readInt(10) === null)
|
||||
this.raise(start, "Invalid number");
|
||||
if (octal && this.state.pos == start + 1) octal = false; // number === 0
|
||||
|
||||
let next = this.input.charCodeAt(this.state.pos);
|
||||
if (next === 0x2E && !octal) { // '.'
|
||||
if (next === 0x2e && !octal) {
|
||||
// '.'
|
||||
++this.state.pos;
|
||||
this.readInt(10);
|
||||
isFloat = true;
|
||||
next = this.input.charCodeAt(this.state.pos);
|
||||
}
|
||||
|
||||
if ((next === 0x45 || next === 0x65) && !octal) { // 'Ee'
|
||||
if ((next === 0x45 || next === 0x65) && !octal) {
|
||||
// 'Ee'
|
||||
next = this.input.charCodeAt(++this.state.pos);
|
||||
if (next === 0x2B || next === 0x2D) ++this.state.pos; // '+-'
|
||||
if (next === 0x2b || next === 0x2d) ++this.state.pos; // '+-'
|
||||
if (this.readInt(10) === null) this.raise(start, "Invalid number");
|
||||
isFloat = true;
|
||||
next = this.input.charCodeAt(this.state.pos);
|
||||
}
|
||||
|
||||
if (this.hasPlugin("bigInt")) {
|
||||
if (next === 0x6E) { // 'n'
|
||||
if (next === 0x6e) {
|
||||
// 'n'
|
||||
// disallow floats and legacy octal syntax, new style octal ("0o") is handled in this.readRadixNumber
|
||||
if (isFloat || octal) this.raise(start, "Invalid BigIntLiteral");
|
||||
++this.state.pos;
|
||||
@ -690,7 +828,8 @@ export default class Tokenizer extends LocationParser {
|
||||
}
|
||||
}
|
||||
|
||||
if (isIdentifierStart(this.fullCharCodeAtPos())) this.raise(this.state.pos, "Identifier directly after number");
|
||||
if (isIdentifierStart(this.fullCharCodeAtPos()))
|
||||
this.raise(this.state.pos, "Identifier directly after number");
|
||||
|
||||
// remove "_" for numeric literal separator, and "n" for BigInts
|
||||
const str = this.input.slice(start, this.state.pos).replace(/[_n]/g, "");
|
||||
@ -720,14 +859,18 @@ export default class Tokenizer extends LocationParser {
|
||||
const ch = this.input.charCodeAt(this.state.pos);
|
||||
let code;
|
||||
|
||||
if (ch === 123) { // '{'
|
||||
if (ch === 123) {
|
||||
// '{'
|
||||
const codePos = ++this.state.pos;
|
||||
code = this.readHexChar(this.input.indexOf("}", this.state.pos) - this.state.pos, throwOnInvalid);
|
||||
code = this.readHexChar(
|
||||
this.input.indexOf("}", this.state.pos) - this.state.pos,
|
||||
throwOnInvalid,
|
||||
);
|
||||
++this.state.pos;
|
||||
if (code === null) {
|
||||
// $FlowFixMe (is this always non-null?)
|
||||
--this.state.invalidTemplateEscapePosition; // to point to the '\'' instead of the 'u'
|
||||
} else if (code > 0x10FFFF) {
|
||||
} else if (code > 0x10ffff) {
|
||||
if (throwOnInvalid) {
|
||||
this.raise(codePos, "Code point out of bounds");
|
||||
} else {
|
||||
@ -742,18 +885,22 @@ export default class Tokenizer extends LocationParser {
|
||||
}
|
||||
|
||||
readString(quote: number): void {
|
||||
let out = "", chunkStart = ++this.state.pos;
|
||||
let out = "",
|
||||
chunkStart = ++this.state.pos;
|
||||
for (;;) {
|
||||
if (this.state.pos >= this.input.length) this.raise(this.state.start, "Unterminated string constant");
|
||||
if (this.state.pos >= this.input.length)
|
||||
this.raise(this.state.start, "Unterminated string constant");
|
||||
const ch = this.input.charCodeAt(this.state.pos);
|
||||
if (ch === quote) break;
|
||||
if (ch === 92) { // '\'
|
||||
if (ch === 92) {
|
||||
// '\'
|
||||
out += this.input.slice(chunkStart, this.state.pos);
|
||||
// $FlowFixMe
|
||||
out += this.readEscapedChar(false);
|
||||
chunkStart = this.state.pos;
|
||||
} else {
|
||||
if (isNewLine(ch)) this.raise(this.state.start, "Unterminated string constant");
|
||||
if (isNewLine(ch))
|
||||
this.raise(this.state.start, "Unterminated string constant");
|
||||
++this.state.pos;
|
||||
}
|
||||
}
|
||||
@ -764,11 +911,18 @@ export default class Tokenizer extends LocationParser {
|
||||
// Reads template string tokens.
|
||||
|
||||
readTmplToken(): void {
|
||||
let out = "", chunkStart = this.state.pos, containsInvalid = false;
|
||||
let out = "",
|
||||
chunkStart = this.state.pos,
|
||||
containsInvalid = false;
|
||||
for (;;) {
|
||||
if (this.state.pos >= this.input.length) this.raise(this.state.start, "Unterminated template");
|
||||
if (this.state.pos >= this.input.length)
|
||||
this.raise(this.state.start, "Unterminated template");
|
||||
const ch = this.input.charCodeAt(this.state.pos);
|
||||
if (ch === 96 || ch === 36 && this.input.charCodeAt(this.state.pos + 1) === 123) { // '`', '${'
|
||||
if (
|
||||
ch === 96 ||
|
||||
(ch === 36 && this.input.charCodeAt(this.state.pos + 1) === 123)
|
||||
) {
|
||||
// '`', '${'
|
||||
if (this.state.pos === this.state.start && this.match(tt.template)) {
|
||||
if (ch === 36) {
|
||||
this.state.pos += 2;
|
||||
@ -781,7 +935,8 @@ export default class Tokenizer extends LocationParser {
|
||||
out += this.input.slice(chunkStart, this.state.pos);
|
||||
return this.finishToken(tt.template, containsInvalid ? null : out);
|
||||
}
|
||||
if (ch === 92) { // '\'
|
||||
if (ch === 92) {
|
||||
// '\'
|
||||
out += this.input.slice(chunkStart, this.state.pos);
|
||||
const escaped = this.readEscapedChar(true);
|
||||
if (escaped === null) {
|
||||
@ -819,21 +974,30 @@ export default class Tokenizer extends LocationParser {
|
||||
const ch = this.input.charCodeAt(++this.state.pos);
|
||||
++this.state.pos;
|
||||
switch (ch) {
|
||||
case 110: return "\n"; // 'n' -> '\n'
|
||||
case 114: return "\r"; // 'r' -> '\r'
|
||||
case 120: { // 'x'
|
||||
case 110:
|
||||
return "\n"; // 'n' -> '\n'
|
||||
case 114:
|
||||
return "\r"; // 'r' -> '\r'
|
||||
case 120: {
|
||||
// 'x'
|
||||
const code = this.readHexChar(2, throwOnInvalid);
|
||||
return code === null ? null : String.fromCharCode(code);
|
||||
}
|
||||
case 117: { // 'u'
|
||||
case 117: {
|
||||
// 'u'
|
||||
const code = this.readCodePoint(throwOnInvalid);
|
||||
return code === null ? null : codePointToString(code);
|
||||
}
|
||||
case 116: return "\t"; // 't' -> '\t'
|
||||
case 98: return "\b"; // 'b' -> '\b'
|
||||
case 118: return "\u000b"; // 'v' -> '\u000b'
|
||||
case 102: return "\f"; // 'f' -> '\f'
|
||||
case 13: if (this.input.charCodeAt(this.state.pos) === 10) ++this.state.pos; // '\r\n'
|
||||
case 116:
|
||||
return "\t"; // 't' -> '\t'
|
||||
case 98:
|
||||
return "\b"; // 'b' -> '\b'
|
||||
case 118:
|
||||
return "\u000b"; // 'v' -> '\u000b'
|
||||
case 102:
|
||||
return "\f"; // 'f' -> '\f'
|
||||
case 13:
|
||||
if (this.input.charCodeAt(this.state.pos) === 10) ++this.state.pos; // '\r\n'
|
||||
case 10: // ' \n'
|
||||
this.state.lineStart = this.state.pos;
|
||||
++this.state.curLine;
|
||||
@ -842,7 +1006,9 @@ export default class Tokenizer extends LocationParser {
|
||||
if (ch >= 48 && ch <= 55) {
|
||||
const codePos = this.state.pos - 1;
|
||||
// $FlowFixMe
|
||||
let octalStr = this.input.substr(this.state.pos - 1, 3).match(/^[0-7]+/)[0];
|
||||
let octalStr = this.input
|
||||
.substr(this.state.pos - 1, 3)
|
||||
.match(/^[0-7]+/)[0];
|
||||
let octal = parseInt(octalStr, 8);
|
||||
if (octal > 255) {
|
||||
octalStr = octalStr.slice(0, -1);
|
||||
@ -892,19 +1058,26 @@ export default class Tokenizer extends LocationParser {
|
||||
|
||||
readWord1(): string {
|
||||
this.state.containsEsc = false;
|
||||
let word = "", first = true, chunkStart = this.state.pos;
|
||||
let word = "",
|
||||
first = true,
|
||||
chunkStart = this.state.pos;
|
||||
while (this.state.pos < this.input.length) {
|
||||
const ch = this.fullCharCodeAtPos();
|
||||
if (isIdentifierChar(ch)) {
|
||||
this.state.pos += ch <= 0xffff ? 1 : 2;
|
||||
} else if (ch === 92) { // "\"
|
||||
} else if (ch === 92) {
|
||||
// "\"
|
||||
this.state.containsEsc = true;
|
||||
|
||||
word += this.input.slice(chunkStart, this.state.pos);
|
||||
const escStart = this.state.pos;
|
||||
|
||||
if (this.input.charCodeAt(++this.state.pos) !== 117) { // "u"
|
||||
this.raise(this.state.pos, "Expecting Unicode escape sequence \\uXXXX");
|
||||
if (this.input.charCodeAt(++this.state.pos) !== 117) {
|
||||
// "u"
|
||||
this.raise(
|
||||
this.state.pos,
|
||||
"Expecting Unicode escape sequence \\uXXXX",
|
||||
);
|
||||
}
|
||||
|
||||
++this.state.pos;
|
||||
@ -946,10 +1119,17 @@ export default class Tokenizer extends LocationParser {
|
||||
}
|
||||
|
||||
if (prevType === tt._return) {
|
||||
return lineBreak.test(this.input.slice(this.state.lastTokEnd, this.state.start));
|
||||
return lineBreak.test(
|
||||
this.input.slice(this.state.lastTokEnd, this.state.start),
|
||||
);
|
||||
}
|
||||
|
||||
if (prevType === tt._else || prevType === tt.semi || prevType === tt.eof || prevType === tt.parenR) {
|
||||
if (
|
||||
prevType === tt._else ||
|
||||
prevType === tt.semi ||
|
||||
prevType === tt.eof ||
|
||||
prevType === tt.parenR
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -971,7 +1151,7 @@ export default class Tokenizer extends LocationParser {
|
||||
|
||||
if (type.keyword && (prevType === tt.dot || prevType === tt.questionDot)) {
|
||||
this.state.exprAllowed = false;
|
||||
} else if (update = type.updateContext) {
|
||||
} else if ((update = type.updateContext)) {
|
||||
update.call(this, prevType);
|
||||
} else {
|
||||
this.state.exprAllowed = type.beforeExpr;
|
||||
|
||||
@ -10,21 +10,15 @@ import { types as tt, type TokenType } from "./types";
|
||||
|
||||
export default class State {
|
||||
init(options: Options, input: string): void {
|
||||
this.strict = options.strictMode === false ? false : options.sourceType === "module";
|
||||
this.strict =
|
||||
options.strictMode === false ? false : options.sourceType === "module";
|
||||
|
||||
this.input = input;
|
||||
|
||||
this.potentialArrowAt = -1;
|
||||
|
||||
this.inMethod =
|
||||
this.inFunction =
|
||||
this.inGenerator =
|
||||
this.inAsync =
|
||||
this.inPropertyName =
|
||||
this.inType =
|
||||
this.inClassProperty =
|
||||
this.noAnonFunctionType =
|
||||
false;
|
||||
// eslint-disable-next-line max-len
|
||||
this.inMethod = this.inFunction = this.inGenerator = this.inAsync = this.inPropertyName = this.inType = this.inClassProperty = this.noAnonFunctionType = false;
|
||||
|
||||
this.classLevel = 0;
|
||||
|
||||
@ -105,9 +99,9 @@ export default class State {
|
||||
trailingComments: Array<N.Comment>;
|
||||
leadingComments: Array<N.Comment>;
|
||||
commentStack: Array<{
|
||||
start: number;
|
||||
leadingComments: ?Array<N.Comment>;
|
||||
trailingComments: ?Array<N.Comment>;
|
||||
start: number,
|
||||
leadingComments: ?Array<N.Comment>,
|
||||
trailingComments: ?Array<N.Comment>,
|
||||
}>;
|
||||
commentPreviousNode: N.Node;
|
||||
|
||||
@ -164,7 +158,7 @@ export default class State {
|
||||
}
|
||||
|
||||
clone(skipArrays?: boolean): State {
|
||||
const state = new State;
|
||||
const state = new State();
|
||||
for (const key in this) {
|
||||
// $FlowIgnore
|
||||
let val = this[key];
|
||||
|
||||
@ -26,16 +26,16 @@ const prefix = true;
|
||||
const postfix = true;
|
||||
|
||||
type TokenOptions = {
|
||||
keyword?: string;
|
||||
keyword?: string,
|
||||
|
||||
beforeExpr?: boolean;
|
||||
startsExpr?: boolean;
|
||||
rightAssociative?: boolean;
|
||||
isLoop?: boolean;
|
||||
isAssign?: boolean;
|
||||
prefix?: boolean;
|
||||
postfix?: boolean;
|
||||
binop?: ?number;
|
||||
beforeExpr?: boolean,
|
||||
startsExpr?: boolean,
|
||||
rightAssociative?: boolean,
|
||||
isLoop?: boolean,
|
||||
isAssign?: boolean,
|
||||
prefix?: boolean,
|
||||
postfix?: boolean,
|
||||
binop?: ?number,
|
||||
};
|
||||
|
||||
export class TokenType {
|
||||
@ -49,7 +49,7 @@ export class TokenType {
|
||||
prefix: boolean;
|
||||
postfix: boolean;
|
||||
binop: ?number;
|
||||
updateContext: ?((prevType: TokenType) => void);
|
||||
updateContext: ?(prevType: TokenType) => void;
|
||||
|
||||
constructor(label: string, conf: TokenOptions = {}) {
|
||||
this.label = label;
|
||||
@ -143,50 +143,54 @@ export const types: { [name: string]: TokenType } = {
|
||||
modulo: new BinopTokenType("%", 10),
|
||||
star: new BinopTokenType("*", 10),
|
||||
slash: new BinopTokenType("/", 10),
|
||||
exponent: new TokenType("**", { beforeExpr, binop: 11, rightAssociative: true })
|
||||
exponent: new TokenType("**", {
|
||||
beforeExpr,
|
||||
binop: 11,
|
||||
rightAssociative: true,
|
||||
}),
|
||||
};
|
||||
|
||||
export const keywords = {
|
||||
"break": new KeywordTokenType("break"),
|
||||
"case": new KeywordTokenType("case", { beforeExpr }),
|
||||
"catch": new KeywordTokenType("catch"),
|
||||
"continue": new KeywordTokenType("continue"),
|
||||
"debugger": new KeywordTokenType("debugger"),
|
||||
"default": new KeywordTokenType("default", { beforeExpr }),
|
||||
"do": new KeywordTokenType("do", { isLoop, beforeExpr }),
|
||||
"else": new KeywordTokenType("else", { beforeExpr }),
|
||||
"finally": new KeywordTokenType("finally"),
|
||||
"for": new KeywordTokenType("for", { isLoop }),
|
||||
"function": new KeywordTokenType("function", { startsExpr }),
|
||||
"if": new KeywordTokenType("if"),
|
||||
"return": new KeywordTokenType("return", { beforeExpr }),
|
||||
"switch": new KeywordTokenType("switch"),
|
||||
"throw": new KeywordTokenType("throw", { beforeExpr }),
|
||||
"try": new KeywordTokenType("try"),
|
||||
"var": new KeywordTokenType("var"),
|
||||
"let": new KeywordTokenType("let"),
|
||||
"const": new KeywordTokenType("const"),
|
||||
"while": new KeywordTokenType("while", { isLoop }),
|
||||
"with": new KeywordTokenType("with"),
|
||||
"new": new KeywordTokenType("new", { beforeExpr, startsExpr }),
|
||||
"this": new KeywordTokenType("this", { startsExpr }),
|
||||
"super": new KeywordTokenType("super", { startsExpr }),
|
||||
"class": new KeywordTokenType("class"),
|
||||
"extends": new KeywordTokenType("extends", { beforeExpr }),
|
||||
"export": new KeywordTokenType("export"),
|
||||
"import": new KeywordTokenType("import", { startsExpr }),
|
||||
"yield": new KeywordTokenType("yield", { beforeExpr, startsExpr }),
|
||||
"null": new KeywordTokenType("null", { startsExpr }),
|
||||
"true": new KeywordTokenType("true", { startsExpr }),
|
||||
"false": new KeywordTokenType("false", { startsExpr }),
|
||||
"in": new KeywordTokenType("in", { beforeExpr, binop: 7 }),
|
||||
"instanceof": new KeywordTokenType("instanceof", { beforeExpr, binop: 7 }),
|
||||
"typeof": new KeywordTokenType("typeof", { beforeExpr, prefix, startsExpr }),
|
||||
"void": new KeywordTokenType("void", { beforeExpr, prefix, startsExpr }),
|
||||
"delete": new KeywordTokenType("delete", { beforeExpr, prefix, startsExpr })
|
||||
break: new KeywordTokenType("break"),
|
||||
case: new KeywordTokenType("case", { beforeExpr }),
|
||||
catch: new KeywordTokenType("catch"),
|
||||
continue: new KeywordTokenType("continue"),
|
||||
debugger: new KeywordTokenType("debugger"),
|
||||
default: new KeywordTokenType("default", { beforeExpr }),
|
||||
do: new KeywordTokenType("do", { isLoop, beforeExpr }),
|
||||
else: new KeywordTokenType("else", { beforeExpr }),
|
||||
finally: new KeywordTokenType("finally"),
|
||||
for: new KeywordTokenType("for", { isLoop }),
|
||||
function: new KeywordTokenType("function", { startsExpr }),
|
||||
if: new KeywordTokenType("if"),
|
||||
return: new KeywordTokenType("return", { beforeExpr }),
|
||||
switch: new KeywordTokenType("switch"),
|
||||
throw: new KeywordTokenType("throw", { beforeExpr }),
|
||||
try: new KeywordTokenType("try"),
|
||||
var: new KeywordTokenType("var"),
|
||||
let: new KeywordTokenType("let"),
|
||||
const: new KeywordTokenType("const"),
|
||||
while: new KeywordTokenType("while", { isLoop }),
|
||||
with: new KeywordTokenType("with"),
|
||||
new: new KeywordTokenType("new", { beforeExpr, startsExpr }),
|
||||
this: new KeywordTokenType("this", { startsExpr }),
|
||||
super: new KeywordTokenType("super", { startsExpr }),
|
||||
class: new KeywordTokenType("class"),
|
||||
extends: new KeywordTokenType("extends", { beforeExpr }),
|
||||
export: new KeywordTokenType("export"),
|
||||
import: new KeywordTokenType("import", { startsExpr }),
|
||||
yield: new KeywordTokenType("yield", { beforeExpr, startsExpr }),
|
||||
null: new KeywordTokenType("null", { startsExpr }),
|
||||
true: new KeywordTokenType("true", { startsExpr }),
|
||||
false: new KeywordTokenType("false", { startsExpr }),
|
||||
in: new KeywordTokenType("in", { beforeExpr, binop: 7 }),
|
||||
instanceof: new KeywordTokenType("instanceof", { beforeExpr, binop: 7 }),
|
||||
typeof: new KeywordTokenType("typeof", { beforeExpr, prefix, startsExpr }),
|
||||
void: new KeywordTokenType("void", { beforeExpr, prefix, startsExpr }),
|
||||
delete: new KeywordTokenType("delete", { beforeExpr, prefix, startsExpr }),
|
||||
};
|
||||
|
||||
// Map keyword names to token types.
|
||||
Object.keys(keywords).forEach((name) => {
|
||||
Object.keys(keywords).forEach(name => {
|
||||
types["_" + name] = keywords[name];
|
||||
});
|
||||
|
||||
970
src/types.js
970
src/types.js
File diff suppressed because it is too large
Load Diff
@ -13,7 +13,7 @@
|
||||
|
||||
function makePredicate(words: string): (str: string) => boolean {
|
||||
const wordsArr = words.split(" ");
|
||||
return function (str) {
|
||||
return function(str) {
|
||||
return wordsArr.indexOf(str) >= 0;
|
||||
};
|
||||
}
|
||||
@ -22,13 +22,17 @@ function makePredicate(words: string): (str: string) => boolean {
|
||||
|
||||
export const reservedWords = {
|
||||
"6": makePredicate("enum await"),
|
||||
strict: makePredicate("implements interface let package private protected public static yield"),
|
||||
strictBind: makePredicate("eval arguments")
|
||||
strict: makePredicate(
|
||||
"implements interface let package private protected public static yield",
|
||||
),
|
||||
strictBind: makePredicate("eval arguments"),
|
||||
};
|
||||
|
||||
// And the keywords
|
||||
|
||||
export const isKeyword = makePredicate("break case catch continue debugger default do else finally for function if return switch throw try var while with null true false instanceof typeof void delete new in this let const class extends export import yield super");
|
||||
export const isKeyword = makePredicate(
|
||||
"break case catch continue debugger default do else finally for function if return switch throw try var while with null true false instanceof typeof void delete new in this let const class extends export import yield super",
|
||||
);
|
||||
|
||||
// ## Character categories
|
||||
|
||||
@ -38,11 +42,17 @@ export const isKeyword = makePredicate("break case catch continue debugger defau
|
||||
// code point above 128.
|
||||
// Generated by `bin/generate-identifier-regex.js`.
|
||||
|
||||
let nonASCIIidentifierStartChars = "\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0af9\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fd5\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7ae\ua7b0-\ua7b7\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc";
|
||||
let nonASCIIidentifierChars = "\u200c\u200d\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d4-\u08e1\u08e3-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c03\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0d01-\u0d03\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d82\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf2-\u1cf4\u1cf8\u1cf9\u1dc0-\u1df5\u1dfb-\u1dff\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua620-\ua629\ua66f\ua674-\ua67d\ua69e\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua880\ua881\ua8b4-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f1\ua900-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f";
|
||||
let nonASCIIidentifierStartChars =
|
||||
"\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u08a0-\u08b4\u08b6-\u08bd\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0af9\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c60\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fd5\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7ae\ua7b0-\ua7b7\ua7f7-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab65\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc";
|
||||
let nonASCIIidentifierChars =
|
||||
"\u200c\u200d\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08d4-\u08e1\u08e3-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c03\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0d01-\u0d03\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d82\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf2-\u1cf4\u1cf8\u1cf9\u1dc0-\u1df5\u1dfb-\u1dff\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua620-\ua629\ua66f\ua674-\ua67d\ua69e\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua880\ua881\ua8b4-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f1\ua900-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f";
|
||||
|
||||
const nonASCIIidentifierStart = new RegExp("[" + nonASCIIidentifierStartChars + "]");
|
||||
const nonASCIIidentifier = new RegExp("[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]");
|
||||
const nonASCIIidentifierStart = new RegExp(
|
||||
"[" + nonASCIIidentifierStartChars + "]",
|
||||
);
|
||||
const nonASCIIidentifier = new RegExp(
|
||||
"[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]",
|
||||
);
|
||||
|
||||
nonASCIIidentifierStartChars = nonASCIIidentifierChars = null;
|
||||
|
||||
@ -52,9 +62,9 @@ nonASCIIidentifierStartChars = nonASCIIidentifierChars = null;
|
||||
// offset to the next range, and then a size of the range. They were
|
||||
// generated by `bin/generate-identifier-regex.js`.
|
||||
// eslint-disable-next-line comma-spacing
|
||||
const astralIdentifierStartCodes = [0,11,2,25,2,18,2,1,2,14,3,13,35,122,70,52,268,28,4,48,48,31,17,26,6,37,11,29,3,35,5,7,2,4,43,157,19,35,5,35,5,39,9,51,157,310,10,21,11,7,153,5,3,0,2,43,2,1,4,0,3,22,11,22,10,30,66,18,2,1,11,21,11,25,71,55,7,1,65,0,16,3,2,2,2,26,45,28,4,28,36,7,2,27,28,53,11,21,11,18,14,17,111,72,56,50,14,50,785,52,76,44,33,24,27,35,42,34,4,0,13,47,15,3,22,0,2,0,36,17,2,24,85,6,2,0,2,3,2,14,2,9,8,46,39,7,3,1,3,21,2,6,2,1,2,4,4,0,19,0,13,4,159,52,19,3,54,47,21,1,2,0,185,46,42,3,37,47,21,0,60,42,86,25,391,63,32,0,449,56,264,8,2,36,18,0,50,29,881,921,103,110,18,195,2749,1070,4050,582,8634,568,8,30,114,29,19,47,17,3,32,20,6,18,881,68,12,0,67,12,65,0,32,6124,20,754,9486,1,3071,106,6,12,4,8,8,9,5991,84,2,70,2,1,3,0,3,1,3,3,2,11,2,0,2,6,2,64,2,3,3,7,2,6,2,27,2,3,2,4,2,0,4,6,2,339,3,24,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,7,4149,196,60,67,1213,3,2,26,2,1,2,0,3,0,2,9,2,3,2,0,2,0,7,0,5,0,2,0,2,0,2,2,2,1,2,0,3,0,2,0,2,0,2,0,2,0,2,1,2,0,3,3,2,6,2,3,2,3,2,0,2,9,2,16,6,2,2,4,2,16,4421,42710,42,4148,12,221,3,5761,10591,541];
|
||||
/* prettier-ignore */ const astralIdentifierStartCodes = [0,11,2,25,2,18,2,1,2,14,3,13,35,122,70,52,268,28,4,48,48,31,17,26,6,37,11,29,3,35,5,7,2,4,43,157,19,35,5,35,5,39,9,51,157,310,10,21,11,7,153,5,3,0,2,43,2,1,4,0,3,22,11,22,10,30,66,18,2,1,11,21,11,25,71,55,7,1,65,0,16,3,2,2,2,26,45,28,4,28,36,7,2,27,28,53,11,21,11,18,14,17,111,72,56,50,14,50,785,52,76,44,33,24,27,35,42,34,4,0,13,47,15,3,22,0,2,0,36,17,2,24,85,6,2,0,2,3,2,14,2,9,8,46,39,7,3,1,3,21,2,6,2,1,2,4,4,0,19,0,13,4,159,52,19,3,54,47,21,1,2,0,185,46,42,3,37,47,21,0,60,42,86,25,391,63,32,0,449,56,264,8,2,36,18,0,50,29,881,921,103,110,18,195,2749,1070,4050,582,8634,568,8,30,114,29,19,47,17,3,32,20,6,18,881,68,12,0,67,12,65,0,32,6124,20,754,9486,1,3071,106,6,12,4,8,8,9,5991,84,2,70,2,1,3,0,3,1,3,3,2,11,2,0,2,6,2,64,2,3,3,7,2,6,2,27,2,3,2,4,2,0,4,6,2,339,3,24,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,7,4149,196,60,67,1213,3,2,26,2,1,2,0,3,0,2,9,2,3,2,0,2,0,7,0,5,0,2,0,2,0,2,2,2,1,2,0,3,0,2,0,2,0,2,0,2,0,2,1,2,0,3,3,2,6,2,3,2,3,2,0,2,9,2,16,6,2,2,4,2,16,4421,42710,42,4148,12,221,3,5761,10591,541];
|
||||
// eslint-disable-next-line comma-spacing
|
||||
const astralIdentifierCodes = [509,0,227,0,150,4,294,9,1368,2,2,1,6,3,41,2,5,0,166,1,1306,2,54,14,32,9,16,3,46,10,54,9,7,2,37,13,2,9,52,0,13,2,49,13,10,2,4,9,83,11,7,0,161,11,6,9,7,3,57,0,2,6,3,1,3,2,10,0,11,1,3,6,4,4,193,17,10,9,87,19,13,9,214,6,3,8,28,1,83,16,16,9,82,12,9,9,84,14,5,9,423,9,838,7,2,7,17,9,57,21,2,13,19882,9,135,4,60,6,26,9,1016,45,17,3,19723,1,5319,4,4,5,9,7,3,6,31,3,149,2,1418,49,513,54,5,49,9,0,15,0,23,4,2,14,1361,6,2,16,3,6,2,1,2,4,2214,6,110,6,6,9,792487,239];
|
||||
/* prettier-ignore */ const astralIdentifierCodes = [509,0,227,0,150,4,294,9,1368,2,2,1,6,3,41,2,5,0,166,1,1306,2,54,14,32,9,16,3,46,10,54,9,7,2,37,13,2,9,52,0,13,2,49,13,10,2,4,9,83,11,7,0,161,11,6,9,7,3,57,0,2,6,3,1,3,2,10,0,11,1,3,6,4,4,193,17,10,9,87,19,13,9,214,6,3,8,28,1,83,16,16,9,82,12,9,9,84,14,5,9,423,9,838,7,2,7,17,9,57,21,2,13,19882,9,135,4,60,6,26,9,1016,45,17,3,19723,1,5319,4,4,5,9,7,3,6,31,3,149,2,1418,49,513,54,5,49,9,0,15,0,23,4,2,14,1361,6,2,16,3,6,2,1,2,4,2214,6,110,6,6,9,792487,239];
|
||||
|
||||
// This has a complexity linear to the value of the code. The
|
||||
// assumption is that looking up astral identifier characters is
|
||||
@ -78,7 +88,10 @@ export function isIdentifierStart(code: number): boolean {
|
||||
if (code < 91) return true;
|
||||
if (code < 97) return code === 95;
|
||||
if (code < 123) return true;
|
||||
if (code <= 0xffff) return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code));
|
||||
if (code <= 0xffff)
|
||||
return (
|
||||
code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code))
|
||||
);
|
||||
return isInAstralSet(code, astralIdentifierStartCodes);
|
||||
}
|
||||
|
||||
@ -91,6 +104,10 @@ export function isIdentifierChar(code: number): boolean {
|
||||
if (code < 91) return true;
|
||||
if (code < 97) return code === 95;
|
||||
if (code < 123) return true;
|
||||
if (code <= 0xffff) return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code));
|
||||
return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes);
|
||||
if (code <= 0xffff)
|
||||
return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code));
|
||||
return (
|
||||
isInAstralSet(code, astralIdentifierStartCodes) ||
|
||||
isInAstralSet(code, astralIdentifierCodes)
|
||||
);
|
||||
}
|
||||
|
||||
@ -3,8 +3,8 @@
|
||||
import { lineBreakG } from "./whitespace";
|
||||
|
||||
export type Pos = {
|
||||
start: number;
|
||||
}
|
||||
start: number,
|
||||
};
|
||||
|
||||
// These are used when `options.locations` is on, for the
|
||||
// `startLoc` and `endLoc` properties.
|
||||
@ -39,7 +39,7 @@ export class SourceLocation {
|
||||
// into.
|
||||
|
||||
export function getLineInfo(input: string, offset: number): Position {
|
||||
for (let line = 1, cur = 0; ;) {
|
||||
for (let line = 1, cur = 0; ; ) {
|
||||
lineBreakG.lastIndex = cur;
|
||||
const match = lineBreakG.exec(input);
|
||||
if (match && match.index < offset) {
|
||||
|
||||
@ -2,53 +2,59 @@ import test from "ava";
|
||||
import { multiple as getFixtures } from "babel-helper-fixtures";
|
||||
|
||||
export function runFixtureTests(fixturesPath, parseFunction) {
|
||||
var fixtures = getFixtures(fixturesPath);
|
||||
const fixtures = getFixtures(fixturesPath);
|
||||
|
||||
Object.keys(fixtures).forEach(function (name) {
|
||||
fixtures[name].forEach(function (testSuite) {
|
||||
testSuite.tests.forEach(function (task) {
|
||||
var testFn = task.disabled ? test.skip : task.options.only ? test.only : test;
|
||||
Object.keys(fixtures).forEach(function(name) {
|
||||
fixtures[name].forEach(function(testSuite) {
|
||||
testSuite.tests.forEach(function(task) {
|
||||
const testFn = task.disabled
|
||||
? test.skip
|
||||
: task.options.only ? test.only : test;
|
||||
|
||||
testFn(name + "/" + testSuite.title + "/" + task.title, function (t) {
|
||||
testFn(name + "/" + testSuite.title + "/" + task.title, function(t) {
|
||||
try {
|
||||
runTest(task, parseFunction);
|
||||
t.pass();
|
||||
} catch (err) {
|
||||
const message = name + "/" + task.actual.filename + ": " + err.message;
|
||||
const message =
|
||||
name + "/" + task.actual.filename + ": " + err.message;
|
||||
t.fail(message);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export function runThrowTestsWithEstree(fixturesPath, parseFunction) {
|
||||
var fixtures = getFixtures(fixturesPath);
|
||||
const fixtures = getFixtures(fixturesPath);
|
||||
|
||||
Object.keys(fixtures).forEach(function (name) {
|
||||
fixtures[name].forEach(function (testSuite) {
|
||||
testSuite.tests.forEach(function (task) {
|
||||
Object.keys(fixtures).forEach(function(name) {
|
||||
fixtures[name].forEach(function(testSuite) {
|
||||
testSuite.tests.forEach(function(task) {
|
||||
if (!task.options.throws) return;
|
||||
|
||||
task.options.plugins = task.options.plugins || [];
|
||||
task.options.plugins.push("estree");
|
||||
|
||||
var testFn = task.disabled ? test.skip : task.options.only ? test.only : test;
|
||||
const testFn = task.disabled
|
||||
? test.skip
|
||||
: task.options.only ? test.only : test;
|
||||
|
||||
testFn(name + "/" + testSuite.title + "/" + task.title, function (t) {
|
||||
testFn(name + "/" + testSuite.title + "/" + task.title, function(t) {
|
||||
try {
|
||||
runTest(task, parseFunction);
|
||||
t.pass();
|
||||
} catch (err) {
|
||||
const message = name + "/" + task.actual.filename + ": " + err.message;
|
||||
const message =
|
||||
name + "/" + task.actual.filename + ": " + err.message;
|
||||
t.fail(message);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function save(test, ast) {
|
||||
// Ensure that RegExp are serialized as strings
|
||||
@ -59,20 +65,28 @@ function save(test, ast) {
|
||||
}
|
||||
|
||||
function runTest(test, parseFunction) {
|
||||
var opts = test.options;
|
||||
const opts = test.options;
|
||||
|
||||
if (opts.throws && test.expect.code) {
|
||||
throw new Error("File expected.json exists although options specify throws. Remove expected.json.");
|
||||
throw new Error(
|
||||
"File expected.json exists although options specify throws. Remove expected.json.",
|
||||
);
|
||||
}
|
||||
|
||||
let ast;
|
||||
|
||||
try {
|
||||
var ast = parseFunction(test.actual.code, opts);
|
||||
ast = parseFunction(test.actual.code, opts);
|
||||
} catch (err) {
|
||||
if (opts.throws) {
|
||||
if (err.message === opts.throws) {
|
||||
return;
|
||||
} else {
|
||||
err.message = "Expected error message: " + opts.throws + ". Got error message: " + err.message;
|
||||
err.message =
|
||||
"Expected error message: " +
|
||||
opts.throws +
|
||||
". Got error message: " +
|
||||
err.message;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
@ -88,9 +102,11 @@ function runTest(test, parseFunction) {
|
||||
}
|
||||
|
||||
if (opts.throws) {
|
||||
throw new Error("Expected error message: " + opts.throws + ". But parsing succeeded.");
|
||||
throw new Error(
|
||||
"Expected error message: " + opts.throws + ". But parsing succeeded.",
|
||||
);
|
||||
} else {
|
||||
var mis = misMatch(JSON.parse(test.expect.code), ast);
|
||||
const mis = misMatch(JSON.parse(test.expect.code), ast);
|
||||
|
||||
if (mis) {
|
||||
throw new Error(mis);
|
||||
@ -113,25 +129,27 @@ function addPath(str, pt) {
|
||||
|
||||
function misMatch(exp, act) {
|
||||
if (exp instanceof RegExp || act instanceof RegExp) {
|
||||
var left = ppJSON(exp), right = ppJSON(act);
|
||||
const left = ppJSON(exp);
|
||||
const right = ppJSON(act);
|
||||
if (left !== right) return left + " !== " + right;
|
||||
} else if (Array.isArray(exp)) {
|
||||
if (!Array.isArray(act)) return ppJSON(exp) + " != " + ppJSON(act);
|
||||
if (act.length != exp.length) return "array length mismatch " + exp.length + " != " + act.length;
|
||||
for (var i = 0; i < act.length; ++i) {
|
||||
var mis = misMatch(exp[i], act[i]);
|
||||
if (act.length != exp.length)
|
||||
return "array length mismatch " + exp.length + " != " + act.length;
|
||||
for (let i = 0; i < act.length; ++i) {
|
||||
const mis = misMatch(exp[i], act[i]);
|
||||
if (mis) return addPath(mis, i);
|
||||
}
|
||||
} else if (!exp || !act || (typeof exp != "object") || (typeof act != "object")) {
|
||||
} else if (!exp || !act || typeof exp != "object" || typeof act != "object") {
|
||||
if (exp !== act && typeof exp != "function")
|
||||
return ppJSON(exp) + " !== " + ppJSON(act);
|
||||
} else {
|
||||
for (var prop in exp) {
|
||||
var mis = misMatch(exp[prop], act[prop]);
|
||||
for (const prop in exp) {
|
||||
const mis = misMatch(exp[prop], act[prop]);
|
||||
if (mis) return addPath(mis, prop);
|
||||
}
|
||||
|
||||
for (var prop in act) {
|
||||
for (const prop in act) {
|
||||
if (typeof act[prop] === "function") {
|
||||
continue;
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user