Skip to content

Commit

Permalink
fix(babel-parser): avoid state.clone() to clone the whole token store (
Browse files Browse the repository at this point in the history
…#11029)

* fix(babel-parser): avoid state.clone() to clone the whole token store

Fixed the performance issue on large input when turned on option {tokens: true} and typescript plugin which uses quite a few state.clone().

* test(babel-parser): turn on 2 typescript tests with tokens:true

The output.json is generated by old master to make sure no regression.

* fix(babel-parser): avoid duplicated tokens trapped by mainly typescript/flow plugins

* test(babel-parser): update output.json to latest master result

* chore(babel-parser): improve performance by storing tokensLength in state
  • Loading branch information
3cp authored and JLHwung committed Jan 20, 2020
1 parent 740064c commit 9bc04ba
Show file tree
Hide file tree
Showing 11 changed files with 1,455 additions and 514 deletions.
2 changes: 1 addition & 1 deletion packages/babel-parser/src/parser/statement.js
Expand Up @@ -67,7 +67,7 @@ export default class StatementParser extends ExpressionParser {
file.program = this.finishNode(program, "Program");
file.comments = this.state.comments;

if (this.options.tokens) file.tokens = this.state.tokens;
if (this.options.tokens) file.tokens = this.tokens;

return this.finishNode(file, "File");
}
Expand Down
16 changes: 14 additions & 2 deletions packages/babel-parser/src/tokenizer/index.js
@@ -1,6 +1,7 @@
// @flow

import type { Options } from "../options";
import * as N from "../types";
import type { Position } from "../util/location";
import * as charCodes from "charcodes";
import { isIdentifierStart, isIdentifierChar } from "../util/identifier";
Expand Down Expand Up @@ -114,6 +115,9 @@ export default class Tokenizer extends LocationParser {

isLookahead: boolean;

// Token store.
tokens: Array<Token | N.Comment> = [];

constructor(options: Options, input: string) {
super();
this.state = new State();
Expand All @@ -123,13 +127,21 @@ export default class Tokenizer extends LocationParser {
this.isLookahead = false;
}

pushToken(token: Token | N.Comment) {
// Pop out invalid tokens trapped by try-catch parsing.
// Those parsing branches are mainly created by typescript and flow plugins.
this.tokens.length = this.state.tokensLength;
this.tokens.push(token);
++this.state.tokensLength;
}

// Move to the next token

next(): void {
if (!this.isLookahead) {
this.checkKeywordEscapes();
if (this.options.tokens) {
this.state.tokens.push(new Token(this.state));
this.pushToken(new Token(this.state));
}
}

Expand Down Expand Up @@ -242,7 +254,7 @@ export default class Tokenizer extends LocationParser {
loc: new SourceLocation(startLoc, endLoc),
};

if (this.options.tokens) this.state.tokens.push(comment);
if (this.options.tokens) this.pushToken(comment);
this.state.comments.push(comment);
this.addComment(comment);
}
Expand Down
7 changes: 3 additions & 4 deletions packages/babel-parser/src/tokenizer/state.js
Expand Up @@ -5,7 +5,6 @@ import * as N from "../types";
import { Position } from "../util/location";

import { types as ct, type TokContext } from "./context";
import type { Token } from "./index";
import { types as tt, type TokenType } from "./types";

type TopicContextState = {
Expand Down Expand Up @@ -93,9 +92,6 @@ export default class State {
yieldPos: number = -1;
awaitPos: number = -1;

// Token store.
tokens: Array<Token | N.Comment> = [];

// Comment store.
comments: Array<N.Comment> = [];

Expand Down Expand Up @@ -153,6 +149,9 @@ export default class State {
// `export default foo;` and `export { foo as default };`.
exportedIdentifiers: Array<string> = [];

// Tokens length in token store
tokensLength: number = 0;

curPosition(): Position {
return new Position(this.curLine, this.pos - this.lineStart);
}
Expand Down
@@ -0,0 +1,5 @@
{
"sourceType": "module",
"plugins": ["typescript"],
"tokens": true
}

0 comments on commit 9bc04ba

Please sign in to comment.