Skip to content

Commit

Permalink
Improve source map generation (#759)
Browse files Browse the repository at this point in the history
Fixes #541

Co-authored-by: Alan Pierce <alangpierce@gmail.com>
  • Loading branch information
forivall and alangpierce committed Apr 9, 2023
1 parent 7e5b06a commit 2fafe71
Show file tree
Hide file tree
Showing 13 changed files with 224 additions and 37 deletions.
2 changes: 1 addition & 1 deletion .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ module.exports = {
"@typescript-eslint/no-unsafe-call": "off",
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unused-vars": ["error", {args: "none"}],
"@typescript-eslint/no-unused-vars": ["error", {args: "none", ignoreRestSiblings: true}],
"@typescript-eslint/restrict-template-expressions": [
"error",
{
Expand Down
17 changes: 16 additions & 1 deletion integration-test/integration-tests.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import assert from "assert";
import {exec} from "child_process";
import {readdirSync, statSync} from "fs";
import {writeFile} from "fs/promises";
import {join, dirname} from "path";
import {promisify} from "util";

import {readJSONFileContentsIfExists} from "../script/util/readFileContents";
import {readFileContents, readJSONFileContentsIfExists} from "../script/util/readFileContents";

const execPromise = promisify(exec);

Expand Down Expand Up @@ -68,6 +69,20 @@ describe("integration tests", () => {
});
}

it("allows Jest inline snapshots", async () => {
process.chdir("./test-cases/other-cases/allows-inline-snapshots");
const originalContents = await readFileContents("./main.test.ts");
assert(originalContents.includes("toMatchInlineSnapshot()"));
try {
await execPromise(`npx jest --no-cache --updateSnapshot`);
// Running the test should have worked and updated the inline snapshot.
const newContents = await readFileContents("./main.test.ts");
assert(newContents.includes("toMatchInlineSnapshot(`3`)"));
} finally {
await writeFile("./main.test.ts", originalContents);
}
});

/**
* Find ts-node integration tests.
*
Expand Down
5 changes: 5 additions & 0 deletions integration-test/test-cases/other-cases/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Other integration test cases

Tests in this folder help power test cases that don't fit nicely into the usual
framework of discovering and running each project as a test case. These projects
are referenced directly by tests.
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
module.exports = {
transform: {"\\.(js|jsx|ts|tsx)$": "@sucrase/jest-plugin"},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
test("fills inline snapshot", () => {
expect(3 as number).toMatchInlineSnapshot();
});
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
},
"devDependencies": {
"@babel/core": "^7.18.6",
"@jridgewell/trace-mapping": "^0.3.18",
"@types/glob": "^7",
"@types/mocha": "^9.1.1",
"@types/mz": "^2.7.4",
Expand All @@ -70,6 +71,7 @@
"typescript": "~4.7"
},
"dependencies": {
"@jridgewell/gen-mapping": "^0.3.2",
"commander": "^4.0.0",
"glob": "7.1.6",
"lines-and-columns": "^1.1.6",
Expand Down
4 changes: 1 addition & 3 deletions src/Options.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,7 @@ export interface Options {
enableLegacyBabel5ModuleInterop?: boolean;
/**
* If specified, we also return a RawSourceMap object alongside the code.
* Currently, source maps simply map each line to the original line without
* any mappings within lines, since Sucrase preserves line numbers. filePath
* must be specified if this option is enabled.
* filePath must be specified if this option is enabled.
*/
sourceMapOptions?: SourceMapOptions;
/**
Expand Down
28 changes: 24 additions & 4 deletions src/TokenProcessor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,16 @@ export interface TokenProcessorSnapshot {
tokenIndex: number;
}

export interface TokenProcessorResult {
code: string;
mappings: Array<number | undefined>;
}

export default class TokenProcessor {
private resultCode: string = "";
// Array mapping input token index to optional string index position in the
// output code.
private resultMappings: Array<number | undefined> = new Array(this.tokens.length);
private tokenIndex = 0;

constructor(
Expand All @@ -22,10 +30,17 @@ export default class TokenProcessor {
) {}

/**
* Make a new TokenProcessor for things like lookahead.
* Snapshot the token state in a way that can be restored later, useful for
* things like lookahead.
*
* resultMappings do not need to be copied since in all use cases, they will
* be overwritten anyway after restore.
*/
snapshot(): TokenProcessorSnapshot {
return {resultCode: this.resultCode, tokenIndex: this.tokenIndex};
return {
resultCode: this.resultCode,
tokenIndex: this.tokenIndex,
};
}

restoreToSnapshot(snapshot: TokenProcessorSnapshot): void {
Expand All @@ -48,6 +63,7 @@ export default class TokenProcessor {

reset(): void {
this.resultCode = "";
this.resultMappings = new Array(this.tokens.length);
this.tokenIndex = 0;
}

Expand Down Expand Up @@ -168,6 +184,7 @@ export default class TokenProcessor {
replaceToken(newCode: string): void {
this.resultCode += this.previousWhitespaceAndComments();
this.appendTokenPrefix();
this.resultMappings[this.tokenIndex] = this.resultCode.length;
this.resultCode += newCode;
this.appendTokenSuffix();
this.tokenIndex++;
Expand All @@ -176,6 +193,7 @@ export default class TokenProcessor {
replaceTokenTrimmingLeftWhitespace(newCode: string): void {
this.resultCode += this.previousWhitespaceAndComments().replace(/[^\r\n]/g, "");
this.appendTokenPrefix();
this.resultMappings[this.tokenIndex] = this.resultCode.length;
this.resultCode += newCode;
this.appendTokenSuffix();
this.tokenIndex++;
Expand Down Expand Up @@ -217,6 +235,7 @@ export default class TokenProcessor {
copyToken(): void {
this.resultCode += this.previousWhitespaceAndComments();
this.appendTokenPrefix();
this.resultMappings[this.tokenIndex] = this.resultCode.length;
this.resultCode += this.code.slice(
this.tokens[this.tokenIndex].start,
this.tokens[this.tokenIndex].end,
Expand All @@ -229,6 +248,7 @@ export default class TokenProcessor {
this.resultCode += this.previousWhitespaceAndComments();
this.appendTokenPrefix();
this.resultCode += prefix;
this.resultMappings[this.tokenIndex] = this.resultCode.length;
this.resultCode += this.code.slice(
this.tokens[this.tokenIndex].start,
this.tokens[this.tokenIndex].end,
Expand Down Expand Up @@ -323,12 +343,12 @@ export default class TokenProcessor {
this.tokenIndex--;
}

finish(): string {
finish(): TokenProcessorResult {
if (this.tokenIndex !== this.tokens.length) {
throw new Error("Tried to finish processing tokens before reaching the end.");
}
this.resultCode += this.previousWhitespaceAndComments();
return this.resultCode;
return {code: this.resultCode, mappings: this.resultMappings};
}

isAtEnd(): boolean {
Expand Down
78 changes: 66 additions & 12 deletions src/computeSourceMap.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import {GenMapping, maybeAddSegment, toEncodedMap} from "@jridgewell/gen-mapping";

import type {SourceMapOptions} from "./index";
import type {Token} from "./parser/tokenizer";
import {charCodes} from "./parser/util/charcodes";
import type {RootTransformerResult} from "./transformers/RootTransformer";

export interface RawSourceMap {
version: number;
Expand All @@ -12,24 +16,74 @@ export interface RawSourceMap {
}

/**
* Generate a simple source map indicating that each line maps directly to the original line.
* Generate a source map indicating that each line maps directly to the original line,
* with the tokens in their new positions.
*/
export default function computeSourceMap(
code: string,
{code: generatedCode, mappings: rawMappings}: RootTransformerResult,
filePath: string,
{compiledFilename}: SourceMapOptions,
options: SourceMapOptions,
source: string,
tokens: Array<Token>,
): RawSourceMap {
let mappings = "AAAA";
const sourceColumns = computeSourceColumns(source, tokens);
const map = new GenMapping({file: options.compiledFilename});
let tokenIndex = 0;
// currentMapping is the output source index for the current input token being
// considered.
let currentMapping = rawMappings[0];
while (currentMapping === undefined && tokenIndex < rawMappings.length - 1) {
tokenIndex++;
currentMapping = rawMappings[tokenIndex];
}
let line = 0;
let lineStart = 0;
if (currentMapping !== lineStart) {
maybeAddSegment(map, line, 0, filePath, line, 0);
}
for (let i = 0; i < generatedCode.length; i++) {
if (i === currentMapping) {
const genColumn = currentMapping - lineStart;
const sourceColumn = sourceColumns[tokenIndex];
maybeAddSegment(map, line, genColumn, filePath, line, sourceColumn);
while (
(currentMapping === i || currentMapping === undefined) &&
tokenIndex < rawMappings.length - 1
) {
tokenIndex++;
currentMapping = rawMappings[tokenIndex];
}
}
if (generatedCode.charCodeAt(i) === charCodes.lineFeed) {
line++;
lineStart = i + 1;
if (currentMapping !== lineStart) {
maybeAddSegment(map, line, 0, filePath, line, 0);
}
}
}
const {sourceRoot, sourcesContent, ...sourceMap} = toEncodedMap(map);
return sourceMap as RawSourceMap;
}

/**
* Create an array mapping each token index to the 0-based column of the start
* position of the token.
*/
function computeSourceColumns(code: string, tokens: Array<Token>): Array<number> {
const sourceColumns: Array<number> = new Array(tokens.length);
let tokenIndex = 0;
let currentMapping = tokens[tokenIndex].start;
let lineStart = 0;
for (let i = 0; i < code.length; i++) {
if (i === currentMapping) {
sourceColumns[tokenIndex] = currentMapping - lineStart;
tokenIndex++;
currentMapping = tokens[tokenIndex].start;
}
if (code.charCodeAt(i) === charCodes.lineFeed) {
mappings += ";AACA";
lineStart = i + 1;
}
}
return {
version: 3,
file: compiledFilename || "",
sources: [filePath],
mappings,
names: [],
};
return sourceColumns;
}
11 changes: 9 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,21 @@ export function transform(code: string, options: Options): TransformResult {
Boolean(options.enableLegacyBabel5ModuleInterop),
options,
);
let result: TransformResult = {code: transformer.transform()};
const transformerResult = transformer.transform();
let result: TransformResult = {code: transformerResult.code};
if (options.sourceMapOptions) {
if (!options.filePath) {
throw new Error("filePath must be specified when generating a source map.");
}
result = {
...result,
sourceMap: computeSourceMap(result.code, options.filePath, options.sourceMapOptions),
sourceMap: computeSourceMap(
transformerResult,
options.filePath,
options.sourceMapOptions,
code,
sucraseContext.tokenProcessor.tokens,
),
};
}
return result;
Expand Down
37 changes: 33 additions & 4 deletions src/transformers/RootTransformer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,13 @@ import ReactHotLoaderTransformer from "./ReactHotLoaderTransformer";
import type Transformer from "./Transformer";
import TypeScriptTransformer from "./TypeScriptTransformer";

export interface RootTransformerResult {
code: string;
// Array mapping input token index to optional string index position in the
// output code.
mappings: Array<number | undefined>;
}

export default class RootTransformer {
private transformers: Array<Transformer> = [];
private nameManager: NameManager;
Expand Down Expand Up @@ -121,7 +128,7 @@ export default class RootTransformer {
}
}

transform(): string {
transform(): RootTransformerResult {
this.tokens.reset();
this.processBalancedCode();
const shouldAddUseStrict = this.isImportsTransformEnabled;
Expand All @@ -139,16 +146,25 @@ export default class RootTransformer {
for (const transformer of this.transformers) {
suffix += transformer.getSuffixCode();
}
let code = this.tokens.finish();
const result = this.tokens.finish();
let {code} = result;
if (code.startsWith("#!")) {
let newlineIndex = code.indexOf("\n");
if (newlineIndex === -1) {
newlineIndex = code.length;
code += "\n";
}
return code.slice(0, newlineIndex + 1) + prefix + code.slice(newlineIndex + 1) + suffix;
return {
code: code.slice(0, newlineIndex + 1) + prefix + code.slice(newlineIndex + 1) + suffix,
// The hashbang line has no tokens, so shifting the tokens to account
// for prefix can happen normally.
mappings: this.shiftMappings(result.mappings, prefix.length),
};
} else {
return prefix + this.tokens.finish() + suffix;
return {
code: prefix + code + suffix,
mappings: this.shiftMappings(result.mappings, prefix.length),
};
}
}

Expand Down Expand Up @@ -426,4 +442,17 @@ export default class RootTransformer {
}
return false;
}

shiftMappings(
mappings: Array<number | undefined>,
prefixLength: number,
): Array<number | undefined> {
for (let i = 0; i < mappings.length; i++) {
const mapping = mappings[i];
if (mapping !== undefined) {
mappings[i] = mapping + prefixLength;
}
}
return mappings;
}
}

0 comments on commit 2fafe71

Please sign in to comment.