From edad9b19a6d9bc7750970ec6b9ad16e2e5a1932d Mon Sep 17 00:00:00 2001 From: Vladimir Date: Tue, 20 Jun 2023 10:53:20 +0200 Subject: [PATCH] fix(benchmark): don't fail when running correct benchmarks (#3629) --- .../vitest/src/runtime/runners/benchmark.ts | 14 ++++--- packages/vitest/src/types/benchmark.ts | 1 - test/benchmark/package.json | 2 +- test/benchmark/specs/runner.test.mjs | 40 +++++++++++++++++++ test/benchmark/test.mjs | 29 -------------- 5 files changed, 50 insertions(+), 36 deletions(-) create mode 100644 test/benchmark/specs/runner.test.mjs delete mode 100644 test/benchmark/test.mjs diff --git a/packages/vitest/src/runtime/runners/benchmark.ts b/packages/vitest/src/runtime/runners/benchmark.ts index 81a8d013253d..ecd8a4167022 100644 --- a/packages/vitest/src/runtime/runners/benchmark.ts +++ b/packages/vitest/src/runtime/runners/benchmark.ts @@ -24,6 +24,8 @@ function createBenchmarkResult(name: string): BenchmarkResult { } as BenchmarkResult } +const benchmarkTasks = new WeakMap() + async function runBenchmarkSuite(suite: Suite, runner: VitestRunner) { const { Task, Bench } = await importTinybench() const start = performance.now() @@ -67,12 +69,13 @@ async function runBenchmarkSuite(suite: Suite, runner: VitestRunner) { benchmarkMap[id] = benchmark const task = new Task(benchmarkInstance, id, benchmarkFn) - benchmark.meta.task = task + benchmarkTasks.set(benchmark, task) updateTask(benchmark) }) benchmarkGroup.forEach((benchmark) => { - benchmark.meta.task!.addEventListener('complete', (e) => { + const task = benchmarkTasks.get(benchmark)! + task.addEventListener('complete', (e) => { const task = e.task const _benchmark = benchmarkMap[task.name || ''] if (_benchmark) { @@ -82,7 +85,7 @@ async function runBenchmarkSuite(suite: Suite, runner: VitestRunner) { updateTask(_benchmark) } }) - benchmark.meta.task!.addEventListener('error', (e) => { + task.addEventListener('error', (e) => { const task = e.task const _benchmark = benchmarkMap[task.name || ''] defer.reject(_benchmark ? task.result!.error : e) @@ -91,10 +94,11 @@ async function runBenchmarkSuite(suite: Suite, runner: VitestRunner) { const tasks: BenchTask[] = [] for (const benchmark of benchmarkGroup) { - await benchmark.meta.task!.warmup() + const task = benchmarkTasks.get(benchmark)! + await task.warmup() const { setTimeout } = getSafeTimers() tasks.push(await new Promise(resolve => setTimeout(async () => { - resolve(await benchmark.meta.task!.run()) + resolve(await task.run()) }))) } diff --git a/packages/vitest/src/types/benchmark.ts b/packages/vitest/src/types/benchmark.ts index 125eab92cfc9..e0b3368dd95f 100644 --- a/packages/vitest/src/types/benchmark.ts +++ b/packages/vitest/src/types/benchmark.ts @@ -42,7 +42,6 @@ export interface BenchmarkUserOptions { export interface Benchmark extends TaskCustom { meta: { benchmark: true - task?: BenchTask result?: BenchTaskResult } } diff --git a/test/benchmark/package.json b/test/benchmark/package.json index a266eee9bf37..577cc07f8c9b 100644 --- a/test/benchmark/package.json +++ b/test/benchmark/package.json @@ -2,7 +2,7 @@ "name": "@vitest/benchmark", "private": true, "scripts": { - "test": "node test.mjs", + "test": "node --test specs/ && echo '1'", "bench:json": "vitest bench --reporter=json", "bench": "vitest bench" }, diff --git a/test/benchmark/specs/runner.test.mjs b/test/benchmark/specs/runner.test.mjs new file mode 100644 index 000000000000..3fd04b135e20 --- /dev/null +++ b/test/benchmark/specs/runner.test.mjs @@ -0,0 +1,40 @@ +import { existsSync, rmSync } from 'node:fs' +import test from 'node:test' +import * as assert from 'node:assert' +import { readFile } from 'node:fs/promises' +import { startVitest } from 'vitest/node' + +if (existsSync('./bench.json')) + rmSync('./bench.json') + +try { + await startVitest('benchmark', ['base.bench', 'mode.bench', 'only.bench'], { + watch: false, + }) +} +catch (error) { + console.error(error) + process.exit(1) +} + +const benchResult = await readFile('./bench.json', 'utf-8') +const resultJson = JSON.parse(benchResult) + +await test('benchmarks are actually running', async () => { + assert.ok(resultJson.testResults.sort, 'sort is in results') + assert.ok(resultJson.testResults.timeout, 'timeout is in results') + assert.ok(resultJson.testResults.a0, 'a0 is in results') + assert.ok(resultJson.testResults.c1, 'c1 is in results') + assert.ok(resultJson.testResults.a2, 'a2 is in results') + assert.ok(resultJson.testResults.b3, 'b3 is in results') + assert.ok(resultJson.testResults.b4, 'b4 is in results') +}) + +await test('doesn\'t have skipped tests', () => { + assert.doesNotMatch(benchResult, /skip/, 'contains skipped benchmarks') + + const skippedBenches = ['s0', 's1', 's2', 's3', 'sb4', 's4'] + const todoBenches = ['unimplemented suite', 'unimplemented test'] + + assert.ok(skippedBenches.concat(todoBenches).every(b => !benchResult.includes(b)), 'contains skipped benchmarks') +}) diff --git a/test/benchmark/test.mjs b/test/benchmark/test.mjs deleted file mode 100644 index 470c43426e8e..000000000000 --- a/test/benchmark/test.mjs +++ /dev/null @@ -1,29 +0,0 @@ -import { existsSync, rmSync } from 'node:fs' -import { readFile } from 'node:fs/promises' -import { startVitest } from 'vitest/node' - -if (existsSync('./bench.json')) - rmSync('./bench.json') - -try { - await startVitest('benchmark', ['base.bench', 'mode.bench', 'only.bench']) -} -catch (error) { - console.error(error) - process.exit(1) -} - -const benchResult = await readFile('./bench.json', 'utf-8') - -if (benchResult.includes('skip')) - process.exit(1) - -const skippedBenches = ['s0', 's1', 's2', 's3', 'sb4', 's4'] -if (skippedBenches.some(b => benchResult.includes(b))) - process.exit(1) - -const todoBenches = ['unimplemented suite', 'unimplemented test'] -if (todoBenches.some(b => benchResult.includes(b))) - process.exit(1) - -process.exit(0)