Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: benchmark table report #2179

Merged
merged 2 commits into from Nov 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
5 changes: 4 additions & 1 deletion packages/vitest/src/node/reporters/benchmark/index.ts
@@ -1,7 +1,10 @@
import { VerboseReporter } from '../verbose'
import { JsonReporter } from './json'
import { TableReporter } from './table'

export const BenchmarkReportsMap = {
default: VerboseReporter,
default: TableReporter,
verbose: VerboseReporter,
json: JsonReporter,
}
export type BenchmarkBuiltinReporters = keyof typeof BenchmarkReportsMap
60 changes: 60 additions & 0 deletions packages/vitest/src/node/reporters/benchmark/table/index.ts
@@ -0,0 +1,60 @@
import c from 'picocolors'
import type { UserConsoleLog } from '../../../../types'
import { BaseReporter } from '../../base'
import type { ListRendererOptions } from '../../renderers/listRenderer'
import { createTableRenderer } from './tableRender'

export class TableReporter extends BaseReporter {
renderer?: ReturnType<typeof createTableRenderer>
rendererOptions: ListRendererOptions = {} as any

async onTestRemoved(trigger?: string) {
await this.stopListRender()
this.ctx.logger.clearScreen(c.yellow('Test removed...') + (trigger ? c.dim(` [ ${this.relative(trigger)} ]\n`) : ''), true)
const files = this.ctx.state.getFiles(this.watchFilters)
createTableRenderer(files, this.rendererOptions).stop()
this.ctx.logger.log()
await super.reportSummary(files)
super.onWatcherStart()
}

onCollected() {
if (this.isTTY) {
this.rendererOptions.logger = this.ctx.logger
this.rendererOptions.showHeap = this.ctx.config.logHeapUsage
const files = this.ctx.state.getFiles(this.watchFilters)
if (!this.renderer)
this.renderer = createTableRenderer(files, this.rendererOptions).start()
else
this.renderer.update(files)
}
}

async onFinished(files = this.ctx.state.getFiles(), errors = this.ctx.state.getUnhandledErrors()) {
await this.stopListRender()
this.ctx.logger.log()
await super.onFinished(files, errors)
}

async onWatcherStart() {
await this.stopListRender()
await super.onWatcherStart()
}

async stopListRender() {
await this.renderer?.stop()
this.renderer = undefined
}

async onWatcherRerun(files: string[], trigger?: string) {
await this.stopListRender()
await super.onWatcherRerun(files, trigger)
}

onUserConsoleLog(log: UserConsoleLog) {
if (!this.shouldLog(log))
return
this.renderer?.clear()
super.onUserConsoleLog(log)
}
}
200 changes: 200 additions & 0 deletions packages/vitest/src/node/reporters/benchmark/table/tableRender.ts
@@ -0,0 +1,200 @@
import c from 'picocolors'
import cliTruncate from 'cli-truncate'
import stripAnsi from 'strip-ansi'
import type { Benchmark, BenchmarkResult, Task } from '../../../../types'
import { clearInterval, getTests, notNullish, setInterval } from '../../../../utils'
import { F_RIGHT } from '../../../../utils/figures'
import type { Logger } from '../../../logger'
import { getCols, getStateSymbol } from '../../renderers/utils'

export interface ListRendererOptions {
renderSucceed?: boolean
logger: Logger
showHeap: boolean
}

const DURATION_LONG = 300

const outputMap = new WeakMap<Task, string>()

function formatFilepath(path: string) {
const lastSlash = Math.max(path.lastIndexOf('/') + 1, 0)
const basename = path.slice(lastSlash)
let firstDot = basename.indexOf('.')
if (firstDot < 0)
firstDot = basename.length
firstDot += lastSlash

return c.dim(path.slice(0, lastSlash)) + path.slice(lastSlash, firstDot) + c.dim(path.slice(firstDot))
}

function formatNumber(number: number) {
const res = String(number.toFixed(number < 100 ? 4 : 2)).split('.')
return res[0].replace(/(?=(?:\d{3})+$)(?!\b)/g, ',')
+ (res[1] ? `.${res[1]}` : '')
}

const tableHead = ['name', 'hz', 'min', 'max', 'mean', 'p75', 'p99', 'p995', 'p999', 'rme', 'samples']

function renderTableHead(tasks: Task[]) {
const benchs = tasks
.map(i => i.type === 'benchmark' ? i.result?.benchmark : undefined)
.filter(notNullish)
const allItems = benchs.map(renderBenchmarkItems).concat([tableHead])
return `${' '.repeat(3)}${tableHead.map((i, idx) => {
const width = Math.max(...allItems.map(i => i[idx].length))
return idx
? i.padStart(width, ' ')
: i.padEnd(width, ' ') // name
}).map(c.bold).join(' ')}`
}

function renderBenchmarkItems(result: BenchmarkResult) {
return [
result.name,
formatNumber(result.hz || 0),
formatNumber(result.min || 0),
formatNumber(result.max || 0),
formatNumber(result.mean || 0),
formatNumber(result.p75 || 0),
formatNumber(result.p99 || 0),
formatNumber(result.p995 || 0),
formatNumber(result.p999 || 0),
`±${(result.rme || 0).toFixed(2)}%`,
result.samples.length.toString(),
]
}
function renderBenchmark(task: Benchmark, tasks: Task[]): string {
const result = task.result?.benchmark
if (!result)
return task.name

const benchs = tasks
.map(i => i.type === 'benchmark' ? i.result?.benchmark : undefined)
.filter(notNullish)
const allItems = benchs.map(renderBenchmarkItems).concat([tableHead])
const items = renderBenchmarkItems(result)
const padded = items.map((i, idx) => {
const width = Math.max(...allItems.map(i => i[idx].length))
return idx
? i.padStart(width, ' ')
: i.padEnd(width, ' ') // name
})

return [
padded[0], // name
c.blue(padded[1]), // hz
c.cyan(padded[2]), // min
c.cyan(padded[3]), // max
c.cyan(padded[4]), // mean
c.cyan(padded[5]), // p75
c.cyan(padded[6]), // p99
c.cyan(padded[7]), // p995
c.cyan(padded[8]), // p999
c.dim(padded[9]), // rem
c.dim(padded[10]), // sample
result.rank === 1
? c.bold(c.green(' fastest'))
: result.rank === benchs.length && benchs.length > 2
? c.bold(c.gray(' slowest'))
: '',
].join(' ')
}

export function renderTree(tasks: Task[], options: ListRendererOptions, level = 0) {
let output: string[] = []

let idx = 0
for (const task of tasks) {
const padding = ' '.repeat(level ? 1 : 0)
let prefix = ''
if (idx === 0 && task.type === 'benchmark')
prefix += `${renderTableHead(tasks)}\n${padding}`

prefix += ` ${getStateSymbol(task)} `

let suffix = ''
if (task.type === 'suite')
suffix += c.dim(` (${getTests(task).length})`)

if (task.mode === 'skip' || task.mode === 'todo')
suffix += ` ${c.dim(c.gray('[skipped]'))}`

if (task.result?.duration != null) {
if (task.result.duration > DURATION_LONG)
suffix += c.yellow(` ${Math.round(task.result.duration)}${c.dim('ms')}`)
}

if (options.showHeap && task.result?.heap != null)
suffix += c.magenta(` ${Math.floor(task.result.heap / 1024 / 1024)} MB heap used`)

let name = task.name
if (level === 0)
name = formatFilepath(name)

const body = task.type === 'benchmark'
? renderBenchmark(task, tasks)
: name

output.push(padding + prefix + body + suffix)

if ((task.result?.state !== 'pass') && outputMap.get(task) != null) {
let data: string | undefined = outputMap.get(task)
if (typeof data === 'string') {
data = stripAnsi(data.trim().split('\n').filter(Boolean).pop()!)
if (data === '')
data = undefined
}

if (data != null) {
const out = `${' '.repeat(level)}${F_RIGHT} ${data}`
output.push(` ${c.gray(cliTruncate(out, getCols(-3)))}`)
}
}

if (task.type === 'suite' && task.tasks.length > 0) {
if (task.result?.state)
output = output.concat(renderTree(task.tasks, options, level + 1))
}
idx++
}

return output.filter(Boolean).join('\n')
}

export const createTableRenderer = (_tasks: Task[], options: ListRendererOptions) => {
let tasks = _tasks
let timer: any

const log = options.logger.logUpdate

function update() {
log(renderTree(tasks, options))
}

return {
start() {
if (timer)
return this
timer = setInterval(update, 200)
return this
},
update(_tasks: Task[]) {
tasks = _tasks
update()
return this
},
async stop() {
if (timer) {
clearInterval(timer)
timer = undefined
}
log.clear()
options.logger.log(renderTree(tasks, options))
return this
},
clear() {
log.clear()
},
}
}
3 changes: 2 additions & 1 deletion test/benchmark/package.json
Expand Up @@ -3,7 +3,8 @@
"private": true,
"scripts": {
"test": "node test.mjs",
"bench": "vitest bench --reporter=json",
"bench:json": "vitest bench --reporter=json",
"bench": "vitest bench",
"coverage": "vitest run --coverage"
},
"devDependencies": {
Expand Down
8 changes: 8 additions & 0 deletions test/benchmark/test/only.bench.ts
Expand Up @@ -66,3 +66,11 @@ bench.only(
},
{ iterations: 1, time: 0 },
)

bench.only(
'visited2',
() => {
assert.deepEqual(run, [true, true, true, true, true])
},
{ iterations: 1, time: 0 },
)
2 changes: 1 addition & 1 deletion test/benchmark/vitest.config.ts
Expand Up @@ -19,7 +19,7 @@ export default defineConfig({
onWatcherRerun: noop,
onServerRestart: noop,
onUserConsoleLog: noop,
}],
}, 'default'],
},
},
})