Skip to content

Commit

Permalink
feat(benchmark): support comparing benchmark result (#5398)
Browse files Browse the repository at this point in the history
Co-authored-by: Vladimir <[email protected]>
  • Loading branch information
hi-ogawa and sheremet-va authored May 3, 2024
1 parent 21e58bd commit f8d3d22
Show file tree
Hide file tree
Showing 19 changed files with 414 additions and 167 deletions.
26 changes: 26 additions & 0 deletions docs/config/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -312,6 +312,32 @@ By providing an object instead of a string you can define individual outputs whe

To provide object via CLI command, use the following syntax: `--outputFile.json=./path --outputFile.junit=./other-path`.

#### benchmark.outputJson <Version>1.6.0</Version> {#benchmark-outputJson}

- **Type:** `string | undefined`
- **Default:** `undefined`

A file path to store the benchmark result, which can be used for `--compare` option later.

For example:

```sh
# save main branch's result
git checkout main
vitest bench --outputJson main.json

# change a branch and compare against main
git checkout feature
vitest bench --compare main.json
```

#### benchmark.compare <Version>1.6.0</Version> {#benchmark-compare}

- **Type:** `string | undefined`
- **Default:** `undefined`

A file path to a previous benchmark result to compare against current runs.

### alias

- **Type:** `Record<string, string> | Array<{ find: string | RegExp, replacement: string, customResolver?: ResolverFunction | ResolverObject }>`
Expand Down
3 changes: 3 additions & 0 deletions docs/guide/features.md
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,9 @@ describe('sort', () => {
})
```

<img alt="Benchmark report" img-dark src="https://github.com/vitest-dev/vitest/assets/4232207/6f0383ea-38ba-4f14-8a05-ab243afea01d">
<img alt="Benchmark report" img-light src="https://github.com/vitest-dev/vitest/assets/4232207/efbcb427-ecf1-4882-88de-210cd73415f6">

## Type Testing <Badge type="warning">Experimental</Badge> {#type-testing}

Since Vitest 0.25.0 you can [write tests](/guide/testing-types) to catch type regressions. Vitest comes with [`expect-type`](https://github.com/mmkal/expect-type) package to provide you with a similar and easy to understand API.
Expand Down
2 changes: 1 addition & 1 deletion packages/vitest/src/defaults.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { isCI } from './utils/env'

export const defaultInclude = ['**/*.{test,spec}.?(c|m)[jt]s?(x)']
export const defaultExclude = ['**/node_modules/**', '**/dist/**', '**/cypress/**', '**/.{idea,git,cache,output,temp}/**', '**/{karma,rollup,webpack,vite,vitest,jest,ava,babel,nyc,cypress,tsup,build,eslint,prettier}.config.*']
export const benchmarkConfigDefaults: Required<Omit<BenchmarkUserOptions, 'outputFile'>> = {
export const benchmarkConfigDefaults: Required<Omit<BenchmarkUserOptions, 'outputFile' | 'compare' | 'outputJson'>> = {
include: ['**/*.{bench,benchmark}.?(c|m)[jt]s?(x)'],
exclude: defaultExclude,
includeSource: [],
Expand Down
30 changes: 18 additions & 12 deletions packages/vitest/src/node/cli/cac.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import { normalize } from 'pathe'
import cac, { type CAC } from 'cac'
import cac, { type CAC, type Command } from 'cac'
import c from 'picocolors'
import { version } from '../../../package.json'
import { toArray } from '../../utils/base'
import type { Vitest, VitestRunMode } from '../../types'
import type { CliOptions } from './cli-api'
import type { CLIOption } from './cli-config'
import { cliOptionsConfig } from './cli-config'
import type { CLIOption, CLIOptions as CLIOptionsConfig } from './cli-config'
import { benchCliOptionsConfig, cliOptionsConfig } from './cli-config'

function addCommand(cli: CAC, name: string, option: CLIOption<any>) {
function addCommand(cli: CAC | Command, name: string, option: CLIOption<any>) {
const commandName = option.alias || name
let command = option.shorthand ? `-${option.shorthand}, --${commandName}` : `--${commandName}`
if ('argument' in option)
Expand Down Expand Up @@ -56,17 +56,20 @@ interface CLIOptions {
allowUnknownOptions?: boolean
}

function addCliOptions(cli: CAC | Command, options: CLIOptionsConfig<any>) {
for (const [optionName, option] of Object.entries(options)) {
if (option)
addCommand(cli, optionName, option)
}
}

export function createCLI(options: CLIOptions = {}) {
const cli = cac('vitest')

cli
.version(version)

for (const optionName in cliOptionsConfig) {
const option = (cliOptionsConfig as any)[optionName] as CLIOption<any> | null
if (option)
addCommand(cli, optionName, option)
}
addCliOptions(cli, cliOptionsConfig)

cli.help((info) => {
const helpSection = info.find(current => current.title?.startsWith('For more info, run any command'))
Expand Down Expand Up @@ -158,9 +161,12 @@ export function createCLI(options: CLIOptions = {}) {
.command('dev [...filters]', undefined, options)
.action(watch)

cli
.command('bench [...filters]', undefined, options)
.action(benchmark)
addCliOptions(
cli
.command('bench [...filters]', undefined, options)
.action(benchmark),
benchCliOptionsConfig,
)

// TODO: remove in Vitest 2.0
cli
Expand Down
13 changes: 13 additions & 0 deletions packages/vitest/src/node/cli/cli-config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -640,4 +640,17 @@ export const cliOptionsConfig: VitestCLIOptions = {
name: null,
includeTaskLocation: null,
snapshotEnvironment: null,
compare: null,
outputJson: null,
}

export const benchCliOptionsConfig: Pick<VitestCLIOptions, 'compare' | 'outputJson'> = {
compare: {
description: 'benchmark output file to compare against',
argument: '<filename>',
},
outputJson: {
description: 'benchmark output file',
argument: '<filename>',
},
}
6 changes: 6 additions & 0 deletions packages/vitest/src/node/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -381,6 +381,12 @@ export function resolveConfig(

if (options.outputFile)
resolved.benchmark.outputFile = options.outputFile

// --compare from cli
if (options.compare)
resolved.benchmark.compare = options.compare
if (options.outputJson)
resolved.benchmark.outputJson = options.outputJson
}

resolved.setupFiles = toArray(resolved.setupFiles || []).map(file =>
Expand Down
2 changes: 0 additions & 2 deletions packages/vitest/src/node/reporters/benchmark/index.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
import { VerboseReporter } from '../verbose'
import { JsonReporter } from './json'
import { TableReporter } from './table'

export const BenchmarkReportsMap = {
default: TableReporter,
verbose: VerboseReporter,
json: JsonReporter,
}
export type BenchmarkBuiltinReporters = keyof typeof BenchmarkReportsMap
82 changes: 0 additions & 82 deletions packages/vitest/src/node/reporters/benchmark/json.ts

This file was deleted.

105 changes: 100 additions & 5 deletions packages/vitest/src/node/reporters/benchmark/table/index.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
import fs from 'node:fs'
import c from 'picocolors'
import * as pathe from 'pathe'
import type { TaskResultPack } from '@vitest/runner'
import type { UserConsoleLog } from '../../../../types/general'
import { BaseReporter } from '../../base'
import { getFullName } from '../../../../utils'
import type { BenchmarkResult, File } from '../../../../types'
import { getFullName, getTasks } from '../../../../utils'
import { getStateSymbol } from '../../renderers/utils'
import { type TableRendererOptions, createTableRenderer, renderTree } from './tableRender'

Expand All @@ -20,11 +23,24 @@ export class TableReporter extends BaseReporter {
super.onWatcherStart()
}

onCollected() {
async onCollected() {
this.rendererOptions.logger = this.ctx.logger
this.rendererOptions.showHeap = this.ctx.config.logHeapUsage
this.rendererOptions.slowTestThreshold = this.ctx.config.slowTestThreshold
if (this.ctx.config.benchmark?.compare) {
const compareFile = pathe.resolve(this.ctx.config.root, this.ctx.config.benchmark?.compare)
try {
this.rendererOptions.compare = flattenFormattedBenchamrkReport(
JSON.parse(
await fs.promises.readFile(compareFile, 'utf-8'),
),
)
}
catch (e) {
this.ctx.logger.error(`Failed to read '${compareFile}'`, e)
}
}
if (this.isTTY) {
this.rendererOptions.logger = this.ctx.logger
this.rendererOptions.showHeap = this.ctx.config.logHeapUsage
this.rendererOptions.slowTestThreshold = this.ctx.config.slowTestThreshold
const files = this.ctx.state.getFiles(this.watchFilters)
if (!this.renderer)
this.renderer = createTableRenderer(files, this.rendererOptions).start()
Expand Down Expand Up @@ -56,6 +72,18 @@ export class TableReporter extends BaseReporter {
await this.stopListRender()
this.ctx.logger.log()
await super.onFinished(files, errors)

// write output for future comparison
let outputFile = this.ctx.config.benchmark?.outputJson
if (outputFile) {
outputFile = pathe.resolve(this.ctx.config.root, outputFile)
const outputDirectory = pathe.dirname(outputFile)
if (!fs.existsSync(outputDirectory))
await fs.promises.mkdir(outputDirectory, { recursive: true })
const output = createFormattedBenchamrkReport(files)
await fs.promises.writeFile(outputFile, JSON.stringify(output, null, 2))
this.ctx.logger.log(`Benchmark report written to ${outputFile}`)
}
}

async onWatcherStart() {
Expand All @@ -80,3 +108,70 @@ export class TableReporter extends BaseReporter {
super.onUserConsoleLog(log)
}
}

export interface FormattedBenchamrkReport {
files: {
filepath: string
groups: FormattedBenchmarkGroup[]
}[]
}

// flat results with TaskId as a key
export interface FlatBenchmarkReport {
[id: string]: FormattedBenchmarkResult
}

interface FormattedBenchmarkGroup {
fullName: string
benchmarks: FormattedBenchmarkResult[]
}

export type FormattedBenchmarkResult = Omit<BenchmarkResult, 'samples'> & {
id: string
sampleCount: number
}

function createFormattedBenchamrkReport(files: File[]) {
const report: FormattedBenchamrkReport = { files: [] }
for (const file of files) {
const groups: FormattedBenchmarkGroup[] = []
for (const task of getTasks(file)) {
if (task && task.type === 'suite') {
const benchmarks: FormattedBenchmarkResult[] = []
for (const t of task.tasks) {
const benchmark = t.meta.benchmark && t.result?.benchmark
if (benchmark) {
const { samples, ...rest } = benchmark
benchmarks.push({
id: t.id,
sampleCount: samples.length,
...rest,
})
}
}
if (benchmarks.length) {
groups.push({
fullName: getFullName(task, ' > '),
benchmarks,
})
}
}
}
report.files.push({
filepath: file.filepath,
groups,
})
}
return report
}

function flattenFormattedBenchamrkReport(report: FormattedBenchamrkReport): FlatBenchmarkReport {
const flat: FlatBenchmarkReport = {}
for (const file of report.files) {
for (const group of file.groups) {
for (const t of group.benchmarks)
flat[t.id] = t
}
}
return flat
}
Loading

0 comments on commit f8d3d22

Please sign in to comment.