Skip to content

Commit

Permalink
Merge pull request #105 from bhperry/benchmark
Browse files Browse the repository at this point in the history
Benchmark Pixel
  • Loading branch information
bhperry authored Aug 27, 2024
2 parents 4c07641 + 3127be3 commit 9070c3c
Show file tree
Hide file tree
Showing 10 changed files with 1,112 additions and 0 deletions.
81 changes: 81 additions & 0 deletions tools/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# Benchmarking

The `bench` command provides a set of tools used for benchmarking the performance of pixel under various scenarios.
It is intended to be a development tool for comparing the performance of new implementations in pixel against previous iterations.

## Usage

List available benchmarks
```
go run main.go bench ls
```

Run a benchmark
```
go run main.go bench run [names...]
```

Write benchmark stats to a file
```
go run main.go bench run [names...] -o my-stats.json
```

## Profiling
Run benchmark with cpu/mem profiling enabled
```
go run main.go bench run [names...] -c cpu.prof -m mem.prof
```

View profile on cmdline
```
go tool pprof cpu.prof
```

View profile in browser (requires [graphviz](https://graphviz.org/download/))
```
go tool pprof -http :9000 cpu.prof
```

## Results

To add your own results to this file, create an entry in the [Machine Info](#machine-info) table with
a unique identifer and basic info about the computer where you are running the benchmarks.
On linux you can get most of the info from `lshw -short`. By default, benchmark stats will use the local username
from environment variables or the os package if `MACHINE_NAME` env is not provided.

Then run all benchmarks:
```
# Optional
export MACHINE_NAME=<machine-identifier>
export PIXEL_VERSION=<local-pixel-version>
go run main.go bench run --all
```

### Machine Info

| Machine | OS/Distro | CPU | Memory | GPU |
|--------------------|---------------------|-------------------------------|--------------------|----------------|
| bhperry-wsl | Linux Ubuntu 20.04 | Intel i7-8086K @ 4.00GHz | 8GiB | RTX 2080 |
| bhperry-win10 | Windows 10 | Intel i7-8086K @ 4.00GHz | 16GiB | RTX 2080 |

### Stats

| Machine | Pixel | Benchmark | Duration | Frames | FPS Avg | FPS Min | FPS Max | FPS Stdev |
|--------------------|--------|------------------------------|----------|--------|---------|---------|---------|-----------|
| bhperry-wsl | v2.2.1 | imdraw-moving | 30s | 2214 | 73.79 | 68 | 76 | 1.77 |
| bhperry-wsl | v2.2.1 | imdraw-moving-batched | 30s | 5658 | 188.57 | 166 | 195 | 5.86 |
| bhperry-wsl | v2.2.1 | imdraw-static | 30s | 2355 | 78.5 | 72 | 81 | 1.89 |
| bhperry-wsl | v2.2.1 | imdraw-static-batched | 30.01s | 6171 | 205.64 | 168 | 212 | 9.62 |
| bhperry-wsl | v2.2.1 | sprite-moving | 30.03s | 1451 | 48.32 | 45 | 50 | 1.25 |
| bhperry-wsl | v2.2.1 | sprite-moving-batched | 30.01s | 4085 | 136.12 | 127 | 142 | 3.17 |
| bhperry-wsl | v2.2.1 | sprite-static | 30.01s | 1518 | 50.59 | 47 | 52 | 1.45 |
| bhperry-wsl | v2.2.1 | sprite-static-batched | 30.01s | 5318 | 177.2 | 159 | 182 | 6.01 |
| bhperry-win10 | v2.2.1 | imdraw-moving | 30.03s | 1430 | 47.61 | 22 | 50 | 5.85 |
| bhperry-win10 | v2.2.1 | imdraw-moving-batched | 30s | 52017 | 1733.9 | 1635 | 1915 | 43.92 |
| bhperry-win10 | v2.2.1 | imdraw-static | 30.02s | 1569 | 52.27 | 51 | 53 | 0.64 |
| bhperry-win10 | v2.2.1 | imdraw-static-batched | 30.01s | 1517 | 50.55 | 21 | 53 | 6.62 |
| bhperry-win10 | v2.2.1 | sprite-moving | 30.03s | 1148 | 38.23 | 35 | 39 | 0.9 |
| bhperry-win10 | v2.2.1 | sprite-moving-batched | 30s | 39085 | 1302.79 | 1205 | 1329 | 23.93 |
| bhperry-win10 | v2.2.1 | sprite-static | 30.04s | 1218 | 40.54 | 38 | 42 | 0.88 |
| bhperry-win10 | v2.2.1 | sprite-static-batched | 30s | 40570 | 1352.29 | 1245 | 1380 | 26.04 |
141 changes: 141 additions & 0 deletions tools/benchmark/benchmark.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
package benchmark

import (
"fmt"
"slices"
"time"

"github.com/gopxl/pixel/v2"
"github.com/gopxl/pixel/v2/backends/opengl"
)

var Benchmarks = &Registry{benchmarks: map[string]Config{}}

// Config defines how to run a given benchmark, along with metadata describing it
type Config struct {
Name string
Description string

// New returns the benchmark to be executed
New func(win *opengl.Window) (Benchmark, error)
// Duration sets the maximum duration to run the benchmark
Duration time.Duration
// WindowConfig defines the input parameters to the benchmark's window
WindowConfig opengl.WindowConfig
}

// Run executes the benchmark and calculates statistics about its performance
func (c Config) Run() (*Stats, error) {
fmt.Printf("Running benchmark %s\n", c.Name)

windowConfig := c.WindowConfig
title := windowConfig.Title
if title == "" {
title = c.Name
}
windowConfig.Title = fmt.Sprintf("%s | FPS -", title)

if windowConfig.Bounds.Empty() {
windowConfig.Bounds = pixel.R(0, 0, 1024, 1024)
}
if windowConfig.Position.Eq(pixel.ZV) {
windowConfig.Position = pixel.V(50, 50)
}

duration := c.Duration
if duration == 0 {
duration = 10 * time.Second
}

win, err := opengl.NewWindow(windowConfig)
if err != nil {
return nil, err
}
defer win.Destroy()

benchmark, err := c.New(win)
if err != nil {
return nil, err
}

frame := 0
frameSeconds := make([]int, 0)
prevFrameCount := 0
second := time.NewTicker(time.Second)
done := time.NewTicker(duration)
start := time.Now()
last := start
loop:
for frame = 0; !win.Closed(); frame++ {
now := time.Now()
benchmark.Step(win, now.Sub(last).Seconds())
last = now
win.Update()

select {
case <-second.C:
frameSeconds = append(frameSeconds, frame)
win.SetTitle(fmt.Sprintf("%s | FPS %v", title, frame-prevFrameCount))
prevFrameCount = frame
case <-done.C:
break loop
default:
}
}
stats := NewStats(c.Name, time.Since(start), frame, frameSeconds)

if win.Closed() {
return nil, fmt.Errorf("window closed early")
}

return stats, err
}

// Benchmark provides hooks into the stages of a window's lifecycle
type Benchmark interface {
Step(win *opengl.Window, delta float64)
}

// Registry is a collection of benchmark configs
type Registry struct {
benchmarks map[string]Config
}

// List returns a copy of all registered benchmark configs
func (r *Registry) List() []Config {
configs := make([]Config, len(r.benchmarks))
for i, name := range r.ListNames() {
configs[i] = r.benchmarks[name]
i++
}
return configs
}

// ListNames returns a sorted list of all registered benchmark names
func (r *Registry) ListNames() []string {
names := make([]string, len(r.benchmarks))
i := 0
for name := range r.benchmarks {
names[i] = name
i++
}
slices.Sort(names)
return names
}

// Add a benchmark config to the registry
func (r *Registry) Add(configs ...Config) {
for _, config := range configs {
r.benchmarks[config.Name] = config
}
}

// Get a benchmark config by name
func (r *Registry) Get(name string) (Config, error) {
config, ok := r.benchmarks[name]
if !ok {
return config, fmt.Errorf("unknown benchmark %s", name)
}

return config, nil
}
Loading

0 comments on commit 9070c3c

Please sign in to comment.