Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
182 changes: 92 additions & 90 deletions test/benchmark/suite/render.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,59 +11,14 @@ import (
"bytes"
"fmt"
"io"
"math"
"os"
"strconv"
"path"
"sort"
"strings"
"text/tabwriter"
)

const (
omitEmptyValue = "-"
benchmarkEnvPrefix = "BENCHMARK_"

querySum = "Sum"
queryAvg = "Avg"
queryMin = "Min"
queryMax = "Max"
)

type tableHeader struct {
name string
unit string
promQL string // only valid for metrics table
queryType string
}

var metricsTableHeader = []tableHeader{
{
name: "Test Name",
},
{
name: "Envoy Gateway Memory",
unit: "MiB",
promQL: `process_resident_memory_bytes{namespace="envoy-gateway-system", control_plane="envoy-gateway"}/1024/1024`,
queryType: querySum,
},
{
name: "Envoy Gateway CPU",
unit: "s",
promQL: `process_cpu_seconds_total{namespace="envoy-gateway-system", control_plane="envoy-gateway"}`,
queryType: querySum,
},
{
name: "Envoy Proxy Memory (Avg)",
unit: "MiB",
promQL: `container_memory_working_set_bytes{namespace="envoy-gateway-system",container="envoy"}/1024/1024`,
queryType: queryAvg,
},
{
name: "Envoy Proxy CPU (Avg)",
unit: "s",
promQL: `container_cpu_usage_seconds_total{namespace="envoy-gateway-system",container="envoy"}`,
queryType: queryAvg,
},
}

// RenderReport renders a report out of given list of benchmark report in Markdown format.
func RenderReport(writer io.Writer, name, description string, titleLevel int, reports []*BenchmarkReport) error {
writeSection(writer, "Test: "+name, titleLevel, description)
Expand All @@ -73,11 +28,12 @@ func RenderReport(writer io.Writer, name, description string, titleLevel int, re
return err
}

writeSection(writer, "Metrics", titleLevel+1, "")
writeSection(writer, "Metrics", titleLevel+1,
"The CPU usage statistics of both control-plane and data-plane are the CPU usage per second over the past 30 seconds.")
renderMetricsTable(writer, reports)

writeSection(writer, "Profiles", titleLevel+1, renderProfilesNote())
renderProfilesTable(writer, "Memory", "heap", titleLevel+2, reports)
renderProfilesTable(writer, "Heap", "heap", titleLevel+2, reports)

return nil
}
Expand All @@ -90,22 +46,23 @@ func newMarkdownStyleTableWriter(writer io.Writer) *tabwriter.Writer {
func renderEnvSettingsTable(writer io.Writer) {
table := newMarkdownStyleTableWriter(writer)

headers := []tableHeader{
{name: "RPS"},
{name: "Connections"},
{name: "Duration", unit: "s"},
{name: "CPU Limits", unit: "m"},
{name: "Memory Limits", unit: "MiB"},
headers := []string{
"RPS",
"Connections",
"Duration (Seconds)",
"CPU Limits (m)",
"Memory Limits (MiB)",
}
writeTableHeader(table, headers)

writeTableRow(table, headers, func(_ int, h tableHeader) string {
env := strings.ReplaceAll(strings.ToUpper(h.name), " ", "_")
if v, ok := os.LookupEnv(benchmarkEnvPrefix + env); ok {
return v
}
return omitEmptyValue
})
data := []string{
os.Getenv("BENCHMARK_RPS"),
os.Getenv("BENCHMARK_CONNECTIONS"),
os.Getenv("BENCHMARK_DURATION"),
os.Getenv("BENCHMARK_CPU_LIMITS"),
os.Getenv("BENCHMARK_MEMORY_LIMITS"),
}
writeTableRow(table, data)

_ = table.Flush()
}
Expand All @@ -129,20 +86,20 @@ func renderResultsTable(writer io.Writer, reports []*BenchmarkReport) error {
func renderMetricsTable(writer io.Writer, reports []*BenchmarkReport) {
table := newMarkdownStyleTableWriter(writer)

writeTableHeader(table, metricsTableHeader)
// write headers
headers := []string{
"Test Name",
"Envoy Gateway Memory (MiB) <br> min/max/means",
"Envoy Gateway CPU (%) <br> min/max/means",
"Averaged Envoy Proxy Memory (MiB) <br> min/max/means",
"Averaged Envoy Proxy CPU (%) <br> min/max/means",
}
writeTableHeader(table, headers)

for _, report := range reports {
writeTableRow(table, metricsTableHeader, func(_ int, h tableHeader) string {
if len(h.promQL) == 0 {
return report.Name
}

if v, ok := report.Metrics[h.name]; ok {
return strconv.FormatFloat(v, 'f', -1, 64)
}

return omitEmptyValue
})
data := []string{report.Name}
data = append(data, getSamplesMinMaxMeans(report.Samples)...)
writeTableRow(table, data)
}

_ = table.Flush()
Expand All @@ -156,19 +113,32 @@ You can visualize them in a web page by running:
%s

Currently, the supported profile types are:
- heap
- heap (memory)
`, "`/profiles`", "`{ProfileType}.{TestCase}.pprof`", "```shell\ngo tool pprof -http=: path/to/your.pprof\n```")
}

func renderProfilesTable(writer io.Writer, target, key string, titleLevel int, reports []*BenchmarkReport) {
writeSection(writer, target, titleLevel, "")
writeSection(writer, target, titleLevel,
"The profiles were sampled when Envoy Gateway Memory is at its maximum.")

for _, report := range reports {
// Get the heap profile when control plane memory is at its maximum.
sortedSamples := make([]BenchmarkMetricSample, len(report.Samples))
copy(sortedSamples, report.Samples)
sort.Slice(sortedSamples, func(i, j int) bool {
return sortedSamples[i].ControlPlaneMem > sortedSamples[j].ControlPlaneMem
})

heapPprof := sortedSamples[0].HeapProfile
heapPprofPath := path.Join(report.ProfilesOutputDir, fmt.Sprintf("heap.%s.pprof", report.Name))
_ = os.WriteFile(heapPprofPath, heapPprof, 0o600)

// The image is not be rendered yet, so it is a placeholder for the path.
// The image will be rendered after the test has finished.
rootDir := strings.SplitN(heapPprofPath, "/", 2)[0]
heapPprofPath = strings.TrimPrefix(heapPprofPath, rootDir+"/")
writeSection(writer, report.Name, titleLevel+1,
fmt.Sprintf("![%s-%s](%s.png)", key, report.Name,
strings.TrimSuffix(report.ProfilesPath[key], ".pprof")))
fmt.Sprintf("![%s-%s](%s.png)", key, report.Name, strings.TrimSuffix(heapPprofPath, ".pprof")))
}
}

Expand All @@ -194,21 +164,16 @@ func writeCollapsibleSection(writer io.Writer, title string, content []byte) {
`, title, summary)
}

func writeTableHeader(table *tabwriter.Writer, headers []tableHeader) {
writeTableRow(table, headers, func(_ int, h tableHeader) string {
if len(h.unit) > 0 {
return fmt.Sprintf("%s (%s)", h.name, h.unit)
}
return h.name
})
func writeTableHeader(table *tabwriter.Writer, headers []string) {
writeTableRow(table, headers)
writeTableDelimiter(table, len(headers))
}

// writeTableRow writes one row in Markdown table style according to headers.
func writeTableRow(table *tabwriter.Writer, headers []tableHeader, on func(int, tableHeader) string) {
// writeTableRow writes one row in Markdown table style.
func writeTableRow(table *tabwriter.Writer, data []string) {
row := "|"
for i, v := range headers {
row += on(i, v) + "\t"
for _, v := range data {
row += v + "\t"
}

_, _ = fmt.Fprintln(table, row)
Expand All @@ -223,3 +188,40 @@ func writeTableDelimiter(table *tabwriter.Writer, n int) {

_, _ = fmt.Fprintln(table, sep)
}

func getSamplesMinMaxMeans(samples []BenchmarkMetricSample) []string {
cpMem := make([]float64, 0, len(samples))
cpCPU := make([]float64, 0, len(samples))
dpMem := make([]float64, 0, len(samples))
dpCPU := make([]float64, 0, len(samples))
for _, sample := range samples {
cpMem = append(cpMem, sample.ControlPlaneMem)
cpCPU = append(cpCPU, sample.ControlPlaneCPU)
dpMem = append(dpMem, sample.DataPlaneMem)
dpCPU = append(dpCPU, sample.DataPlaneCPU)
}

return []string{
getMetricsMinMaxMeans(cpMem),
getMetricsMinMaxMeans(cpCPU),
getMetricsMinMaxMeans(dpMem),
getMetricsMinMaxMeans(dpCPU),
}
}

func getMetricsMinMaxMeans(metrics []float64) string {
var min, max, avg float64 = math.MaxFloat64, 0, 0
for _, v := range metrics {
min = math.Min(v, min)
max = math.Max(v, max)
avg += v
}
if min == math.MaxFloat64 {
min = 0
}
if len(metrics) > 0 {
avg /= float64(len(metrics))
}

return fmt.Sprintf("%.2f / %.2f / %.2f", min, max, avg)
}
Loading