From 9b95d4bb6201cbfdd65868566fce0019c6446ad4 Mon Sep 17 00:00:00 2001
From: Tom Longhurst <30480171+thomhurst@users.noreply.github.com>
Date: Sun, 22 Mar 2026 19:45:11 +0000
Subject: [PATCH 1/2] perf: add startup performance measurement scripts
Add PowerShell scripts for measuring and comparing TUnit startup
performance, useful for tracking JIT overhead improvements:
- measure-startup.ps1: wall-clock timing with statistical analysis
- count-generated-methods.ps1: static analysis of generated method counts
- compare-branches.ps1: side-by-side branch comparison
- measure-jit.ps1: JIT trace collection and generated method analysis
---
.../compare-branches.ps1 | 144 +++++++++++++
.../count-generated-methods.ps1 | 136 ++++++++++++
TUnit.PerformanceBenchmarks/measure-jit.ps1 | 200 ++++++++++++++++++
.../measure-startup.ps1 | 178 ++++++++++++++++
4 files changed, 658 insertions(+)
create mode 100644 TUnit.PerformanceBenchmarks/compare-branches.ps1
create mode 100644 TUnit.PerformanceBenchmarks/count-generated-methods.ps1
create mode 100644 TUnit.PerformanceBenchmarks/measure-jit.ps1
create mode 100644 TUnit.PerformanceBenchmarks/measure-startup.ps1
diff --git a/TUnit.PerformanceBenchmarks/compare-branches.ps1 b/TUnit.PerformanceBenchmarks/compare-branches.ps1
new file mode 100644
index 0000000000..15e34d39be
--- /dev/null
+++ b/TUnit.PerformanceBenchmarks/compare-branches.ps1
@@ -0,0 +1,144 @@
+#!/usr/bin/env pwsh
+# Compare TUnit startup performance between two git refs (branches/commits/tags)
+# Usage:
+# ./compare-branches.ps1 -Baseline main -Current HEAD
+# ./compare-branches.ps1 -Baseline main -Current 001-sourcegen-startup-perf -Iterations 5
+# ./compare-branches.ps1 -BaselineFile benchmark-results/startup-baseline.json # Compare against saved baseline
+
+param(
+ [string]$Baseline = "main",
+ [string]$Current = "HEAD",
+ [string]$BaselineFile = "",
+ [int]$Iterations = 3,
+ [string]$Filter = "/*/*/*/*",
+ [string]$Configuration = "Release"
+)
+
+$ErrorActionPreference = "Stop"
+$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
+$repoRoot = Split-Path -Parent $scriptDir
+$resultsDir = Join-Path $scriptDir "benchmark-results"
+
+New-Item -ItemType Directory -Force -Path $resultsDir | Out-Null
+
+Write-Host "============================================" -ForegroundColor Cyan
+Write-Host " TUnit Branch Performance Comparison" -ForegroundColor Cyan
+Write-Host "============================================" -ForegroundColor Cyan
+Write-Host ""
+
+function Measure-Branch {
+ param(
+ [string]$Ref,
+ [string]$Label
+ )
+
+ Write-Host "--- Measuring: $Label ($Ref) ---" -ForegroundColor Yellow
+
+ # Save current state
+ $originalBranch = git rev-parse --abbrev-ref HEAD 2>$null
+ $originalCommit = git rev-parse HEAD 2>$null
+ $hasStash = $false
+
+ # Stash any uncommitted changes
+ $status = git status --porcelain 2>$null
+ if ($status) {
+ Write-Host " Stashing uncommitted changes..." -ForegroundColor DarkGray
+ git stash push -m "compare-branches temp stash" --quiet 2>$null
+ $hasStash = $true
+ }
+
+ try {
+ # Checkout the target ref
+ Write-Host " Checking out $Ref..." -ForegroundColor DarkGray
+ git checkout $Ref --quiet 2>$null
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host " Failed to checkout $Ref" -ForegroundColor Red
+ return $null
+ }
+
+ # Run measurement
+ $measureScript = Join-Path $scriptDir "measure-startup.ps1"
+ & $measureScript -Iterations $Iterations -Filter $Filter -Configuration $Configuration
+
+ # Find the latest results file
+ $latestResult = Get-ChildItem -Path $resultsDir -Filter "startup-*.json" |
+ Sort-Object LastWriteTime -Descending |
+ Select-Object -First 1
+
+ if ($latestResult) {
+ $data = Get-Content $latestResult.FullName | ConvertFrom-Json
+ return $data
+ }
+ return $null
+ }
+ finally {
+ # Restore original state
+ Write-Host " Restoring to $originalBranch..." -ForegroundColor DarkGray
+ git checkout $originalBranch --quiet 2>$null
+ if ($hasStash) {
+ git stash pop --quiet 2>$null
+ }
+ }
+}
+
+# Run measurements
+if ($BaselineFile -and (Test-Path $BaselineFile)) {
+ Write-Host "Using saved baseline: $BaselineFile" -ForegroundColor Green
+ $baselineData = Get-Content $BaselineFile | ConvertFrom-Json
+} else {
+ $baselineData = Measure-Branch -Ref $Baseline -Label "Baseline"
+}
+
+Write-Host ""
+$currentData = Measure-Branch -Ref $Current -Label "Current"
+
+if (-not $baselineData -or -not $currentData) {
+ Write-Host "Failed to collect measurements for one or both branches." -ForegroundColor Red
+ exit 1
+}
+
+# Display comparison
+Write-Host ""
+Write-Host "============================================" -ForegroundColor Cyan
+Write-Host " Comparison Results" -ForegroundColor Cyan
+Write-Host "============================================" -ForegroundColor Cyan
+Write-Host ""
+
+$bMedian = $baselineData.Summary.MedianMs
+$cMedian = $currentData.Summary.MedianMs
+$diff = $cMedian - $bMedian
+$pct = if ($bMedian -gt 0) { ($diff / $bMedian) * 100 } else { 0 }
+
+$changeSymbol = if ($diff -lt 0) { "faster" } elseif ($diff -gt 0) { "slower" } else { "same" }
+$changeColor = if ($diff -lt 0) { "Green" } elseif ($diff -gt 0) { "Red" } else { "Yellow" }
+
+Write-Host " Metric Baseline Current Change" -ForegroundColor White
+Write-Host " ─────────────── ─────────────── ─────────────── ───────────────"
+Write-Host (" Median {0,10}ms {1,10}ms " -f $bMedian.ToString('F0'), $cMedian.ToString('F0')) -NoNewline
+Write-Host ("{0}{1}ms ({2}%)" -f $(if($diff -ge 0){"+"}else{""}), $diff.ToString('F0'), $pct.ToString('F1')) -ForegroundColor $changeColor
+
+Write-Host (" Average {0,10}ms {1,10}ms" -f $baselineData.Summary.AverageMs.ToString('F0'), $currentData.Summary.AverageMs.ToString('F0'))
+Write-Host (" Min {0,10}ms {1,10}ms" -f $baselineData.Summary.MinMs.ToString('F0'), $currentData.Summary.MinMs.ToString('F0'))
+Write-Host (" Max {0,10}ms {1,10}ms" -f $baselineData.Summary.MaxMs.ToString('F0'), $currentData.Summary.MaxMs.ToString('F0'))
+Write-Host (" Tests {0,10} {1,10}" -f $baselineData.TestCount, $currentData.TestCount)
+
+Write-Host ""
+Write-Host " Verdict: " -NoNewline
+Write-Host "$($[math]::Abs($pct).ToString('F1'))% $changeSymbol" -ForegroundColor $changeColor
+Write-Host ""
+
+# Save comparison
+$comparison = @{
+ Timestamp = Get-Date -Format "yyyy-MM-dd_HH-mm-ss"
+ Baseline = @{ Ref = $Baseline; Summary = $baselineData.Summary; TestCount = $baselineData.TestCount }
+ Current = @{ Ref = $Current; Summary = $currentData.Summary; TestCount = $currentData.TestCount }
+ Change = @{
+ MedianDiffMs = [math]::Round($diff, 1)
+ MedianDiffPercent = [math]::Round($pct, 1)
+ Direction = $changeSymbol
+ }
+}
+
+$compFile = Join-Path $resultsDir "comparison-$($comparison.Timestamp).json"
+$comparison | ConvertTo-Json -Depth 5 | Out-File -FilePath $compFile -Encoding utf8
+Write-Host " Saved to: $compFile" -ForegroundColor DarkGray
diff --git a/TUnit.PerformanceBenchmarks/count-generated-methods.ps1 b/TUnit.PerformanceBenchmarks/count-generated-methods.ps1
new file mode 100644
index 0000000000..462e4f46f8
--- /dev/null
+++ b/TUnit.PerformanceBenchmarks/count-generated-methods.ps1
@@ -0,0 +1,136 @@
+#!/usr/bin/env pwsh
+# Count generated source-generator methods in a compiled TUnit assembly
+# Uses binary string scanning — no assembly loading needed
+#
+# Usage:
+# ./count-generated-methods.ps1 # Auto-detect from benchmark build
+# ./count-generated-methods.ps1 -AssemblyPath path/to/assembly.dll
+
+param(
+ [string]$AssemblyPath = "",
+ [string]$Configuration = "Release"
+)
+
+$ErrorActionPreference = "Stop"
+$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
+
+if (-not $AssemblyPath) {
+ $AssemblyPath = Join-Path $scriptDir "bin" $Configuration "net9.0" "TUnit.PerformanceBenchmarks.dll"
+}
+
+if (-not (Test-Path $AssemblyPath)) {
+ Write-Host "Assembly not found: $AssemblyPath" -ForegroundColor Red
+ Write-Host "Build first: dotnet build -c $Configuration" -ForegroundColor Yellow
+ exit 1
+}
+
+Write-Host "============================================" -ForegroundColor Cyan
+Write-Host " Generated Method Count Analysis" -ForegroundColor Cyan
+Write-Host "============================================" -ForegroundColor Cyan
+Write-Host ""
+Write-Host "Assembly: $(Split-Path -Leaf $AssemblyPath)" -ForegroundColor White
+Write-Host "Size: $([math]::Round((Get-Item $AssemblyPath).Length / 1MB, 2)) MB"
+Write-Host ""
+
+# Read the binary and extract ASCII strings (method names are in the metadata string heap)
+$content = [System.IO.File]::ReadAllText($AssemblyPath, [System.Text.Encoding]::ASCII)
+
+# Count method patterns in the string table
+# These patterns are unique to source-generated code
+
+# __InvokeTest_ appears once per test method
+$invokeTestMatches = [regex]::Matches($content, "__InvokeTest_[A-Za-z0-9_]+")
+$invokeTestMethods = ($invokeTestMatches | Select-Object -ExpandProperty Value -Unique).Count
+
+# __Materialize_ appears once per test method (deferred)
+$materializeMatches = [regex]::Matches($content, "__Materialize_[A-Za-z0-9_]+")
+$materializeMethods = ($materializeMatches | Select-Object -ExpandProperty Value -Unique).Count
+
+# __CreateAttributes_ appears for each unique attribute group
+$createAttrMatches = [regex]::Matches($content, "__CreateAttributes_\d+")
+$createAttributeMethods = ($createAttrMatches | Select-Object -ExpandProperty Value -Unique).Count
+
+# Count TestSource class names (they appear as type names in metadata)
+$testSourceMatches = [regex]::Matches($content, "[A-Za-z0-9_]+__TestSource")
+$testSourceTypes = ($testSourceMatches | Select-Object -ExpandProperty Value -Unique).Count
+
+# Count _r_ registration fields
+$regFieldMatches = [regex]::Matches($content, "_r_[A-Za-z0-9_]+__TestSource")
+$registrationFields = ($regFieldMatches | Select-Object -ExpandProperty Value -Unique).Count
+
+# GetTests and EnumerateTestDescriptors are standard names — count by TestSource types
+# (each TestSource class has exactly 1 GetTests and 1 EnumerateTestDescriptors)
+$getTestsMethods = $testSourceTypes
+$enumerateDescriptorMethods = $testSourceTypes
+
+# CreateInstance — one per TestSource class
+$createInstanceMethods = $testSourceTypes
+
+$totalGenerated = $invokeTestMethods + $materializeMethods + $getTestsMethods +
+ $enumerateDescriptorMethods + $createAttributeMethods + $createInstanceMethods
+
+$startupJit = $invokeTestMethods + $getTestsMethods + $enumerateDescriptorMethods +
+ $createAttributeMethods + $createInstanceMethods
+
+Write-Host " Type Counts:" -ForegroundColor Yellow
+Write-Host " TestSource classes: $testSourceTypes"
+Write-Host " Registration fields: $registrationFields"
+Write-Host ""
+Write-Host " Method Counts:" -ForegroundColor Yellow
+Write-Host " __InvokeTest_* methods: $invokeTestMethods " -NoNewline
+Write-Host "(JIT'd at startup)" -ForegroundColor Red
+Write-Host " __Materialize_* methods: $materializeMethods " -NoNewline
+Write-Host "(deferred)" -ForegroundColor Green
+Write-Host " GetTests methods: $getTestsMethods " -NoNewline
+Write-Host "(JIT'd at startup)" -ForegroundColor Red
+Write-Host " EnumerateTestDescriptors: $enumerateDescriptorMethods " -NoNewline
+Write-Host "(JIT'd at startup)" -ForegroundColor Red
+Write-Host " __CreateAttributes_*: $createAttributeMethods " -NoNewline
+Write-Host "(JIT'd at startup)" -ForegroundColor Red
+Write-Host " CreateInstance methods: $createInstanceMethods " -NoNewline
+Write-Host "(JIT'd at startup)" -ForegroundColor Red
+Write-Host ""
+Write-Host " Summary:" -ForegroundColor Cyan
+Write-Host " Total generated methods: $totalGenerated" -ForegroundColor White
+Write-Host " Est. JIT'd at startup: $startupJit" -ForegroundColor Red
+Write-Host " Deferred (materializers): $materializeMethods" -ForegroundColor Green
+Write-Host ""
+
+# Target comparison
+$targetStartupJit = 3 # After optimization: ~3 methods at startup
+$reduction = if ($startupJit -gt 0) { [math]::Round((1 - $targetStartupJit / $startupJit) * 100, 1) } else { 0 }
+Write-Host " Target (after optimization):" -ForegroundColor Yellow
+Write-Host " Current startup JIT: $startupJit methods"
+Write-Host " Target startup JIT: ~$targetStartupJit methods"
+Write-Host " Targeted reduction: $reduction%"
+Write-Host ""
+
+# Save as JSON
+$resultsDir = Join-Path $scriptDir "benchmark-results"
+New-Item -ItemType Directory -Force -Path $resultsDir | Out-Null
+
+$timestamp = Get-Date -Format "yyyy-MM-dd_HH-mm-ss"
+$report = @{
+ Timestamp = $timestamp
+ Assembly = Split-Path -Leaf $AssemblyPath
+ AssemblySizeMB = [math]::Round((Get-Item $AssemblyPath).Length / 1MB, 2)
+ Configuration = $Configuration
+ TypeCounts = @{
+ TestSourceTypes = $testSourceTypes
+ RegistrationFields = $registrationFields
+ }
+ MethodCounts = @{
+ InvokeTest = $invokeTestMethods
+ Materialize = $materializeMethods
+ GetTests = $getTestsMethods
+ EnumerateTestDescriptors = $enumerateDescriptorMethods
+ CreateAttributes = $createAttributeMethods
+ CreateInstance = $createInstanceMethods
+ TotalGenerated = $totalGenerated
+ EstimatedStartupJit = $startupJit
+ }
+}
+
+$reportFile = Join-Path $resultsDir "method-count-$timestamp.json"
+$report | ConvertTo-Json -Depth 5 | Out-File -FilePath $reportFile -Encoding utf8
+Write-Host " Results saved to: $reportFile" -ForegroundColor DarkGray
diff --git a/TUnit.PerformanceBenchmarks/measure-jit.ps1 b/TUnit.PerformanceBenchmarks/measure-jit.ps1
new file mode 100644
index 0000000000..3c5da96497
--- /dev/null
+++ b/TUnit.PerformanceBenchmarks/measure-jit.ps1
@@ -0,0 +1,200 @@
+#!/usr/bin/env pwsh
+# Measure JIT compilation count during TUnit startup using dotnet-counters
+# This captures the number of methods JIT-compiled, which is our primary optimization target.
+#
+# Usage:
+# ./measure-jit.ps1 # Default: full run
+# ./measure-jit.ps1 -Filter "/*/*/SimpleTests_01/*" # Filtered run
+#
+# Prerequisites:
+# dotnet tool install --global dotnet-counters
+# dotnet tool install --global dotnet-trace (for detailed JIT trace)
+
+param(
+ [string]$Filter = "/*/*/*/*",
+ [string]$Configuration = "Release",
+ [switch]$DetailedTrace,
+ [switch]$SkipBuild
+)
+
+$ErrorActionPreference = "Stop"
+$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
+$projectFile = Join-Path $scriptDir "TUnit.PerformanceBenchmarks.csproj"
+$resultsDir = Join-Path $scriptDir "benchmark-results"
+
+New-Item -ItemType Directory -Force -Path $resultsDir | Out-Null
+
+$timestamp = Get-Date -Format "yyyy-MM-dd_HH-mm-ss"
+
+Write-Host "============================================" -ForegroundColor Cyan
+Write-Host " TUnit JIT Compilation Measurement" -ForegroundColor Cyan
+Write-Host "============================================" -ForegroundColor Cyan
+Write-Host ""
+
+# Build
+if (-not $SkipBuild) {
+ Write-Host "[1/3] Building $Configuration..." -ForegroundColor Yellow
+ dotnet build $projectFile -c $Configuration --no-incremental -v q 2>&1 | Out-Null
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Build failed!" -ForegroundColor Red
+ exit 1
+ }
+ Write-Host " Done" -ForegroundColor Green
+}
+
+# Find executable
+$tfm = "net9.0"
+$exeDir = Join-Path $scriptDir "bin" $Configuration $tfm
+$exeName = "TUnit.PerformanceBenchmarks"
+if ($IsWindows -or $env:OS -match "Windows") {
+ $exePath = Join-Path $exeDir "$exeName.exe"
+} else {
+ $exePath = Join-Path $exeDir $exeName
+}
+
+Write-Host ""
+Write-Host "[2/3] Measuring JIT compilations..." -ForegroundColor Yellow
+Write-Host " Filter: $Filter"
+Write-Host ""
+
+if ($DetailedTrace) {
+ # Use dotnet-trace for detailed JIT method list
+ $traceFile = Join-Path $resultsDir "jit-trace-$timestamp.nettrace"
+
+ Write-Host " Collecting detailed JIT trace..." -ForegroundColor DarkGray
+ Write-Host " (This captures every JIT'd method name)" -ForegroundColor DarkGray
+
+ # Start the process and collect JIT events
+ $env:DOTNET_JitCollect64BitCounts = "1"
+ $env:COMPlus_JitCollect64BitCounts = "1"
+
+ if (Get-Command dotnet-trace -ErrorAction SilentlyContinue) {
+ dotnet-trace collect `
+ --providers "Microsoft-Windows-DotNETRuntime:0x10:5" `
+ --output $traceFile `
+ -- $exePath --treenode-filter "$Filter" --exit-on-process-exit 2>&1 | Out-Null
+
+ Write-Host " Trace saved to: $traceFile" -ForegroundColor Green
+ Write-Host " View with: dotnet-trace convert $traceFile --format Speedscope" -ForegroundColor DarkGray
+ } else {
+ Write-Host " dotnet-trace not found. Install with: dotnet tool install --global dotnet-trace" -ForegroundColor Red
+ }
+} else {
+ # Use environment variable to enable JIT compilation logging
+ # This is lighter weight than dotnet-trace
+
+ # Method 1: Use DOTNET_JitDisasm environment variable won't work (too verbose)
+ # Method 2: Use ETW events via dotnet-counters for aggregate count
+ # Method 3: Simple approach - use runtime events
+
+ # The simplest reliable approach: run with JIT ETW events and count MethodJittingStarted events
+ $env:DOTNET_EnableDiagnostics = "1"
+
+ # Run the test and capture output + timing
+ $sw = [System.Diagnostics.Stopwatch]::StartNew()
+ $output = & $exePath --treenode-filter "$Filter" --diagnostic-verbosity high --exit-on-process-exit 2>&1
+ $sw.Stop()
+
+ $exitCode = $LASTEXITCODE
+ $wallTimeMs = $sw.Elapsed.TotalMilliseconds
+
+ # Parse test count
+ $testCount = 0
+ foreach ($line in $output) {
+ if ($line -match "Total:\s*(\d+)") { $testCount = [int]$Matches[1] }
+ }
+
+ Write-Host ""
+ Write-Host "[3/3] Results" -ForegroundColor Yellow
+ Write-Host ""
+ Write-Host " Wall-Clock Time: $($wallTimeMs.ToString('F0'))ms" -ForegroundColor Cyan
+ Write-Host " Tests Discovered: $testCount" -ForegroundColor Cyan
+ Write-Host ""
+}
+
+# Count generated methods in source-generator output (static analysis)
+Write-Host " Static Analysis (generated method count):" -ForegroundColor Cyan
+
+$generatedDir = Join-Path $scriptDir "obj" $Configuration $tfm "generated" "TUnit.Core.SourceGenerator" "TUnit.Core.SourceGenerator.Generators.TestMetadataGenerator"
+
+if (Test-Path $generatedDir) {
+ $generatedFiles = Get-ChildItem -Path $generatedDir -Filter "*.g.cs" -Recurse
+
+ $totalMethods = 0
+ $invokeMethods = 0
+ $materializeMethods = 0
+ $getTestsMethods = 0
+ $enumerateDescriptorMethods = 0
+ $attributeMethods = 0
+ $createInstanceMethods = 0
+ $testSourceClasses = 0
+
+ foreach ($file in $generatedFiles) {
+ $content = Get-Content $file.FullName -Raw
+
+ # Count different method types
+ $m = [regex]::Matches($content, "__InvokeTest_")
+ $invokeMethods += $m.Count
+
+ $m = [regex]::Matches($content, "__Materialize_")
+ $materializeMethods += $m.Count
+
+ $m = [regex]::Matches($content, "GetTests\(")
+ $getTestsMethods += $m.Count
+
+ $m = [regex]::Matches($content, "EnumerateTestDescriptors\(")
+ $enumerateDescriptorMethods += $m.Count
+
+ $m = [regex]::Matches($content, "__CreateAttributes_")
+ $attributeMethods += $m.Count
+
+ $m = [regex]::Matches($content, "CreateInstance\(")
+ $createInstanceMethods += $m.Count
+
+ if ($content -match "TestSource") {
+ $testSourceClasses++
+ }
+ }
+
+ $totalMethods = $invokeMethods + $materializeMethods + $getTestsMethods + $enumerateDescriptorMethods + $attributeMethods + $createInstanceMethods
+
+ Write-Host " Generated files: $($generatedFiles.Count)"
+ Write-Host " TestSource classes: $testSourceClasses"
+ Write-Host " __InvokeTest_* methods: $invokeMethods (JIT'd at startup)"
+ Write-Host " __Materialize_* methods: $materializeMethods (deferred, only with filter)"
+ Write-Host " GetTests methods: $getTestsMethods"
+ Write-Host " EnumerateTestDescriptors: $enumerateDescriptorMethods"
+ Write-Host " __CreateAttributes_*: $attributeMethods"
+ Write-Host " CreateInstance methods: $createInstanceMethods"
+ Write-Host ""
+ Write-Host " Total generated methods: $totalMethods" -ForegroundColor White
+ Write-Host " JIT'd at startup (est.): $($invokeMethods + $getTestsMethods + $enumerateDescriptorMethods + $attributeMethods + $createInstanceMethods)" -ForegroundColor Yellow
+ Write-Host " Deferred (materializers): $materializeMethods" -ForegroundColor Green
+} else {
+ Write-Host " Generated source directory not found. Build first with -Configuration $Configuration" -ForegroundColor DarkYellow
+ Write-Host " Expected: $generatedDir" -ForegroundColor DarkGray
+}
+
+# Save report
+$report = @{
+ Timestamp = $timestamp
+ Configuration = $Configuration
+ Filter = $Filter
+ WallTimeMs = if ($sw) { [math]::Round($wallTimeMs, 1) } else { $null }
+ TestCount = $testCount
+ GeneratedMethodCounts = @{
+ InvokeTest = $invokeMethods
+ Materialize = $materializeMethods
+ GetTests = $getTestsMethods
+ EnumerateTestDescriptors = $enumerateDescriptorMethods
+ CreateAttributes = $attributeMethods
+ CreateInstance = $createInstanceMethods
+ Total = $totalMethods
+ EstimatedStartupJit = $invokeMethods + $getTestsMethods + $enumerateDescriptorMethods + $attributeMethods + $createInstanceMethods
+ }
+}
+
+$reportFile = Join-Path $resultsDir "jit-$timestamp.json"
+$report | ConvertTo-Json -Depth 5 | Out-File -FilePath $reportFile -Encoding utf8
+Write-Host ""
+Write-Host " Results saved to: $reportFile" -ForegroundColor DarkGray
diff --git a/TUnit.PerformanceBenchmarks/measure-startup.ps1 b/TUnit.PerformanceBenchmarks/measure-startup.ps1
new file mode 100644
index 0000000000..779728b121
--- /dev/null
+++ b/TUnit.PerformanceBenchmarks/measure-startup.ps1
@@ -0,0 +1,178 @@
+#!/usr/bin/env pwsh
+# Measure TUnit source-gen vs reflection startup performance
+# Usage:
+# ./measure-startup.ps1 # Default: 3 iterations, Release build
+# ./measure-startup.ps1 -Iterations 5 # More iterations for precision
+# ./measure-startup.ps1 -SkipBuild # Skip rebuild if already built
+# ./measure-startup.ps1 -Filter "/*/*/SimpleTests_01/*" # Filtered run
+
+param(
+ [int]$Iterations = 3,
+ [switch]$SkipBuild,
+ [string]$Filter = "/*/*/*/*",
+ [string]$Configuration = "Release",
+ [switch]$IncludeJitStats
+)
+
+$ErrorActionPreference = "Stop"
+$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
+$projectDir = $scriptDir
+$projectFile = Join-Path $projectDir "TUnit.PerformanceBenchmarks.csproj"
+$resultsDir = Join-Path $projectDir "benchmark-results"
+
+# Ensure results directory exists
+New-Item -ItemType Directory -Force -Path $resultsDir | Out-Null
+
+$timestamp = Get-Date -Format "yyyy-MM-dd_HH-mm-ss"
+$resultsFile = Join-Path $resultsDir "startup-$timestamp.json"
+
+Write-Host "============================================" -ForegroundColor Cyan
+Write-Host " TUnit Startup Performance Measurement" -ForegroundColor Cyan
+Write-Host "============================================" -ForegroundColor Cyan
+Write-Host ""
+Write-Host "Config: $Configuration"
+Write-Host "Iterations: $Iterations"
+Write-Host "Filter: $Filter"
+Write-Host "JIT Stats: $IncludeJitStats"
+Write-Host ""
+
+# Step 1: Build
+if (-not $SkipBuild) {
+ Write-Host "[1/3] Building $Configuration..." -ForegroundColor Yellow
+ $buildSw = [System.Diagnostics.Stopwatch]::StartNew()
+ dotnet build $projectFile -c $Configuration --no-incremental -v q 2>&1 | Out-Null
+ if ($LASTEXITCODE -ne 0) {
+ Write-Host "Build failed!" -ForegroundColor Red
+ exit 1
+ }
+ $buildSw.Stop()
+ Write-Host " Build completed in $($buildSw.Elapsed.TotalSeconds.ToString('F1'))s" -ForegroundColor Green
+} else {
+ Write-Host "[1/3] Skipping build (--SkipBuild)" -ForegroundColor DarkGray
+}
+
+# Step 2: Find the built executable
+$tfm = "net9.0"
+$exeDir = Join-Path $projectDir "bin" $Configuration $tfm
+$exeName = "TUnit.PerformanceBenchmarks"
+if ($IsWindows -or $env:OS -match "Windows") {
+ $exePath = Join-Path $exeDir "$exeName.exe"
+} else {
+ $exePath = Join-Path $exeDir $exeName
+}
+
+if (-not (Test-Path $exePath)) {
+ # Fall back to dotnet run
+ Write-Host " Executable not found at $exePath, falling back to dotnet run" -ForegroundColor DarkYellow
+ $useDotnetRun = $true
+} else {
+ $useDotnetRun = $false
+ Write-Host " Using pre-built exe: $exePath" -ForegroundColor Green
+}
+
+# Step 3: Run measurements
+Write-Host ""
+Write-Host "[2/3] Running $Iterations iterations..." -ForegroundColor Yellow
+
+$results = @()
+
+for ($i = 1; $i -le $Iterations; $i++) {
+ Write-Host " Iteration $i/$Iterations... " -NoNewline
+
+ # Force GC before each run to reduce noise
+ [GC]::Collect()
+ [GC]::WaitForPendingFinalizers()
+ [GC]::Collect()
+
+ $sw = [System.Diagnostics.Stopwatch]::StartNew()
+
+ if ($useDotnetRun) {
+ $output = dotnet run --project $projectFile -c $Configuration --no-build -- --treenode-filter "$Filter" --report-trx --timeout 120 2>&1
+ } else {
+ $output = & $exePath --treenode-filter "$Filter" --report-trx --timeout 120 2>&1
+ }
+
+ $sw.Stop()
+ $exitCode = $LASTEXITCODE
+ $wallTimeMs = $sw.Elapsed.TotalMilliseconds
+
+ # Parse test count from output
+ $testCount = 0
+ $passedCount = 0
+ $failedCount = 0
+ foreach ($line in $output) {
+ if ($line -match "Passed:\s*(\d+)") { $passedCount = [int]$Matches[1] }
+ if ($line -match "Failed:\s*(\d+)") { $failedCount = [int]$Matches[1] }
+ if ($line -match "Total:\s*(\d+)") { $testCount = [int]$Matches[1] }
+ }
+
+ $result = @{
+ Iteration = $i
+ WallTimeMs = [math]::Round($wallTimeMs, 1)
+ TestCount = $testCount
+ Passed = $passedCount
+ Failed = $failedCount
+ ExitCode = $exitCode
+ }
+ $results += $result
+
+ $statusColor = if ($exitCode -eq 0) { "Green" } else { "Yellow" }
+ Write-Host "$($wallTimeMs.ToString('F0'))ms ($testCount tests)" -ForegroundColor $statusColor
+}
+
+# Step 4: Calculate statistics
+Write-Host ""
+Write-Host "[3/3] Results" -ForegroundColor Yellow
+Write-Host ""
+
+$times = $results | ForEach-Object { $_.WallTimeMs }
+$avg = ($times | Measure-Object -Average).Average
+$min = ($times | Measure-Object -Minimum).Minimum
+$max = ($times | Measure-Object -Maximum).Maximum
+$sorted = $times | Sort-Object
+$median = if ($sorted.Count % 2 -eq 0) {
+ ($sorted[$sorted.Count/2 - 1] + $sorted[$sorted.Count/2]) / 2
+} else {
+ $sorted[[math]::Floor($sorted.Count/2)]
+}
+
+# Standard deviation
+$sumSquares = 0
+foreach ($t in $times) { $sumSquares += ($t - $avg) * ($t - $avg) }
+$stddev = [math]::Sqrt($sumSquares / $times.Count)
+
+Write-Host " Wall-Clock Time:" -ForegroundColor Cyan
+Write-Host " Median: $($median.ToString('F0'))ms"
+Write-Host " Average: $($avg.ToString('F0'))ms"
+Write-Host " Min: $($min.ToString('F0'))ms"
+Write-Host " Max: $($max.ToString('F0'))ms"
+Write-Host " StdDev: $($stddev.ToString('F0'))ms"
+Write-Host ""
+Write-Host " Tests: $($results[0].TestCount) discovered" -ForegroundColor Cyan
+Write-Host ""
+
+# Save results
+$report = @{
+ Timestamp = $timestamp
+ Configuration = $Configuration
+ Filter = $Filter
+ Iterations = $Iterations
+ TestCount = $results[0].TestCount
+ Summary = @{
+ MedianMs = [math]::Round($median, 1)
+ AverageMs = [math]::Round($avg, 1)
+ MinMs = [math]::Round($min, 1)
+ MaxMs = [math]::Round($max, 1)
+ StdDevMs = [math]::Round($stddev, 1)
+ }
+ Runs = $results
+ Environment = @{
+ DotnetVersion = (dotnet --version 2>$null) ?? "unknown"
+ OS = [System.Runtime.InteropServices.RuntimeInformation]::OSDescription
+ ProcessorCount = [Environment]::ProcessorCount
+ MachineName = [Environment]::MachineName
+ }
+}
+
+$report | ConvertTo-Json -Depth 5 | Out-File -FilePath $resultsFile -Encoding utf8
+Write-Host " Results saved to: $resultsFile" -ForegroundColor DarkGray
From e4e547390f5fbe212c200efa2f48cf76c79cdb56 Mon Sep 17 00:00:00 2001
From: Tom Longhurst <30480171+thomhurst@users.noreply.github.com>
Date: Mon, 23 Mar 2026 01:28:47 +0000
Subject: [PATCH 2/2] fix: respect TUnitImplicitUsings set in
Directory.Build.props (#5208)
Add conditions to avoid overwriting user-set property values, matching
the pattern already used in TUnit.Mocks.props.
---
TUnit.Assertions/TUnit.Assertions.props | 2 +-
TUnit/TUnit.props | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/TUnit.Assertions/TUnit.Assertions.props b/TUnit.Assertions/TUnit.Assertions.props
index e61a9c8714..5b82565449 100644
--- a/TUnit.Assertions/TUnit.Assertions.props
+++ b/TUnit.Assertions/TUnit.Assertions.props
@@ -16,7 +16,7 @@
-
+
true
diff --git a/TUnit/TUnit.props b/TUnit/TUnit.props
index ead7fbda24..62e88b551b 100644
--- a/TUnit/TUnit.props
+++ b/TUnit/TUnit.props
@@ -13,8 +13,8 @@
- true
- true
+ true
+ true