Skip to content

Commit

Permalink
Rename line plot field, and update strong/weak scaling expression
Browse files Browse the repository at this point in the history
  • Loading branch information
EdmundGoodman committed Feb 26, 2024
1 parent 442fb2e commit 4c8d157
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 41 deletions.
2 changes: 1 addition & 1 deletion src/hpc_multibench/test_bench.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def report(self) -> None:

split_data: list[str] = [
f"{split_metric}={metrics[split_metric]}"
for split_metric in plot.split_by
for split_metric in plot.split_metrics
]
series_name = (run_configuration.name, *split_data)

Expand Down
3 changes: 2 additions & 1 deletion src/hpc_multibench/yaml_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ class LinePlotModel(BaseModel):
title: str
x: str
y: str
split_by: list[str] = []
split_metrics: list[str] = []
# fix_metrics: dict[str, Any] = {}


# class RooflinePlotModel(BaseModel):
Expand Down
4 changes: 2 additions & 2 deletions yaml_examples/kudu/simple_rust.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ benches:
- title: "MPI Configuration Sweep"
x: "Mesh x size"
y: "Wall time (s)"
split_by:
split_metric:
- "Nodes"
- "Tasks per Node"

Expand Down Expand Up @@ -229,6 +229,6 @@ benches:
- title: "Parallel Implementation Comparison"
x: "Mesh x size"
y: "Wall time (s)"
split_by:
split_metric:
- "Tasks per Node"
- "Threads"
55 changes: 18 additions & 37 deletions yaml_examples/kudu/strong_weak_scaling.yaml
Original file line number Diff line number Diff line change
@@ -1,26 +1,10 @@
run_configurations:
"cpp-mpi":
sbatch_config:
"nodes": 1
"ntasks-per-node": 16
"cpus-per-task": 2
"exclusive": "mcs"
"mem-per-cpu": 1875
module_loads:
- "cs402-mpi"
environment_variables: {}
directory: "../0_cpp_versions/2_mpi"
build_commands:
- "make -j 8"
run_command: "mpirun ./test_HPCCG"

"cpp-hybrid":
sbatch_config:
"nodes": 1
"ntasks-per-node": 16
"cpus-per-task": 2
"exclusive": "mcs"
"mem-per-cpu": 1875
"mem": 60000
module_loads:
- "cs402-mpi"
environment_variables:
Expand All @@ -33,17 +17,16 @@ run_configurations:
benches:
"strong-scaling":
run_configurations:
# - "cpp-mpi"
- "cpp-hybrid"
matrix:
[args, run_command]:
- ["64 64 1024", "mpirun -np 1 ./test_HPCCG"]
- ["64 64 512", "mpirun -np 2 ./test_HPCCG"]
- ["64 64 256", "mpirun -np 4 ./test_HPCCG"]
- ["64 64 128", "mpirun -np 8 ./test_HPCCG"]
- ["64 64 64", "mpirun -np 16 ./test_HPCCG"]
# - ["64 64 32", "mpirun -np 32 ./test_HPCCG"] # Exceeds Kudu's resource capacity...
# - ["64 64 16", "mpirun -np 64 ./test_HPCCG"]
[args, sbatch_config]:
- ["64 64 1024", {"ntasks-per-node": 1}]
- ["64 64 512", {"ntasks-per-node": 2}]
- ["64 64 256", {"ntasks-per-node": 4}]
- ["64 64 128", {"ntasks-per-node": 8}]
- ["64 64 64", {"ntasks-per-node": 16}]
- ["64 64 32", {"ntasks-per-node": 32}]
- ["64 64 16", {"ntasks-per-node": 64}]
analysis:
metrics:
"Mesh z size": "nz: (\\d+)"
Expand All @@ -57,24 +40,22 @@ benches:

"weak-scaling":
run_configurations:
# - "cpp-mpi"
- "cpp-hybrid"
matrix:
args:
- "64 64 64"
run_command:
- "mpirun -np 1 ./test_HPCCG"
- "mpirun -np 2 ./test_HPCCG"
- "mpirun -np 4 ./test_HPCCG"
- "mpirun -np 8 ./test_HPCCG"
- "mpirun -np 16 ./test_HPCCG"
# - "mpirun -np 32 ./test_HPCCG" # Exceeds Kudu's resource capacity...
# - "mpirun -np 64 ./test_HPCCG"
sbatch_config:
- {"ntasks-per-node": 1}
- {"ntasks-per-node": 2}
- {"ntasks-per-node": 4}
- {"ntasks-per-node": 8}
- {"ntasks-per-node": 16}
- {"ntasks-per-node": 32}
- {"ntasks-per-node": 64}
analysis:
metrics:
"MPI Ranks": "=== RUN INSTANTIATION ===\n\\{.*run_command: mpirun -np (\\d+).*\\}"
"Total time (s)": "Time Summary:[\\s\\S]*Total\\s*: ([\\d\\.]+)[\\s\\S]*\nFLOPS Summary"
"Wall time (s)": "real\\s([\\d\\.]+)\nuser"
"MPI Ranks": "=== RUN INSTANTIATION ===\n\\{.*sbatch_config: \\{.*ntasks-per-node: (\\d+).*\\}"
line_plots:
- title: "Weak Scaling Plot"
x: "MPI Ranks"
Expand Down

0 comments on commit 4c8d157

Please sign in to comment.