Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions velox/exec/Driver.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -801,6 +801,19 @@ void Driver::closeOperators() {
for (auto& op : operators_) {
auto stats = op->stats(true);
stats.numDrivers = 1;

// Calculate this driver's CPU time for this specific operator and add it as
// a runtime stat. This will be aggregated across all drivers, with the max
// field containing the CPU time from the longest running driver.
uint64_t operatorCpuNanos = stats.addInputTiming.cpuNanos +
stats.getOutputTiming.cpuNanos + stats.finishTiming.cpuNanos +
stats.isBlockedTiming.cpuNanos;

if (operatorCpuNanos > 0) {
stats.runtimeStats[OperatorStats::kDriverCpuTime] =
RuntimeMetric(operatorCpuNanos, RuntimeCounter::Unit::kNanos);
}

task()->addOperatorStats(stats);
}
}
Expand Down
5 changes: 5 additions & 0 deletions velox/exec/OperatorStats.h
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,11 @@ struct DynamicFilterStats {
};

struct OperatorStats {
/// Runtime stat name for per-driver CPU time (actual work time, not including
/// blocked time) for this operator. The max field will contain the CPU time
/// from the longest running single driver.
static constexpr const char* kDriverCpuTime = "driverCpuTimeNanos";

/// Initial ordinal position in the operator's pipeline.
int32_t operatorId = 0;
int32_t pipelineId = 0;
Expand Down
4 changes: 3 additions & 1 deletion velox/exec/OperatorUtils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,9 @@ bool shouldAggregateRuntimeMetric(const std::string& name) {
"dataSourceAddSplitWallNanos",
"dataSourceLazyWallNanos",
"queuedWallNanos",
"flushTimes"};
"flushTimes",
"driverCpuTimeNanos",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

shall we use the defined constant? If possible we can make other entries to reuse their corresponding defined constants as well.

"ioWaitWallNanos"};
if (metricNames.contains(name)) {
return true;
}
Expand Down
11 changes: 11 additions & 0 deletions velox/exec/tests/PrintPlanWithStatsTest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -170,13 +170,15 @@ TEST_F(PrintPlanWithStatsTest, innerJoinWithTableScan) {
{" dataSourceLazyCpuNanos[ ]* sum: .+, count: .+, min: .+, max: .+"},
{" dataSourceLazyInputBytes[ ]* sum: .+, count: .+, min: .+, max: .+"},
{" dataSourceLazyWallNanos[ ]* sum: .+, count: 1, min: .+, max: .+"},
{" driverCpuTimeNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" runningAddInputWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" runningFinishWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" runningGetOutputWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" -- HashJoin\\[3\\]\\[INNER c0=u_c0\\] -> c0:INTEGER, c1:BIGINT, u_c1:BIGINT"},
{" Output: 2000 rows \\(.+\\), Cpu time: .+, Blocked wall time: .+, Peak memory: .+, Memory allocations: .+, CPU breakdown: B/I/O/F (.+/.+/.+/.+)"},
{" HashBuild: Input: 100 rows \\(.+\\), Output: 0 rows \\(.+\\), Cpu time: .+, Blocked wall time: .+, Peak memory: .+, Memory allocations: .+, Threads: 1, CPU breakdown: B/I/O/F (.+/.+/.+/.+)"},
{" distinctKey0\\s+sum: 101, count: 1, min: 101, max: 101, avg: 101"},
{" driverCpuTimeNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" hashtable.buildWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" hashtable.capacity\\s+sum: 200, count: 1, min: 200, max: 200, avg: 200"},
{" hashtable.numDistinct\\s+sum: 100, count: 1, min: 100, max: 100, avg: 100"},
Expand All @@ -193,6 +195,8 @@ TEST_F(PrintPlanWithStatsTest, innerJoinWithTableScan) {
true},
{" blockedWaitForJoinBuildWallNanos\\s+sum: .+, count: 1, min: .+, max: .+",
true},
{" driverCpuTimeNanos\\s+sum: .+, count: 1, min: .+, max: .+",
true},
{" dynamicFiltersProduced\\s+sum: 1, count: 1, min: 1, max: 1, avg: 1",
true},
{" queuedWallNanos\\s+sum: .+, count: 1, min: .+, max: .+", true},
Expand All @@ -207,6 +211,7 @@ TEST_F(PrintPlanWithStatsTest, innerJoinWithTableScan) {
{" connectorSplitSize[ ]* sum: .+, count: .+, min: .+, max: .+"},
{" dataSourceAddSplitWallNanos[ ]* sum: .+, count: 1, min: .+, max: .+"},
{" dataSourceReadWallNanos[ ]* sum: .+, count: .+, min: .+, max: .+"},
{" driverCpuTimeNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" dynamicFiltersAccepted[ ]* sum: 1, count: 1, min: 1, max: 1, avg: 1"},
{" footerBufferOverread[ ]* sum: .+, count: 1, min: .+, max: .+"},
{" ioWaitWallNanos [ ]* sum: .+, count: .+ min: .+, max: .+"},
Expand Down Expand Up @@ -234,11 +239,13 @@ TEST_F(PrintPlanWithStatsTest, innerJoinWithTableScan) {
{" waitForPreloadSplitNanos[ ]* sum: .+, count: .+, min: .+, max: .+, avg: .+"},
{" -- Project\\[1\\]\\[expressions: \\(u_c0:INTEGER, ROW\\[\"c0\"\\]\\), \\(u_c1:BIGINT, ROW\\[\"c1\"\\]\\)\\] -> u_c0:INTEGER, u_c1:BIGINT"},
{" Output: 100 rows \\(.+\\), Cpu time: .+, Blocked wall time: .+, Peak memory: 0B, Memory allocations: .+, Threads: 1, CPU breakdown: B/I/O/F (.+/.+/.+/.+)"},
{" driverCpuTimeNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" runningAddInputWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" runningFinishWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" runningGetOutputWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" -- Values\\[0\\]\\[100 rows in 1 vectors\\] -> c0:INTEGER, c1:BIGINT"},
{" Input: 0 rows \\(.+\\), Output: 100 rows \\(.+\\), Cpu time: .+, Blocked wall time: .+, Peak memory: 0B, Memory allocations: .+, Threads: 1, CPU breakdown: B/I/O/F (.+/.+/.+/.+)"},
{" driverCpuTimeNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" runningAddInputWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" runningFinishWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" runningGetOutputWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"}});
Expand Down Expand Up @@ -291,6 +298,7 @@ TEST_F(PrintPlanWithStatsTest, partialAggregateWithTableScan) {
{" dataSourceLazyInputBytes\\s+sum: .+, count: .+, min: .+, max: .+"},
{" dataSourceLazyWallNanos\\s+sum: .+, count: .+, min: .+, max: .+"},
{" distinctKey0\\s+sum: .+, count: 1, min: .+, max: .+"},
{" driverCpuTimeNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" hashtable.capacity\\s+sum: (?:1273|1252), count: 1, min: (?:1273|1252), max: (?:1273|1252), avg: (?:1273|1252)"},
{" hashtable.numDistinct\\s+sum: (?:849|835), count: 1, min: (?:849|835), max: (?:849|835), avg: (?:849|835)"},
{" hashtable.numRehashes\\s+sum: 1, count: 1, min: 1, max: 1, avg: 1"},
Expand All @@ -304,6 +312,7 @@ TEST_F(PrintPlanWithStatsTest, partialAggregateWithTableScan) {
{" connectorSplitSize[ ]* sum: .+, count: .+, min: .+, max: .+"},
{" dataSourceAddSplitWallNanos[ ]* sum: .+, count: 1, min: .+, max: .+"},
{" dataSourceReadWallNanos[ ]* sum: .+, count: .+, min: .+, max: .+"},
{" driverCpuTimeNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" footerBufferOverread[ ]* sum: .+, count: 1, min: .+, max: .+"},
{" ioWaitWallNanos [ ]* sum: .+, count: .+ min: .+, max: .+"},
{" numPrefetch [ ]* sum: .+, count: .+, min: .+, max: .+"},
Expand Down Expand Up @@ -366,6 +375,7 @@ TEST_F(PrintPlanWithStatsTest, tableWriterWithTableScan) {
{" dataSourceLazyCpuNanos\\s+sum: .+, count: .+, min: .+, max: .+"},
{" dataSourceLazyInputBytes\\s+sum: .+, count: .+, min: .+, max: .+"},
{" dataSourceLazyWallNanos\\s+sum: .+, count: .+, min: .+, max: .+"},
{" driverCpuTimeNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" dwrfWriterCount\\s+sum: .+, count: 1, min: .+, max: .+"},
{" numWrittenFiles\\s+sum: .+, count: 1, min: .+, max: .+"},
{" runningAddInputWallNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
Expand All @@ -379,6 +389,7 @@ TEST_F(PrintPlanWithStatsTest, tableWriterWithTableScan) {
{" connectorSplitSize[ ]* sum: .+, count: .+, min: .+, max: .+"},
{" dataSourceAddSplitWallNanos[ ]* sum: .+, count: 1, min: .+, max: .+"},
{" dataSourceReadWallNanos[ ]* sum: .+, count: .+, min: .+, max: .+"},
{" driverCpuTimeNanos\\s+sum: .+, count: 1, min: .+, max: .+"},
{" footerBufferOverread[ ]* sum: .+, count: 1, min: .+, max: .+"},
{" ioWaitWallNanos [ ]* sum: .+, count: .+ min: .+, max: .+"},
{" numPrefetch [ ]* sum: .+, count: .+, min: .+, max: .+"},
Expand Down
Loading