Skip to content

Commit 088556b

Browse files
committed
Docs.
1 parent 10a53a7 commit 088556b

File tree

1 file changed

+10
-0
lines changed

1 file changed

+10
-0
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -352,6 +352,15 @@ object InputOutputMetricsHelper {
352352
stageIdToMetricsResult = HashMap.empty[Int, MetricsResult]
353353
}
354354

355+
/**
356+
* Return a list of recorded metrics aggregated per stage.
357+
*
358+
* The list is sorted in the ascending order on the stageId.
359+
* For each recorded stage, the following tuple is returned:
360+
* - sum of inputMetrics.recordsRead for all the tasks in the stage
361+
* - sum of shuffleReadMetrics.recordsRead for all the tasks in the stage
362+
* - sum of the highest values of "number of output rows" metric for all the tasks in the stage
363+
*/
355364
def getResults(): List[(Long, Long, Long)] = {
356365
stageIdToMetricsResult.keySet.toList.sorted.map({ stageId =>
357366
val res = stageIdToMetricsResult(stageId)
@@ -381,6 +390,7 @@ object InputOutputMetricsHelper {
381390
}
382391
}
383392

393+
// Run df.collect() and return aggregated metrics for each stage.
384394
def run(df: DataFrame): List[(Long, Long, Long)] = {
385395
val spark = df.sparkSession
386396
val sparkContext = spark.sparkContext

0 commit comments

Comments
 (0)