File tree Expand file tree Collapse file tree 1 file changed +10
-0
lines changed
sql/core/src/test/scala/org/apache/spark/sql/execution/metric Expand file tree Collapse file tree 1 file changed +10
-0
lines changed Original file line number Diff line number Diff line change @@ -352,6 +352,15 @@ object InputOutputMetricsHelper {
352352 stageIdToMetricsResult = HashMap .empty[Int , MetricsResult ]
353353 }
354354
355+ /**
356+ * Return a list of recorded metrics aggregated per stage.
357+ *
358+ * The list is sorted in the ascending order on the stageId.
359+ * For each recorded stage, the following tuple is returned:
360+ * - sum of inputMetrics.recordsRead for all the tasks in the stage
361+ * - sum of shuffleReadMetrics.recordsRead for all the tasks in the stage
362+ * - sum of the highest values of "number of output rows" metric for all the tasks in the stage
363+ */
355364 def getResults (): List [(Long , Long , Long )] = {
356365 stageIdToMetricsResult.keySet.toList.sorted.map({ stageId =>
357366 val res = stageIdToMetricsResult(stageId)
@@ -381,6 +390,7 @@ object InputOutputMetricsHelper {
381390 }
382391 }
383392
393+ // Run df.collect() and return aggregated metrics for each stage.
384394 def run (df : DataFrame ): List [(Long , Long , Long )] = {
385395 val spark = df.sparkSession
386396 val sparkContext = spark.sparkContext
You can’t perform that action at this time.
0 commit comments