diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/metric/CustomMetric.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/metric/CustomMetric.java index 4c4151ad9697..0b24cc601326 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/metric/CustomMetric.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/metric/CustomMetric.java @@ -25,6 +25,10 @@ * and combine the metrics at the driver side. How to combine task metrics is defined by the * metric class with the same metric name. * + * When Spark needs to aggregate task metrics, it will internally construct the instance of + * custom metric class defined in data source by using reflection. Spark requires the class + * implementing this interface to have a 0-arg constructor. + * * @since 3.2.0 */ @Evolving diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListener.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListener.scala index d892dbdc2316..9988df025b6a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListener.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListener.scala @@ -220,7 +220,10 @@ class SQLAppStatusListener( metricAggregationMap.put(className, method) method } catch { - case NonFatal(_) => + case NonFatal(e) => + logWarning(s"Unable to load custom metric object for class `$className`. " + + "Please make sure that the custom metric class is in the classpath and " + + "it has 0-arg constructor.", e) // Cannot initialize custom metric object, we might be in history server that does // not have the custom metric class. val defaultMethod = (_: Array[Long], _: Array[Long]) => "N/A"