Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK] Add SQL metrics for ConvertToDelta #3841

Merged
merged 1 commit into from
Nov 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,15 @@ import org.apache.spark.sql.delta.sources.{DeltaSourceUtils, DeltaSQLConf}
import org.apache.spark.sql.delta.util._
import org.apache.hadoop.fs.{FileSystem, Path}

import org.apache.spark.SparkContext
import org.apache.spark.internal.MDC
import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.{Analyzer, NoSuchTableException}
import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType, SessionCatalog}
import org.apache.spark.sql.connector.catalog.{Identifier, TableCatalog, V1Table}
import org.apache.spark.sql.execution.command.LeafRunnableCommand
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.types.StructType

/**
Expand Down Expand Up @@ -76,6 +78,11 @@ abstract class ConvertToDeltaCommandBase(
protected lazy val icebergEnabled: Boolean =
conf.getConf(DeltaSQLConf.DELTA_CONVERT_ICEBERG_ENABLED)

override lazy val metrics: Map[String, SQLMetric] = Map (
"numConvertedFiles" ->
SQLMetrics.createMetric(SparkContext.getOrCreate(), "number of files converted")
)

protected def isParquetPathProvider(provider: String): Boolean =
provider.equalsIgnoreCase("parquet")

Expand Down Expand Up @@ -380,16 +387,18 @@ abstract class ConvertToDeltaCommandBase(

val numFiles = targetTable.numFiles
val addFilesIter = createDeltaActions(spark, manifest, partitionFields, txn, fs)
val metrics = Map[String, String](
val transactionMetrics = Map[String, String](
"numConvertedFiles" -> numFiles.toString
)
metrics("numConvertedFiles") += numFiles
sendDriverMetrics(spark, metrics)
val (committedVersion, postCommitSnapshot) = txn.commitLarge(
spark,
addFilesIter,
Some(txn.protocol),
getOperation(numFiles, convertProperties, targetTable.format),
getContext,
metrics)
transactionMetrics)
} finally {
manifest.close()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.sql.delta
import org.apache.spark.sql.delta.sources.DeltaSQLConf
import org.apache.spark.sql.delta.test.DeltaSQLCommandTest

import org.apache.spark.sql.execution.command.ExecutedCommandExec
import org.apache.spark.sql.functions.{col, from_json}

trait ConvertToDeltaSQLSuiteBase extends ConvertToDeltaSuiteBaseCommons
Expand Down Expand Up @@ -58,6 +59,23 @@ trait ConvertToDeltaSQLSuiteBase extends ConvertToDeltaSuiteBaseCommons
}
}

for (numFiles <- Seq(1, 7)) {
test(s"numConvertedFiles metric ($numFiles files)") {
val testTableName = "test_table"
withTable(testTableName) {
spark.range(end = numFiles).toDF("part").withColumn("data", col("part"))
.write.partitionBy("part").mode("overwrite").format("parquet").saveAsTable(testTableName)

val plans = DeltaTestUtils.withPhysicalPlansCaptured(spark) {
convertToDelta(testTableName, Some("part long"))
}

// Validate that the command node has the correct metrics.
val commandNode = plans.collect { case exe: ExecutedCommandExec => exe.cmd }.head
assert(commandNode.metrics("numConvertedFiles").value === numFiles)
}
}
}
}

class ConvertToDeltaSQLSuite extends ConvertToDeltaSQLSuiteBase
Expand Down
Loading