diff --git a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java index 93f35ff799676..92d57341c4cb2 100644 --- a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java +++ b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java @@ -40,7 +40,6 @@ import org.apache.hudi.exception.HoodieWriteConflictException; import org.apache.hudi.testutils.HoodieClientTestBase; -import org.apache.hadoop.fs.Path; import org.apache.spark.api.java.JavaRDD; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Disabled; diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableAsSelectCommand.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableAsSelectCommand.scala index ce6237ec99344..88803ba470ab8 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableAsSelectCommand.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableAsSelectCommand.scala @@ -28,7 +28,7 @@ import org.apache.spark.sql.{Row, SaveMode, SparkSession} import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType, HoodieCatalogTable} import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project} import org.apache.spark.sql.execution.SparkPlan -import org.apache.spark.sql.execution.command.DataWritingCommand +import org.apache.spark.sql.execution.command.RunnableCommand import org.apache.spark.sql.hudi.HoodieSqlUtils import scala.collection.JavaConverters._ @@ -39,9 +39,9 @@ import scala.collection.JavaConverters._ case class CreateHoodieTableAsSelectCommand( table: CatalogTable, mode: SaveMode, - query: LogicalPlan) extends DataWritingCommand { + query: LogicalPlan) extends RunnableCommand { - override def run(sparkSession: SparkSession, child: SparkPlan): Seq[Row] = { + override def run(sparkSession: SparkSession): Seq[Row] = { assert(table.tableType != CatalogTableType.VIEW) assert(table.provider.isDefined) @@ -114,8 +114,6 @@ case class CreateHoodieTableAsSelectCommand( fs.delete(path, true) } - override def outputColumnNames: Seq[String] = query.output.map(_.name) - private def reOrderPartitionColumn(query: LogicalPlan, partitionColumns: Seq[String]): LogicalPlan = { if (partitionColumns.isEmpty) {