diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableAsSelectCommand.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableAsSelectCommand.scala index f1a344fb832f5..2790ea97c87c6 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableAsSelectCommand.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/CreateHoodieTableAsSelectCommand.scala @@ -28,7 +28,6 @@ import org.apache.spark.sql.{Row, SaveMode, SparkSession} import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType, HoodieCatalogTable} import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project} import org.apache.spark.sql.execution.SparkPlan -import org.apache.spark.sql.execution.command.DataWritingCommand import org.apache.spark.sql.hudi.HoodieSqlUtils import scala.collection.JavaConverters._ @@ -39,13 +38,9 @@ import scala.collection.JavaConverters._ case class CreateHoodieTableAsSelectCommand( table: CatalogTable, mode: SaveMode, - query: LogicalPlan) extends DataWritingCommand { + query: LogicalPlan) extends HoodieLeafRunnableCommand { - def withNewChildInternal(newChild: LogicalPlan): CreateHoodieTableAsSelectCommand = { - this - } - - override def run(sparkSession: SparkSession, child: SparkPlan): Seq[Row] = { + override def run(sparkSession: SparkSession): Seq[Row] = { assert(table.tableType != CatalogTableType.VIEW) assert(table.provider.isDefined) @@ -118,8 +113,6 @@ case class CreateHoodieTableAsSelectCommand( fs.delete(path, true) } - override def outputColumnNames: Seq[String] = query.output.map(_.name) - private def reOrderPartitionColumn(query: LogicalPlan, partitionColumns: Seq[String]): LogicalPlan = { if (partitionColumns.isEmpty) {