diff --git a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieCLIUtils.scala b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieCLIUtils.scala index 552e3cfc9b9c3..0d3edd592d192 100644 --- a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieCLIUtils.scala +++ b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieCLIUtils.scala @@ -22,8 +22,11 @@ package org.apache.hudi import org.apache.hudi.avro.model.HoodieClusteringGroup import org.apache.hudi.client.SparkRDDWriteClient import org.apache.hudi.common.table.{HoodieTableMetaClient, TableSchemaResolver} +import org.apache.spark.SparkException import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.sql.SparkSession +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable import org.apache.spark.sql.hudi.HoodieSqlCommonUtils.withSparkConf import scala.collection.JavaConverters.{collectionAsScalaIterableConverter, mapAsJavaMapConverter} @@ -57,4 +60,16 @@ object HoodieCLIUtils { partitionPaths.sorted.mkString(",") } + + def getHoodieCatalogTable(sparkSession: SparkSession, table: String): HoodieCatalogTable = { + val seq: Seq[String] = table.split('.') + seq match { + case Seq(tableName) => + HoodieCatalogTable(sparkSession, TableIdentifier(tableName)) + case Seq(database, tableName) => + HoodieCatalogTable(sparkSession, TableIdentifier(tableName, Some(database))) + case _ => + throw new SparkException(s"Unsupported identifier $table") + } + } } diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/BaseProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/BaseProcedure.scala index 99e488784cc2c..70799d3dc1e61 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/BaseProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/BaseProcedure.scala @@ -17,12 +17,14 @@ package org.apache.spark.sql.hudi.command.procedures +import org.apache.hudi.HoodieCLIUtils import org.apache.hudi.client.SparkRDDWriteClient import org.apache.hudi.client.common.HoodieSparkEngineContext import org.apache.hudi.common.model.HoodieRecordPayload import org.apache.hudi.config.{HoodieIndexConfig, HoodieWriteConfig} import org.apache.hudi.exception.HoodieClusteringException import org.apache.hudi.index.HoodieIndex.IndexType +import org.apache.spark.SparkException import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.sql.SparkSession import org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable @@ -112,7 +114,7 @@ abstract class BaseProcedure extends Procedure { protected def getBasePath(tableName: Option[Any], tablePath: Option[Any] = Option.empty): String = { tableName.map( - t => HoodieCatalogTable(sparkSession, new TableIdentifier(t.asInstanceOf[String])).tableLocation) + t => HoodieCLIUtils.getHoodieCatalogTable(sparkSession, t.asInstanceOf[String]).tableLocation) .getOrElse( tablePath.map(p => p.asInstanceOf[String]).getOrElse( throw new HoodieClusteringException("Table name or table path must be given one")) diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CommitsCompareProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CommitsCompareProcedure.scala index 86262dc302c1b..9cb03bffc9ed7 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CommitsCompareProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/CommitsCompareProcedure.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql.hudi.command.procedures +import org.apache.hudi.HoodieCLIUtils import org.apache.hudi.common.table.HoodieTableMetaClient import org.apache.hudi.common.table.timeline.HoodieTimeline import org.apache.spark.sql.Row @@ -47,7 +48,7 @@ class CommitsCompareProcedure() extends BaseProcedure with ProcedureBuilder { val table = getArgValueOrDefault(args, PARAMETERS(0)).get.asInstanceOf[String] val path = getArgValueOrDefault(args, PARAMETERS(1)).get.asInstanceOf[String] - val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new TableIdentifier(table)) + val hoodieCatalogTable = HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table) val basePath = hoodieCatalogTable.tableLocation val source = HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build val target = HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(path).build diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala index cf400dd6d5c11..114f4c4ee130c 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ExportInstantsProcedure.scala @@ -20,6 +20,7 @@ package org.apache.spark.sql.hudi.command.procedures import org.apache.avro.generic.GenericRecord import org.apache.avro.specific.SpecificData import org.apache.hadoop.fs.{FileStatus, FileSystem, Path} +import org.apache.hudi.HoodieCLIUtils import org.apache.hudi.avro.HoodieAvroUtils import org.apache.hudi.avro.model.HoodieArchivedMetaEntry import org.apache.hudi.common.fs.FSUtils @@ -72,7 +73,7 @@ class ExportInstantsProcedure extends BaseProcedure with ProcedureBuilder with L val actions: String = getArgValueOrDefault(args, PARAMETERS(3)).get.asInstanceOf[String] val desc = getArgValueOrDefault(args, PARAMETERS(4)).get.asInstanceOf[Boolean] - val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new TableIdentifier(table)) + val hoodieCatalogTable = HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table) val basePath = hoodieCatalogTable.tableLocation val metaClient = HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build val archivePath = new Path(basePath + "/.hoodie/.commits_.archive*") diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToInstantTimeProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToInstantTimeProcedure.scala index f802e1e78b80d..1fcc665d61123 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToInstantTimeProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/RollbackToInstantTimeProcedure.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql.hudi.command.procedures +import org.apache.hudi.HoodieCLIUtils import org.apache.hudi.common.table.HoodieTableMetaClient import org.apache.hudi.common.table.timeline.HoodieTimeline import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion @@ -49,7 +50,7 @@ class RollbackToInstantTimeProcedure extends BaseProcedure with ProcedureBuilder val table = getArgValueOrDefault(args, PARAMETERS(0)).get.asInstanceOf[String] val instantTime = getArgValueOrDefault(args, PARAMETERS(1)).get.asInstanceOf[String] - val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new TableIdentifier(table)) + val hoodieCatalogTable = HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table) val basePath = hoodieCatalogTable.tableLocation val client = createHoodieClient(jsc, basePath) client.getConfig.setValue(ROLLBACK_USING_MARKERS_ENABLE, "false") diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala index 957dfbe8bf7be..a3c3ece2932b8 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowArchivedCommitsProcedure.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql.hudi.command.procedures +import org.apache.hudi.HoodieCLIUtils import org.apache.hudi.common.model.HoodieCommitMetadata import org.apache.hudi.common.table.HoodieTableMetaClient import org.apache.hudi.common.table.timeline.{HoodieActiveTimeline, HoodieDefaultTimeline, HoodieInstant} @@ -82,7 +83,7 @@ class ShowArchivedCommitsProcedure(includeExtraMetadata: Boolean) extends BasePr var startTs = getArgValueOrDefault(args, PARAMETERS(2)).get.asInstanceOf[String] var endTs = getArgValueOrDefault(args, PARAMETERS(3)).get.asInstanceOf[String] - val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new TableIdentifier(table)) + val hoodieCatalogTable = HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table) val basePath = hoodieCatalogTable.tableLocation val metaClient = HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala index d4581be7f46bb..53fcd072c3b7e 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitFilesProcedure.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql.hudi.command.procedures +import org.apache.hudi.HoodieCLIUtils import org.apache.hudi.common.model.{HoodieCommitMetadata, HoodieReplaceCommitMetadata, HoodieWriteStat} import org.apache.hudi.common.table.HoodieTableMetaClient import org.apache.hudi.common.table.timeline.{HoodieInstant, HoodieTimeline} @@ -61,7 +62,7 @@ class ShowCommitFilesProcedure() extends BaseProcedure with ProcedureBuilder { val limit = getArgValueOrDefault(args, PARAMETERS(1)).get.asInstanceOf[Int] val instantTime = getArgValueOrDefault(args, PARAMETERS(2)).get.asInstanceOf[String] - val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new TableIdentifier(table)) + val hoodieCatalogTable = HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table) val basePath = hoodieCatalogTable.tableLocation val metaClient = HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build val activeTimeline = metaClient.getActiveTimeline diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitPartitionsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitPartitionsProcedure.scala index d358f996f3e87..0a3945aee8a64 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitPartitionsProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitPartitionsProcedure.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql.hudi.command.procedures +import org.apache.hudi.HoodieCLIUtils import org.apache.hudi.common.model.{HoodieCommitMetadata, HoodieReplaceCommitMetadata, HoodieWriteStat} import org.apache.hudi.common.table.HoodieTableMetaClient import org.apache.hudi.common.table.timeline.{HoodieInstant, HoodieTimeline} @@ -60,7 +61,7 @@ class ShowCommitPartitionsProcedure() extends BaseProcedure with ProcedureBuilde val limit = getArgValueOrDefault(args, PARAMETERS(1)).get.asInstanceOf[Int] val instantTime = getArgValueOrDefault(args, PARAMETERS(2)).get.asInstanceOf[String] - val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new TableIdentifier(table)) + val hoodieCatalogTable = HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table) val basePath = hoodieCatalogTable.tableLocation val metaClient = HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build val activeTimeline = metaClient.getActiveTimeline diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitWriteStatsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitWriteStatsProcedure.scala index 594d187699283..4e3609b533465 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitWriteStatsProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitWriteStatsProcedure.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql.hudi.command.procedures +import org.apache.hudi.HoodieCLIUtils import org.apache.hudi.common.model.{HoodieCommitMetadata, HoodieReplaceCommitMetadata} import org.apache.hudi.common.table.HoodieTableMetaClient import org.apache.hudi.common.table.timeline.{HoodieInstant, HoodieTimeline} @@ -55,7 +56,7 @@ class ShowCommitWriteStatsProcedure() extends BaseProcedure with ProcedureBuilde val limit = getArgValueOrDefault(args, PARAMETERS(1)).get.asInstanceOf[Int] val instantTime = getArgValueOrDefault(args, PARAMETERS(2)).get.asInstanceOf[String] - val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new TableIdentifier(table)) + val hoodieCatalogTable = HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table) val basePath = hoodieCatalogTable.tableLocation val metaClient = HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build val activeTimeline = metaClient.getActiveTimeline diff --git a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala index 1dc395ad2713c..169acce887dd1 100644 --- a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowCommitsProcedure.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql.hudi.command.procedures +import org.apache.hudi.HoodieCLIUtils import org.apache.hudi.common.model.HoodieCommitMetadata import org.apache.hudi.common.table.HoodieTableMetaClient import org.apache.hudi.common.table.timeline.{HoodieDefaultTimeline, HoodieInstant} @@ -78,7 +79,7 @@ class ShowCommitsProcedure(includeExtraMetadata: Boolean) extends BaseProcedure val table = getArgValueOrDefault(args, PARAMETERS(0)).get.asInstanceOf[String] val limit = getArgValueOrDefault(args, PARAMETERS(1)).get.asInstanceOf[Int] - val hoodieCatalogTable = HoodieCatalogTable(sparkSession, new TableIdentifier(table)) + val hoodieCatalogTable = HoodieCLIUtils.getHoodieCatalogTable(sparkSession, table) val basePath = hoodieCatalogTable.tableLocation val metaClient = HoodieTableMetaClient.builder.setConf(jsc.hadoopConfiguration()).setBasePath(basePath).build diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestNestedSchemaPruningOptimization.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestNestedSchemaPruningOptimization.scala index f47ff6be1b900..87d19d31d96e7 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestNestedSchemaPruningOptimization.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestNestedSchemaPruningOptimization.scala @@ -18,7 +18,6 @@ package org.apache.spark.sql.hudi import org.apache.hudi.{HoodieSparkUtils, SparkAdapterSupport} -import org.apache.spark.sql.DataFrame import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.execution.{FileSourceScanExec, ProjectExec, RowDataSourceScanExec, SparkPlan} import org.apache.spark.sql.internal.SQLConf diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/HoodieSparkProcedureTestBase.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/HoodieSparkProcedureTestBase.scala new file mode 100644 index 0000000000000..eb1390e81c6ac --- /dev/null +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/HoodieSparkProcedureTestBase.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.hudi.procedure + +import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase + +class HoodieSparkProcedureTestBase extends HoodieSparkSqlTestBase { + override def generateTableName: String = { + s"default.${super.generateTableName}" + } +} diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala index f1e15a88c27f4..bc02c0402bbed 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestBootstrapProcedure.scala @@ -21,13 +21,12 @@ import org.apache.hadoop.fs.Path import org.apache.hudi.common.model.HoodieTableType import org.apache.hudi.functional.TestBootstrap import org.apache.spark.api.java.JavaSparkContext -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase import org.apache.spark.sql.{Dataset, Row} import java.time.Instant import java.util -class TestBootstrapProcedure extends HoodieSparkSqlTestBase { +class TestBootstrapProcedure extends HoodieSparkProcedureTestBase { test("Test Call run_bootstrap Procedure") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala index 3bd7b016738c7..094b8b1aca3a0 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCallProcedure.scala @@ -19,9 +19,8 @@ package org.apache.spark.sql.hudi.procedure import org.apache.hudi.common.model.IOType import org.apache.hudi.common.testutils.FileCreateUtils -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase -class TestCallProcedure extends HoodieSparkSqlTestBase { +class TestCallProcedure extends HoodieSparkProcedureTestBase { test("Test Call show_commits Procedure") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala index 316dccca52012..f93be88fe0632 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCleanProcedure.scala @@ -19,9 +19,7 @@ package org.apache.spark.sql.hudi.procedure -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase - -class TestCleanProcedure extends HoodieSparkSqlTestBase { +class TestCleanProcedure extends HoodieSparkProcedureTestBase { test("Test Call run_clean Procedure by Table") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestClusteringProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestClusteringProcedure.scala index df4d8c90e2e6f..456f9c5066afe 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestClusteringProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestClusteringProcedure.scala @@ -23,11 +23,10 @@ import org.apache.hadoop.fs.Path import org.apache.hudi.common.table.timeline.{HoodieActiveTimeline, HoodieInstant, HoodieTimeline} import org.apache.hudi.common.util.{Option => HOption} import org.apache.hudi.{HoodieCLIUtils, HoodieDataSourceHelpers} -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase import scala.collection.JavaConverters.asScalaIteratorConverter -class TestClusteringProcedure extends HoodieSparkSqlTestBase { +class TestClusteringProcedure extends HoodieSparkProcedureTestBase { test("Test Call run_clustering Procedure By Table") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala index 2539ff7c365b5..2840b2243430d 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCommitsProcedure.scala @@ -17,9 +17,7 @@ package org.apache.spark.sql.hudi.procedure -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase - -class TestCommitsProcedure extends HoodieSparkSqlTestBase { +class TestCommitsProcedure extends HoodieSparkProcedureTestBase { test("Test Call show_archived_commits Procedure") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCompactionProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCompactionProcedure.scala index 39332d859171d..e9d9d550d3f76 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCompactionProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCompactionProcedure.scala @@ -20,9 +20,8 @@ package org.apache.spark.sql.hudi.procedure import org.apache.hudi.common.table.timeline.HoodieInstant -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase -class TestCompactionProcedure extends HoodieSparkSqlTestBase { +class TestCompactionProcedure extends HoodieSparkProcedureTestBase { test("Test Call run_compaction Procedure by Table") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCopyToTableProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCopyToTableProcedure.scala index 57025ab0b6bcd..6866b62f37cdd 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCopyToTableProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestCopyToTableProcedure.scala @@ -18,11 +18,10 @@ package org.apache.spark.sql.hudi.procedure import org.apache.spark.sql.Row -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase import java.util -class TestCopyToTableProcedure extends HoodieSparkSqlTestBase { +class TestCopyToTableProcedure extends HoodieSparkProcedureTestBase { test("Test Call copy_to_table Procedure with default params") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala index cd4e3a7ac69ca..b6a83e64fa064 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestExportInstantsProcedure.scala @@ -17,9 +17,7 @@ package org.apache.spark.sql.hudi.procedure -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase - -class TestExportInstantsProcedure extends HoodieSparkSqlTestBase { +class TestExportInstantsProcedure extends HoodieSparkProcedureTestBase { test("Test Call export_instants Procedure") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestFsViewProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestFsViewProcedure.scala index 69d08e37dfc00..64da833b9dcd0 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestFsViewProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestFsViewProcedure.scala @@ -17,9 +17,7 @@ package org.apache.spark.sql.hudi.procedure -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase - -class TestFsViewProcedure extends HoodieSparkSqlTestBase { +class TestFsViewProcedure extends HoodieSparkProcedureTestBase { test("Test Call show_fsview_all Procedure") { withTempDir { tmp => val tableName = generateTableName diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala index 1a4d3e2e913d3..ea83c828c553b 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHdfsParquetImportProcedure.scala @@ -29,7 +29,6 @@ import org.apache.parquet.hadoop.ParquetWriter import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.sql.Row import org.apache.spark.sql.catalyst.parser.ParseException -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase import org.junit.jupiter.api.Assertions.assertTrue import java.io.IOException @@ -37,7 +36,7 @@ import java.util import java.util.Objects import java.util.concurrent.TimeUnit -class TestHdfsParquetImportProcedure extends HoodieSparkSqlTestBase { +class TestHdfsParquetImportProcedure extends HoodieSparkProcedureTestBase { test("Test Call hdfs_parquet_import Procedure with insert operation") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHoodieLogFileProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHoodieLogFileProcedure.scala index 41954c80025e3..be6a2bb76244f 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHoodieLogFileProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestHoodieLogFileProcedure.scala @@ -19,9 +19,7 @@ package org.apache.spark.sql.hudi.procedure -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase - -class TestHoodieLogFileProcedure extends HoodieSparkSqlTestBase { +class TestHoodieLogFileProcedure extends HoodieSparkProcedureTestBase { test("Test Call show_logfile_metadata Procedure") { withTempDir { tmp => val tableName = generateTableName diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala index 5a26aaa0cf147..ba90fe3cb9d24 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestMetadataProcedure.scala @@ -17,9 +17,7 @@ package org.apache.spark.sql.hudi.procedure -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase - -class TestMetadataProcedure extends HoodieSparkSqlTestBase { +class TestMetadataProcedure extends HoodieSparkProcedureTestBase { test("Test Call delete_metadata_table Procedure") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestRepairsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestRepairsProcedure.scala index 587f7a4bdda64..f6ce92b41586a 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestRepairsProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestRepairsProcedure.scala @@ -29,14 +29,13 @@ import org.apache.hudi.common.table.view.HoodieTableFileSystemView import org.apache.hudi.common.testutils.{HoodieTestDataGenerator, SchemaTestUtil} import org.apache.hudi.testutils.HoodieSparkWriteableTestTable import org.apache.spark.api.java.JavaSparkContext -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase import java.io.IOException import java.net.URL import java.nio.file.{Files, Paths} import scala.collection.JavaConverters.asScalaIteratorConverter -class TestRepairsProcedure extends HoodieSparkSqlTestBase { +class TestRepairsProcedure extends HoodieSparkProcedureTestBase { test("Test Call repair_add_partition_meta Procedure") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala index 24036519cd93c..c0ca6735a350f 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestSavepointsProcedure.scala @@ -17,9 +17,7 @@ package org.apache.spark.sql.hudi.procedure -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase - -class TestSavepointsProcedure extends HoodieSparkSqlTestBase { +class TestSavepointsProcedure extends HoodieSparkProcedureTestBase { test("Test Call create_savepoint Procedure") { withTempDir { tmp => diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestShowFsPathDetailProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestShowFsPathDetailProcedure.scala index 8940f7c4ee62b..cc6164f071790 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestShowFsPathDetailProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestShowFsPathDetailProcedure.scala @@ -17,9 +17,7 @@ package org.apache.spark.sql.hudi.procedure -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase - -class TestShowFsPathDetailProcedure extends HoodieSparkSqlTestBase { +class TestShowFsPathDetailProcedure extends HoodieSparkProcedureTestBase { test("Test Call show_fs_path_detail Procedure") { withTempDir { tmp => val tableName = generateTableName diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala index ad0179b58b202..f4dd8ad63b4a8 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestStatsProcedure.scala @@ -19,9 +19,7 @@ package org.apache.spark.sql.hudi.procedure -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase - -class TestStatsProcedure extends HoodieSparkSqlTestBase { +class TestStatsProcedure extends HoodieSparkProcedureTestBase { test("Test Call stats_wa Procedure") { withTempDir { tmp => val tableName = generateTableName diff --git a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala index a9a763c8fdd23..962f28fc42687 100644 --- a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala +++ b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestUpgradeOrDowngradeProcedure.scala @@ -21,11 +21,10 @@ import org.apache.hadoop.fs.Path import org.apache.hudi.common.config.HoodieConfig import org.apache.hudi.common.table.{HoodieTableConfig, HoodieTableMetaClient, HoodieTableVersion} import org.apache.spark.api.java.JavaSparkContext -import org.apache.spark.sql.hudi.HoodieSparkSqlTestBase import java.io.IOException -class TestUpgradeOrDowngradeProcedure extends HoodieSparkSqlTestBase { +class TestUpgradeOrDowngradeProcedure extends HoodieSparkProcedureTestBase { test("Test Call downgrade_table and upgrade_table Procedure") { withTempDir { tmp =>