From bee42f7ae0a2b147d36e38c457758d178e5a79ba Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 5 Dec 2020 14:59:06 +0300 Subject: [PATCH 1/4] Add a test --- .../execution/command/v1/ShowTablesSuite.scala | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala index 8f29f9f276138..04edfb32c56ff 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.command.v1 -import org.apache.spark.sql.{AnalysisException, Row} +import org.apache.spark.sql.{AnalysisException, Row, SaveMode} import org.apache.spark.sql.connector.catalog.CatalogManager import org.apache.spark.sql.execution.command import org.apache.spark.sql.internal.SQLConf @@ -111,4 +111,18 @@ trait ShowTablesSuiteBase extends command.ShowTablesSuiteBase { } } -class ShowTablesSuite extends ShowTablesSuiteBase with SharedSparkSession +class ShowTablesSuite extends ShowTablesSuiteBase with SharedSparkSession { + test("show partitions from a datasource table") { + import testImplicits._ + withNamespace(s"$catalog.ns") { + sql(s"CREATE NAMESPACE $catalog.ns") + sql(s"USE $catalog.ns") + val t = "part_datasrc" + withTable(t) { + val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c") + df.write.partitionBy("a").format("parquet").mode(SaveMode.Overwrite).saveAsTable(t) + assert(sql(s"SHOW TABLE EXTENDED LIKE '$t' PARTITION(a = 1)").count() === 1) + } + } + } +} From 1accc1fc1ef4f884e3b9b6ca2a62a9feb6d407d6 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 5 Dec 2020 14:59:29 +0300 Subject: [PATCH 2/4] Invoke verifyPartitionProviderIsHive --- .../scala/org/apache/spark/sql/execution/command/tables.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 9e3ca3c321a54..20ec371a845d6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -879,6 +879,9 @@ case class ShowTablesCommand( // Note: tableIdentifierPattern should be non-empty, otherwise a [[ParseException]] // should have been thrown by the sql parser. val table = catalog.getTableMetadata(TableIdentifier(tableIdentifierPattern.get, Some(db))) + + DDLUtils.verifyPartitionProviderIsHive(sparkSession, table, "SHOW TABLE EXTENDED") + val tableIdent = table.identifier val normalizedSpec = PartitioningUtils.normalizePartitionSpec( partitionSpec.get, From c82c61a27ff4ec5a9874095e5339c2a41be48d7f Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 5 Dec 2020 15:43:38 +0300 Subject: [PATCH 3/4] Update PartitionProviderCompatibilitySuite --- .../hive/PartitionProviderCompatibilitySuite.scala | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionProviderCompatibilitySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionProviderCompatibilitySuite.scala index 80afc9d8f44bc..e1b0637963b75 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionProviderCompatibilitySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionProviderCompatibilitySuite.scala @@ -53,7 +53,8 @@ class PartitionProviderCompatibilitySuite s"ALTER TABLE $tableName PARTITION (partCol=1) SET LOCATION '/foo'", s"ALTER TABLE $tableName DROP PARTITION (partCol=1)", s"DESCRIBE $tableName PARTITION (partCol=1)", - s"SHOW PARTITIONS $tableName") + s"SHOW PARTITIONS $tableName", + s"SHOW TABLE EXTENDED LIKE '$tableName' PARTITION (partCol=1)") withSQLConf(SQLConf.HIVE_MANAGE_FILESOURCE_PARTITIONS.key -> "true") { for (cmd <- unsupportedCommands) { @@ -124,10 +125,15 @@ class PartitionProviderCompatibilitySuite } // disabled withSQLConf(SQLConf.HIVE_MANAGE_FILESOURCE_PARTITIONS.key -> "false") { - val e = intercept[AnalysisException] { - spark.sql(s"show partitions test") + Seq( + "SHOW PARTITIONS test", + "SHOW TABLE EXTENDED LIKE 'test' PARTITION (partCol=1)" + ).foreach { showPartitions => + val e = intercept[AnalysisException] { + spark.sql(showPartitions) + } + assert(e.getMessage.contains("filesource partition management is disabled")) } - assert(e.getMessage.contains("filesource partition management is disabled")) spark.sql("refresh table test") assert(spark.sql("select * from test").count() == 5) } From 5058fe582161a33d840e33bbea9eed54bfdb3558 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sun, 6 Dec 2020 09:27:15 +0300 Subject: [PATCH 4/4] Add JIRA to test's title --- .../apache/spark/sql/execution/command/v1/ShowTablesSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala index 04edfb32c56ff..3db880c776365 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala @@ -112,7 +112,7 @@ trait ShowTablesSuiteBase extends command.ShowTablesSuiteBase { } class ShowTablesSuite extends ShowTablesSuiteBase with SharedSparkSession { - test("show partitions from a datasource table") { + test("SPARK-33670: show partitions from a datasource table") { import testImplicits._ withNamespace(s"$catalog.ns") { sql(s"CREATE NAMESPACE $catalog.ns")