diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index b1cbb797b464..a0032756dfc8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -3773,17 +3773,6 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark } } } - - test("SPARK-33591: null as a partition value") { - val t = "part_table" - withTable(t) { - sql(s"CREATE TABLE $t (col1 INT, p1 STRING) USING PARQUET PARTITIONED BY (p1)") - sql(s"INSERT INTO TABLE $t PARTITION (p1 = null) SELECT 0") - checkAnswer(sql(s"SELECT * FROM $t"), Row(0, null)) - sql(s"ALTER TABLE $t DROP PARTITION (p1 = null)") - checkAnswer(sql(s"SELECT * FROM $t"), Nil) - } - } } case class Foo(bar: Option[String]) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index cb25777530ff..64c25663bb57 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -33,6 +33,7 @@ import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, NoSuchDatabaseE import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.connector.catalog.SupportsNamespaces.PROP_OWNER +import org.apache.spark.sql.execution.datasources.PartitioningUtils import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils} @@ -1733,9 +1734,8 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { // null partition values createTablePartition(catalog, Map("a" -> null, "b" -> null), tableIdent) - val nullPartValue = if (isUsingHiveMetastore) "__HIVE_DEFAULT_PARTITION__" else null assert(catalog.listPartitions(tableIdent).map(_.spec).toSet == - Set(Map("a" -> nullPartValue, "b" -> nullPartValue))) + Set(Map("a" -> "__HIVE_DEFAULT_PARTITION__", "b" -> "__HIVE_DEFAULT_PARTITION__"))) sql("ALTER TABLE tab1 DROP PARTITION (a = null, b = null)") assert(catalog.listPartitions(tableIdent).isEmpty) } @@ -3140,6 +3140,35 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { assert(errMsg.contains(expectedError)) } } + + test("SPARK-33591, SPARK-34203: insert and drop partitions with null values") { + def checkPartitions(t: String, expected: Map[String, String]*): Unit = { + val partitions = sql(s"SHOW PARTITIONS $t") + .collect() + .toSet + .map((row: Row) => row.getString(0)) + .map(PartitioningUtils.parsePathFragment) + assert(partitions === expected.toSet) + } + val defaultUsing = "USING " + (if (isUsingHiveMetastore) "hive" else "parquet") + def insertAndDropNullPart(t: String, insertCmd: String): Unit = { + sql(s"CREATE TABLE $t (col1 INT, p1 STRING) $defaultUsing PARTITIONED BY (p1)") + sql(insertCmd) + checkPartitions(t, Map("p1" -> ExternalCatalogUtils.DEFAULT_PARTITION_NAME)) + sql(s"ALTER TABLE $t DROP PARTITION (p1 = null)") + checkPartitions(t) + } + + withTable("tbl") { + insertAndDropNullPart("tbl", "INSERT INTO TABLE tbl PARTITION (p1 = null) SELECT 0") + } + + withSQLConf("hive.exec.dynamic.partition.mode" -> "nonstrict") { + withTable("tbl") { + insertAndDropNullPart("tbl", "INSERT OVERWRITE TABLE tbl VALUES (0, null)") + } + } + } } object FakeLocalFsFileSystem {