@@ -19,6 +19,8 @@ package org.apache.spark.sql.hive.execution
1919
2020import scala .util .Try
2121
22+ import org .apache .hadoop .hive .conf .HiveConf .ConfVars
23+
2224import org .apache .spark .SparkException
2325import org .apache .spark .sql .hive ._
2426import org .apache .spark .sql .hive .test .TestHive
@@ -591,6 +593,45 @@ class HiveQuerySuite extends HiveComparisonTest {
591593 |DROP TABLE IF EXISTS dynamic_part_table;
592594 """ .stripMargin)
593595
596+ test(" Dynamic partition folder layout" ) {
597+ sql(" DROP TABLE IF EXISTS dynamic_part_table" )
598+ sql(" CREATE TABLE dynamic_part_table(intcol INT) PARTITIONED BY (partcol1 INT, partcol2 INT)" )
599+ sql(" SET hive.exec.dynamic.partition.mode=nonstrict" )
600+
601+ val data = Map (
602+ Seq (" 1" , " 1" ) -> 1 ,
603+ Seq (" 1" , " NULL" ) -> 2 ,
604+ Seq (" NULL" , " 1" ) -> 3 ,
605+ Seq (" NULL" , " NULL" ) -> 4 )
606+
607+ data.foreach { case (parts, value) =>
608+ sql(
609+ s """ INSERT INTO TABLE dynamic_part_table PARTITION(partcol1, partcol2)
610+ |SELECT $value, ${parts.mkString(" , " )} FROM src WHERE key=150
611+ """ .stripMargin)
612+
613+ val partFolder = Seq (" partcol1" , " partcol2" )
614+ .zip(parts)
615+ .map { case (k, v) =>
616+ if (v == " NULL" ) {
617+ s " $k= ${ConfVars .DEFAULTPARTITIONNAME .defaultVal}"
618+ } else {
619+ s " $k= $v"
620+ }
621+ }
622+ .mkString(" /" )
623+
624+ // Loads partition data to a temporary table to verify contents
625+ val path = s " $warehousePath/dynamic_part_table/ $partFolder/part-00000 "
626+
627+ sql(" DROP TABLE IF EXISTS dp_verify" )
628+ sql(" CREATE TABLE dp_verify(intcol INT)" )
629+ sql(s " LOAD DATA LOCAL INPATH ' $path' INTO TABLE dp_verify " )
630+
631+ assert(sql(" SELECT * FROM dp_verify" ).collect() === Array (Row (value)))
632+ }
633+ }
634+
594635 test(" Partition spec validation" ) {
595636 sql(" DROP TABLE IF EXISTS dp_test" )
596637 sql(" CREATE TABLE dp_test(key INT, value STRING) PARTITIONED BY (dp INT, sp INT)" )
0 commit comments