@@ -19,6 +19,7 @@ package org.apache.spark.sql.hive.execution
1919
2020import scala .util .Try
2121
22+ import org .apache .spark .SparkException
2223import org .apache .spark .sql .hive ._
2324import org .apache .spark .sql .hive .test .TestHive
2425import org .apache .spark .sql .hive .test .TestHive ._
@@ -380,7 +381,7 @@ class HiveQuerySuite extends HiveComparisonTest {
380381
381382 def isExplanation (result : SchemaRDD ) = {
382383 val explanation = result.select(' plan ).collect().map { case Row (plan : String ) => plan }
383- explanation.exists(_ == " == Physical Plan ==" )
384+ explanation.contains( " == Physical Plan ==" )
384385 }
385386
386387 test(" SPARK-1704: Explain commands as a SchemaRDD" ) {
@@ -590,6 +591,30 @@ class HiveQuerySuite extends HiveComparisonTest {
590591 |DROP TABLE IF EXISTS dynamic_part_table;
591592 """ .stripMargin)
592593
594+ test(" Partition spec validation" ) {
595+ sql(" DROP TABLE IF EXISTS dp_test" )
596+ sql(" CREATE TABLE dp_test(key INT, value STRING) PARTITIONED BY (dp INT, sp INT)" )
597+ sql(" SET hive.exec.dynamic.partition.mode=strict" )
598+
599+ // Should throw when using strict dynamic partition mode without any static partition
600+ intercept[SparkException ] {
601+ sql(
602+ """ INSERT INTO TABLE dp_test PARTITION(dp)
603+ |SELECT key, value, key % 5 FROM src
604+ """ .stripMargin)
605+ }
606+
607+ sql(" SET hive.exec.dynamic.partition.mode=nonstrict" )
608+
609+ // Should throw when a static partition appears after a dynamic partition
610+ intercept[SparkException ] {
611+ sql(
612+ """ INSERT INTO TABLE dp_test PARTITION(dp, sp = 1)
613+ |SELECT key, value, key % 5 FROM src
614+ """ .stripMargin)
615+ }
616+ }
617+
593618 test(" SPARK-3414 regression: should store analyzed logical plan when registering a temp table" ) {
594619 sparkContext.makeRDD(Seq .empty[LogEntry ]).registerTempTable(" rawLogs" )
595620 sparkContext.makeRDD(Seq .empty[LogFile ]).registerTempTable(" logFiles" )
@@ -647,27 +672,27 @@ class HiveQuerySuite extends HiveComparisonTest {
647672 assert(sql(" SET" ).collect().size == 0 )
648673
649674 assertResult(Set (testKey -> testVal)) {
650- collectResults(hql (s " SET $testKey= $testVal" ))
675+ collectResults(sql (s " SET $testKey= $testVal" ))
651676 }
652677
653678 assert(hiveconf.get(testKey, " " ) == testVal)
654679 assertResult(Set (testKey -> testVal)) {
655- collectResults(hql (" SET" ))
680+ collectResults(sql (" SET" ))
656681 }
657682
658683 sql(s " SET ${testKey + testKey}= ${testVal + testVal}" )
659684 assert(hiveconf.get(testKey + testKey, " " ) == testVal + testVal)
660685 assertResult(Set (testKey -> testVal, (testKey + testKey) -> (testVal + testVal))) {
661- collectResults(hql (" SET" ))
686+ collectResults(sql (" SET" ))
662687 }
663688
664689 // "set key"
665690 assertResult(Set (testKey -> testVal)) {
666- collectResults(hql (s " SET $testKey" ))
691+ collectResults(sql (s " SET $testKey" ))
667692 }
668693
669694 assertResult(Set (nonexistentKey -> " <undefined>" )) {
670- collectResults(hql (s " SET $nonexistentKey" ))
695+ collectResults(sql (s " SET $nonexistentKey" ))
671696 }
672697
673698 // Assert that sql() should have the same effects as sql() by repeating the above using sql().
0 commit comments