|
17 | 17 |
|
18 | 18 | package org.apache.spark.sql.hive.execution |
19 | 19 |
|
| 20 | +import java.io.File |
| 21 | + |
20 | 22 | import scala.util.Try |
21 | 23 |
|
22 | | -import org.apache.spark.sql.{SchemaRDD, Row} |
| 24 | +import org.apache.spark.SparkException |
23 | 25 | import org.apache.spark.sql.hive._ |
24 | 26 | import org.apache.spark.sql.hive.test.TestHive |
25 | 27 | import org.apache.spark.sql.hive.test.TestHive._ |
@@ -313,7 +315,7 @@ class HiveQuerySuite extends HiveComparisonTest { |
313 | 315 | "SELECT srcalias.KEY, SRCALIAS.value FROM sRc SrCAlias WHERE SrCAlias.kEy < 15") |
314 | 316 |
|
315 | 317 | test("case sensitivity: registered table") { |
316 | | - val testData: SchemaRDD = |
| 318 | + val testData = |
317 | 319 | TestHive.sparkContext.parallelize( |
318 | 320 | TestData(1, "str1") :: |
319 | 321 | TestData(2, "str2") :: Nil) |
@@ -467,7 +469,7 @@ class HiveQuerySuite extends HiveComparisonTest { |
467 | 469 | } |
468 | 470 |
|
469 | 471 | // Describe a registered temporary table. |
470 | | - val testData: SchemaRDD = |
| 472 | + val testData = |
471 | 473 | TestHive.sparkContext.parallelize( |
472 | 474 | TestData(1, "str1") :: |
473 | 475 | TestData(1, "str2") :: Nil) |
@@ -495,6 +497,23 @@ class HiveQuerySuite extends HiveComparisonTest { |
495 | 497 | } |
496 | 498 | } |
497 | 499 |
|
| 500 | + test("ADD JAR command") { |
| 501 | + val testJar = TestHive.getHiveFile("data/files/TestSerDe.jar").getCanonicalPath |
| 502 | + sql("CREATE TABLE alter1(a INT, b INT)") |
| 503 | + intercept[Exception] { |
| 504 | + sql( |
| 505 | + """ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' |
| 506 | + |WITH serdeproperties('s1'='9') |
| 507 | + """.stripMargin) |
| 508 | + } |
| 509 | + sql(s"ADD JAR $testJar") |
| 510 | + sql( |
| 511 | + """ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' |
| 512 | + |WITH serdeproperties('s1'='9') |
| 513 | + """.stripMargin) |
| 514 | + sql("DROP TABLE alter1") |
| 515 | + } |
| 516 | + |
498 | 517 | test("parse HQL set commands") { |
499 | 518 | // Adapted from its SQL counterpart. |
500 | 519 | val testKey = "spark.sql.key.usedfortestonly" |
|
0 commit comments