@@ -22,9 +22,13 @@ import org.apache.spark.sql.catalyst.util.quietly
2222import org .apache .spark .util .Utils
2323import org .scalatest .FunSuite
2424
25+ /**
26+ * A simple set of tests that call the methods of a hive ClientInterface, loading different version of hive
27+ * from maven central. These tests are simple in that they are mostly just testing to make sure that
28+ * reflective calls are not throwing NoSuchMethod error, but the actually functionallity is not fully
29+ * tested.
30+ */
2531class VersionsSuite extends FunSuite with Logging {
26- val testType = " derby"
27-
2832 private def buildConf () = {
2933 lazy val warehousePath = Utils .createTempDir()
3034 lazy val metastorePath = Utils .createTempDir()
@@ -50,6 +54,14 @@ class VersionsSuite extends FunSuite with Logging {
5054 causes
5155 }
5256
57+ private val emptyDir = Utils .createTempDir().getCanonicalPath
58+
59+ private def partSpec = {
60+ val hashMap = new java.util.LinkedHashMap [String , String ]
61+ hashMap.put(" key" , " 1" )
62+ hashMap
63+ }
64+
5365 // Its actually pretty easy to mess things up and have all of your tests "pass" by accidentally
5466 // connecting to an auto-populated, in-process metastore. Let's make sure we are getting the
5567 // versions right by forcing a known compatibility failure.
@@ -66,10 +78,9 @@ class VersionsSuite extends FunSuite with Logging {
6678 private var client : ClientInterface = null
6779
6880 versions.foreach { version =>
69- test(s " $version: listTables " ) {
81+ test(s " $version: create client " ) {
7082 client = null
7183 client = IsolatedClientLoader .forVersion(version, buildConf()).client
72- client.listTables(" default" )
7384 }
7485
7586 test(s " $version: createDatabase " ) {
@@ -102,8 +113,63 @@ class VersionsSuite extends FunSuite with Logging {
102113 client.getTable(" default" , " src" )
103114 }
104115
116+ test(s " $version: listTables " ) {
117+ assert(client.listTables(" default" ) === Seq (" src" ))
118+ }
119+
120+ test(s " $version: currentDatabase " ) {
121+ assert(client.currentDatabase === " default" )
122+ }
123+
124+ test(s " $version: getDatabase " ) {
125+ client.getDatabase(" default" )
126+ }
127+
128+ test(s " $version: alterTable " ) {
129+ client.alterTable(client.getTable(" default" , " src" ))
130+ }
131+
105132 test(s " $version: set command " ) {
106133 client.runSqlHive(" SET spark.sql.test.key=1" )
107134 }
135+
136+ test(s " $version: create partitioned table DDL " ) {
137+ client.runSqlHive(" CREATE TABLE src_part (value INT) PARTITIONED BY (key INT)" )
138+ client.runSqlHive(" ALTER TABLE src_part ADD PARTITION (key = '1')" )
139+ }
140+
141+ test(s " $version: getPartitions " ) {
142+ client.getAllPartitions(client.getTable(" default" , " src_part" ))
143+ }
144+
145+ test(s " $version: loadPartition " ) {
146+ client.loadPartition(
147+ emptyDir,
148+ " default.src_part" ,
149+ partSpec,
150+ false ,
151+ false ,
152+ false ,
153+ false )
154+ }
155+
156+ test(s " $version: loadTable " ) {
157+ client.loadTable(
158+ emptyDir,
159+ " src" ,
160+ false ,
161+ false )
162+ }
163+
164+ test(s " $version: loadDynamicPartitions " ) {
165+ client.loadDynamicPartitions(
166+ emptyDir,
167+ " default.src_part" ,
168+ partSpec,
169+ false ,
170+ 1 ,
171+ false ,
172+ false )
173+ }
108174 }
109175}
0 commit comments