Skip to content

Commit f5de7de

Browse files
committed
cleanup
1 parent 11e9c72 commit f5de7de

File tree

8 files changed

+27
-31
lines changed

8 files changed

+27
-31
lines changed

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -151,9 +151,6 @@ private[hive] object SparkSQLCLIDriver {
151151
case e: UnsupportedEncodingException => System.exit(3)
152152
}
153153

154-
// TODO: SET commands seem to be using the wrong session?
155-
sessionState.getConf.set("spark.sql.hive.version", HiveShim.version)
156-
157154
if (sessionState.database != null) {
158155
SparkSQLEnv.hiveContext.runSqlHive(s"USE ${sessionState.database}")
159156
}

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -539,7 +539,7 @@ abstract class HiveThriftServer2Test extends FunSuite with BeforeAndAfterAll wit
539539
diagnosisBuffer.clear()
540540

541541
// Retries up to 3 times with different port numbers if the server fails to start
542-
Seq.empty.foldLeft(Try(startThriftServer(listeningPort, 0))) { case (started, attempt) =>
542+
(1 to 3).foldLeft(Try(startThriftServer(listeningPort, 0))) { case (started, attempt) =>
543543
started.orElse {
544544
listeningPort += 1
545545
stopThriftServer()

sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -242,14 +242,15 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
242242
// https://issues.apache.org/jira/browse/HIVE-7673 (in Hive 0.14 and trunk).
243243
"input46",
244244

245-
"combine1", // BROKEN
246-
247-
"part_inherit_tbl_props", // BROKEN
248-
"part_inherit_tbl_props_with_star", // BROKEN
245+
// These tests were broken by the hive client isolation PR.
246+
"part_inherit_tbl_props",
247+
"part_inherit_tbl_props_with_star",
249248

250-
"nullformatCTAS", // NEED TO FINISH CTAS parser
249+
"nullformatCTAS", // SPARK-7411: need to finish CTAS parser
251250

252-
"load_dyn_part14.*" // These work along but fail when run with other tests...
251+
// The isolated classloader seemed to make some of our test reset mechanisms less robust.
252+
"combine1", // This test changes compression settings in a way that breaks all subsequent tests.
253+
"load_dyn_part14.*" // These work alone but fail when run with other tests...
253254
) ++ HiveShim.compatibilityBlackList
254255

255256
/**

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
109109

110110
/**
111111
* The location of the jars that should be used to instantiate the HiveMetastoreClient. This
112-
* property can be one of three option:
112+
* property can be one of three options:
113113
* - a colon-separated list of jar files or directories for hive and hadoop.
114114
* - builtin - attempt to discover the jars that were used to load Spark SQL and use those. This
115115
* option is only valid when using the execution version of Hive.
@@ -362,8 +362,6 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
362362
override def dialect: String = getConf(SQLConf.DIALECT, "hiveql")
363363
}
364364

365-
protected[hive] def localSession = executionHive.state
366-
367365
/**
368366
* SQLConf and HiveConf contracts:
369367
*

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -22,30 +22,30 @@ import java.util.{Map => JMap}
2222

2323
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchTableException}
2424

25-
case class HiveDatabase(
25+
private[hive] case class HiveDatabase(
2626
name: String,
2727
location: String)
2828

29-
abstract class TableType { val name: String }
30-
case object ExternalTable extends TableType { override val name = "EXTERNAL_TABLE" }
31-
case object IndexTable extends TableType { override val name = "INDEX_TABLE" }
32-
case object ManagedTable extends TableType { override val name = "MANAGED_TABLE" }
33-
case object VirtualView extends TableType { override val name = "VIRTUAL_VIEW" }
29+
private[hive] abstract class TableType { val name: String }
30+
private[hive] case object ExternalTable extends TableType { override val name = "EXTERNAL_TABLE" }
31+
private[hive] case object IndexTable extends TableType { override val name = "INDEX_TABLE" }
32+
private[hive] case object ManagedTable extends TableType { override val name = "MANAGED_TABLE" }
33+
private[hive] case object VirtualView extends TableType { override val name = "VIRTUAL_VIEW" }
3434

3535
// TODO: Use this for Tables and Partitions
36-
case class HiveStorageDescriptor(
36+
private[hive] case class HiveStorageDescriptor(
3737
location: String,
3838
inputFormat: String,
3939
outputFormat: String,
4040
serde: String,
4141
serdeProperties: Map[String, String])
4242

43-
case class HivePartition(
43+
private[hive] case class HivePartition(
4444
values: Seq[String],
4545
storage: HiveStorageDescriptor)
4646

47-
case class HiveColumn(name: String, hiveType: String, comment: String)
48-
case class HiveTable(
47+
private[hive] case class HiveColumn(name: String, hiveType: String, comment: String)
48+
private[hive] case class HiveTable(
4949
specifiedDatabase: Option[String],
5050
name: String,
5151
schema: Seq[HiveColumn],
@@ -82,7 +82,7 @@ case class HiveTable(
8282
* internal and external classloaders for a given version of Hive and thus must expose only
8383
* shared classes.
8484
*/
85-
trait ClientInterface {
85+
private[hive] trait ClientInterface {
8686
/**
8787
* Runs a HiveQL command using Hive, returning the results as a list of strings. Each row will
8888
* result in one string.

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ import org.apache.spark.sql.execution.QueryExecutionException
5555
* @param config a collection of configuration options that will be added to the hive conf before
5656
* opening the hive client.
5757
*/
58-
class ClientWrapper(
58+
private[hive] class ClientWrapper(
5959
version: HiveVersion,
6060
config: Map[String, String])
6161
extends ClientInterface

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ import org.apache.spark.sql.catalyst.util.quietly
3333
import org.apache.spark.sql.hive.HiveContext
3434

3535
/** Factory for `IsolatedClientLoader` with specific versions of hive. */
36-
object IsolatedClientLoader {
36+
private[hive] object IsolatedClientLoader {
3737
/**
3838
* Creates isolated Hive client loaders by downloading the requested version from maven.
3939
*/
@@ -100,7 +100,7 @@ object IsolatedClientLoader {
100100
* @param baseClassLoader The spark classloader that is used to load shared classes.
101101
*
102102
*/
103-
class IsolatedClientLoader(
103+
private[hive] class IsolatedClientLoader(
104104
val version: HiveVersion,
105105
val execJars: Seq[URL] = Seq.empty,
106106
val config: Map[String, String] = Map.empty,

sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,10 @@ import org.apache.spark.util.Utils
2323
import org.scalatest.FunSuite
2424

2525
/**
26-
* A simple set of tests that call the methods of a hive ClientInterface, loading different version of hive
27-
* from maven central. These tests are simple in that they are mostly just testing to make sure that
28-
* reflective calls are not throwing NoSuchMethod error, but the actually functionallity is not fully
29-
* tested.
26+
* A simple set of tests that call the methods of a hive ClientInterface, loading different version
27+
* of hive from maven central. These tests are simple in that they are mostly just testing to make
28+
* sure that reflective calls are not throwing NoSuchMethod error, but the actually functionallity
29+
* is not fully tested.
3030
*/
3131
class VersionsSuite extends FunSuite with Logging {
3232
private def buildConf() = {

0 commit comments

Comments
 (0)