Skip to content

Commit 1d8ae44

Browse files
committed
fix final tests?
1 parent 1c50813 commit 1d8ae44

File tree

4 files changed

+33
-9
lines changed

4 files changed

+33
-9
lines changed

project/SparkBuild.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -205,6 +205,9 @@ object SparkBuild extends PomBuild {
205205
sparkShell := {
206206
(runMain in Compile).toTask(" org.apache.spark.repl.Main -usejavacp").value
207207
},
208+
209+
javaOptions in Compile += "-Dspark.master=local",
210+
208211
sparkSql := {
209212
(runMain in Compile).toTask(" org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver").value
210213
}

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -215,9 +215,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
215215
}
216216

217217
sparkContext.getConf.getAll.foreach {
218-
case (key, value) if key.startsWith("spark.sql") =>
219-
println(s"$key=$value")
220-
setConf(key, value)
218+
case (key, value) if key.startsWith("spark.sql") => setConf(key, value)
221219
case _ =>
222220
}
223221

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -100,10 +100,14 @@ private[hive] object SparkSQLCLIDriver {
100100

101101
// Set all properties specified via command line.
102102
val conf: HiveConf = sessionState.getConf
103-
sessionState.cmdProperties.entrySet().foreach { item: java.util.Map.Entry[Object, Object] =>
104-
//conf.set(item.getKey.asInstanceOf[String], item.getValue.asInstanceOf[String])
105-
//sessionState.getOverriddenConfigurations.put(
106-
// item.getKey.asInstanceOf[String], item.getValue.asInstanceOf[String])
103+
sessionState.cmdProperties.entrySet().foreach { item =>
104+
val key = item.getKey.asInstanceOf[String]
105+
val value = item.getValue.asInstanceOf[String]
106+
// We do not propogate metastore options to the execution copy of hive.
107+
if (key != "javax.jdo.option.ConnectionURL") {
108+
conf.set(key, value)
109+
sessionState.getOverriddenConfigurations.put(key, value)
110+
}
107111
}
108112

109113
SessionState.start(sessionState)

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -290,15 +290,34 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
290290
/** Overridden by child classes that need to set configuration before the client init. */
291291
protected def configure(): Map[String, String] = Map.empty
292292

293-
294293
protected[hive] class SQLSession extends super.SQLSession {
295294
protected[sql] override lazy val conf: SQLConf = new SQLConf {
296295
override def dialect: String = getConf(SQLConf.DIALECT, "hiveql")
297296
}
298297

299298
protected[hive] def localSession = executionHive.state
300299

301-
protected[hive] def hiveconf = executionConf
300+
/**
301+
* SQLConf and HiveConf contracts:
302+
*
303+
* 1. reuse existing started SessionState if any
304+
* 2. when the Hive session is first initialized, params in HiveConf will get picked up by the
305+
* SQLConf. Additionally, any properties set by set() or a SET command inside sql() will be
306+
* set in the SQLConf *as well as* in the HiveConf.
307+
*/
308+
protected[hive] lazy val sessionState: SessionState = {
309+
var state = SessionState.get()
310+
if (state == null) {
311+
state = new SessionState(new HiveConf(classOf[SessionState]))
312+
SessionState.start(state)
313+
}
314+
state
315+
}
316+
317+
protected[hive] lazy val hiveconf: HiveConf = {
318+
setConf(sessionState.getConf.getAllProperties)
319+
sessionState.getConf
320+
}
302321
}
303322

304323
override protected[sql] def dialectClassName = if (conf.dialect == "hiveql") {

0 commit comments

Comments
 (0)