Skip to content

Commit ee47742

Browse files
committed
util method
1 parent 5043eb6 commit ee47742

File tree

3 files changed

+23
-13
lines changed

3 files changed

+23
-13
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala

Lines changed: 20 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import java.util.{Arrays, Comparator, Date, Locale}
2424

2525
import scala.collection.JavaConverters._
2626
import scala.collection.mutable
27+
import scala.collection.mutable.HashMap
2728
import scala.util.control.NonFatal
2829

2930
import com.google.common.primitives.Longs
@@ -99,17 +100,30 @@ class SparkHadoopUtil extends Logging {
99100
hadoopConf.set("fs.s3a.session.token", sessionToken)
100101
}
101102
}
102-
// Copy any "spark.hadoop.foo=bar" system properties into conf as "foo=bar"
103-
conf.getAll.foreach { case (key, value) =>
104-
if (key.startsWith("spark.hadoop.")) {
105-
hadoopConf.set(key.substring("spark.hadoop.".length), value)
106-
}
107-
}
103+
appendSparkHadoopConfigs(conf, hadoopConf)
108104
val bufferSize = conf.get("spark.buffer.size", "65536")
109105
hadoopConf.set("io.file.buffer.size", bufferSize)
110106
}
111107
}
112108

109+
def appendSparkHadoopConfigs(conf: SparkConf, hadoopConf: Configuration): Unit = {
110+
// Copy any "spark.hadoop.foo=bar" system properties into conf as "foo=bar"
111+
conf.getAll.foreach { case (key, value) =>
112+
if (key.startsWith("spark.hadoop.")) {
113+
hadoopConf.set(key.substring("spark.hadoop.".length), value)
114+
}
115+
}
116+
}
117+
118+
def appendSparkHadoopConfigs(propMap: HashMap[String, String]): Unit = {
119+
// Copy any "spark.hadoop.foo=bar" system properties into conf as "foo=bar"
120+
sys.props.foreach { case (key, value) =>
121+
if (key.startsWith("spark.hadoop.")) {
122+
propMap.put(key.substring("spark.hadoop.".length), value)
123+
}
124+
}
125+
}
126+
113127
/**
114128
* Return an appropriate (subclass) of Configuration. Creating config can initializes some Hadoop
115129
* subsystems.

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ private[hive] object SparkSQLCLIDriver extends Logging {
168168
// Execute -i init files (always in silent mode)
169169
cli.processInitFiles(sessionState)
170170

171-
newHiveConf.foreach{ kv =>
171+
newHiveConf.foreach { kv =>
172172
SparkSQLEnv.sqlContext.setConf(kv._1, kv._2)
173173
}
174174

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars
3535
import org.apache.hadoop.hive.serde2.io.{DateWritable, TimestampWritable}
3636
import org.apache.hadoop.util.VersionInfo
3737

38+
import org.apache.spark.deploy.SparkHadoopUtil
3839
import org.apache.spark.{SparkConf, SparkContext}
3940
import org.apache.spark.internal.Logging
4041
import org.apache.spark.sql._
@@ -404,12 +405,7 @@ private[spark] object HiveUtils extends Logging {
404405
propMap.put(ConfVars.METASTORE_EVENT_LISTENERS.varname, "")
405406
propMap.put(ConfVars.METASTORE_END_FUNCTION_LISTENERS.varname, "")
406407

407-
// Copy any "spark.hadoop.foo=bar" system properties into conf as "foo=bar"
408-
sys.props.foreach { case (key, value) =>
409-
if (key.startsWith("spark.hadoop.")) {
410-
propMap.put(key.substring("spark.hadoop.".length), value)
411-
}
412-
}
408+
SparkHadoopUtil.get.appendSparkHadoopConfigs(propMap)
413409

414410
propMap.toMap
415411
}

0 commit comments

Comments
 (0)