Skip to content

Commit 55729fa

Browse files
committed
review, comments
1 parent a2b23f3 commit 55729fa

File tree

2 files changed

+23
-13
lines changed

2 files changed

+23
-13
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala

Lines changed: 17 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import java.security.PrivilegedExceptionAction
2222
import java.text.DateFormat
2323
import java.util.{Arrays, Comparator, Date, Locale}
2424

25+
import scala.collection.immutable.Map
2526
import scala.collection.JavaConverters._
2627
import scala.collection.mutable
2728
import scala.collection.mutable.HashMap
@@ -75,7 +76,6 @@ class SparkHadoopUtil extends Logging {
7576
}
7677
}
7778

78-
7979
/**
8080
* Appends S3-specific, spark.hadoop.*, and spark.buffer.size configurations to a Hadoop
8181
* configuration.
@@ -106,21 +106,26 @@ class SparkHadoopUtil extends Logging {
106106
}
107107
}
108108

109+
/**
110+
* Appends spark.hadoop.* configurations from a [[SparkConf]] to a Hadoop
111+
* configuration without the spark.hadoop. prefix.
112+
*/
109113
def appendSparkHadoopConfigs(conf: SparkConf, hadoopConf: Configuration): Unit = {
110-
// Copy any "spark.hadoop.foo=bar" system properties into conf as "foo=bar"
111-
conf.getAll.foreach { case (key, value) =>
112-
if (key.startsWith("spark.hadoop.")) {
113-
hadoopConf.set(key.substring("spark.hadoop.".length), value)
114-
}
114+
// Copy any "spark.hadoop.foo=bar" spark properties into conf as "foo=bar"
115+
conf.getAll.foreach { case (key, value) if key.startsWith("spark.hadoop.") =>
116+
hadoopConf.set(key.substring("spark.hadoop.".length), value)
115117
}
116118
}
117119

118-
def appendSparkHadoopConfigs(propMap: HashMap[String, String]): Unit = {
119-
// Copy any "spark.hadoop.foo=bar" system properties into conf as "foo=bar"
120-
sys.props.foreach { case (key, value) =>
121-
if (key.startsWith("spark.hadoop.")) {
122-
propMap.put(key.substring("spark.hadoop.".length), value)
123-
}
120+
/**
121+
* Appends spark.hadoop.* configurations from a Map to another without the spark.hadoop. prefix.
122+
*/
123+
def appendSparkHadoopConfigs(
124+
srcMap: Map[String, String],
125+
destMap: HashMap[String, String]): Unit = {
126+
// Copy any "spark.hadoop.foo=bar" system properties into destMap as "foo=bar"
127+
srcMap.foreach { case (key, value) if key.startsWith("spark.hadoop.") =>
128+
destMap.put(key.substring("spark.hadoop.".length), value)
124129
}
125130
}
126131

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -405,7 +405,12 @@ private[spark] object HiveUtils extends Logging {
405405
propMap.put(ConfVars.METASTORE_EVENT_LISTENERS.varname, "")
406406
propMap.put(ConfVars.METASTORE_END_FUNCTION_LISTENERS.varname, "")
407407

408-
SparkHadoopUtil.get.appendSparkHadoopConfigs(propMap)
408+
// SPARK-21451: Spark will gather all `spark.hadoop.*` properties from a `SparkConf` to a
409+
// Hadoop Configuration internally, as long as it happens after SparkContext initialized.
410+
// Some instances such as `CliSessionState` used in `SparkSQLCliDriver` may also rely on these
411+
// Configuration. But it happens before SparkContext initialized, we need to take them from
412+
// system properties in the form of regular hadoop configurations.
413+
SparkHadoopUtil.get.appendSparkHadoopConfigs(sys.props.toMap, propMap)
409414

410415
propMap.toMap
411416
}

0 commit comments

Comments
 (0)