1717
1818package org .apache .spark
1919
20+ import java .util .concurrent .ConcurrentHashMap
21+
2022import scala .collection .JavaConverters ._
21- import scala .collection .concurrent . TrieMap
22- import scala . collection . mutable .{ HashMap , LinkedHashSet }
23+ import scala .collection .mutable . LinkedHashSet
24+
2325import org .apache .spark .serializer .KryoSerializer
2426
2527/**
@@ -47,12 +49,12 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
4749 /** Create a SparkConf that loads defaults from system properties and the classpath */
4850 def this () = this (true )
4951
50- private [spark] val settings = new TrieMap [String , String ]()
52+ private val settings = new ConcurrentHashMap [String , String ]()
5153
5254 if (loadDefaults) {
5355 // Load any spark.* system properties
5456 for ((k, v) <- System .getProperties.asScala if k.startsWith(" spark." )) {
55- settings(k) = v
57+ set(k, v)
5658 }
5759 }
5860
@@ -64,7 +66,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
6466 if (value == null ) {
6567 throw new NullPointerException (" null value for " + key)
6668 }
67- settings(key) = value
69+ settings.put (key, value)
6870 this
6971 }
7072
@@ -130,15 +132,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
130132
131133 /** Set multiple parameters together */
132134 def setAll (settings : Traversable [(String , String )]) = {
133- this .settings ++= settings
135+ this .settings.putAll( settings.toMap.asJava)
134136 this
135137 }
136138
137139 /** Set a parameter if it isn't already configured */
138140 def setIfMissing (key : String , value : String ): SparkConf = {
139- if (! settings.contains(key)) {
140- settings(key) = value
141- }
141+ settings.putIfAbsent(key, value)
142142 this
143143 }
144144
@@ -164,21 +164,23 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
164164
165165 /** Get a parameter; throws a NoSuchElementException if it's not set */
166166 def get (key : String ): String = {
167- settings .getOrElse(key, throw new NoSuchElementException (key))
167+ getOption(key) .getOrElse(throw new NoSuchElementException (key))
168168 }
169169
170170 /** Get a parameter, falling back to a default if not set */
171171 def get (key : String , defaultValue : String ): String = {
172- settings .getOrElse(key, defaultValue)
172+ getOption(key) .getOrElse(defaultValue)
173173 }
174174
175175 /** Get a parameter as an Option */
176176 def getOption (key : String ): Option [String ] = {
177- settings.get(key)
177+ Option ( settings.get(key) )
178178 }
179179
180180 /** Get all parameters as a list of pairs */
181- def getAll : Array [(String , String )] = settings.toArray
181+ def getAll : Array [(String , String )] = {
182+ settings.entrySet().asScala.map(x => (x.getKey, x.getValue)).toArray
183+ }
182184
183185 /** Get a parameter as an integer, falling back to a default if not set */
184186 def getInt (key : String , defaultValue : Int ): Int = {
@@ -225,11 +227,11 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
225227 def getAppId : String = get(" spark.app.id" )
226228
227229 /** Does the configuration contain a given parameter? */
228- def contains (key : String ): Boolean = settings.contains (key)
230+ def contains (key : String ): Boolean = settings.containsKey (key)
229231
230232 /** Copy this object */
231233 override def clone : SparkConf = {
232- new SparkConf (false ).setAll(settings )
234+ new SparkConf (false ).setAll(getAll )
233235 }
234236
235237 /**
@@ -241,7 +243,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
241243 /** Checks for illegal or deprecated config settings. Throws an exception for the former. Not
242244 * idempotent - may mutate this conf object to convert deprecated settings to supported ones. */
243245 private [spark] def validateSettings () {
244- if (settings. contains(" spark.local.dir" )) {
246+ if (contains(" spark.local.dir" )) {
245247 val msg = " In Spark 1.0 and later spark.local.dir will be overridden by the value set by " +
246248 " the cluster manager (via SPARK_LOCAL_DIRS in mesos/standalone and LOCAL_DIRS in YARN)."
247249 logWarning(msg)
@@ -266,7 +268,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
266268 }
267269
268270 // Validate spark.executor.extraJavaOptions
269- settings.get (executorOptsKey).map { javaOpts =>
271+ getOption (executorOptsKey).map { javaOpts =>
270272 if (javaOpts.contains(" -Dspark" )) {
271273 val msg = s " $executorOptsKey is not allowed to set Spark options (was ' $javaOpts'). " +
272274 " Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit."
@@ -346,7 +348,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
346348 * configuration out for debugging.
347349 */
348350 def toDebugString : String = {
349- settings.toArray .sorted.map{case (k, v) => k + " =" + v}.mkString(" \n " )
351+ getAll .sorted.map{case (k, v) => k + " =" + v}.mkString(" \n " )
350352 }
351353}
352354
0 commit comments