File tree Expand file tree Collapse file tree 2 files changed +4
-7
lines changed
core/src/main/scala/org/apache/spark
yarn/src/main/scala/org/apache/spark/deploy/yarn Expand file tree Collapse file tree 2 files changed +4
-7
lines changed Original file line number Diff line number Diff line change @@ -176,10 +176,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
176176 logInfo(s " Running Spark version $SPARK_VERSION" )
177177
178178 private [spark] val conf = config.clone()
179- val portRetriesConf = conf.getOption(" spark.port.maxRetries" )
180- if (portRetriesConf.isDefined) {
181- System .setProperty(" spark.port.maxRetries" , portRetriesConf.get)
182- }
179+ conf.getOption(" spark.port.maxRetries" )
180+ .foreach(portRetriesConf => System .setProperty(" spark.port.maxRetries" , portRetriesConf))
183181 conf.validateSettings()
184182
185183 /**
Original file line number Diff line number Diff line change @@ -75,10 +75,9 @@ trait ExecutorRunnableUtil extends Logging {
7575 // registers with the Scheduler and transfers the spark configs. Since the Executor backend
7676 // uses Akka to connect to the scheduler, the akka settings are needed as well as the
7777 // authentication settings.
78- sparkConf.getAll.
79- filter { case (k, v) =>
78+ sparkConf.getAll.filter { case (k, v) =>
8079 k.startsWith(" spark.auth" ) || k.startsWith(" spark.akka" ) || k.equals(" spark.port.maxRetries" )
81- }.
80+ }.
8281 foreach { case (k, v) => javaOpts += YarnSparkHadoopUtil .escapeForShell(s " -D $k= $v" ) }
8382
8483 sparkConf.getAkkaConf.
You can’t perform that action at this time.
0 commit comments