Skip to content

Commit 396c226

Browse files
make the grammar more like scala
1 parent 191face commit 396c226

File tree

2 files changed

+4
-7
lines changed

2 files changed

+4
-7
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -176,10 +176,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
176176
logInfo(s"Running Spark version $SPARK_VERSION")
177177

178178
private[spark] val conf = config.clone()
179-
val portRetriesConf = conf.getOption("spark.port.maxRetries")
180-
if (portRetriesConf.isDefined) {
181-
System.setProperty("spark.port.maxRetries", portRetriesConf.get)
182-
}
179+
conf.getOption("spark.port.maxRetries")
180+
.foreach(portRetriesConf => System.setProperty("spark.port.maxRetries", portRetriesConf))
183181
conf.validateSettings()
184182

185183
/**

yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -75,10 +75,9 @@ trait ExecutorRunnableUtil extends Logging {
7575
// registers with the Scheduler and transfers the spark configs. Since the Executor backend
7676
// uses Akka to connect to the scheduler, the akka settings are needed as well as the
7777
// authentication settings.
78-
sparkConf.getAll.
79-
filter { case (k, v) =>
78+
sparkConf.getAll.filter { case (k, v) =>
8079
k.startsWith("spark.auth") || k.startsWith("spark.akka") || k.equals("spark.port.maxRetries")
81-
}.
80+
}.
8281
foreach { case (k, v) => javaOpts += YarnSparkHadoopUtil.escapeForShell(s"-D$k=$v") }
8382

8483
sparkConf.getAkkaConf.

0 commit comments

Comments
 (0)