Skip to content

Commit 61a370d

Browse files
some minor fixes
1 parent bc6e1ec commit 61a370d

File tree

6 files changed

+18
-19
lines changed

6 files changed

+18
-19
lines changed

core/src/main/scala/org/apache/spark/HttpFileServer.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import com.google.common.io.Files
2424
import org.apache.spark.util.Utils
2525

2626
private[spark] class HttpFileServer(
27+
conf: SparkConf,
2728
securityManager: SecurityManager,
2829
requestedPort: Int = 0)
2930
extends Logging {
@@ -41,7 +42,7 @@ private[spark] class HttpFileServer(
4142
fileDir.mkdir()
4243
jarDir.mkdir()
4344
logInfo("HTTP File server directory is " + baseDir)
44-
httpServer = new HttpServer(baseDir, securityManager, requestedPort, "HTTP file server")
45+
httpServer = new HttpServer(conf, baseDir, securityManager, requestedPort, "HTTP file server")
4546
httpServer.start()
4647
serverUri = httpServer.uri
4748
logDebug("HTTP file server started at: " + serverUri)

core/src/main/scala/org/apache/spark/HttpServer.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ private[spark] class ServerStateException(message: String) extends Exception(mes
4242
* around a Jetty server.
4343
*/
4444
private[spark] class HttpServer(
45+
conf: SparkConf,
4546
resourceBase: File,
4647
securityManager: SecurityManager,
4748
requestedPort: Int = 0,
@@ -57,7 +58,7 @@ private[spark] class HttpServer(
5758
} else {
5859
logInfo("Starting HTTP Server")
5960
val (actualServer, actualPort) =
60-
Utils.startServiceOnPort[Server](requestedPort, doStart, new SparkConf(), serverName)
61+
Utils.startServiceOnPort[Server](requestedPort, doStart, conf, serverName)
6162
server = actualServer
6263
port = actualPort
6364
}

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -370,7 +370,9 @@ private[spark] object SparkConf {
370370
}
371371

372372
/**
373-
* Return whether the given config is a Spark port config.
373+
* Return true if the given config matches either `spark.*.port` or `spark.port.*`.
374374
*/
375-
def isSparkPortConf(name: String): Boolean = name.startsWith("spark.") && name.contains(".port")
375+
def isSparkPortConf(name: String): Boolean = {
376+
(name.startsWith("spark.") && name.endsWith(".port")) | name.startsWith("spark.port.")
377+
}
376378
}

core/src/main/scala/org/apache/spark/SparkEnv.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -312,7 +312,7 @@ object SparkEnv extends Logging {
312312
val httpFileServer =
313313
if (isDriver) {
314314
val fileServerPort = conf.getInt("spark.fileserver.port", 0)
315-
val server = new HttpFileServer(securityManager, fileServerPort)
315+
val server = new HttpFileServer(conf, securityManager, fileServerPort)
316316
server.initialize()
317317
conf.set("spark.fileserver.uri", server.serverUri)
318318
server

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1693,11 +1693,12 @@ private[spark] object Utils extends Logging {
16931693
* Default maximum number of retries when binding to a port before giving up.
16941694
*/
16951695
def portMaxRetries(conf: SparkConf): Int = {
1696-
if (sys.props.contains("spark.testing")) {
1696+
val maxRetries = conf.getOption("spark.port.maxRetries").map(_.toInt)
1697+
if (conf.contains("spark.testing")) {
16971698
// Set a higher number of retries for tests...
1698-
sys.props.get("spark.port.maxRetries").map(_.toInt).getOrElse(100)
1699+
maxRetries.getOrElse(100)
16991700
} else {
1700-
conf.getOption("spark.port.maxRetries").map(_.toInt).getOrElse(16)
1701+
maxRetries.getOrElse(16)
17011702
}
17021703
}
17031704

@@ -1708,18 +1709,16 @@ private[spark] object Utils extends Logging {
17081709
* @param startPort The initial port to start the service on.
17091710
* @param startService Function to start service on a given port.
17101711
* This is expected to throw java.net.BindException on port collision.
1711-
* @param conf Used to get maximum number of retries.
1712+
* @param conf A SparkConf used to get the maximum number of retries when binding to a port.
17121713
* @param serviceName Name of the service.
17131714
*/
17141715
def startServiceOnPort[T](
17151716
startPort: Int,
17161717
startService: Int => (T, Int),
17171718
conf: SparkConf,
1718-
serviceName: String = ""
1719-
): (T, Int) = {
1719+
serviceName: String = ""): (T, Int) = {
17201720
val serviceString = if (serviceName.isEmpty) "" else s" '$serviceName'"
17211721
val maxRetries = portMaxRetries(conf)
1722-
logInfo(s"Starting service$serviceString on port $startPort with maximum $maxRetries retries. ")
17231722
for (offset <- 0 to maxRetries) {
17241723
// Do not increment port if startPort is 0, which is treated as a special port
17251724
val tryPort = if (startPort == 0) {

yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -148,13 +148,9 @@ class ExecutorRunnable(
148148
// registers with the Scheduler and transfers the spark configs. Since the Executor backend
149149
// uses Akka to connect to the scheduler, the akka settings are needed as well as the
150150
// authentication settings.
151-
sparkConf.getAll.filter { case (k, v) =>
152-
k.startsWith("spark.auth") || k.startsWith("spark.akka") || k.equals("spark.port.maxRetries")
153-
}.
154-
foreach { case (k, v) => javaOpts += YarnSparkHadoopUtil.escapeForShell(s"-D$k=$v") }
155-
156-
sparkConf.getAkkaConf.
157-
foreach { case (k, v) => javaOpts += YarnSparkHadoopUtil.escapeForShell(s"-D$k=$v") }
151+
sparkConf.getAll
152+
.filter { case (k, v) => SparkConf.isExecutorStartupConf(k) }
153+
.foreach { case (k, v) => javaOpts += YarnSparkHadoopUtil.escapeForShell(s"-D$k=$v") }
158154

159155
// Commenting it out for now - so that people can refer to the properties if required. Remove
160156
// it once cpuset version is pushed out.

0 commit comments

Comments
 (0)