Skip to content

Commit ef7069a

Browse files
committed
Clean up YarnClientSchedulerBackend more
1 parent 6de9072 commit ef7069a

File tree

2 files changed

+38
-32
lines changed

2 files changed

+38
-32
lines changed

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ class ClientArguments(val args: Array[String], val sparkConf: SparkConf) {
147147
"Options:\n" +
148148
" --jar JAR_PATH Path to your application's JAR file (required in yarn-cluster mode)\n" +
149149
" --class CLASS_NAME Name of your application's main class (required)\n" +
150-
" --arg ARGS Argument to be passed to your application's main class.\n" +
150+
" --arg ARG Argument to be passed to your application's main class.\n" +
151151
" Multiple invocations are possible, each will be passed in order.\n" +
152152
" --num-executors NUM Number of executors to start (Default: 2)\n" +
153153
" --executor-cores NUM Number of cores for the executors (Default: 1).\n" +

yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala

Lines changed: 37 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -39,45 +39,22 @@ private[spark] class YarnClientSchedulerBackend(
3939
var stopping: Boolean = false
4040
var totalExpectedExecutors = 0
4141

42-
private[spark] def addArg(optionName: String, envVar: String, sysProp: String,
43-
arrayBuf: ArrayBuffer[String]) {
44-
if (System.getenv(envVar) != null) {
45-
arrayBuf += (optionName, System.getenv(envVar))
46-
} else if (sc.getConf.contains(sysProp)) {
47-
arrayBuf += (optionName, sc.getConf.get(sysProp))
48-
}
49-
}
50-
42+
/**
43+
* Create a Yarn client to submit an application to the ResourceManager.
44+
* This waits until the application is running.
45+
*/
5146
override def start() {
5247
super.start()
53-
5448
val driverHost = conf.get("spark.driver.host")
5549
val driverPort = conf.get("spark.driver.port")
5650
val hostport = driverHost + ":" + driverPort
5751
conf.set("spark.driver.appUIAddress", sc.ui.appUIHostPort)
5852

5953
val argsArrayBuf = new ArrayBuffer[String]()
60-
argsArrayBuf += (
61-
"--args", hostport
62-
)
63-
64-
// process any optional arguments, given either as environment variables
65-
// or system properties. use the defaults already defined in ClientArguments
66-
// if things aren't specified. system properties override environment
67-
// variables.
68-
List(("--driver-memory", "SPARK_MASTER_MEMORY", "spark.master.memory"),
69-
("--driver-memory", "SPARK_DRIVER_MEMORY", "spark.driver.memory"),
70-
("--num-executors", "SPARK_WORKER_INSTANCES", "spark.executor.instances"),
71-
("--num-executors", "SPARK_EXECUTOR_INSTANCES", "spark.executor.instances"),
72-
("--executor-memory", "SPARK_WORKER_MEMORY", "spark.executor.memory"),
73-
("--executor-memory", "SPARK_EXECUTOR_MEMORY", "spark.executor.memory"),
74-
("--executor-cores", "SPARK_WORKER_CORES", "spark.executor.cores"),
75-
("--executor-cores", "SPARK_EXECUTOR_CORES", "spark.executor.cores"),
76-
("--queue", "SPARK_YARN_QUEUE", "spark.yarn.queue"),
77-
("--name", "SPARK_YARN_APP_NAME", "spark.app.name"))
78-
.foreach { case (optName, envVar, sysProp) => addArg(optName, envVar, sysProp, argsArrayBuf) }
79-
80-
logDebug("ClientArguments called with: " + argsArrayBuf)
54+
argsArrayBuf += ("--arg", hostport)
55+
argsArrayBuf ++= getExtraClientArguments
56+
57+
logDebug("ClientArguments called with: " + argsArrayBuf.mkString(" "))
8158
val args = new ClientArguments(argsArrayBuf.toArray, conf)
8259
totalExpectedExecutors = args.numExecutors
8360
client = new Client(args, conf)
@@ -86,6 +63,35 @@ private[spark] class YarnClientSchedulerBackend(
8663
asyncMonitorApplication()
8764
}
8865

66+
/**
67+
* Return any extra command line arguments to be passed to Client provided in the form of
68+
* environment variables or Spark properties.
69+
*/
70+
private def getExtraClientArguments: Seq[String] = {
71+
val extraArgs = new ArrayBuffer[String]
72+
val optionTuples = // List of (target Client argument, environment variable, Spark property)
73+
List(
74+
("--driver-memory", "SPARK_MASTER_MEMORY", "spark.master.memory"),
75+
("--driver-memory", "SPARK_DRIVER_MEMORY", "spark.driver.memory"),
76+
("--num-executors", "SPARK_WORKER_INSTANCES", "spark.executor.instances"),
77+
("--num-executors", "SPARK_EXECUTOR_INSTANCES", "spark.executor.instances"),
78+
("--executor-memory", "SPARK_WORKER_MEMORY", "spark.executor.memory"),
79+
("--executor-memory", "SPARK_EXECUTOR_MEMORY", "spark.executor.memory"),
80+
("--executor-cores", "SPARK_WORKER_CORES", "spark.executor.cores"),
81+
("--executor-cores", "SPARK_EXECUTOR_CORES", "spark.executor.cores"),
82+
("--queue", "SPARK_YARN_QUEUE", "spark.yarn.queue"),
83+
("--name", "SPARK_YARN_APP_NAME", "spark.app.name")
84+
)
85+
optionTuples.foreach { case (argName, envVar, sparkProp) =>
86+
if (System.getenv(envVar) != null) {
87+
extraArgs += (optionName, System.getenv(envVar))
88+
} else if (sc.getConf.contains(sysProp)) {
89+
extraArgs += (optionName, sc.getConf.get(sysProp))
90+
}
91+
}
92+
extraArgs
93+
}
94+
8995
/**
9096
* Report the state of the application until it is running.
9197
* If the application has finished, failed or been killed in the process, throw an exception.

0 commit comments

Comments
 (0)