Skip to content

Commit 855256e

Browse files
committed
Fix standalone-cluster mode
The problem was that spark properties are not propagated to the driver. The solution is simple: pass the properties as part of the driver description, such that the command that launches the driver automatically sets the spark properties as its java system properties, which will then be loaded by SparkConf.
1 parent fd9da51 commit 855256e

File tree

11 files changed

+18
-17
lines changed

11 files changed

+18
-17
lines changed

core/src/main/scala/org/apache/spark/deploy/Client.scala

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.deploy
1919

20-
import scala.collection.JavaConversions._
21-
import scala.collection.mutable.Map
2220
import scala.concurrent._
2321

2422
import akka.actor._
@@ -50,8 +48,8 @@ private class ClientActor(driverArgs: ClientArguments, conf: SparkConf) extends
5048
// TODO: We could add an env variable here and intercept it in `sc.addJar` that would
5149
// truncate filesystem paths similar to what YARN does. For now, we just require
5250
// people call `addJar` assuming the jar is in the same directory.
53-
val env = Map[String, String]()
54-
System.getenv().foreach { case (k, v) => env(k) = v }
51+
val env = sys.env
52+
val props = conf.getAll.toMap
5553

5654
val mainClass = "org.apache.spark.deploy.worker.DriverWrapper"
5755

@@ -68,7 +66,7 @@ private class ClientActor(driverArgs: ClientArguments, conf: SparkConf) extends
6866
val javaOptionsConf = "spark.driver.extraJavaOptions"
6967
val javaOpts = sys.props.get(javaOptionsConf)
7068
val command = new Command(mainClass, Seq("{{WORKER_URL}}", driverArgs.mainClass) ++
71-
driverArgs.driverOptions, env, classPathEntries, libraryPathEntries, javaOpts)
69+
driverArgs.driverOptions, env, props, classPathEntries, libraryPathEntries, javaOpts)
7270

7371
val driverDescription = new DriverDescription(
7472
driverArgs.jarUrl,

core/src/main/scala/org/apache/spark/deploy/Command.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ private[spark] case class Command(
2323
mainClass: String,
2424
arguments: Seq[String],
2525
environment: Map[String, String],
26+
sparkProps: Map[String, String],
2627
classPathEntries: Seq[String],
2728
libraryPathEntries: Seq[String],
2829
extraJavaOptions: Option[String] = None) {

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -132,8 +132,6 @@ object SparkSubmit {
132132
(clusterManager, deployMode) match {
133133
case (MESOS, CLUSTER) =>
134134
printErrorAndExit("Cluster deploy mode is currently not supported for Mesos clusters.")
135-
case (STANDALONE, CLUSTER) =>
136-
printErrorAndExit("Cluster deploy mode is currently not supported for Standalone clusters.")
137135
case (_, CLUSTER) if args.isPython =>
138136
printErrorAndExit("Cluster deploy mode is currently not supported for python applications.")
139137
case (_, CLUSTER) if isShell(args.primaryResource) =>

core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,8 @@ private[spark] object TestClient {
4949
val (actorSystem, port) = AkkaUtils.createActorSystem("spark", Utils.localIpAddress, 0,
5050
conf = conf, securityManager = new SecurityManager(conf))
5151
val desc = new ApplicationDescription(
52-
"TestClient", Some(1), 512, Command("spark.deploy.client.TestExecutor", Seq(), Map(), Seq(),
53-
Seq()), Some("dummy-spark-home"), "ignored")
52+
"TestClient", Some(1), 512, Command("spark.deploy.client.TestExecutor", Seq(), Map(), Map(),
53+
Seq(), Seq()), Some("dummy-spark-home"), "ignored")
5454
val listener = new TestListener
5555
val client = new AppClient(actorSystem, Array(url), desc, listener, new SparkConf)
5656
client.start()

core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,9 @@ object CommandUtils extends Logging {
6767

6868
val permGenOpt = Seq("-XX:MaxPermSize=128m")
6969

70+
// Convert Spark properties to java system properties
71+
val sparkOpts = command.sparkProps.map { case (k, v) => s"-D$k=$v" }
72+
7073
// Figure out our classpath with the external compute-classpath script
7174
val ext = if (System.getProperty("os.name").startsWith("Windows")) ".cmd" else ".sh"
7275
val classPath = Utils.executeAndGetOutput(
@@ -75,7 +78,7 @@ object CommandUtils extends Logging {
7578
val userClassPath = command.classPathEntries ++ Seq(classPath)
7679

7780
Seq("-cp", userClassPath.filterNot(_.isEmpty).mkString(File.pathSeparator)) ++
78-
permGenOpt ++ libraryOpts ++ extraOpts ++ workerLocalOpts ++ memoryOpts
81+
sparkOpts ++ permGenOpt ++ libraryOpts ++ extraOpts ++ workerLocalOpts ++ memoryOpts
7982
}
8083

8184
/** Spawn a thread that will redirect a given stream to a file */

core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,7 @@ private[spark] class DriverRunner(
7979
driverDesc.command.mainClass,
8080
driverDesc.command.arguments.map(substituteVariables),
8181
driverDesc.command.environment,
82+
driverDesc.command.sparkProps,
8283
classPath,
8384
driverDesc.command.libraryPathEntries,
8485
driverDesc.command.extraJavaOptions)

core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,7 @@ private[spark] class ExecutorRunner(
118118
appDesc.command.mainClass,
119119
appDesc.command.arguments.map(substituteVariables) ++ Seq(appId),
120120
appDesc.command.environment,
121+
appDesc.command.sparkProps,
121122
appDesc.command.classPathEntries,
122123
appDesc.command.libraryPathEntries,
123124
appDesc.command.extraJavaOptions)

core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -54,9 +54,8 @@ private[spark] class SparkDeploySchedulerBackend(
5454
cp.split(java.io.File.pathSeparator)
5555
}
5656

57-
val command = Command(
58-
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.executorEnvs,
59-
classPathEntries, libraryPathEntries, extraJavaOpts)
57+
val command = Command("org.apache.spark.executor.CoarseGrainedExecutorBackend",
58+
args, sc.executorEnvs, conf.getAll.toMap, classPathEntries, libraryPathEntries, extraJavaOpts)
6059
val sparkHome = sc.getSparkHome()
6160
val appDesc = new ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
6261
sparkHome, sc.ui.appUIAddress, sc.eventLogger.map(_.logDir))

core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ class JsonProtocolSuite extends FunSuite {
8888
}
8989

9090
def createAppDesc(): ApplicationDescription = {
91-
val cmd = new Command("mainClass", List("arg1", "arg2"), Map(), Seq(), Seq())
91+
val cmd = new Command("mainClass", List("arg1", "arg2"), Map(), Map(), Seq(), Seq())
9292
new ApplicationDescription("name", Some(4), 1234, cmd, Some("sparkHome"), "appUiUrl")
9393
}
9494

@@ -101,7 +101,7 @@ class JsonProtocolSuite extends FunSuite {
101101

102102
def createDriverCommand() = new Command(
103103
"org.apache.spark.FakeClass", Seq("some arg --and-some options -g foo"),
104-
Map(("K1", "V1"), ("K2", "V2")), Seq("cp1", "cp2"), Seq("lp1", "lp2"), Some("-Dfoo")
104+
Map(("K1", "V1"), ("K2", "V2")), Map(), Seq("cp1", "cp2"), Seq("lp1", "lp2"), Some("-Dfoo")
105105
)
106106

107107
def createDriverDesc() = new DriverDescription("hdfs://some-dir/some.jar", 100, 3,

core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import org.apache.spark.deploy.{Command, DriverDescription}
2929

3030
class DriverRunnerTest extends FunSuite {
3131
private def createDriverRunner() = {
32-
val command = new Command("mainClass", Seq(), Map(), Seq(), Seq())
32+
val command = new Command("mainClass", Seq(), Map(), Map(), Seq(), Seq())
3333
val driverDescription = new DriverDescription("jarUrl", 512, 1, true, command)
3434
new DriverRunner("driverId", new File("workDir"), new File("sparkHome"), driverDescription,
3535
null, "akka://1.2.3.4/worker/")

0 commit comments

Comments
 (0)