Skip to content

Commit ca7846d

Browse files
committed
Add more specific configuration for executor Spark home in Mesos
1 parent 7557c4c commit ca7846d

File tree

2 files changed

+12
-8
lines changed

2 files changed

+12
-8
lines changed

core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -71,9 +71,11 @@ private[spark] class CoarseMesosSchedulerBackend(
7171
val taskIdToSlaveId = new HashMap[Int, String]
7272
val failuresBySlaveId = new HashMap[String, Int] // How many times tasks on each slave failed
7373

74-
val sparkHome = sc.getSparkHome().getOrElse(throw new SparkException(
75-
"Spark home is not set; set it through the spark.home system " +
76-
"property, the SPARK_HOME environment variable or the SparkContext constructor"))
74+
val executorSparkHome = conf.getOption("spark.mesos.executor.home")
75+
.orElse(sc.getSparkHome())
76+
.getOrElse {
77+
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
78+
}
7779

7880
val extraCoresPerSlave = conf.getInt("spark.mesos.extra.cores", 0)
7981

@@ -138,7 +140,7 @@ private[spark] class CoarseMesosSchedulerBackend(
138140

139141
val uri = conf.get("spark.executor.uri", null)
140142
if (uri == null) {
141-
val runScript = new File(sparkHome, "./bin/spark-class").getCanonicalPath
143+
val runScript = new File(executorSparkHome, "./bin/spark-class").getCanonicalPath
142144
command.setValue(
143145
"\"%s\" org.apache.spark.executor.CoarseGrainedExecutorBackend %s %s %s %s %d".format(
144146
runScript, extraOpts, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))

core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -86,9 +86,11 @@ private[spark] class MesosSchedulerBackend(
8686
}
8787

8888
def createExecutorInfo(execId: String): ExecutorInfo = {
89-
val sparkHome = sc.getSparkHome().getOrElse(throw new SparkException(
90-
"Spark home is not set; set it through the spark.home system " +
91-
"property, the SPARK_HOME environment variable or the SparkContext constructor"))
89+
val executorSparkHome = sc.conf.getOption("spark.mesos.executor.home")
90+
.orElse(sc.getSparkHome()) // Fall back to driver Spark home for backward compatibility
91+
.getOrElse {
92+
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
93+
}
9294
val environment = Environment.newBuilder()
9395
sc.executorEnvs.foreach { case (key, value) =>
9496
environment.addVariables(Environment.Variable.newBuilder()
@@ -100,7 +102,7 @@ private[spark] class MesosSchedulerBackend(
100102
.setEnvironment(environment)
101103
val uri = sc.conf.get("spark.executor.uri", null)
102104
if (uri == null) {
103-
command.setValue(new File(sparkHome, "/sbin/spark-executor").getCanonicalPath)
105+
command.setValue(new File(executorSparkHome, "/sbin/spark-executor").getCanonicalPath)
104106
} else {
105107
// Grab everything to the first '.'. We'll use that and '*' to
106108
// glob the directory "correctly".

0 commit comments

Comments
 (0)