Skip to content

Commit 4d49680

Browse files
Michael Gummeltsrowen
authored andcommitted
[SPARK-16952] don't lookup spark home directory when executor uri is set
## What changes were proposed in this pull request? remove requirement to set spark.mesos.executor.home when spark.executor.uri is used ## How was this patch tested? unit tests Author: Michael Gummelt <[email protected]> Closes #14552 from mgummelt/fix-spark-home.
1 parent 7186e8c commit 4d49680

File tree

2 files changed

+26
-7
lines changed

2 files changed

+26
-7
lines changed

core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -163,11 +163,6 @@ private[spark] class MesosCoarseGrainedSchedulerBackend(
163163
}
164164

165165
def createCommand(offer: Offer, numCores: Int, taskId: String): CommandInfo = {
166-
val executorSparkHome = conf.getOption("spark.mesos.executor.home")
167-
.orElse(sc.getSparkHome())
168-
.getOrElse {
169-
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
170-
}
171166
val environment = Environment.newBuilder()
172167
val extraClassPath = conf.getOption("spark.executor.extraClassPath")
173168
extraClassPath.foreach { cp =>
@@ -201,6 +196,11 @@ private[spark] class MesosCoarseGrainedSchedulerBackend(
201196
.orElse(Option(System.getenv("SPARK_EXECUTOR_URI")))
202197

203198
if (uri.isEmpty) {
199+
val executorSparkHome = conf.getOption("spark.mesos.executor.home")
200+
.orElse(sc.getSparkHome())
201+
.getOrElse {
202+
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
203+
}
204204
val runScript = new File(executorSparkHome, "./bin/spark-class").getPath
205205
command.setValue(
206206
"%s \"%s\" org.apache.spark.executor.CoarseGrainedExecutorBackend"

core/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackendSuite.scala

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -370,6 +370,21 @@ class MesosCoarseGrainedSchedulerBackendSuite extends SparkFunSuite
370370
verify(driverEndpoint, never()).askWithRetry(isA(classOf[RemoveExecutor]))(any[ClassTag[_]])
371371
}
372372

373+
test("mesos supports spark.executor.uri") {
374+
val url = "spark.spark.spark.com"
375+
setBackend(Map(
376+
"spark.executor.uri" -> url
377+
), false)
378+
379+
val (mem, cpu) = (backend.executorMemory(sc), 4)
380+
381+
val offer1 = createOffer("o1", "s1", mem, cpu)
382+
backend.resourceOffers(driver, List(offer1).asJava)
383+
384+
val launchedTasks = verifyTaskLaunched(driver, "o1")
385+
assert(launchedTasks.head.getCommand.getUrisList.asScala(0).getValue == url)
386+
}
387+
373388
private def verifyDeclinedOffer(driver: SchedulerDriver,
374389
offerId: OfferID,
375390
filter: Boolean = false): Unit = {
@@ -435,13 +450,17 @@ class MesosCoarseGrainedSchedulerBackendSuite extends SparkFunSuite
435450
backend
436451
}
437452

438-
private def setBackend(sparkConfVars: Map[String, String] = null) {
453+
private def setBackend(sparkConfVars: Map[String, String] = null,
454+
setHome: Boolean = true) {
439455
sparkConf = (new SparkConf)
440456
.setMaster("local[*]")
441457
.setAppName("test-mesos-dynamic-alloc")
442-
.setSparkHome("/path")
443458
.set("spark.mesos.driver.webui.url", "http://webui")
444459

460+
if (setHome) {
461+
sparkConf.setSparkHome("/path")
462+
}
463+
445464
if (sparkConfVars != null) {
446465
sparkConf.setAll(sparkConfVars)
447466
}

0 commit comments

Comments
 (0)