Skip to content

Commit 7473726

Browse files
committed
Spark shell exits if it cannot create SparkContext
Mainly, this occurs if you provide a messed up MASTER url (one that doesn't match one of our regexes). Previously, we would default to Mesos, fail, and then start the shell anyway, except that any Spark command would fail.
1 parent fc26e5b commit 7473726

File tree

2 files changed

+14
-8
lines changed

2 files changed

+14
-8
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -217,21 +217,20 @@ class SparkContext(
217217
scheduler.initialize(backend)
218218
scheduler
219219

220-
case _ =>
221-
if (MESOS_REGEX.findFirstIn(master).isEmpty) {
222-
logWarning("Master %s does not match expected format, parsing as Mesos URL".format(master))
223-
}
220+
case MESOS_REGEX(mesosUrl) =>
224221
MesosNativeLibrary.load()
225222
val scheduler = new ClusterScheduler(this)
226223
val coarseGrained = System.getProperty("spark.mesos.coarse", "false").toBoolean
227-
val masterWithoutProtocol = master.replaceFirst("^mesos://", "") // Strip initial mesos://
228224
val backend = if (coarseGrained) {
229-
new CoarseMesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName)
225+
new CoarseMesosSchedulerBackend(scheduler, this, mesosUrl, appName)
230226
} else {
231-
new MesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName)
227+
new MesosSchedulerBackend(scheduler, this, mesosUrl, appName)
232228
}
233229
scheduler.initialize(backend)
234230
scheduler
231+
232+
case _ =>
233+
throw new SparkException("Could not parse Master URL: '" + master + "'")
235234
}
236235
}
237236
taskScheduler.start()

repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -845,7 +845,14 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
845845
val jars = Option(System.getenv("ADD_JARS")).map(_.split(','))
846846
.getOrElse(new Array[String](0))
847847
.map(new java.io.File(_).getAbsolutePath)
848-
sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
848+
try {
849+
sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
850+
} catch {
851+
case e: Exception =>
852+
e.printStackTrace()
853+
echo("Failed to create SparkContext, exiting...")
854+
sys.exit(1)
855+
}
849856
sparkContext
850857
}
851858

0 commit comments

Comments
 (0)