Skip to content

Commit 85a424a

Browse files
committed
Incorporate more review feedback.
1 parent 372d0d3 commit 85a424a

File tree

1 file changed

+25
-17
lines changed

1 file changed

+25
-17
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 25 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,15 @@ import org.apache.spark.util._
6666

6767
class SparkContext(config: SparkConf) extends SparkStatusAPI with Logging {
6868

69+
// In order to prevent multiple SparkContexts from being active at the same time, mark this
70+
// context as having started construction
6971
SparkContext.markPartiallyConstructed(this, config)
7072

73+
/**
74+
* The call site where this SparkContext was constructed.
75+
*/
76+
private val creationSite: CallSite = Utils.getCallSite()
77+
7178
// This is used only by YARN for now, but should be relevant to other cluster types (Mesos,
7279
// etc) too. This is typically generated from InputFormatInfo.computePreferredLocations. It
7380
// contains a map from hostname to a list of input format splits on the host.
@@ -1194,7 +1201,7 @@ class SparkContext(config: SparkConf) extends SparkStatusAPI with Logging {
11941201
if (dagScheduler == null) {
11951202
throw new SparkException("SparkContext has been shutdown")
11961203
}
1197-
val callSite = Utils.getCallSite()
1204+
val callSite = getCallSite
11981205
val cleanedFunc = clean(func)
11991206
logInfo("Starting job: " + callSite.shortForm)
12001207
dagScheduler.runJob(rdd, cleanedFunc, partitions, callSite, allowLocal,
@@ -1418,7 +1425,9 @@ class SparkContext(config: SparkConf) extends SparkStatusAPI with Logging {
14181425
persistentRdds.clearOldValues(cleanupTime)
14191426
}
14201427

1421-
SparkContext.markFullyConstructed(this, config)
1428+
// In order to prevent multiple SparkContexts from being active at the same time, mark this
1429+
// context as having finished construction
1430+
SparkContext.setActiveContext(this, config)
14221431
}
14231432

14241433
/**
@@ -1430,23 +1439,22 @@ object SparkContext extends Logging {
14301439
/**
14311440
* Lock that guards access to global variables that track SparkContext construction.
14321441
*/
1433-
private[spark] val SPARK_CONTEXT_CONSTRUCTOR_LOCK = new Object()
1442+
private val SPARK_CONTEXT_CONSTRUCTOR_LOCK = new Object()
14341443

14351444
/**
1436-
* Records the creation site of the active, fully-constructed SparkContext. If no SparkContext
1437-
* is active, then this is `None`.
1445+
* The active, fully-constructed SparkContext. If no SparkContext is active, then this is `None`.
14381446
*
14391447
* Access to this field is guarded by SPARK_CONTEXT_CONSTRUCTOR_LOCK
14401448
*/
1441-
private[spark] var activeContextCreationSite: Option[CallSite] = None
1449+
private var activeContext: Option[SparkContext] = None
14421450

14431451
/**
14441452
* Points to a partially-constructed SparkContext if some thread is in the SparkContext
14451453
* constructor, or `None` if no SparkContext is being constructed.
14461454
*
14471455
* Access to this field is guarded by SPARK_CONTEXT_CONSTRUCTOR_LOCK
14481456
*/
1449-
private[spark] var contextBeingConstructed: Option[SparkContext] = None
1457+
private var contextBeingConstructed: Option[SparkContext] = None
14501458

14511459
/**
14521460
* Called to ensure that no other SparkContext is running in this JVM.
@@ -1456,20 +1464,20 @@ object SparkContext extends Logging {
14561464
* prevents us from reliably distinguishing between cases where another context is being
14571465
* constructed and cases where another constructor threw an exception.
14581466
*/
1459-
private def assertNoOtherContextIsRunning(sc: SparkContext, conf: SparkConf) {
1467+
private def assertNoOtherContextIsRunning(sc: SparkContext, conf: SparkConf): Unit = {
14601468
SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized {
14611469
contextBeingConstructed.foreach { otherContext =>
1462-
if (otherContext ne sc) {
1470+
if (otherContext ne sc) { // checks for reference equality
14631471
val warnMsg = "Another SparkContext is being constructed (or threw an exception in its" +
14641472
" constructor). This may indicate an error, since only one SparkContext may be" +
14651473
" running in this JVM (see SPARK-2243)."
14661474
logWarning(warnMsg)
14671475
}
14681476

1469-
activeContextCreationSite.foreach { creationSite =>
1477+
activeContext.foreach { ctx =>
14701478
val errMsg = "Only one SparkContext may be running in this JVM (see SPARK-2243)." +
14711479
" To ignore this error, set spark.driver.allowMultipleContexts = true. " +
1472-
s"The currently running SparkContext was created at:\n${creationSite.longForm}"
1480+
s"The currently running SparkContext was created at:\n${ctx.creationSite.longForm}"
14731481
val exception = new SparkException(errMsg)
14741482
if (conf.getBoolean("spark.driver.allowMultipleContexts", false)) {
14751483
logWarning("Multiple running SparkContexts detected in the same JVM!", exception)
@@ -1488,7 +1496,7 @@ object SparkContext extends Logging {
14881496
* scheme prevents us from reliably distinguishing between cases where another context is being
14891497
* constructed and cases where another constructor threw an exception.
14901498
*/
1491-
private[spark] def markPartiallyConstructed(sc: SparkContext, conf: SparkConf) {
1499+
private[spark] def markPartiallyConstructed(sc: SparkContext, conf: SparkConf): Unit = {
14921500
SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized {
14931501
assertNoOtherContextIsRunning(sc, conf)
14941502
contextBeingConstructed = Some(sc)
@@ -1499,22 +1507,22 @@ object SparkContext extends Logging {
14991507
* Called at the end of the SparkContext constructor to ensure that no other SparkContext has
15001508
* raced with this constructor and started.
15011509
*/
1502-
private[spark] def markFullyConstructed(sc: SparkContext, conf: SparkConf) {
1510+
private[spark] def setActiveContext(sc: SparkContext, conf: SparkConf): Unit = {
15031511
SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized {
15041512
assertNoOtherContextIsRunning(sc, conf)
15051513
contextBeingConstructed = None
1506-
activeContextCreationSite = Some(Utils.getCallSite())
1514+
activeContext = Some(sc)
15071515
}
15081516
}
15091517

15101518
/**
1511-
* Clears the active SparkContext metadata. This is called by `SparkContext.stop()`. It's
1519+
* Clears the active SparkContext metadata. This is called by `SparkContext#stop()`. It's
15121520
* also called in unit tests to prevent a flood of warnings from test suites that don't / can't
15131521
* properly clean up their SparkContexts.
15141522
*/
1515-
private[spark] def clearActiveContext() {
1523+
private[spark] def clearActiveContext(): Unit = {
15161524
SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized {
1517-
activeContextCreationSite = None
1525+
activeContext = None
15181526
}
15191527
}
15201528

0 commit comments

Comments
 (0)