Skip to content

Commit d38251b

Browse files
committed
Address latest round of feedback.
1 parent c0987d3 commit d38251b

File tree

2 files changed

+12
-7
lines changed

2 files changed

+12
-7
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -58,12 +58,11 @@ import org.apache.spark.util._
5858
* cluster, and can be used to create RDDs, accumulators and broadcast variables on that cluster.
5959
*
6060
* Only one SparkContext may be active per JVM. You must `stop()` the active SparkContext before
61-
* creating a new one. This limitation will eventually be removed; see SPARK-2243 for more details.
61+
* creating a new one. This limitation may eventually be removed; see SPARK-2243 for more details.
6262
*
6363
* @param config a Spark Config object describing the application configuration. Any settings in
6464
* this config overrides the default configs as well as system properties.
6565
*/
66-
6766
class SparkContext(config: SparkConf) extends SparkStatusAPI with Logging {
6867

6968
// The call site where this SparkContext was constructed.
@@ -73,9 +72,9 @@ class SparkContext(config: SparkConf) extends SparkStatusAPI with Logging {
7372
private val allowMultipleContexts: Boolean =
7473
config.getBoolean("spark.driver.allowMultipleContexts", false)
7574

76-
7775
// In order to prevent multiple SparkContexts from being active at the same time, mark this
78-
// context as having started construction
76+
// context as having started construction.
77+
// NOTE: this must be placed at the beginning of the SparkContext constructor.
7978
SparkContext.markPartiallyConstructed(this, allowMultipleContexts)
8079

8180
// This is used only by YARN for now, but should be relevant to other cluster types (Mesos,
@@ -1429,7 +1428,8 @@ class SparkContext(config: SparkConf) extends SparkStatusAPI with Logging {
14291428
}
14301429

14311430
// In order to prevent multiple SparkContexts from being active at the same time, mark this
1432-
// context as having finished construction
1431+
// context as having finished construction.
1432+
// NOTE: this must be placed at the end of the SparkContext constructor.
14331433
SparkContext.setActiveContext(this, allowMultipleContexts)
14341434
}
14351435

@@ -1473,9 +1473,14 @@ object SparkContext extends Logging {
14731473
SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized {
14741474
contextBeingConstructed.foreach { otherContext =>
14751475
if (otherContext ne sc) { // checks for reference equality
1476+
// Since otherContext might point to a partially-constructed context, guard against
1477+
// its creationSite field being null:
1478+
val otherContextCreationSite =
1479+
Option(otherContext.creationSite).map(_.longForm).getOrElse("unknown location")
14761480
val warnMsg = "Another SparkContext is being constructed (or threw an exception in its" +
14771481
" constructor). This may indicate an error, since only one SparkContext may be" +
1478-
" running in this JVM (see SPARK-2243)."
1482+
" running in this JVM (see SPARK-2243)." +
1483+
s" The other SparkContext was created at:\n$otherContextCreationSite"
14791484
logWarning(warnMsg)
14801485
}
14811486

core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ import org.apache.spark.rdd.{EmptyRDD, HadoopRDD, NewHadoopRDD, RDD}
4444
* [[org.apache.spark.api.java.JavaRDD]]s and works with Java collections instead of Scala ones.
4545
*
4646
* Only one SparkContext may be active per JVM. You must `stop()` the active SparkContext before
47-
* creating a new one. This limitation will eventually be removed; see SPARK-2243 for more details.
47+
* creating a new one. This limitation may eventually be removed; see SPARK-2243 for more details.
4848
*/
4949
class JavaSparkContext(val sc: SparkContext)
5050
extends JavaSparkContextVarargsWorkaround with Closeable {

0 commit comments

Comments
 (0)