diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 9ecf316beeaa1..78f509c670839 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -83,7 +83,7 @@ class SparkContext(config: SparkConf) extends Logging { // The call site where this SparkContext was constructed. private val creationSite: CallSite = Utils.getCallSite() - if (!config.get(ALLOW_SPARK_CONTEXT_IN_EXECUTORS)) { + if (!config.get(EXECUTOR_ALLOW_SPARK_CONTEXT)) { // In order to prevent SparkContext from being created in executors. SparkContext.assertOnDriver() } diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala index fdc9253ce9b02..200cde0a2d3ed 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/package.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala @@ -1909,8 +1909,8 @@ package object config { .booleanConf .createWithDefault(false) - private[spark] val ALLOW_SPARK_CONTEXT_IN_EXECUTORS = - ConfigBuilder("spark.driver.allowSparkContextInExecutors") + private[spark] val EXECUTOR_ALLOW_SPARK_CONTEXT = + ConfigBuilder("spark.executor.allowSparkContext") .doc("If set to true, SparkContext can be created in executors.") .version("3.0.1") .booleanConf diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala index 1f7aa8eec8942..ebdf2f59a2770 100644 --- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala @@ -952,7 +952,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu sc.range(0, 1).foreach { _ => new SparkContext(new SparkConf().setAppName("test").setMaster("local") - .set(ALLOW_SPARK_CONTEXT_IN_EXECUTORS, true)).stop() + .set(EXECUTOR_ALLOW_SPARK_CONTEXT, true)).stop() } } } diff --git a/docs/core-migration-guide.md b/docs/core-migration-guide.md index b2a08502d0d6f..11d3e0019617f 100644 --- a/docs/core-migration-guide.md +++ b/docs/core-migration-guide.md @@ -24,7 +24,7 @@ license: | ## Upgrading from Core 3.0 to 3.1 -- In Spark 3.0 and below, `SparkContext` can be created in executors. Since Spark 3.1, an exception will be thrown when creating `SparkContext` in executors. You can allow it by setting the configuration `spark.driver.allowSparkContextInExecutors` when creating `SparkContext` in executors. +- In Spark 3.0 and below, `SparkContext` can be created in executors. Since Spark 3.1, an exception will be thrown when creating `SparkContext` in executors. You can allow it by setting the configuration `spark.executor.allowSparkContext` when creating `SparkContext` in executors. ## Upgrading from Core 2.4 to 3.0 diff --git a/python/pyspark/context.py b/python/pyspark/context.py index 0816657692826..55a5657b64055 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -118,7 +118,7 @@ def __init__(self, master=None, appName=None, sparkHome=None, pyFiles=None, ValueError:... """ if (conf is None or - conf.get("spark.driver.allowSparkContextInExecutors", "false").lower() != "true"): + conf.get("spark.executor.allowSparkContext", "false").lower() != "true"): # In order to prevent SparkContext from being created in executors. SparkContext._assert_on_driver() diff --git a/python/pyspark/tests/test_context.py b/python/pyspark/tests/test_context.py index 64fe3837e7697..f398cec344725 100644 --- a/python/pyspark/tests/test_context.py +++ b/python/pyspark/tests/test_context.py @@ -279,7 +279,7 @@ def test_allow_to_create_spark_context_in_executors(self): # SPARK-32160: SparkContext can be created in executors if the config is set. def create_spark_context(): - conf = SparkConf().set("spark.driver.allowSparkContextInExecutors", "true") + conf = SparkConf().set("spark.executor.allowSparkContext", "true") with SparkContext(conf=conf): pass diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index 306c3235b0bc0..e5d53f5fd4c65 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -29,7 +29,7 @@ import org.apache.spark.{SPARK_VERSION, SparkConf, SparkContext, TaskContext} import org.apache.spark.annotation.{DeveloperApi, Experimental, Stable, Unstable} import org.apache.spark.api.java.JavaRDD import org.apache.spark.internal.Logging -import org.apache.spark.internal.config.ALLOW_SPARK_CONTEXT_IN_EXECUTORS +import org.apache.spark.internal.config.EXECUTOR_ALLOW_SPARK_CONTEXT import org.apache.spark.rdd.RDD import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd} import org.apache.spark.sql.catalog.Catalog @@ -904,7 +904,7 @@ object SparkSession extends Logging { val sparkConf = new SparkConf() options.foreach { case (k, v) => sparkConf.set(k, v) } - if (!sparkConf.get(ALLOW_SPARK_CONTEXT_IN_EXECUTORS)) { + if (!sparkConf.get(EXECUTOR_ALLOW_SPARK_CONTEXT)) { assertOnDriver() } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala index cc261a9ed3598..9da32d02aa723 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql import org.scalatest.BeforeAndAfterEach import org.apache.spark.{SparkConf, SparkContext, SparkException, SparkFunSuite} -import org.apache.spark.internal.config.ALLOW_SPARK_CONTEXT_IN_EXECUTORS +import org.apache.spark.internal.config.EXECUTOR_ALLOW_SPARK_CONTEXT import org.apache.spark.internal.config.UI.UI_ENABLED import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.StaticSQLConf._ @@ -277,7 +277,7 @@ class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach { session.range(1).foreach { v => SparkSession.builder.master("local") - .config(ALLOW_SPARK_CONTEXT_IN_EXECUTORS.key, true).getOrCreate().stop() + .config(EXECUTOR_ALLOW_SPARK_CONTEXT.key, true).getOrCreate().stop() () } }