diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index e1d616b9b83c..72ca6aef72d4 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -1029,7 +1029,8 @@ private[spark] class SparkSubmit extends Logging { } finally { if (args.master.startsWith("k8s") && !isShell(args.primaryResource) && !isSqlShell(args.mainClass) && !isThriftServer(args.mainClass) && - !isConnectServer(args.mainClass)) { + !isConnectServer(args.mainClass) && + sparkConf.getBoolean("spark.kubernetes.submit.autoStopActiveSparkContexts", true)) { try { SparkContext.getActive.foreach(_.stop()) } catch { diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala index 042e96827304..ad1c8f95ef76 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala @@ -317,6 +317,14 @@ private[spark] object Config extends Logging { .stringConf .createOptional + val AUTO_STOP_ACTIVE_SPARK_CONTEXTS = + ConfigBuilder("spark.kubernetes.submit.autoStopActiveSparkContexts") + .version("3.5.0") + .doc("When set to true, Spark on Kubernetes will stop all active Spark contexts once " + + "non-shell application' main methods are finished.") + .booleanConf + .createWithDefault(true) + val KUBERNETES_SCHEDULER_NAME = ConfigBuilder("spark.kubernetes.scheduler.name") .doc("Specify the scheduler name for driver and executor pods. If " +