diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CheckConnectJvmClientCompatibility.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CheckConnectJvmClientCompatibility.scala index 4edea00cd511..f990e28ca2c1 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CheckConnectJvmClientCompatibility.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CheckConnectJvmClientCompatibility.scala @@ -199,6 +199,7 @@ object CheckConnectJvmClientCompatibility { ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.executeCommand"), ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.readStream"), ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.this"), + ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.stop"), // RuntimeConfig ProblemFilters.exclude[Problem]("org.apache.spark.sql.RuntimeConfig.this"), diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectServer.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectServer.scala index df28df59fa2a..d4119d87793d 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectServer.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectServer.scala @@ -28,6 +28,7 @@ object SparkConnectServer extends Logging { // Set the active Spark Session, and starts SparkEnv instance (via Spark Context) logInfo("Starting Spark session.") val session = SparkSession.builder.getOrCreate() + var exitCode = 0 try { try { SparkConnectService.start() @@ -35,11 +36,12 @@ object SparkConnectServer extends Logging { } catch { case e: Exception => logError("Error starting Spark Connect server", e) - System.exit(-1) + exitCode = -1 + System.exit(exitCode) } SparkConnectService.server.awaitTermination() } finally { - session.stop() + session.stop(exitCode) } } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index 66215d050332..6728c1f55960 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -753,7 +753,7 @@ class SparkSession private( // scalastyle:on /** - * Stop the underlying `SparkContext`. + * Stop the underlying `SparkContext` with default exit code 0. * * @since 2.0.0 */ @@ -761,6 +761,15 @@ class SparkSession private( sparkContext.stop() } + /** + * Stop the underlying `SparkContext` with exit code that will passed to scheduler backend. + * + * @since 3.5.0 + */ + def stop(exitCode: Int): Unit = { + sparkContext.stop(exitCode) + } + /** * Synonym for `stop()`. *