diff --git a/core/src/main/scala/org/apache/spark/deploy/Client.scala b/core/src/main/scala/org/apache/spark/deploy/Client.scala index 6beea5646f63b..7c5ab43a9e1b3 100644 --- a/core/src/main/scala/org/apache/spark/deploy/Client.scala +++ b/core/src/main/scala/org/apache/spark/deploy/Client.scala @@ -64,8 +64,7 @@ private class ClientEndpoint( private val lostMasters = new HashSet[RpcAddress] private var activeMasterEndpoint: RpcEndpointRef = null - private val waitAppCompletion = conf.getBoolean("spark.standalone.submit.waitAppCompletion", - false) + private val waitAppCompletion = conf.get(config.STANDALONE_SUBMIT_WAIT_APP_COMPLETION) private val REPORT_DRIVER_STATUS_INTERVAL = 10000 private var submittedDriverID = "" private var driverStatusReported = false diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala index 8ef0c37198568..ee437c696b47e 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/package.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala @@ -1864,4 +1864,13 @@ package object config { .version("3.1.0") .booleanConf .createWithDefault(false) + + private[spark] val STANDALONE_SUBMIT_WAIT_APP_COMPLETION = + ConfigBuilder("spark.standalone.submit.waitAppCompletion") + .doc("In standalone cluster mode, controls whether the client waits to exit until the " + + "application completes. If set to true, the client process will stay alive polling " + + "the driver's status. Otherwise, the client process will exit after submission.") + .version("3.1.0") + .booleanConf + .createWithDefault(false) }