From 056d4328832dc7f481f85d08e46d24e846ed1e44 Mon Sep 17 00:00:00 2001 From: Josh Soref Date: Tue, 8 Dec 2020 20:13:49 -0500 Subject: [PATCH] spelling: [API] deprecate spark.launcher.childConectionTimeout in favor of spark.launcher.childConnectionTimeout Signed-off-by: Josh Soref --- docs/core-migration-guide.md | 2 ++ .../org/apache/spark/launcher/LauncherServer.java | 14 +++++++++++++- .../org/apache/spark/launcher/SparkLauncher.java | 12 +++++++++++- 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/docs/core-migration-guide.md b/docs/core-migration-guide.md index e243b14526814..8cd7bca325ac3 100644 --- a/docs/core-migration-guide.md +++ b/docs/core-migration-guide.md @@ -30,6 +30,8 @@ license: | - Since Spark 3.2, `spark.storage.replication.proactive` is enabled by default which means Spark tries to replenish in case of the loss of cached RDD block replicas due to executor failures. To restore the behavior before Spark 3.2, you can set `spark.storage.replication.proactive` to `false`. +- In Spark 3.2, `spark.launcher.childConectionTimeout` is deprecated (typo) though still works. Use `spark.launcher.childConnectionTimeout` instead. + ## Upgrading from Core 3.0 to 3.1 - In Spark 3.0 and below, `SparkContext` can be created in executors. Since Spark 3.1, an exception will be thrown when creating `SparkContext` in executors. You can allow it by setting the configuration `spark.executor.allowSparkContext` when creating `SparkContext` in executors. diff --git a/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java b/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java index d5a277ba581a0..125205f416d35 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java +++ b/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java @@ -263,7 +263,19 @@ public void run() { private long getConnectionTimeout() { String value = SparkLauncher.launcherConfig.get(SparkLauncher.CHILD_CONNECTION_TIMEOUT); - return (value != null) ? Long.parseLong(value) : DEFAULT_CONNECT_TIMEOUT; + if (value != null) { + return Long.parseLong(value); + } + + value = SparkLauncher.launcherConfig.get(SparkLauncher.DEPRECATED_CHILD_CONNECTION_TIMEOUT); + if (value != null) { + LOG.log(Level.WARNING, + "Property '" + SparkLauncher.DEPRECATED_CHILD_CONNECTION_TIMEOUT + + "' is deprecated, please switch to '" + SparkLauncher.CHILD_CONNECTION_TIMEOUT + + "'."); + return Long.parseLong(value); + } + return DEFAULT_CONNECT_TIMEOUT; } private String createSecret() { diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java index 32844104c1deb..12febc5441bd6 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java @@ -89,11 +89,21 @@ public class SparkLauncher extends AbstractLauncher { */ public static final String NO_RESOURCE = "spark-internal"; + /** + * Maximum time (in ms) to wait for a child process to connect back to the launcher server + * when using @link{#start()}. + * + * @deprecated use `CHILD_CONNECTION_TIMEOUT` + * @since 1.6.0 + */ + public static final String DEPRECATED_CHILD_CONNECTION_TIMEOUT = + "spark.launcher.childConectionTimeout"; + /** * Maximum time (in ms) to wait for a child process to connect back to the launcher server * when using @link{#start()}. */ - public static final String CHILD_CONNECTION_TIMEOUT = "spark.launcher.childConectionTimeout"; + public static final String CHILD_CONNECTION_TIMEOUT = "spark.launcher.childConnectionTimeout"; /** Used internally to create unique logger names. */ private static final AtomicInteger COUNTER = new AtomicInteger();