diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index 7050396e84a8..21131edc6234 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -740,7 +740,6 @@ private[spark] object SparkConf extends Logging { */ def isExecutorStartupConf(name: String): Boolean = { (name.startsWith("spark.auth") && name != SecurityManager.SPARK_AUTH_SECRET_CONF) || - name.startsWith("spark.ssl") || name.startsWith("spark.rpc") || name.startsWith("spark.network") || isSparkPortConf(name) diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala index 1123191c43c9..02aaf4af6932 100644 --- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala @@ -354,6 +354,17 @@ class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSyst } } + test("SPARK-26998: SSL configuration not needed on executors") { + val conf = new SparkConf(false) + conf.set("spark.ssl.enabled", "true") + conf.set("spark.ssl.keyPassword", "password") + conf.set("spark.ssl.keyStorePassword", "password") + conf.set("spark.ssl.trustStorePassword", "password") + + val filtered = conf.getAll.filter { case (k, _) => SparkConf.isExecutorStartupConf(k) } + assert(filtered.isEmpty) + } + test("SPARK-27244 toDebugString redacts sensitive information") { val conf = new SparkConf(loadDefaults = false) .set("dummy.password", "dummy-password")