Skip to content

Commit 575152b

Browse files
committed
Using Hadoop credential provider API to store password
Change-Id: Ie774eeb9376f8b5d7379f1976826e12e9c529be3
1 parent 3e5b4ae commit 575152b

File tree

3 files changed

+28
-10
lines changed

3 files changed

+28
-10
lines changed

core/src/main/scala/org/apache/spark/SSLOptions.scala

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import java.io.File
2121
import java.security.NoSuchAlgorithmException
2222
import javax.net.ssl.SSLContext
2323

24+
import org.apache.hadoop.conf.Configuration
2425
import org.eclipse.jetty.util.ssl.SslContextFactory
2526

2627
import org.apache.spark.internal.Logging
@@ -163,11 +164,16 @@ private[spark] object SSLOptions extends Logging {
163164
* missing in SparkConf, the corresponding setting is used from the default configuration.
164165
*
165166
* @param conf Spark configuration object where the settings are collected from
167+
* @param hadoopConf Hadoop configuration to get settings
166168
* @param ns the namespace name
167169
* @param defaults the default configuration
168170
* @return [[org.apache.spark.SSLOptions]] object
169171
*/
170-
def parse(conf: SparkConf, ns: String, defaults: Option[SSLOptions] = None): SSLOptions = {
172+
def parse(
173+
conf: SparkConf,
174+
hadoopConf: Configuration,
175+
ns: String,
176+
defaults: Option[SSLOptions] = None): SSLOptions = {
171177
val enabled = conf.getBoolean(s"$ns.enabled", defaultValue = defaults.exists(_.enabled))
172178

173179
val port = conf.getWithSubstitution(s"$ns.port").map(_.toInt)
@@ -179,9 +185,11 @@ private[spark] object SSLOptions extends Logging {
179185
.orElse(defaults.flatMap(_.keyStore))
180186

181187
val keyStorePassword = conf.getWithSubstitution(s"$ns.keyStorePassword")
188+
.orElse(Option(hadoopConf.getPassword(s"$ns.keyStorePassword")).map(new String(_)))
182189
.orElse(defaults.flatMap(_.keyStorePassword))
183190

184191
val keyPassword = conf.getWithSubstitution(s"$ns.keyPassword")
192+
.orElse(Option(hadoopConf.getPassword(s"$ns.keyPassword")).map(new String(_)))
185193
.orElse(defaults.flatMap(_.keyPassword))
186194

187195
val keyStoreType = conf.getWithSubstitution(s"$ns.keyStoreType")
@@ -194,6 +202,7 @@ private[spark] object SSLOptions extends Logging {
194202
.orElse(defaults.flatMap(_.trustStore))
195203

196204
val trustStorePassword = conf.getWithSubstitution(s"$ns.trustStorePassword")
205+
.orElse(Option(hadoopConf.getPassword(s"$ns.trustStorePassword")).map(new String(_)))
197206
.orElse(defaults.flatMap(_.trustStorePassword))
198207

199208
val trustStoreType = conf.getWithSubstitution(s"$ns.trustStoreType")

core/src/main/scala/org/apache/spark/SecurityManager.scala

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,11 @@ package org.apache.spark
1919

2020
import java.net.{Authenticator, PasswordAuthentication}
2121
import java.nio.charset.StandardCharsets.UTF_8
22-
import javax.net.ssl._
2322

2423
import org.apache.hadoop.io.Text
2524
import org.apache.hadoop.security.{Credentials, UserGroupInformation}
2625

26+
import org.apache.spark.deploy.SparkHadoopUtil
2727
import org.apache.spark.internal.Logging
2828
import org.apache.spark.internal.config._
2929
import org.apache.spark.launcher.SparkLauncher
@@ -111,11 +111,14 @@ private[spark] class SecurityManager(
111111
)
112112
}
113113

114+
private val hadoopConf = SparkHadoopUtil.get.newConfiguration(sparkConf)
114115
// the default SSL configuration - it will be used by all communication layers unless overwritten
115-
private val defaultSSLOptions = SSLOptions.parse(sparkConf, "spark.ssl", defaults = None)
116+
private val defaultSSLOptions =
117+
SSLOptions.parse(sparkConf, hadoopConf, "spark.ssl", defaults = None)
116118

117119
def getSSLOptions(module: String): SSLOptions = {
118-
val opts = SSLOptions.parse(sparkConf, s"spark.ssl.$module", Some(defaultSSLOptions))
120+
val opts =
121+
SSLOptions.parse(sparkConf, hadoopConf, s"spark.ssl.$module", Some(defaultSSLOptions))
119122
logDebug(s"Created SSL options for $module: $opts")
120123
opts
121124
}
@@ -325,6 +328,7 @@ private[spark] class SecurityManager(
325328
.orElse(Option(secretKey))
326329
.orElse(Option(sparkConf.getenv(ENV_AUTH_SECRET)))
327330
.orElse(sparkConf.getOption(SPARK_AUTH_SECRET_CONF))
331+
.orElse(Option(hadoopConf.getPassword(SPARK_AUTH_SECRET_CONF)).map(new String(_)))
328332
.getOrElse {
329333
throw new IllegalArgumentException(
330334
s"A secret key must be specified via the $SPARK_AUTH_SECRET_CONF config")

core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package org.apache.spark
2020
import java.io.File
2121
import javax.net.ssl.SSLContext
2222

23+
import org.apache.hadoop.conf.Configuration
2324
import org.scalatest.BeforeAndAfterAll
2425

2526
import org.apache.spark.util.SparkConfWithEnv
@@ -40,6 +41,7 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
4041
.toSet
4142

4243
val conf = new SparkConf
44+
val hadoopConf = new Configuration()
4345
conf.set("spark.ssl.enabled", "true")
4446
conf.set("spark.ssl.keyStore", keyStorePath)
4547
conf.set("spark.ssl.keyStorePassword", "password")
@@ -49,7 +51,7 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
4951
conf.set("spark.ssl.enabledAlgorithms", algorithms.mkString(","))
5052
conf.set("spark.ssl.protocol", "TLSv1.2")
5153

52-
val opts = SSLOptions.parse(conf, "spark.ssl")
54+
val opts = SSLOptions.parse(conf, hadoopConf, "spark.ssl")
5355

5456
assert(opts.enabled === true)
5557
assert(opts.trustStore.isDefined === true)
@@ -70,6 +72,7 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
7072
val trustStorePath = new File(this.getClass.getResource("/truststore").toURI).getAbsolutePath
7173

7274
val conf = new SparkConf
75+
val hadoopConf = new Configuration()
7376
conf.set("spark.ssl.enabled", "true")
7477
conf.set("spark.ssl.keyStore", keyStorePath)
7578
conf.set("spark.ssl.keyStorePassword", "password")
@@ -80,8 +83,8 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
8083
"TLS_RSA_WITH_AES_128_CBC_SHA, TLS_RSA_WITH_AES_256_CBC_SHA")
8184
conf.set("spark.ssl.protocol", "SSLv3")
8285

83-
val defaultOpts = SSLOptions.parse(conf, "spark.ssl", defaults = None)
84-
val opts = SSLOptions.parse(conf, "spark.ssl.ui", defaults = Some(defaultOpts))
86+
val defaultOpts = SSLOptions.parse(conf, hadoopConf, "spark.ssl", defaults = None)
87+
val opts = SSLOptions.parse(conf, hadoopConf, "spark.ssl.ui", defaults = Some(defaultOpts))
8588

8689
assert(opts.enabled === true)
8790
assert(opts.trustStore.isDefined === true)
@@ -103,6 +106,7 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
103106
val trustStorePath = new File(this.getClass.getResource("/truststore").toURI).getAbsolutePath
104107

105108
val conf = new SparkConf
109+
val hadoopConf = new Configuration()
106110
conf.set("spark.ssl.enabled", "true")
107111
conf.set("spark.ssl.ui.enabled", "false")
108112
conf.set("spark.ssl.ui.port", "4242")
@@ -117,8 +121,8 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
117121
conf.set("spark.ssl.ui.enabledAlgorithms", "ABC, DEF")
118122
conf.set("spark.ssl.protocol", "SSLv3")
119123

120-
val defaultOpts = SSLOptions.parse(conf, "spark.ssl", defaults = None)
121-
val opts = SSLOptions.parse(conf, "spark.ssl.ui", defaults = Some(defaultOpts))
124+
val defaultOpts = SSLOptions.parse(conf, hadoopConf, "spark.ssl", defaults = None)
125+
val opts = SSLOptions.parse(conf, hadoopConf, "spark.ssl.ui", defaults = Some(defaultOpts))
122126

123127
assert(opts.enabled === false)
124128
assert(opts.port === Some(4242))
@@ -139,12 +143,13 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
139143
val conf = new SparkConfWithEnv(Map(
140144
"ENV1" -> "val1",
141145
"ENV2" -> "val2"))
146+
val hadoopConf = new Configuration()
142147

143148
conf.set("spark.ssl.enabled", "true")
144149
conf.set("spark.ssl.keyStore", "${env:ENV1}")
145150
conf.set("spark.ssl.trustStore", "${env:ENV2}")
146151

147-
val opts = SSLOptions.parse(conf, "spark.ssl", defaults = None)
152+
val opts = SSLOptions.parse(conf, hadoopConf, "spark.ssl", defaults = None)
148153
assert(opts.keyStore === Some(new File("val1")))
149154
assert(opts.trustStore === Some(new File("val2")))
150155
}

0 commit comments

Comments
 (0)