Skip to content

Commit c7ef15e

Browse files
committed
add unit test and docs
Change-Id: I38146ee45a4565295fa6fd297f591c368d6b250a
1 parent 575152b commit c7ef15e

File tree

3 files changed

+71
-2
lines changed

3 files changed

+71
-2
lines changed

core/src/main/scala/org/apache/spark/SecurityManager.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -328,7 +328,6 @@ private[spark] class SecurityManager(
328328
.orElse(Option(secretKey))
329329
.orElse(Option(sparkConf.getenv(ENV_AUTH_SECRET)))
330330
.orElse(sparkConf.getOption(SPARK_AUTH_SECRET_CONF))
331-
.orElse(Option(hadoopConf.getPassword(SPARK_AUTH_SECRET_CONF)).map(new String(_)))
332331
.getOrElse {
333332
throw new IllegalArgumentException(
334333
s"A secret key must be specified via the $SPARK_AUTH_SECRET_CONF config")

core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,11 @@
1818
package org.apache.spark
1919

2020
import java.io.File
21+
import java.util.UUID
2122
import javax.net.ssl.SSLContext
2223

2324
import org.apache.hadoop.conf.Configuration
25+
import org.apache.hadoop.security.alias.{CredentialProvider, CredentialProviderFactory}
2426
import org.scalatest.BeforeAndAfterAll
2527

2628
import org.apache.spark.util.SparkConfWithEnv
@@ -154,4 +156,60 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
154156
assert(opts.trustStore === Some(new File("val2")))
155157
}
156158

159+
test("get password from Hadoop credential provider") {
160+
val keyStorePath = new File(this.getClass.getResource("/keystore").toURI).getAbsolutePath
161+
val trustStorePath = new File(this.getClass.getResource("/truststore").toURI).getAbsolutePath
162+
163+
val conf = new SparkConf
164+
val hadoopConf = new Configuration()
165+
val tmpPath = s"localjceks://file${sys.props("java.io.tmpdir")}/test-" +
166+
s"${UUID.randomUUID().toString}.jceks"
167+
val provider = createCredentialProvider(tmpPath, hadoopConf)
168+
169+
conf.set("spark.ssl.enabled", "true")
170+
conf.set("spark.ssl.keyStore", keyStorePath)
171+
storePassword(provider, "spark.ssl.keyStorePassword", "password")
172+
storePassword(provider, "spark.ssl.keyPassword", "password")
173+
conf.set("spark.ssl.trustStore", trustStorePath)
174+
storePassword(provider, "spark.ssl.trustStorePassword", "password")
175+
conf.set("spark.ssl.enabledAlgorithms",
176+
"TLS_RSA_WITH_AES_128_CBC_SHA, TLS_RSA_WITH_AES_256_CBC_SHA")
177+
conf.set("spark.ssl.protocol", "SSLv3")
178+
179+
val defaultOpts = SSLOptions.parse(conf, hadoopConf, "spark.ssl", defaults = None)
180+
val opts = SSLOptions.parse(conf, hadoopConf, "spark.ssl.ui", defaults = Some(defaultOpts))
181+
182+
assert(opts.enabled === true)
183+
assert(opts.trustStore.isDefined === true)
184+
assert(opts.trustStore.get.getName === "truststore")
185+
assert(opts.trustStore.get.getAbsolutePath === trustStorePath)
186+
assert(opts.keyStore.isDefined === true)
187+
assert(opts.keyStore.get.getName === "keystore")
188+
assert(opts.keyStore.get.getAbsolutePath === keyStorePath)
189+
assert(opts.trustStorePassword === Some("password"))
190+
assert(opts.keyStorePassword === Some("password"))
191+
assert(opts.keyPassword === Some("password"))
192+
assert(opts.protocol === Some("SSLv3"))
193+
assert(opts.enabledAlgorithms ===
194+
Set("TLS_RSA_WITH_AES_128_CBC_SHA", "TLS_RSA_WITH_AES_256_CBC_SHA"))
195+
}
196+
197+
private def createCredentialProvider(tmpPath: String, conf: Configuration): CredentialProvider = {
198+
conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, tmpPath)
199+
200+
val provider = CredentialProviderFactory.getProviders(conf).get(0)
201+
if (provider == null) {
202+
throw new IllegalStateException(s"Fail to get credential provider with path $tmpPath")
203+
}
204+
205+
provider
206+
}
207+
208+
private def storePassword(
209+
provider: CredentialProvider,
210+
passwordKey: String,
211+
password: String): Unit = {
212+
provider.createCredentialEntry(passwordKey, password.toCharArray)
213+
provider.flush()
214+
}
157215
}

docs/security.md

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ ACLs can be configured for either users or groups. Configuration entries accept
177177
lists as input, meaning multiple users or groups can be given the desired privileges. This can be
178178
used if you run on a shared cluster and have a set of administrators or developers who need to
179179
monitor applications they may not have started themselves. A wildcard (`*`) added to specific ACL
180-
means that all users will have the respective pivilege. By default, only the user submitting the
180+
means that all users will have the respective privilege. By default, only the user submitting the
181181
application is added to the ACLs.
182182

183183
Group membership is established by using a configurable group mapping provider. The mapper is
@@ -446,6 +446,18 @@ replaced with one of the above namespaces.
446446
</tr>
447447
</table>
448448

449+
Spark also supports retrieving `${ns}.keyPassword`, `${ns}.keyStorePassword` and `${ns}.trustStorePassword` from
450+
[Hadoop Credential Providers](https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/CredentialProviderAPI.html).
451+
User could store password into credential file and make it accessible by different components, like:
452+
453+
```
454+
hadoop credential create spark.ssl.keyPassword -value password \
455+
-provider jceks://[email protected]:9001/user/backup/ssl.jceks
456+
```
457+
458+
In the meantime, adding configuration "hadoop.security.credential.provider.path=jceks://[email protected]:9001/user/backup/ssl.jceks"
459+
into Spark's Hadoop configuration to make it aware of credential provider path.
460+
449461
## Preparing the key stores
450462

451463
Key stores can be generated by `keytool` program. The reference documentation for this tool for

0 commit comments

Comments
 (0)