Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 0 additions & 45 deletions core/src/main/scala/org/apache/spark/SecurityManager.scala
Original file line number Diff line number Diff line change
Expand Up @@ -256,51 +256,6 @@ private[spark] class SecurityManager(
// the default SSL configuration - it will be used by all communication layers unless overwritten
private val defaultSSLOptions = SSLOptions.parse(sparkConf, "spark.ssl", defaults = None)

// SSL configuration for the file server. This is used by Utils.setupSecureURLConnection().
val fileServerSSLOptions = getSSLOptions("fs")
val (sslSocketFactory, hostnameVerifier) = if (fileServerSSLOptions.enabled) {
val trustStoreManagers =
for (trustStore <- fileServerSSLOptions.trustStore) yield {
val input = Files.asByteSource(fileServerSSLOptions.trustStore.get).openStream()

try {
val ks = KeyStore.getInstance(KeyStore.getDefaultType)
ks.load(input, fileServerSSLOptions.trustStorePassword.get.toCharArray)

val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
tmf.init(ks)
tmf.getTrustManagers
} finally {
input.close()
}
}

lazy val credulousTrustStoreManagers = Array({
logWarning("Using 'accept-all' trust manager for SSL connections.")
new X509TrustManager {
override def getAcceptedIssuers: Array[X509Certificate] = null

override def checkClientTrusted(x509Certificates: Array[X509Certificate], s: String) {}

override def checkServerTrusted(x509Certificates: Array[X509Certificate], s: String) {}
}: TrustManager
})

require(fileServerSSLOptions.protocol.isDefined,
"spark.ssl.protocol is required when enabling SSL connections.")

val sslContext = SSLContext.getInstance(fileServerSSLOptions.protocol.get)
sslContext.init(null, trustStoreManagers.getOrElse(credulousTrustStoreManagers), null)

val hostVerifier = new HostnameVerifier {
override def verify(s: String, sslSession: SSLSession): Boolean = true
}

(Some(sslContext.getSocketFactory), Some(hostVerifier))
} else {
(None, None)
}

def getSSLOptions(module: String): SSLOptions = {
val opts = SSLOptions.parse(sparkConf, s"spark.ssl.$module", Some(defaultSSLOptions))
logDebug(s"Created SSL options for $module: $opts")
Expand Down
15 changes: 0 additions & 15 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -673,7 +673,6 @@ private[spark] object Utils extends Logging {
logDebug("fetchFile not using security")
uc = new URL(url).openConnection()
}
Utils.setupSecureURLConnection(uc, securityMgr)

val timeoutMs =
conf.getTimeAsSeconds("spark.files.fetchTimeout", "60s").toInt * 1000
Expand Down Expand Up @@ -2363,20 +2362,6 @@ private[spark] object Utils extends Logging {
PropertyConfigurator.configure(pro)
}

/**
* If the given URL connection is HttpsURLConnection, it sets the SSL socket factory and
* the host verifier from the given security manager.
*/
def setupSecureURLConnection(urlConnection: URLConnection, sm: SecurityManager): URLConnection = {
urlConnection match {
case https: HttpsURLConnection =>
sm.sslSocketFactory.foreach(https.setSSLSocketFactory)
sm.hostnameVerifier.foreach(https.setHostnameVerifier)
https
case connection => connection
}
}

def invoke(
clazz: Class[_],
obj: AnyRef,
Expand Down
68 changes: 0 additions & 68 deletions core/src/test/scala/org/apache/spark/SSLSampleConfigs.scala

This file was deleted.

45 changes: 0 additions & 45 deletions core/src/test/scala/org/apache/spark/SecurityManagerSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -370,51 +370,6 @@ class SecurityManagerSuite extends SparkFunSuite with ResetSystemProperties {
assert(securityManager.checkModifyPermissions("user1") === false)
}

test("ssl on setup") {
val conf = SSLSampleConfigs.sparkSSLConfig()
val expectedAlgorithms = Set(
"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384",
"TLS_RSA_WITH_AES_256_CBC_SHA256",
"TLS_DHE_RSA_WITH_AES_256_CBC_SHA256",
"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
"TLS_DHE_RSA_WITH_AES_128_CBC_SHA256",
"SSL_ECDHE_RSA_WITH_AES_256_CBC_SHA384",
"SSL_RSA_WITH_AES_256_CBC_SHA256",
"SSL_DHE_RSA_WITH_AES_256_CBC_SHA256",
"SSL_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
"SSL_DHE_RSA_WITH_AES_128_CBC_SHA256")

val securityManager = new SecurityManager(conf)

assert(securityManager.fileServerSSLOptions.enabled === true)

assert(securityManager.sslSocketFactory.isDefined === true)
assert(securityManager.hostnameVerifier.isDefined === true)

assert(securityManager.fileServerSSLOptions.trustStore.isDefined === true)
assert(securityManager.fileServerSSLOptions.trustStore.get.getName === "truststore")
assert(securityManager.fileServerSSLOptions.keyStore.isDefined === true)
assert(securityManager.fileServerSSLOptions.keyStore.get.getName === "keystore")
assert(securityManager.fileServerSSLOptions.trustStorePassword === Some("password"))
assert(securityManager.fileServerSSLOptions.keyStorePassword === Some("password"))
assert(securityManager.fileServerSSLOptions.keyPassword === Some("password"))
assert(securityManager.fileServerSSLOptions.protocol === Some("TLSv1.2"))
assert(securityManager.fileServerSSLOptions.enabledAlgorithms === expectedAlgorithms)
}

test("ssl off setup") {
val file = File.createTempFile("SSLOptionsSuite", "conf", Utils.createTempDir())

System.setProperty("spark.ssl.configFile", file.getAbsolutePath)
val conf = new SparkConf()

val securityManager = new SecurityManager(conf)

assert(securityManager.fileServerSSLOptions.enabled === false)
assert(securityManager.sslSocketFactory.isDefined === false)
assert(securityManager.hostnameVerifier.isDefined === false)
}

test("missing secret authentication key") {
val conf = new SparkConf().set("spark.authenticate", "true")
val mgr = new SecurityManager(conf)
Expand Down
4 changes: 0 additions & 4 deletions docs/security.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,6 @@ component-specific configuration namespaces used to override the default setting
<th>Config Namespace</th>
<th>Component</th>
</tr>
<tr>
<td><code>spark.ssl.fs</code></td>
<td>File download client (used to download jars and files from HTTPS-enabled servers).</td>
</tr>
<tr>
<td><code>spark.ssl.ui</code></td>
<td>Spark application Web UI</td>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,26 +17,24 @@

package org.apache.spark.repl

import java.io.{ByteArrayOutputStream, FileNotFoundException, FilterInputStream, InputStream, IOException}
import java.net.{HttpURLConnection, URI, URL, URLEncoder}
import java.io.{ByteArrayOutputStream, FileNotFoundException, FilterInputStream, InputStream}
import java.net.{URI, URL, URLEncoder}
import java.nio.channels.Channels

import scala.util.control.NonFatal

import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.xbean.asm5._
import org.apache.xbean.asm5.Opcodes._

import org.apache.spark.{SparkConf, SparkEnv}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.Logging
import org.apache.spark.util.{ParentClassLoader, Utils}
import org.apache.spark.util.ParentClassLoader

/**
* A ClassLoader that reads classes from a Hadoop FileSystem or HTTP URI, used to load classes
* defined by the interpreter when the REPL is used. Allows the user to specify if user class path
* should be first. This class loader delegates getting/finding resources to parent loader, which
* makes sense until REPL never provide resource dynamically.
* A ClassLoader that reads classes from a Hadoop FileSystem or Spark RPC endpoint, used to load
* classes defined by the interpreter when the REPL is used. Allows the user to specify if user
* class path should be first. This class loader delegates getting/finding resources to parent
* loader, which makes sense until REPL never provide resource dynamically.
*
* Note: [[ClassLoader]] will preferentially load class from parent. Only when parent is null or
* the load failed, that it will call the overridden `findClass` function. To avoid the potential
Expand All @@ -60,7 +58,6 @@ class ExecutorClassLoader(

private val fetchFn: (String) => InputStream = uri.getScheme() match {
case "spark" => getClassFileInputStreamFromSparkRPC
case "http" | "https" | "ftp" => getClassFileInputStreamFromHttpServer
case _ =>
val fileSystem = FileSystem.get(uri, SparkHadoopUtil.get.newConfiguration(conf))
getClassFileInputStreamFromFileSystem(fileSystem)
Expand Down Expand Up @@ -113,42 +110,6 @@ class ExecutorClassLoader(
}
}

private def getClassFileInputStreamFromHttpServer(pathInDirectory: String): InputStream = {
val url = if (SparkEnv.get.securityManager.isAuthenticationEnabled()) {
val uri = new URI(classUri + "/" + urlEncode(pathInDirectory))
val newuri = Utils.constructURIForAuthentication(uri, SparkEnv.get.securityManager)
newuri.toURL
} else {
new URL(classUri + "/" + urlEncode(pathInDirectory))
}
val connection: HttpURLConnection = Utils.setupSecureURLConnection(url.openConnection(),
SparkEnv.get.securityManager).asInstanceOf[HttpURLConnection]
// Set the connection timeouts (for testing purposes)
if (httpUrlConnectionTimeoutMillis != -1) {
connection.setConnectTimeout(httpUrlConnectionTimeoutMillis)
connection.setReadTimeout(httpUrlConnectionTimeoutMillis)
}
connection.connect()
try {
if (connection.getResponseCode != 200) {
// Close the error stream so that the connection is eligible for re-use
try {
connection.getErrorStream.close()
} catch {
case ioe: IOException =>
logError("Exception while closing error stream", ioe)
}
throw new ClassNotFoundException(s"Class file not found at URL $url")
} else {
connection.getInputStream
}
} catch {
case NonFatal(e) if !e.isInstanceOf[ClassNotFoundException] =>
connection.disconnect()
throw e
}
}

private def getClassFileInputStreamFromFileSystem(fileSystem: FileSystem)(
pathInDirectory: String): InputStream = {
val path = new Path(directory, pathInDirectory)
Expand Down