|
17 | 17 |
|
18 | 18 | package org.apache.spark.repl |
19 | 19 |
|
20 | | -import java.io.{ByteArrayOutputStream, InputStream} |
| 20 | +import java.io.{IOException, ByteArrayOutputStream, InputStream} |
21 | 21 | import java.net.{HttpURLConnection, URI, URL, URLEncoder} |
22 | 22 |
|
| 23 | +import scala.util.control.NonFatal |
| 24 | + |
23 | 25 | import org.apache.hadoop.fs.{FileSystem, Path} |
24 | 26 |
|
25 | 27 | import org.apache.spark.{SparkConf, SparkEnv, Logging} |
@@ -89,11 +91,23 @@ class ExecutorClassLoader(conf: SparkConf, classUri: String, parent: ClassLoader |
89 | 91 | connection.setReadTimeout(httpUrlConnectionTimeoutMillis) |
90 | 92 | } |
91 | 93 | connection.connect() |
92 | | - if (connection.getResponseCode != 200) { |
93 | | - connection.disconnect() |
94 | | - throw new ClassNotFoundException(s"Class file not found at URL $url") |
95 | | - } else { |
96 | | - connection.getInputStream |
| 94 | + try { |
| 95 | + if (connection.getResponseCode != 200) { |
| 96 | + // Close the error stream so that the connection is eligible for re-use |
| 97 | + try { |
| 98 | + connection.getErrorStream.close() |
| 99 | + } catch { |
| 100 | + case ioe: IOException => |
| 101 | + logError("Exception while closing error stream", ioe) |
| 102 | + } |
| 103 | + throw new ClassNotFoundException(s"Class file not found at URL $url") |
| 104 | + } else { |
| 105 | + connection.getInputStream |
| 106 | + } |
| 107 | + } catch { |
| 108 | + case NonFatal(e) if !e.isInstanceOf[ClassNotFoundException] => |
| 109 | + connection.disconnect() |
| 110 | + throw e |
97 | 111 | } |
98 | 112 | } |
99 | 113 |
|
|
0 commit comments