Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 10 additions & 2 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1815,10 +1815,18 @@ class SparkContext(config: SparkConf) extends Logging {
// A JAR file which exists only on the driver node
case null | "file" =>
try {
val file = new File(uri.getPath)
if (!file.exists()) {
throw new FileNotFoundException(s"Jar ${file.getAbsolutePath} not found")
}
if (file.isDirectory) {
throw new IllegalArgumentException(
s"Directory ${file.getAbsoluteFile} is not allowed for addJar")
}
env.rpcEnv.fileServer.addJar(new File(uri.getPath))
} catch {
case exc: FileNotFoundException =>
logError(s"Jar not found at $path")
case NonFatal(e) =>
logError(s"Failed to add $path to Spark environment", e)
null
}
// A JAR file which exists locally on every worker node
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1989,7 +1989,7 @@ private[spark] object Utils extends Logging {
if (paths == null || paths.trim.isEmpty) {
""
} else {
paths.split(",").map { p => Utils.resolveURI(p) }.mkString(",")
paths.split(",").filter(_.trim.nonEmpty).map { p => Utils.resolveURI(p) }.mkString(",")
}
}

Expand Down
16 changes: 16 additions & 0 deletions core/src/test/scala/org/apache/spark/SparkContextSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,22 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
}
}

test("add jar with invalid path") {
val tmpDir = Utils.createTempDir()
val tmpJar = File.createTempFile("test", ".jar", tmpDir)

sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))
sc.addJar(tmpJar.getAbsolutePath)

// Invaid jar path will only print the error log, will not add to file server.
sc.addJar("dummy.jar")
sc.addJar("")
sc.addJar(tmpDir.getAbsolutePath)

sc.listJars().size should be (1)
sc.listJars().head should include (tmpJar.getName)
}

test("Cancelling job group should not cause SparkContext to shutdown (SPARK-6414)") {
try {
sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))
Expand Down
1 change: 1 addition & 0 deletions core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -507,6 +507,7 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
assertResolves("""hdfs:/jar1,file:/jar2,jar3,C:\pi.py#py.pi,C:\path to\jar4""",
s"hdfs:/jar1,file:/jar2,file:$cwd/jar3,file:/C:/pi.py%23py.pi,file:/C:/path%20to/jar4")
}
assertResolves(",jar1,jar2", s"file:$cwd/jar1,file:$cwd/jar2")
}

test("nonLocalPaths") {
Expand Down