@@ -21,8 +21,7 @@ import java.net.{InetAddress, UnknownHostException, URI}
2121import java .nio .ByteBuffer
2222
2323import scala .collection .JavaConversions ._
24- import scala .collection .mutable .HashMap
25- import scala .collection .mutable .Map
24+ import scala .collection .mutable .{ListBuffer , HashMap , Map }
2625
2726import org .apache .hadoop .conf .Configuration
2827import org .apache .hadoop .fs .{FileContext , FileStatus , FileSystem , Path , FileUtil }
@@ -281,18 +280,19 @@ class Client(args: ClientArguments, conf: Configuration, sparkConf: SparkConf)
281280 }
282281
283282 // Handle jars local to the ApplicationMaster.
283+ var cachedSecondaryJarLinks = ListBuffer .empty[String ]
284284 if ((args.addJars != null ) && (! args.addJars.isEmpty())){
285285 args.addJars.split(',' ).foreach { case file : String =>
286286 val localURI = new URI (file.trim())
287287 val localPath = new Path (localURI)
288288 val linkname = Option (localURI.getFragment()).getOrElse(localPath.getName())
289289 val destPath = copyRemoteFile(dst, localPath, replication)
290- // Only add the resource to the Spark ApplicationMaster.
291- val appMasterOnly = true
292290 distCacheMgr.addResource(fs, conf, destPath, localResources, LocalResourceType .FILE ,
293- linkname, statCache, appMasterOnly)
291+ linkname, statCache)
292+ cachedSecondaryJarLinks += linkname
294293 }
295294 }
295+ sparkConf.set(Client .CONF_SPARK_YARN_SECONDARY_JARS , cachedSecondaryJarLinks.mkString(" ," ))
296296
297297 // Handle any distributed cache files
298298 if ((args.files != null ) && (! args.files.isEmpty())){
@@ -478,9 +478,10 @@ class Client(args: ClientArguments, conf: Configuration, sparkConf: SparkConf)
478478}
479479
480480object Client {
481- val SPARK_JAR : String = " spark .jar"
482- val APP_JAR : String = " app .jar"
481+ val SPARK_JAR : String = " __spark__ .jar"
482+ val APP_JAR : String = " __app__ .jar"
483483 val LOG4J_PROP : String = " log4j.properties"
484+ val CONF_SPARK_YARN_SECONDARY_JARS = " spark.yarn.secondary.jars"
484485
485486 def main (argStrings : Array [String ]) {
486487 // Set an env variable indicating we are running in YARN mode.
@@ -507,12 +508,19 @@ object Client {
507508 Apps .addToEnvironment(env, Environment .CLASSPATH .name, Environment .PWD .$() +
508509 Path .SEPARATOR + LOG4J_PROP )
509510 }
511+
512+ val cachedSecondaryJarLinks =
513+ sparkConf.getOption(CONF_SPARK_YARN_SECONDARY_JARS ).getOrElse(" " ).split(" ," )
514+
510515 // Normally the users app.jar is last in case conflicts with spark jars
511516 val userClasspathFirst = sparkConf.get(" spark.yarn.user.classpath.first" , " false" )
512517 .toBoolean
513518 if (userClasspathFirst) {
514519 Apps .addToEnvironment(env, Environment .CLASSPATH .name, Environment .PWD .$() +
515520 Path .SEPARATOR + APP_JAR )
521+ cachedSecondaryJarLinks.foreach(jarLink =>
522+ Apps .addToEnvironment(env, Environment .CLASSPATH .name, Environment .PWD .$() +
523+ Path .SEPARATOR + jarLink))
516524 }
517525 Apps .addToEnvironment(env, Environment .CLASSPATH .name, Environment .PWD .$() +
518526 Path .SEPARATOR + SPARK_JAR )
@@ -521,6 +529,9 @@ object Client {
521529 if (! userClasspathFirst) {
522530 Apps .addToEnvironment(env, Environment .CLASSPATH .name, Environment .PWD .$() +
523531 Path .SEPARATOR + APP_JAR )
532+ cachedSecondaryJarLinks.foreach(jarLink =>
533+ Apps .addToEnvironment(env, Environment .CLASSPATH .name, Environment .PWD .$() +
534+ Path .SEPARATOR + jarLink))
524535 }
525536 Apps .addToEnvironment(env, Environment .CLASSPATH .name, Environment .PWD .$() +
526537 Path .SEPARATOR + " *" )
0 commit comments