@@ -20,6 +20,7 @@ package org.apache.spark.deploy
2020import java .io .{File , PrintStream }
2121import java .lang .reflect .{InvocationTargetException , Modifier , UndeclaredThrowableException }
2222import java .net .URL
23+ import java .nio .file .{Path => JavaPath }
2324import java .security .PrivilegedExceptionAction
2425
2526import scala .collection .mutable .{ArrayBuffer , HashMap , Map }
@@ -708,7 +709,9 @@ private[deploy] object SparkSubmitUtils {
708709 * @param artifactId the artifactId of the coordinate
709710 * @param version the version of the coordinate
710711 */
711- private [deploy] case class MavenCoordinate (groupId : String , artifactId : String , version : String )
712+ private [deploy] case class MavenCoordinate (groupId : String , artifactId : String , version : String ) {
713+ override def toString : String = s " $groupId: $artifactId: $version"
714+ }
712715
713716/**
714717 * Extracts maven coordinates from a comma-delimited string. Coordinates should be provided
@@ -731,6 +734,10 @@ private[deploy] object SparkSubmitUtils {
731734 }
732735 }
733736
737+ /** Path of the local Maven cache. */
738+ private [spark] def m2Path : JavaPath = new File (System .getProperty(" user.home" ),
739+ " .m2" + File .separator + " repository" + File .separator).toPath
740+
734741 /**
735742 * Extracts maven coordinates from a comma-delimited string
736743 * @param remoteRepos Comma-delimited string of remote repositories
@@ -744,8 +751,7 @@ private[deploy] object SparkSubmitUtils {
744751
745752 val localM2 = new IBiblioResolver
746753 localM2.setM2compatible(true )
747- val m2Path = " .m2" + File .separator + " repository" + File .separator
748- localM2.setRoot(new File (System .getProperty(" user.home" ), m2Path).toURI.toString)
754+ localM2.setRoot(m2Path.toUri.toString)
749755 localM2.setUsepoms(true )
750756 localM2.setName(" local-m2-cache" )
751757 cr.add(localM2)
@@ -870,69 +876,72 @@ private[deploy] object SparkSubmitUtils {
870876 " "
871877 } else {
872878 val sysOut = System .out
873- // To prevent ivy from logging to system out
874- System .setOut(printStream)
875- val artifacts = extractMavenCoordinates(coordinates)
876- // Default configuration name for ivy
877- val ivyConfName = " default"
878- // set ivy settings for location of cache
879- val ivySettings : IvySettings = new IvySettings
880- // Directories for caching downloads through ivy and storing the jars when maven coordinates
881- // are supplied to spark-submit
882- val alternateIvyCache = ivyPath.getOrElse(" " )
883- val packagesDirectory : File =
884- if (alternateIvyCache.trim.isEmpty) {
885- new File (ivySettings.getDefaultIvyUserDir, " jars" )
879+ try {
880+ // To prevent ivy from logging to system out
881+ System .setOut(printStream)
882+ val artifacts = extractMavenCoordinates(coordinates)
883+ // Default configuration name for ivy
884+ val ivyConfName = " default"
885+ // set ivy settings for location of cache
886+ val ivySettings : IvySettings = new IvySettings
887+ // Directories for caching downloads through ivy and storing the jars when maven coordinates
888+ // are supplied to spark-submit
889+ val alternateIvyCache = ivyPath.getOrElse(" " )
890+ val packagesDirectory : File =
891+ if (alternateIvyCache.trim.isEmpty) {
892+ new File (ivySettings.getDefaultIvyUserDir, " jars" )
893+ } else {
894+ ivySettings.setDefaultIvyUserDir(new File (alternateIvyCache))
895+ ivySettings.setDefaultCache(new File (alternateIvyCache, " cache" ))
896+ new File (alternateIvyCache, " jars" )
897+ }
898+ printStream.println(
899+ s " Ivy Default Cache set to: ${ivySettings.getDefaultCache.getAbsolutePath}" )
900+ printStream.println(s " The jars for the packages stored in: $packagesDirectory" )
901+ // create a pattern matcher
902+ ivySettings.addMatcher(new GlobPatternMatcher )
903+ // create the dependency resolvers
904+ val repoResolver = createRepoResolvers(remoteRepos, ivySettings)
905+ ivySettings.addResolver(repoResolver)
906+ ivySettings.setDefaultResolver(repoResolver.getName)
907+
908+ val ivy = Ivy .newInstance(ivySettings)
909+ // Set resolve options to download transitive dependencies as well
910+ val resolveOptions = new ResolveOptions
911+ resolveOptions.setTransitive(true )
912+ val retrieveOptions = new RetrieveOptions
913+ // Turn downloading and logging off for testing
914+ if (isTest) {
915+ resolveOptions.setDownload(false )
916+ resolveOptions.setLog(LogOptions .LOG_QUIET )
917+ retrieveOptions.setLog(LogOptions .LOG_QUIET )
886918 } else {
887- ivySettings.setDefaultIvyUserDir(new File (alternateIvyCache))
888- ivySettings.setDefaultCache(new File (alternateIvyCache, " cache" ))
889- new File (alternateIvyCache, " jars" )
919+ resolveOptions.setDownload(true )
890920 }
891- printStream.println(
892- s " Ivy Default Cache set to: ${ivySettings.getDefaultCache.getAbsolutePath}" )
893- printStream.println(s " The jars for the packages stored in: $packagesDirectory" )
894- // create a pattern matcher
895- ivySettings.addMatcher(new GlobPatternMatcher )
896- // create the dependency resolvers
897- val repoResolver = createRepoResolvers(remoteRepos, ivySettings)
898- ivySettings.addResolver(repoResolver)
899- ivySettings.setDefaultResolver(repoResolver.getName)
900-
901- val ivy = Ivy .newInstance(ivySettings)
902- // Set resolve options to download transitive dependencies as well
903- val resolveOptions = new ResolveOptions
904- resolveOptions.setTransitive(true )
905- val retrieveOptions = new RetrieveOptions
906- // Turn downloading and logging off for testing
907- if (isTest) {
908- resolveOptions.setDownload(false )
909- resolveOptions.setLog(LogOptions .LOG_QUIET )
910- retrieveOptions.setLog(LogOptions .LOG_QUIET )
911- } else {
912- resolveOptions.setDownload(true )
913- }
914921
915- // A Module descriptor must be specified. Entries are dummy strings
916- val md = getModuleDescriptor
917- md.setDefaultConf(ivyConfName)
922+ // A Module descriptor must be specified. Entries are dummy strings
923+ val md = getModuleDescriptor
924+ md.setDefaultConf(ivyConfName)
918925
919- // Add exclusion rules for Spark and Scala Library
920- addExclusionRules(ivySettings, ivyConfName, md)
921- // add all supplied maven artifacts as dependencies
922- addDependenciesToIvy(md, artifacts, ivyConfName)
926+ // Add exclusion rules for Spark and Scala Library
927+ addExclusionRules(ivySettings, ivyConfName, md)
928+ // add all supplied maven artifacts as dependencies
929+ addDependenciesToIvy(md, artifacts, ivyConfName)
923930
924- // resolve dependencies
925- val rr : ResolveReport = ivy.resolve(md, resolveOptions)
926- if (rr.hasError) {
927- throw new RuntimeException (rr.getAllProblemMessages.toString)
931+ // resolve dependencies
932+ val rr : ResolveReport = ivy.resolve(md, resolveOptions)
933+ if (rr.hasError) {
934+ throw new RuntimeException (rr.getAllProblemMessages.toString)
935+ }
936+ // retrieve all resolved dependencies
937+ ivy.retrieve(rr.getModuleDescriptor.getModuleRevisionId,
938+ packagesDirectory.getAbsolutePath + File .separator +
939+ " [organization]_[artifact]-[revision].[ext]" ,
940+ retrieveOptions.setConfs(Array (ivyConfName)))
941+ resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory)
942+ } finally {
943+ System .setOut(sysOut)
928944 }
929- // retrieve all resolved dependencies
930- ivy.retrieve(rr.getModuleDescriptor.getModuleRevisionId,
931- packagesDirectory.getAbsolutePath + File .separator +
932- " [organization]_[artifact]-[revision].[ext]" ,
933- retrieveOptions.setConfs(Array (ivyConfName)))
934- System .setOut(sysOut)
935- resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory)
936945 }
937946 }
938947}
0 commit comments