@@ -880,6 +880,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
880880
881881 def process (settings : Settings ): Boolean = savingContextLoader {
882882 this .settings = settings
883+ if (getMaster() == " yarn-client" ) System .setProperty(" SPARK_YARN_MODE" , " true" )
883884 createInterpreter()
884885
885886 // sets in to some kind of reader depending on environmental cues
@@ -937,16 +938,9 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
937938
938939 def createSparkContext (): SparkContext = {
939940 val execUri = System .getenv(" SPARK_EXECUTOR_URI" )
940- val master = this .master match {
941- case Some (m) => m
942- case None => {
943- val prop = System .getenv(" MASTER" )
944- if (prop != null ) prop else " local"
945- }
946- }
947941 val jars = SparkILoop .getAddedJars.map(new java.io.File (_).getAbsolutePath)
948942 val conf = new SparkConf ()
949- .setMaster(master )
943+ .setMaster(getMaster() )
950944 .setAppName(" Spark shell" )
951945 .setJars(jars)
952946 .set(" spark.repl.class.uri" , intp.classServer.uri)
@@ -961,6 +955,17 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
961955 sparkContext
962956 }
963957
958+ private def getMaster (): String = {
959+ val master = this .master match {
960+ case Some (m) => m
961+ case None => {
962+ val prop = System .getenv(" MASTER" )
963+ if (prop != null ) prop else " local"
964+ }
965+ }
966+ master
967+ }
968+
964969 /** process command-line arguments and do as they request */
965970 def process (args : Array [String ]): Boolean = {
966971 val command = new SparkCommandLine (args.toList, msg => echo(msg))
0 commit comments