File tree Expand file tree Collapse file tree 2 files changed +10
-2
lines changed
yarn/src/main/scala/org/apache/spark/deploy/yarn Expand file tree Collapse file tree 2 files changed +10
-2
lines changed Original file line number Diff line number Diff line change @@ -73,6 +73,9 @@ private[spark] class Client(
7373
7474 private val isClusterMode = sparkConf.get(" spark.submit.deployMode" , " client" ) == " cluster"
7575
76+ // Default java opts for driver
77+ private val DEFAULT_DRIVER_JAVA_OPTS = " -XX:+PrintGCDetails -XX:+PrintGCDateStamps"
78+
7679 // AM related configurations
7780 private val amMemory = if (isClusterMode) {
7881 sparkConf.get(DRIVER_MEMORY ).toInt
@@ -959,7 +962,9 @@ private[spark] class Client(
959962 " -Djava.net.preferIPv4Stack=true" )
960963 javaOpts ++= Utils .splitCommandString(adminOpts).map(YarnSparkHadoopUtil .escapeForShell)
961964
962- val driverOpts = sparkConf.get(DRIVER_JAVA_OPTIONS ).orElse(sys.env.get(" SPARK_JAVA_OPTS" ))
965+ val driverOpts = sparkConf.get(DRIVER_JAVA_OPTIONS )
966+ .orElse(sys.env.get(" SPARK_JAVA_OPTS" ))
967+ .orElse(Some (DEFAULT_DRIVER_JAVA_OPTS ))
963968 driverOpts.foreach { opts =>
964969 javaOpts ++= Utils .splitCommandString(opts).map(YarnSparkHadoopUtil .escapeForShell)
965970 }
Original file line number Diff line number Diff line change @@ -59,6 +59,9 @@ private[yarn] class ExecutorRunnable(
5959 var rpc : YarnRPC = YarnRPC .create(conf)
6060 var nmClient : NMClient = _
6161
62+ // Default executor java opts for driver
63+ private val DEFAULT_EXECUTOR_JAVA_OPTS = " -XX:+PrintGCDetails -XX:+PrintGCDateStamps"
64+
6265 def run (): Unit = {
6366 logDebug(" Starting Executor Container" )
6467 nmClient = NMClient .createNMClient()
@@ -146,7 +149,7 @@ private[yarn] class ExecutorRunnable(
146149 javaOpts ++= Utils .splitCommandString(adminOpts).map(YarnSparkHadoopUtil .escapeForShell)
147150
148151 // Set extra Java options for the executor, if defined
149- sparkConf.get(EXECUTOR_JAVA_OPTIONS ).foreach { opts =>
152+ sparkConf.get(EXECUTOR_JAVA_OPTIONS ).orElse( Some ( DEFAULT_EXECUTOR_JAVA_OPTS )). foreach { opts =>
150153 javaOpts ++= Utils .splitCommandString(opts).map(YarnSparkHadoopUtil .escapeForShell)
151154 }
152155 sys.env.get(" SPARK_JAVA_OPTS" ).foreach { opts =>
You can’t perform that action at this time.
0 commit comments