2020export SPARK_HOME=" $( cd ` dirname $0 ` /..; pwd) "
2121ORIG_ARGS=(" $@ " )
2222
23- # Load utility functions
24- . " $SPARK_HOME /bin/utils.sh"
25-
2623while (( $# )) ; do
2724 if [ " $1 " = " --deploy-mode" ]; then
2825 DEPLOY_MODE=$2
@@ -44,68 +41,17 @@ DEPLOY_MODE=${DEPLOY_MODE:-"client"}
4441DEFAULT_PROPERTIES_FILE=" $SPARK_HOME /conf/spark-defaults.conf"
4542PROPERTIES_FILE=${PROPERTIES_FILE:- " $DEFAULT_PROPERTIES_FILE " }
4643
47- unset DRIVER_EXTRA_JAVA_OPTIONS
48- unset EXECUTOR_EXTRA_JAVA_OPTIONS
44+ # For client mode, the driver will be launched in the same JVM that launches
45+ # SparkSubmit, so we to read the properties file for any class paths, library
46+ # paths, java options and memory early on. Otherwise, it will be too late by
47+ # the time the JVM has started.
4948
50- # A few Spark configs must be parsed early on before launching the JVM:
51- #
52- # [spark.driver.extra*]
53- # These configs encode java options, class paths, and library paths
54- # needed to launch the JVM if we are running Spark in client mode
55- #
56- # [spark.*.extraJavaOptions]
57- # The escaped characters in these configs must be preserved for
58- # splitting the arguments in Java later. For these configs, we
59- # export the raw values as environment variables.
60- #
61- if [[ -f " $PROPERTIES_FILE " ]]; then
62- echo " Using properties file $PROPERTIES_FILE ." 1>&2
63- # Parse the properties file here only if these special configs exist
64- should_parse=$( grep -e " spark.driver.extra*\|spark.*.extraJavaOptions" " $PROPERTIES_FILE " )
65- if [[ -n " $should_parse " ]]; then
66- # This exports the value of the given key into JAVA_PROPERTY_VALUE
67- parse_java_property " spark.driver.memory"
68- DRIVER_MEMORY_CONF=" $JAVA_PROPERTY_VALUE "
69- parse_java_property " spark.driver.extraLibraryPath"
70- DRIVER_EXTRA_LIBRARY_PATH=" $JAVA_PROPERTY_VALUE "
71- parse_java_property " spark.driver.extraClassPath"
72- DRIVER_EXTRA_CLASSPATH=" $JAVA_PROPERTY_VALUE "
73- parse_java_property " spark.driver.extraJavaOptions"
74- DRIVER_EXTRA_JAVA_OPTS=" $JAVA_PROPERTY_VALUE "
75- parse_java_property " spark.executor.extraJavaOptions"
76- EXECUTOR_EXTRA_JAVA_OPTS=" $JAVA_PROPERTY_VALUE "
77- # Export these for SparkSubmitArguments.scala to consume
78- if [[ -n " DRIVER_EXTRA_JAVA_OPTS" ]]; then
79- export DRIVER_EXTRA_JAVA_OPTS
80- fi
81- if [[ -n " EXECUTOR_EXTRA_JAVA_OPTS" ]]; then
82- export EXECUTOR_EXTRA_JAVA_OPTS
83- fi
84- fi
85- elif [[ " $PROPERTIES_FILE " != " $DEFAULT_PROPERTIES_FILE " ]]; then
86- echo " Warning: properties file $PROPERTIES_FILE does not exist." 1>&2
87- fi
88-
89- # For client mode, the driver will be launched in the JVM that launches
90- # SparkSubmit, so we need to handle the class paths, java options, and
91- # memory preemptively in bash. Otherwise, it will be too late by the
92- # time the JVM has started.
93-
94- if [[ $DEPLOY_MODE == " client" ]]; then
95- if [[ -n " $DRIVER_EXTRA_JAVA_OPTS " ]]; then
96- export SPARK_SUBMIT_OPTS=" $SPARK_SUBMIT_OPTS $DRIVER_EXTRA_JAVA_OPTS "
97- fi
98- if [[ -n " $DRIVER_EXTRA_CLASSPATH " ]]; then
99- export SPARK_SUBMIT_CLASSPATH=" $SPARK_SUBMIT_CLASSPATH :$DRIVER_EXTRA_CLASSPATH "
100- fi
101- if [[ -n " $DRIVER_EXTRA_LIBRARY_PATH " ]]; then
102- export SPARK_SUBMIT_LIBRARY_PATH=" $SPARK_SUBMIT_LIBRARY_PATH :$DRIVER_EXTRA_LIBRARY_PATH "
103- fi
104- # Favor command line memory over config memory
105- DRIVER_MEMORY=${DRIVER_MEMORY:- " $DRIVER_MEMORY_CONF " }
106- if [[ -n " $DRIVER_MEMORY " ]]; then
49+ if [ " $DEPLOY_MODE " == " client" ]; then
50+ if [ -n " $DRIVER_MEMORY " ]; then
10751 export SPARK_DRIVER_MEMORY=$DRIVER_MEMORY
10852 fi
53+ export PROPERTIES_FILE
54+ export SPARK_SUBMIT_CLIENT_MODE=1
10955fi
11056
11157exec $SPARK_HOME /bin/spark-class org.apache.spark.deploy.SparkSubmit " ${ORIG_ARGS[@]} "
0 commit comments