Skip to content

Commit a396eda

Browse files
committed
Nullify my own hard work to simplify bash
:(
1 parent 0effa1e commit a396eda

File tree

8 files changed

+34
-410
lines changed

8 files changed

+34
-410
lines changed

bin/run-tests

Lines changed: 0 additions & 234 deletions
This file was deleted.

bin/spark-class

Lines changed: 20 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,6 @@ FWDIR="$(cd `dirname $0`/..; pwd)"
3030
# Export this as SPARK_HOME
3131
export SPARK_HOME="$FWDIR"
3232

33-
# Load utility functions
34-
. "$SPARK_HOME/bin/utils.sh"
35-
3633
. $FWDIR/bin/load-spark-env.sh
3734

3835
if [ -z "$1" ]; then
@@ -112,10 +109,6 @@ if [ -e "$FWDIR/conf/java-opts" ] ; then
112109
JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`"
113110
fi
114111

115-
# Split JAVA_OPTS properly to handle whitespace, double quotes and backslashes
116-
# This exports the split java options into SPLIT_JAVA_OPTS
117-
split_java_options "$JAVA_OPTS"
118-
119112
# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala!
120113

121114
TOOLS_DIR="$FWDIR"/tools
@@ -156,13 +149,27 @@ if $cygwin; then
156149
fi
157150
export CLASSPATH
158151

159-
if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
160-
# Put quotes around system properties in case they contain spaces for readability
161-
# This exports the resulting list of java opts into QUOTED_JAVA_OPTS
162-
quote_java_property "${SPLIT_JAVA_OPTS[@]}"
152+
if [ -n "$SPARK_PRINT_LAUNCH_COMMAND" ]; then
163153
echo -n "Spark Command: " 1>&2
164-
echo "$RUNNER" -cp "$CLASSPATH" "${QUOTED_JAVA_OPTS[@]}" "$@" 1>&2
154+
echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" 1>&2
165155
echo -e "========================================\n" 1>&2
166156
fi
167157

168-
exec "$RUNNER" -cp "$CLASSPATH" "${SPLIT_JAVA_OPTS[@]}" "$@"
158+
# In Spark submit client mode, the driver is launched in the same JVM as Spark submit itself.
159+
# Here we must parse the properties file for relevant "spark.driver.*" configs for launching
160+
# the driver JVM itself.
161+
162+
if [ -n "$SPARK_SUBMIT_CLIENT_MODE" ]; then
163+
exec "$RUNNER" org.apache.spark.deploy.SparkClassLauncher \
164+
"$PROPERTIES_FILE" \
165+
"$RUNNER" \
166+
"$CLASSPATH" \
167+
"$SPARK_SUBMIT_LIBRARY_PATH" \
168+
"$JAVA_OPTS" \
169+
"$OUR_JAVA_MEM" \
170+
true \
171+
"$@"
172+
else
173+
exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
174+
fi
175+

bin/spark-submit

Lines changed: 8 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,6 @@
2020
export SPARK_HOME="$(cd `dirname $0`/..; pwd)"
2121
ORIG_ARGS=("$@")
2222

23-
# Load utility functions
24-
. "$SPARK_HOME/bin/utils.sh"
25-
2623
while (($#)); do
2724
if [ "$1" = "--deploy-mode" ]; then
2825
DEPLOY_MODE=$2
@@ -44,68 +41,17 @@ DEPLOY_MODE=${DEPLOY_MODE:-"client"}
4441
DEFAULT_PROPERTIES_FILE="$SPARK_HOME/conf/spark-defaults.conf"
4542
PROPERTIES_FILE=${PROPERTIES_FILE:-"$DEFAULT_PROPERTIES_FILE"}
4643

47-
unset DRIVER_EXTRA_JAVA_OPTIONS
48-
unset EXECUTOR_EXTRA_JAVA_OPTIONS
44+
# For client mode, the driver will be launched in the same JVM that launches
45+
# SparkSubmit, so we to read the properties file for any class paths, library
46+
# paths, java options and memory early on. Otherwise, it will be too late by
47+
# the time the JVM has started.
4948

50-
# A few Spark configs must be parsed early on before launching the JVM:
51-
#
52-
# [spark.driver.extra*]
53-
# These configs encode java options, class paths, and library paths
54-
# needed to launch the JVM if we are running Spark in client mode
55-
#
56-
# [spark.*.extraJavaOptions]
57-
# The escaped characters in these configs must be preserved for
58-
# splitting the arguments in Java later. For these configs, we
59-
# export the raw values as environment variables.
60-
#
61-
if [[ -f "$PROPERTIES_FILE" ]]; then
62-
echo "Using properties file $PROPERTIES_FILE." 1>&2
63-
# Parse the properties file here only if these special configs exist
64-
should_parse=$(grep -e "spark.driver.extra*\|spark.*.extraJavaOptions" "$PROPERTIES_FILE")
65-
if [[ -n "$should_parse" ]]; then
66-
# This exports the value of the given key into JAVA_PROPERTY_VALUE
67-
parse_java_property "spark.driver.memory"
68-
DRIVER_MEMORY_CONF="$JAVA_PROPERTY_VALUE"
69-
parse_java_property "spark.driver.extraLibraryPath"
70-
DRIVER_EXTRA_LIBRARY_PATH="$JAVA_PROPERTY_VALUE"
71-
parse_java_property "spark.driver.extraClassPath"
72-
DRIVER_EXTRA_CLASSPATH="$JAVA_PROPERTY_VALUE"
73-
parse_java_property "spark.driver.extraJavaOptions"
74-
DRIVER_EXTRA_JAVA_OPTS="$JAVA_PROPERTY_VALUE"
75-
parse_java_property "spark.executor.extraJavaOptions"
76-
EXECUTOR_EXTRA_JAVA_OPTS="$JAVA_PROPERTY_VALUE"
77-
# Export these for SparkSubmitArguments.scala to consume
78-
if [[ -n "DRIVER_EXTRA_JAVA_OPTS" ]]; then
79-
export DRIVER_EXTRA_JAVA_OPTS
80-
fi
81-
if [[ -n "EXECUTOR_EXTRA_JAVA_OPTS" ]]; then
82-
export EXECUTOR_EXTRA_JAVA_OPTS
83-
fi
84-
fi
85-
elif [[ "$PROPERTIES_FILE" != "$DEFAULT_PROPERTIES_FILE" ]]; then
86-
echo "Warning: properties file $PROPERTIES_FILE does not exist." 1>&2
87-
fi
88-
89-
# For client mode, the driver will be launched in the JVM that launches
90-
# SparkSubmit, so we need to handle the class paths, java options, and
91-
# memory preemptively in bash. Otherwise, it will be too late by the
92-
# time the JVM has started.
93-
94-
if [[ $DEPLOY_MODE == "client" ]]; then
95-
if [[ -n "$DRIVER_EXTRA_JAVA_OPTS" ]]; then
96-
export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS $DRIVER_EXTRA_JAVA_OPTS"
97-
fi
98-
if [[ -n "$DRIVER_EXTRA_CLASSPATH" ]]; then
99-
export SPARK_SUBMIT_CLASSPATH="$SPARK_SUBMIT_CLASSPATH:$DRIVER_EXTRA_CLASSPATH"
100-
fi
101-
if [[ -n "$DRIVER_EXTRA_LIBRARY_PATH" ]]; then
102-
export SPARK_SUBMIT_LIBRARY_PATH="$SPARK_SUBMIT_LIBRARY_PATH:$DRIVER_EXTRA_LIBRARY_PATH"
103-
fi
104-
# Favor command line memory over config memory
105-
DRIVER_MEMORY=${DRIVER_MEMORY:-"$DRIVER_MEMORY_CONF"}
106-
if [[ -n "$DRIVER_MEMORY" ]]; then
49+
if [ "$DEPLOY_MODE" == "client" ]; then
50+
if [ -n "$DRIVER_MEMORY" ]; then
10751
export SPARK_DRIVER_MEMORY=$DRIVER_MEMORY
10852
fi
53+
export PROPERTIES_FILE
54+
export SPARK_SUBMIT_CLIENT_MODE=1
10955
fi
11056

11157
exec $SPARK_HOME/bin/spark-class org.apache.spark.deploy.SparkSubmit "${ORIG_ARGS[@]}"

0 commit comments

Comments
 (0)