Skip to content

Commit 5272ce5

Browse files
committed
SPARK_SCALA_VERSION related bugs.
1 parent 2121071 commit 5272ce5

File tree

5 files changed

+23
-41
lines changed

5 files changed

+23
-41
lines changed

bin/compute-classpath.sh

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -34,24 +34,6 @@ else
3434
CLASSPATH="$CLASSPATH:$FWDIR/conf"
3535
fi
3636

37-
if [ -z "$SPARK_SCALA_VERSION" ]; then
38-
39-
ASSEMBLY_DIR2="$FWDIR/assembly/target/scala-2.11"
40-
ASSEMBLY_DIR1="$FWDIR/assembly/target/scala-2.10"
41-
42-
if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
43-
echo -e "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected." 1>&2
44-
echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION=2.11 in spark-env.sh.' 1>&2
45-
exit 1
46-
fi
47-
48-
if [ -d "$ASSEMBLY_DIR2" ]; then
49-
SPARK_SCALA_VERSION="2.11"
50-
else
51-
SPARK_SCALA_VERSION="2.10"
52-
fi
53-
fi
54-
5537
ASSEMBLY_DIR="$FWDIR/assembly/target/scala-$SPARK_SCALA_VERSION"
5638

5739
if [ -n "$JAVA_HOME" ]; then

bin/load-spark-env.sh

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,3 +36,23 @@ if [ -z "$SPARK_ENV_LOADED" ]; then
3636
set +a
3737
fi
3838
fi
39+
40+
# Setting SPARK_SCALA_VERSION if not already set.
41+
42+
if [ -z "$SPARK_SCALA_VERSION" ]; then
43+
44+
ASSEMBLY_DIR2="$FWDIR/assembly/target/scala-2.11"
45+
ASSEMBLY_DIR1="$FWDIR/assembly/target/scala-2.10"
46+
47+
if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
48+
echo -e "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected." 1>&2
49+
echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION=2.11 in spark-env.sh.' 1>&2
50+
exit 1
51+
fi
52+
53+
if [ -d "$ASSEMBLY_DIR2" ]; then
54+
export SPARK_SCALA_VERSION="2.11"
55+
else
56+
export SPARK_SCALA_VERSION="2.10"
57+
fi
58+
fi

bin/pyspark

Lines changed: 1 addition & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -25,23 +25,7 @@ export SPARK_HOME="$FWDIR"
2525

2626
source "$FWDIR/bin/utils.sh"
2727

28-
if [ -z "$SPARK_SCALA_VERSION" ]; then
29-
30-
ASSEMBLY_DIR2="$FWDIR/assembly/target/scala-2.11"
31-
ASSEMBLY_DIR1="$FWDIR/assembly/target/scala-2.10"
32-
33-
if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
34-
echo -e "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected." 1>&2
35-
echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION=2.11 in spark-env.sh.' 1>&2
36-
exit 1
37-
fi
38-
39-
if [ -d "$ASSEMBLY_DIR2" ]; then
40-
SPARK_SCALA_VERSION="2.11"
41-
else
42-
SPARK_SCALA_VERSION="2.10"
43-
fi
44-
fi
28+
source "$FWDIR"/bin/load-spark-env.sh
4529

4630
function usage() {
4731
echo "Usage: ./bin/pyspark [options]" 1>&2
@@ -64,8 +48,6 @@ if [ ! -f "$FWDIR/RELEASE" ]; then
6448
fi
6549
fi
6650

67-
. "$FWDIR"/bin/load-spark-env.sh
68-
6951
# In Spark <= 1.1, setting IPYTHON=1 would cause the driver to be launched using the `ipython`
7052
# executable, while the worker would still be launched using PYSPARK_PYTHON.
7153
#

bin/run-example

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,12 @@
1717
# limitations under the License.
1818
#
1919

20-
SPARK_SCALA_VERSION=2.10
21-
2220
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
2321
export SPARK_HOME="$FWDIR"
2422
EXAMPLES_DIR="$FWDIR"/examples
2523

24+
. "$FWDIR"/bin/load-spark-env.sh
25+
2626
if [ -n "$1" ]; then
2727
EXAMPLE_CLASS="$1"
2828
shift

bin/spark-class

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,6 @@ case "`uname`" in
2424
CYGWIN*) cygwin=true;;
2525
esac
2626

27-
SPARK_SCALA_VERSION=2.10
28-
2927
# Figure out where Spark is installed
3028
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
3129

0 commit comments

Comments
 (0)