File tree Expand file tree Collapse file tree 3 files changed +36
-1
lines changed
Expand file tree Collapse file tree 3 files changed +36
-1
lines changed Original file line number Diff line number Diff line change @@ -32,6 +32,12 @@ CLASSPATH="$SPARK_CLASSPATH:$SPARK_SUBMIT_CLASSPATH:$FWDIR/conf"
3232
3333ASSEMBLY_DIR=" $FWDIR /assembly/target/scala-$SCALA_VERSION "
3434
35+ if [ -n " ${JAVA_HOME} " ]; then
36+ JAR_CMD=" ${JAVA_HOME} /bin/jar"
37+ else
38+ JAR_CMD=" jar"
39+ fi
40+
3541# First check if we have a dependencies jar. If so, include binary classes with the deps jar
3642if [ -f " $ASSEMBLY_DIR " /spark-assembly* hadoop* -deps.jar ]; then
3743 CLASSPATH=" $CLASSPATH :$FWDIR /core/target/scala-$SCALA_VERSION /classes"
5460 else
5561 ASSEMBLY_JAR=` ls " $ASSEMBLY_DIR " /spark-assembly* hadoop* .jar`
5662 fi
63+ jar_error_check=$( $JAR_CMD -tf $ASSEMBLY_JAR org/apache/spark/SparkContext 2>&1 )
64+ if [[ " $jar_error_check " =~ " invalid CEN header" ]]; then
65+ echo " Loading Spark jar with '$JAR_CMD ' failed. "
66+ echo " This is likely because Spark was compiled with Java 7 and run "
67+ echo " with Java 6. (see SPARK-1703). Please use Java 7 to run Spark "
68+ echo " or build Spark with Java 6."
69+ exit 1
70+ fi
5771 CLASSPATH=" $CLASSPATH :$ASSEMBLY_JAR "
5872fi
5973
Original file line number Diff line number Diff line change @@ -138,7 +138,14 @@ if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
138138fi
139139
140140# Compute classpath using external script
141- CLASSPATH=` $FWDIR /bin/compute-classpath.sh`
141+ CLASSPATH_OUTPUT=$( $FWDIR /bin/compute-classpath.sh)
142+ if [[ " $? " != " 0" ]]; then
143+ echo " $CLASSPATH_OUTPUT "
144+ exit 1
145+ else
146+ CLASSPATH=$CLASSPATH_OUTPUT
147+ fi
148+
142149if [[ " $1 " =~ org.apache.spark.tools.* ]]; then
143150 CLASSPATH=" $CLASSPATH :$SPARK_TOOLS_JAR "
144151fi
Original file line number Diff line number Diff line change @@ -50,6 +50,20 @@ if [ $? == -1 ] ;then
5050 exit -1;
5151fi
5252
53+ if [ -z " ${JAVA_HOME} " ]; then
54+ echo " Error: JAVA_HOME is not set, cannot proceed."
55+ exit -1
56+ fi
57+
58+ JAVA_CMD=$JAVA_HOME /bin/java
59+ JAVA_VERSION=$( $JAVA_CMD -version 2>&1 )
60+ if ! [[ " $JAVA_VERSION " =~ " 1.6" ]]; then
61+ echo " Error: Java version was not 1.6. Spark must be compiled with JDK 1.6 "
62+ echo " (see SPARK-1703). Output from 'java -version' was:"
63+ echo " $JAVA_VERSION "
64+ exit -1
65+ fi
66+
5367# Initialize defaults
5468SPARK_HADOOP_VERSION=1.0.4
5569SPARK_YARN=false
You can’t perform that action at this time.
0 commit comments