Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions bin/compute-classpath.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,12 @@ CLASSPATH="$SPARK_CLASSPATH:$SPARK_SUBMIT_CLASSPATH:$FWDIR/conf"

ASSEMBLY_DIR="$FWDIR/assembly/target/scala-$SCALA_VERSION"

if [ -n "${JAVA_HOME}" ]; then
JAR_CMD="${JAVA_HOME}/bin/jar"
else
JAR_CMD="jar"
fi

# First check if we have a dependencies jar. If so, include binary classes with the deps jar
if [ -f "$ASSEMBLY_DIR"/spark-assembly*hadoop*-deps.jar ]; then
CLASSPATH="$CLASSPATH:$FWDIR/core/target/scala-$SCALA_VERSION/classes"
Expand All @@ -54,6 +60,14 @@ else
else
ASSEMBLY_JAR=`ls "$ASSEMBLY_DIR"/spark-assembly*hadoop*.jar`
fi
jar_error_check=$($JAR_CMD -tf $ASSEMBLY_JAR org/apache/spark/SparkContext 2>&1)
if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then
echo "Loading Spark jar with '$JAR_CMD' failed. "
echo "This is likely because Spark was compiled with Java 7 and run "
echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark "
echo "or build Spark with Java 6."
exit 1
fi
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I realize this is already merged, but it looks like the jar error check is only tested on the assembly jar (not on the deps assembly jar). It might be good to check it in both cases.

CLASSPATH="$CLASSPATH:$ASSEMBLY_JAR"
fi

Expand Down
9 changes: 8 additions & 1 deletion bin/spark-class
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,14 @@ if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
fi

# Compute classpath using external script
CLASSPATH=`$FWDIR/bin/compute-classpath.sh`
classpath_output=$($FWDIR/bin/compute-classpath.sh)
if [[ "$?" != "0" ]]; then
echo "$classpath_output"
exit 1
else
CLASSPATH=$classpath_output
fi

if [[ "$1" =~ org.apache.spark.tools.* ]]; then
CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"
fi
Expand Down
14 changes: 14 additions & 0 deletions make-distribution.sh
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,20 @@ if [ $? == -1 ] ;then
exit -1;
fi

if [ -z "${JAVA_HOME}" ]; then
echo "Error: JAVA_HOME is not set, cannot proceed."
exit -1
fi

JAVA_CMD=$JAVA_HOME/bin/java
JAVA_VERSION=$($JAVA_CMD -version 2>&1)
if ! [[ "$JAVA_VERSION" =~ "1.6" ]]; then

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

require JDK 1.6?? this doesn't seem to be in line with the spirit of SPARK-1703

echo "Error: JAVA_HOME must point to a JDK 6 installation (see SPARK-1703)."
echo "Output from 'java -version' was:"
echo "$JAVA_VERSION"
exit -1
fi

# Initialize defaults
SPARK_HADOOP_VERSION=1.0.4
SPARK_YARN=false
Expand Down