@@ -25,31 +25,48 @@ SCALA_VERSION=2.10
2525# Figure out where Spark is installed
2626FWDIR=" $( cd ` dirname $0 ` /..; pwd) "
2727
28- # Load environment variables from conf/spark-env.sh, if it exists
29- if [ -e " $FWDIR /conf/spark-env.sh" ] ; then
30- . $FWDIR /conf/spark-env.sh
31- fi
28+ . $FWDIR /bin/load-spark-env.sh
3229
3330# Build up classpath
3431CLASSPATH=" $SPARK_CLASSPATH :$FWDIR /conf"
3532
33+ # Support for interacting with Hive. Since hive pulls in a lot of dependencies that might break
34+ # existing Spark applications, it is not included in the standard spark assembly. Instead, we only
35+ # include it in the classpath if the user has explicitly requested it by running "sbt hive/assembly"
36+ # Hopefully we will find a way to avoid uber-jars entirely and deploy only the needed packages in
37+ # the future.
38+ if [ -f " $FWDIR " /sql/hive/target/scala-$SCALA_VERSION /spark-hive-assembly-* .jar ]; then
39+
40+ # Datanucleus jars do not work if only included in the uberjar as plugin.xml metadata is lost.
41+ DATANUCLEUSJARS=$( JARS=(" $FWDIR /lib_managed/jars" /datanucleus-* .jar); IFS=:; echo " ${JARS[*]} " )
42+ CLASSPATH=$CLASSPATH :$DATANUCLEUSJARS
43+
44+ ASSEMBLY_DIR=" $FWDIR /sql/hive/target/scala-$SCALA_VERSION /"
45+ else
46+ ASSEMBLY_DIR=" $FWDIR /assembly/target/scala-$SCALA_VERSION /"
47+ fi
48+
3649# First check if we have a dependencies jar. If so, include binary classes with the deps jar
37- if [ -f " $FWDIR " /assembly/target/scala- $SCALA_VERSION /spark-assembly* hadoop* -deps.jar ]; then
50+ if [ -f " $ASSEMBLY_DIR " /spark-assembly* hadoop* -deps.jar ]; then
3851 CLASSPATH=" $CLASSPATH :$FWDIR /core/target/scala-$SCALA_VERSION /classes"
3952 CLASSPATH=" $CLASSPATH :$FWDIR /repl/target/scala-$SCALA_VERSION /classes"
4053 CLASSPATH=" $CLASSPATH :$FWDIR /mllib/target/scala-$SCALA_VERSION /classes"
4154 CLASSPATH=" $CLASSPATH :$FWDIR /bagel/target/scala-$SCALA_VERSION /classes"
4255 CLASSPATH=" $CLASSPATH :$FWDIR /graphx/target/scala-$SCALA_VERSION /classes"
4356 CLASSPATH=" $CLASSPATH :$FWDIR /streaming/target/scala-$SCALA_VERSION /classes"
57+ CLASSPATH=" $CLASSPATH :$FWDIR /tools/target/scala-$SCALA_VERSION /classes"
58+ CLASSPATH=" $CLASSPATH :$FWDIR /sql/catalyst/target/scala-$SCALA_VERSION /classes"
59+ CLASSPATH=" $CLASSPATH :$FWDIR /sql/core/target/scala-$SCALA_VERSION /classes"
60+ CLASSPATH=" $CLASSPATH :$FWDIR /sql/hive/target/scala-$SCALA_VERSION /classes"
4461
45- DEPS_ASSEMBLY_JAR=` ls " $FWDIR " /assembly/target/scala- $SCALA_VERSION / spark-assembly* hadoop* -deps.jar`
62+ DEPS_ASSEMBLY_JAR=` ls " $ASSEMBLY_DIR " / spark* -assembly* hadoop* -deps.jar`
4663 CLASSPATH=" $CLASSPATH :$DEPS_ASSEMBLY_JAR "
4764else
4865 # Else use spark-assembly jar from either RELEASE or assembly directory
4966 if [ -f " $FWDIR /RELEASE" ]; then
50- ASSEMBLY_JAR=` ls " $FWDIR " /jars/spark-assembly* .jar`
67+ ASSEMBLY_JAR=` ls " $FWDIR " /jars/spark* -assembly* .jar`
5168 else
52- ASSEMBLY_JAR=` ls " $FWDIR " /assembly/target/scala- $SCALA_VERSION / spark-assembly* hadoop* .jar`
69+ ASSEMBLY_JAR=` ls " $ASSEMBLY_DIR " / spark* -assembly* hadoop* .jar`
5370 fi
5471 CLASSPATH=" $CLASSPATH :$ASSEMBLY_JAR "
5572fi
@@ -62,6 +79,9 @@ if [[ $SPARK_TESTING == 1 ]]; then
6279 CLASSPATH=" $CLASSPATH :$FWDIR /bagel/target/scala-$SCALA_VERSION /test-classes"
6380 CLASSPATH=" $CLASSPATH :$FWDIR /graphx/target/scala-$SCALA_VERSION /test-classes"
6481 CLASSPATH=" $CLASSPATH :$FWDIR /streaming/target/scala-$SCALA_VERSION /test-classes"
82+ CLASSPATH=" $CLASSPATH :$FWDIR /sql/catalyst/target/scala-$SCALA_VERSION /test-classes"
83+ CLASSPATH=" $CLASSPATH :$FWDIR /sql/core/target/scala-$SCALA_VERSION /test-classes"
84+ CLASSPATH=" $CLASSPATH :$FWDIR /sql/hive/target/scala-$SCALA_VERSION /test-classes"
6585fi
6686
6787# Add hadoop conf dir if given -- otherwise FileSystem.*, etc fail !
0 commit comments