Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 14 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,27 +28,38 @@ before_script:
-

script:
# spark 1.4
# spark 1.5
- mvn package -Pbuild-distr -Phadoop-2.3 -Ppyspark -B
- ./testing/startSparkCluster.sh 1.4.0 2.3
- ./testing/startSparkCluster.sh 1.5.1 2.3
- echo "export SPARK_HOME=`pwd`/spark-1.5.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn verify -Pusing-packaged-distr -Phadoop-2.3 -Ppyspark -B
- ./testing/stopSparkCluster.sh 1.4.0 2.3
- ./testing/stopSparkCluster.sh 1.5.1 2.3
# spark 1.4
- rm -rf `pwd`/interpreter/spark
- mvn package -DskipTests -Pspark-1.4 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.4.1 2.3
- echo "export SPARK_HOME=`pwd`/spark-1.4.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.4 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.4.1 2.3
# spark 1.3
- rm -rf `pwd`/interpreter/spark
- mvn package -DskipTests -Pspark-1.3 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.3.1 2.3
- echo "export SPARK_HOME=`pwd`/spark-1.3.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.3 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.3.1 2.3
# spark 1.2
- rm -rf `pwd`/interpreter/spark
- mvn package -Pspark-1.2 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.2.1 2.3
- echo "export SPARK_HOME=`pwd`/spark-1.2.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.2 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.2.1 2.3
# spark 1.1
- rm -rf `pwd`/interpreter/spark
- mvn package -Pspark-1.1 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.1.1 2.3
- echo "export SPARK_HOME=`pwd`/spark-1.1.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.1 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.1.1 2.3

Expand Down
1 change: 1 addition & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -456,6 +456,7 @@
<exclude>STYLE.md</exclude>
<exclude>Roadmap.md</exclude>
<exclude>conf/interpreter.json</exclude>
<exclude>conf/zeppelin-env.sh</exclude>
<exclude>spark-*-bin*/**</exclude>
</excludes>
</configuration>
Expand Down
18 changes: 15 additions & 3 deletions testing/startSparkCluster.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,19 @@ ZEPPELIN_HOME="$(cd "${FWDIR}/.."; pwd)"
export SPARK_HOME=${ZEPPELIN_HOME}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}
echo "SPARK_HOME is ${SPARK_HOME} "
if [ ! -d "${SPARK_HOME}" ]; then
wget -q http://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
echo "${SPARK_VERSION}" | grep "^1.[12].[0-9]" > /dev/null
if [ $? -eq 0 ]; then
# spark 1.1.x and spark 1.2.x can be downloaded from archive
wget http://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
else
# spark 1.3.x and later can be downloaded from mirror
# get download address from mirror
MIRROR_INFO=$(curl -s "http://www.apache.org/dyn/closer.cgi/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz?asjson=1")

PREFFERED=$(echo "${MIRROR_INFO}" | grep preferred | sed 's/[^"]*.preferred.: .\([^"]*\).*/\1/g')
PATHINFO=$(echo "${MIRROR_INFO}" | grep path_info | sed 's/[^"]*.path_info.: .\([^"]*\).*/\1/g')
wget "${PREFFERED}${PATHINFO}"
fi
tar zxf spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
fi

Expand All @@ -46,8 +58,8 @@ export SPARK_MASTER_WEBUI_PORT=7072
export SPARK_WORKER_WEBUI_PORT=8082
${SPARK_HOME}/sbin/start-master.sh

echo ${SPARK_VERSION} | grep "^1.4" > /dev/null
if [ $? -ne 0 ]; then # spark 1.3 or prior
echo ${SPARK_VERSION} | grep "^1.[123].[0-9]" > /dev/null
if [ $? -eq 0 ]; then # spark 1.3 or prior
${SPARK_HOME}/sbin/start-slave.sh 1 `hostname`:${SPARK_MASTER_PORT}
else
${SPARK_HOME}/sbin/start-slave.sh spark://`hostname`:7071
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,15 @@
import java.lang.ref.WeakReference;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethodBase;
import org.apache.commons.httpclient.methods.*;
import org.apache.zeppelin.interpreter.Interpreter.RegisteredInterpreter;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterOption;
import org.apache.zeppelin.interpreter.InterpreterSetting;
Expand Down Expand Up @@ -197,6 +199,13 @@ private static boolean isActiveSparkHome(File dir) {

protected static void shutDown() throws Exception {
if (!wasRunning) {
// restart interpreter to stop all interpreter processes
List<String> settingList = ZeppelinServer.notebook.getInterpreterFactory()
.getDefaultInterpreterSettingList();
for (String setting : settingList) {
ZeppelinServer.notebook.getInterpreterFactory().restart(setting);
}

LOG.info("Terminating test Zeppelin...");
ZeppelinServer.jettyServer.stop();
executor.shutdown();
Expand Down