diff --git a/.travis.yml b/.travis.yml
index a1b467d9971..0c74edfc55d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -28,27 +28,38 @@ before_script:
-
script:
-# spark 1.4
+ # spark 1.5
- mvn package -Pbuild-distr -Phadoop-2.3 -Ppyspark -B
- - ./testing/startSparkCluster.sh 1.4.0 2.3
+ - ./testing/startSparkCluster.sh 1.5.1 2.3
+ - echo "export SPARK_HOME=`pwd`/spark-1.5.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn verify -Pusing-packaged-distr -Phadoop-2.3 -Ppyspark -B
- - ./testing/stopSparkCluster.sh 1.4.0 2.3
+ - ./testing/stopSparkCluster.sh 1.5.1 2.3
+ # spark 1.4
+ - rm -rf `pwd`/interpreter/spark
+ - mvn package -DskipTests -Pspark-1.4 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
+ - ./testing/startSparkCluster.sh 1.4.1 2.3
+ - echo "export SPARK_HOME=`pwd`/spark-1.4.1-bin-hadoop2.3" > conf/zeppelin-env.sh
+ - mvn package -Pspark-1.4 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
+ - ./testing/stopSparkCluster.sh 1.4.1 2.3
# spark 1.3
- rm -rf `pwd`/interpreter/spark
- mvn package -DskipTests -Pspark-1.3 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.3.1 2.3
+ - echo "export SPARK_HOME=`pwd`/spark-1.3.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.3 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.3.1 2.3
# spark 1.2
- rm -rf `pwd`/interpreter/spark
- mvn package -Pspark-1.2 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.2.1 2.3
+ - echo "export SPARK_HOME=`pwd`/spark-1.2.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.2 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.2.1 2.3
# spark 1.1
- rm -rf `pwd`/interpreter/spark
- mvn package -Pspark-1.1 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.1.1 2.3
+ - echo "export SPARK_HOME=`pwd`/spark-1.1.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.1 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.1.1 2.3
diff --git a/bin/interpreter.sh b/bin/interpreter.sh
index e03a13b0f98..8a0f101dbee 100755
--- a/bin/interpreter.sh
+++ b/bin/interpreter.sh
@@ -121,9 +121,9 @@ fi
CLASSPATH+=":${ZEPPELIN_CLASSPATH}"
if [[ -n "${SPARK_SUBMIT}" ]]; then
- ${SPARK_SUBMIT} --class ${ZEPPELIN_SERVER} --driver-class-path "${CLASSPATH}" --driver-java-options "${JAVA_INTP_OPTS}" ${SPARK_SUBMIT_OPTIONS} ${SPARK_APP_JAR} ${PORT} &
+ ${SPARK_SUBMIT} --class ${ZEPPELIN_SERVER} --driver-class-path "${CLASSPATH_OVERRIDES}:${CLASSPATH}" --driver-java-options "${JAVA_INTP_OPTS}" ${SPARK_SUBMIT_OPTIONS} ${SPARK_APP_JAR} ${PORT} &
else
- ${ZEPPELIN_RUNNER} ${JAVA_INTP_OPTS} -cp ${CLASSPATH} ${ZEPPELIN_SERVER} ${PORT} &
+ ${ZEPPELIN_RUNNER} ${JAVA_INTP_OPTS} -cp ${CLASSPATH_OVERRIDES}:${CLASSPATH} ${ZEPPELIN_SERVER} ${PORT} &
fi
pid=$!
diff --git a/bin/zeppelin-daemon.sh b/bin/zeppelin-daemon.sh
index a386f277e5c..599ff66ee79 100755
--- a/bin/zeppelin-daemon.sh
+++ b/bin/zeppelin-daemon.sh
@@ -169,7 +169,7 @@ function start() {
initialize_default_directories
- nohup nice -n $ZEPPELIN_NICENESS $ZEPPELIN_RUNNER $JAVA_OPTS -cp $CLASSPATH $ZEPPELIN_MAIN >> "${ZEPPELIN_OUTFILE}" 2>&1 < /dev/null &
+ nohup nice -n $ZEPPELIN_NICENESS $ZEPPELIN_RUNNER $JAVA_OPTS -cp $CLASSPATH_OVERRIDES:$CLASSPATH $ZEPPELIN_MAIN >> "${ZEPPELIN_OUTFILE}" 2>&1 < /dev/null &
pid=$!
if [[ -z "${pid}" ]]; then
action_msg "${ZEPPELIN_NAME} start" "${SET_ERROR}"
diff --git a/bin/zeppelin.sh b/bin/zeppelin.sh
index 695e751839e..3879c2e61d0 100755
--- a/bin/zeppelin.sh
+++ b/bin/zeppelin.sh
@@ -85,4 +85,4 @@ if [[ ! -d "${ZEPPELIN_NOTEBOOK_DIR}" ]]; then
$(mkdir -p "${ZEPPELIN_NOTEBOOK_DIR}")
fi
-$(exec $ZEPPELIN_RUNNER $JAVA_OPTS -cp $CLASSPATH $ZEPPELIN_SERVER "$@")
+$(exec $ZEPPELIN_RUNNER $JAVA_OPTS -cp $CLASSPATH_OVERRIDES:$CLASSPATH $ZEPPELIN_SERVER "$@")
diff --git a/docs/index.md b/docs/index.md
index a5245e60a4f..433912a8eac 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -71,7 +71,7 @@ With simple drag and drop Zeppelin aggeregates the values and display them in pi
-Learn more about Zeppelin's [Display system](./docs/display.html).
+Learn more about Zeppelin's Display system. ( [text](./docs/displaysystem/display.html), [html](./docs/displaysystem/display.html#html), [table](./docs/displaysystem/table.html), [angular](./docs/displaysystem/angular.html) )
@@ -81,7 +81,7 @@ Zeppelin can dynamically create some input forms into your notebook.
-Learn more about [Dynamic Forms](./docs/dynamicform.html).
+Learn more about [Dynamic Forms](./docs/manual/dynamicform.html).
diff --git a/pom.xml b/pom.xml
index 45335feacf6..394f8530e40 100755
--- a/pom.xml
+++ b/pom.xml
@@ -456,6 +456,7 @@
STYLE.md
Roadmap.md
conf/interpreter.json
+ conf/zeppelin-env.sh
spark-*-bin*/**
diff --git a/testing/startSparkCluster.sh b/testing/startSparkCluster.sh
index 1f70fe63794..7333ab09e07 100755
--- a/testing/startSparkCluster.sh
+++ b/testing/startSparkCluster.sh
@@ -31,7 +31,19 @@ ZEPPELIN_HOME="$(cd "${FWDIR}/.."; pwd)"
export SPARK_HOME=${ZEPPELIN_HOME}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}
echo "SPARK_HOME is ${SPARK_HOME} "
if [ ! -d "${SPARK_HOME}" ]; then
- wget -q http://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
+ echo "${SPARK_VERSION}" | grep "^1.[12].[0-9]" > /dev/null
+ if [ $? -eq 0 ]; then
+ # spark 1.1.x and spark 1.2.x can be downloaded from archive
+ wget http://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
+ else
+ # spark 1.3.x and later can be downloaded from mirror
+ # get download address from mirror
+ MIRROR_INFO=$(curl -s "http://www.apache.org/dyn/closer.cgi/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz?asjson=1")
+
+ PREFFERED=$(echo "${MIRROR_INFO}" | grep preferred | sed 's/[^"]*.preferred.: .\([^"]*\).*/\1/g')
+ PATHINFO=$(echo "${MIRROR_INFO}" | grep path_info | sed 's/[^"]*.path_info.: .\([^"]*\).*/\1/g')
+ wget "${PREFFERED}${PATHINFO}"
+ fi
tar zxf spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
fi
@@ -46,8 +58,8 @@ export SPARK_MASTER_WEBUI_PORT=7072
export SPARK_WORKER_WEBUI_PORT=8082
${SPARK_HOME}/sbin/start-master.sh
-echo ${SPARK_VERSION} | grep "^1.4" > /dev/null
-if [ $? -ne 0 ]; then # spark 1.3 or prior
+echo ${SPARK_VERSION} | grep "^1.[123].[0-9]" > /dev/null
+if [ $? -eq 0 ]; then # spark 1.3 or prior
${SPARK_HOME}/sbin/start-slave.sh 1 `hostname`:${SPARK_MASTER_PORT}
else
${SPARK_HOME}/sbin/start-slave.sh spark://`hostname`:7071
diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
index ecf0c729a83..1895e16cd4c 100644
--- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
+++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
@@ -22,6 +22,7 @@
import java.lang.ref.WeakReference;
import java.net.InetAddress;
import java.net.UnknownHostException;
+import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@@ -29,6 +30,7 @@
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethodBase;
import org.apache.commons.httpclient.methods.*;
+import org.apache.zeppelin.interpreter.Interpreter.RegisteredInterpreter;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterOption;
import org.apache.zeppelin.interpreter.InterpreterSetting;
@@ -197,6 +199,13 @@ private static boolean isActiveSparkHome(File dir) {
protected static void shutDown() throws Exception {
if (!wasRunning) {
+ // restart interpreter to stop all interpreter processes
+ List settingList = ZeppelinServer.notebook.getInterpreterFactory()
+ .getDefaultInterpreterSettingList();
+ for (String setting : settingList) {
+ ZeppelinServer.notebook.getInterpreterFactory().restart(setting);
+ }
+
LOG.info("Terminating test Zeppelin...");
ZeppelinServer.jettyServer.stop();
executor.shutdown();
diff --git a/zeppelin-web/src/components/modal-shortcut/modal-shortcut.html b/zeppelin-web/src/components/modal-shortcut/modal-shortcut.html
index 7d41431f7c3..fe18589a1af 100644
--- a/zeppelin-web/src/components/modal-shortcut/modal-shortcut.html
+++ b/zeppelin-web/src/components/modal-shortcut/modal-shortcut.html
@@ -59,6 +59,18 @@ Control in Note
Control in Note Editor
+
+
+
+ Auto-completion
+
+
+
+