diff --git a/.travis.yml b/.travis.yml
index a86cf34acab..8619455253f 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -20,7 +20,7 @@ sudo: false
before_cache:
- sudo chown -R travis:travis $HOME/.m2
-
+
cache:
apt: true
directories:
@@ -98,15 +98,15 @@ matrix:
dist: trusty
env: BUILD_PLUGINS="true" PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-1.6 -Pscala-2.10" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl zeppelin-zengine,spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=SparkIntegrationTest,org.apache.zeppelin.spark.* -DfailIfNoTests=false"
- # Test spark module for 2.1.0 with scala 2.11
+ # Test spark module for 2.4.0 with scala 2.11
- jdk: "oraclejdk8"
dist: trusty
- env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.1 -Phadoop2 -Pscala-2.11" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.* -DfailIfNoTests=false"
+ env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.4 -Phadoop2 -Pscala-2.11" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.* -DfailIfNoTests=false"
- # Test spark module for 2.0.2 with scala 2.11
+ # Test spark module for 2.3.2 with scala 2.11
- jdk: "oraclejdk8"
dist: trusty
- env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.0 -Phadoop3 -Pscala-2.11" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.* -DfailIfNoTests=false"
+ env: BUILD_PLUGINS="false" PYTHON="2" SCALA_VER="2.11" PROFILE="-Pspark-2.3 -Phadoop3 -Pscala-2.11" SPARKR="true" BUILD_FLAG="install -DskipTests -DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl spark/interpreter,spark/spark-dependencies" TEST_PROJECTS="-Dtest=org.apache.zeppelin.spark.*,org.apache.zeppelin.rinterpreter.*,org.apache.spark.api.r.* -DfailIfNoTests=false"
# Test python/pyspark with python 2, livy 0.5
- sudo: required
diff --git a/spark/interpreter/pom.xml b/spark/interpreter/pom.xml
index daf801fcdd2..32f625d6cfb 100644
--- a/spark/interpreter/pom.xml
+++ b/spark/interpreter/pom.xml
@@ -53,7 +53,7 @@
**/PySparkInterpreterMatplotlibTest.java
**/*Test.*
-
+
@@ -69,12 +69,6 @@
${project.version}
-
- org.apache.zeppelin
- spark-scala-2.10
- ${project.version}
-
-
org.apache.zeppelin
zeppelin-interpreter-api
@@ -609,4 +603,51 @@
+
+
+
+ spark-2.2
+
+
+ org.apache.zeppelin
+ spark-scala-2.10
+ ${project.version}
+
+
+
+
+
+ spark-2.1
+
+
+ org.apache.zeppelin
+ spark-scala-2.10
+ ${project.version}
+
+
+
+
+
+ spark-2.0
+
+
+ org.apache.zeppelin
+ spark-scala-2.10
+ ${project.version}
+
+
+
+
+
+ spark-1.6
+
+
+ org.apache.zeppelin
+ spark-scala-2.10
+ ${project.version}
+
+
+
+
+
diff --git a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkVersion.java b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkVersion.java
index b75deb8b420..6ee001512cf 100644
--- a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkVersion.java
+++ b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkVersion.java
@@ -31,9 +31,10 @@ public class SparkVersion {
public static final SparkVersion SPARK_2_3_0 = SparkVersion.fromVersionString("2.3.0");
public static final SparkVersion SPARK_2_3_1 = SparkVersion.fromVersionString("2.3.1");
public static final SparkVersion SPARK_2_4_0 = SparkVersion.fromVersionString("2.4.0");
+ public static final SparkVersion SPARK_3_0_0 = SparkVersion.fromVersionString("3.0.0");
public static final SparkVersion MIN_SUPPORTED_VERSION = SPARK_1_6_0;
- public static final SparkVersion UNSUPPORTED_FUTURE_VERSION = SPARK_2_4_0;
+ public static final SparkVersion UNSUPPORTED_FUTURE_VERSION = SPARK_3_0_0;
private int version;
private String versionString;
diff --git a/spark/pom.xml b/spark/pom.xml
index 42f9d179145..b16e53f0774 100644
--- a/spark/pom.xml
+++ b/spark/pom.xml
@@ -49,17 +49,16 @@
spark-${spark.version}
- http://d3kbcqa49mib13.cloudfront.net/${spark.archive}.tgz
+ https://archive.apache.org/dist/spark/${spark.archive}/${spark.archive}.tgz
- http://d3kbcqa49mib13.cloudfront.net/${spark.archive}-bin-without-hadoop.tgz
+ https://archive.apache.org/dist/spark/${spark.archive}/${spark.archive}-bin-without-hadoop.tgz
interpreter
spark-scala-parent
- scala-2.10
scala-2.11
spark-dependencies
spark-shims
@@ -192,32 +191,47 @@
+
+ spark-2.4
+
+ 2.4.0
+ 2.5.0
+ 0.10.7
+
+
+
spark-2.3
- 2.3.0
+ 2.3.2
2.5.0
- 0.10.6
+ 0.10.7
spark-2.2
-
- true
-
- 2.2.0
+ 2.2.1
0.10.4
+
+ true
+
+
+ scala-2.10
+
spark-2.1
- 2.1.0
+ 2.1.2
0.10.4
+
+ scala-2.10
+
@@ -226,6 +240,9 @@
2.0.2
0.10.3
+
+ scala-2.10
+
@@ -234,7 +251,10 @@
1.6.3
0.9
+
+ scala-2.10
+
-
+
diff --git a/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala b/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
index 757f7eb5c3e..0956e042e03 100644
--- a/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
+++ b/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
@@ -39,6 +39,8 @@ class SparkScala211Interpreter(override val conf: SparkConf,
override val printReplOutput: java.lang.Boolean)
extends BaseSparkScalaInterpreter(conf, depFiles, printReplOutput) {
+ import SparkScala211Interpreter._
+
lazy override val LOGGER: Logger = LoggerFactory.getLogger(getClass)
private var sparkILoop: ILoop = _
@@ -81,7 +83,7 @@ class SparkScala211Interpreter(override val conf: SparkConf,
sparkILoop.in = reader
sparkILoop.initializeSynchronous()
- callMethod(sparkILoop, "scala$tools$nsc$interpreter$ILoop$$loopPostInit")
+ loopPostInit(this)
this.scalaCompleter = reader.completion.completer()
createSparkContext()
@@ -105,3 +107,72 @@ class SparkScala211Interpreter(override val conf: SparkConf,
sparkILoop.interpret(code)
}
+
+private object SparkScala211Interpreter {
+
+ /**
+ * This is a hack to call `loopPostInit` at `ILoop`. At higher version of Scala such
+ * as 2.11.12, `loopPostInit` became a nested function which is inaccessible. Here,
+ * we redefine `loopPostInit` at Scala's 2.11.8 side and ignore `loadInitFiles` being called at
+ * Scala 2.11.12 since here we do not have to load files.
+ *
+ * Both methods `loopPostInit` and `unleashAndSetPhase` are redefined, and `phaseCommand` and
+ * `asyncMessage` are being called via reflection since both exist in Scala 2.11.8 and 2.11.12.
+ *
+ * Please see the codes below:
+ * https://github.com/scala/scala/blob/v2.11.8/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+ * https://github.com/scala/scala/blob/v2.11.12/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+ *
+ * See also ZEPPELIN-3810.
+ */
+ private def loopPostInit(interpreter: SparkScala211Interpreter): Unit = {
+ import StdReplTags._
+ import scala.reflect.classTag
+ import scala.reflect.io
+
+ val sparkILoop = interpreter.sparkILoop
+ val intp = sparkILoop.intp
+ val power = sparkILoop.power
+ val in = sparkILoop.in
+
+ def loopPostInit() {
+ // Bind intp somewhere out of the regular namespace where
+ // we can get at it in generated code.
+ intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain]))
+ // Auto-run code via some setting.
+ (replProps.replAutorunCode.option
+ flatMap (f => io.File(f).safeSlurp())
+ foreach (intp quietRun _)
+ )
+ // classloader and power mode setup
+ intp.setContextClassLoader()
+ if (isReplPower) {
+ replProps.power setValue true
+ unleashAndSetPhase()
+ asyncMessage(power.banner)
+ }
+ // SI-7418 Now, and only now, can we enable TAB completion.
+ in.postInit()
+ }
+
+ def unleashAndSetPhase() = if (isReplPower) {
+ power.unleash()
+ intp beSilentDuring phaseCommand("typer") // Set the phase to "typer"
+ }
+
+ def phaseCommand(name: String): Results.Result = {
+ interpreter.callMethod(
+ sparkILoop,
+ "scala$tools$nsc$interpreter$ILoop$$phaseCommand",
+ Array(classOf[String]),
+ Array(name)).asInstanceOf[Results.Result]
+ }
+
+ def asyncMessage(msg: String): Unit = {
+ interpreter.callMethod(
+ sparkILoop, "asyncMessage", Array(classOf[String]), Array(msg))
+ }
+
+ loopPostInit()
+ }
+}
diff --git a/zeppelin-distribution/src/bin_license/LICENSE b/zeppelin-distribution/src/bin_license/LICENSE
index 54356e18682..f79da72dcf6 100644
--- a/zeppelin-distribution/src/bin_license/LICENSE
+++ b/zeppelin-distribution/src/bin_license/LICENSE
@@ -291,7 +291,7 @@ The text of each license is also included at licenses/LICENSE-[project]-[version
(BSD Style) JSch v0.1.53 (http://www.jcraft.com) - http://www.jcraft.com/jsch/LICENSE.txt
(BSD 3 Clause) highlightjs v9.4.0 (https://highlightjs.org/) - https://github.com/isagalaev/highlight.js/blob/9.4.0/LICENSE
(BSD 3 Clause) hamcrest v1.3 (http://hamcrest.org/JavaHamcrest/) - http://opensource.org/licenses/BSD-3-Clause
- (BSD Style) JLine v2.12.1 (https://github.com/jline/jline2) - https://github.com/jline/jline2/blob/master/LICENSE.txt
+ (BSD Style) JLine v2.14.3 (https://github.com/jline/jline2) - https://github.com/jline/jline2/blob/master/LICENSE.txt
(BSD New license) Google Auth Library for Java - Credentials (com.google.auth:google-auth-library-credentials:0.4.0 - https://github.com/google/google-auth-library-java/google-auth-library-credentials)
(BSD New license) Google Auth Library for Java - OAuth2 HTTP (com.google.auth:google-auth-library-oauth2-http:0.4.0 - https://github.com/google/google-auth-library-java/google-auth-library-oauth2-http)
(New BSD license) Protocol Buffer Java API (com.google.protobuf:protobuf-java-util:3.0.0-beta-2 - https://developers.google.com/protocol-buffers/)
diff --git a/zeppelin-interpreter/pom.xml b/zeppelin-interpreter/pom.xml
index 22a029e8bf2..bf624ceda38 100644
--- a/zeppelin-interpreter/pom.xml
+++ b/zeppelin-interpreter/pom.xml
@@ -43,7 +43,7 @@
1.12
3.0.3
1.0
- 2.12.1
+ 2.14.3
3.0.0-rc4
3.1.1
20.0
diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
index faa639f8e8a..97abd076785 100644
--- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
+++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
@@ -64,7 +64,7 @@ public class ZeppelinSparkClusterTest extends AbstractTestRestApi {
//ci timeout.
//TODO(zjffdu) remove this after we upgrade it to junit 4.13 (ZEPPELIN-3341)
private static Set verifiedSparkVersions = new HashSet<>();
-
+
private String sparkVersion;
private AuthenticationInfo anonymous = new AuthenticationInfo("anonymous");
@@ -83,10 +83,12 @@ public ZeppelinSparkClusterTest(String sparkVersion) throws Exception {
@Parameterized.Parameters
public static List