Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
4462207
[ZEPPELIN-605] Add support for Scala 2.11
lresende Feb 25, 2016
864e52f
[ZEPPELIN-605] Add Scala 2.11 build profile
lresende Apr 2, 2016
d8628c7
[ZEPPELIN-605] Enable Scala 2.11 REPL support for Spark Interpreter
lresende Apr 7, 2016
ac0088b
[ZEPPELIN-605] Rewrite Spark interpreter based on Scala 2.11 support
lresende Apr 13, 2016
ceb1480
Initial scala-210, 211 support in the single binary
Leemoonsoo Jun 9, 2016
fefcb2b
scala 2.11 support for spark interpreter
Leemoonsoo Jun 10, 2016
32650c7
Fix some reflections
Leemoonsoo Jun 10, 2016
ce4a2d8
Fix reflection on creating SparkCommandLine
Leemoonsoo Jun 10, 2016
fe3f8ea
Fix reflection around HttpServer and createTempDir
Leemoonsoo Jun 10, 2016
35fc8a0
fix style
Leemoonsoo Jun 11, 2016
9542299
Fix reflection
Leemoonsoo Jun 13, 2016
43d4a70
style
Leemoonsoo Jun 13, 2016
6da1364
SparkContext sharing seems not working in scala 2.11, disable the test
Leemoonsoo Jun 13, 2016
d7bd3ee
Update FlinkInterpreter
Leemoonsoo Jun 13, 2016
c7df53d
Update ignite interpreter
Leemoonsoo Jun 13, 2016
1102d00
Remove unused methods
Leemoonsoo Jun 13, 2016
9b2adf8
Force close
lresende Jun 16, 2016
36e4eed
make binary built with scala 2.11 work with spark_2.10 binary
Leemoonsoo Jun 29, 2016
f62dc46
Fix pom.xml merge conflict
lresende Jul 7, 2016
0d3bd46
Fix new code to support both scala versions
lresende Jul 7, 2016
49cc8d6
initialize imain
Leemoonsoo Jul 9, 2016
a744204
-Dscala-2.11 on mvn verify
Leemoonsoo Jul 9, 2016
bfd7340
Initial spark 2.0 support
Leemoonsoo Jul 10, 2016
482864c
comment out spark-streaming-twitter
Leemoonsoo Jul 10, 2016
b3d0670
generate spark2.0 compatible classname
Leemoonsoo Jul 10, 2016
5243fea
Remove unnecessary refernece to HiveContext
Leemoonsoo Jul 10, 2016
1bd101a
spark2 in PySparkInterpreter
Leemoonsoo Jul 10, 2016
bd4a5fe
get SQLContext correctly
Leemoonsoo Jul 10, 2016
5bd5640
dd spark 2 test profile in travis
Leemoonsoo Jul 10, 2016
55e9b7f
SparkSession.wrapped() fallbacks to SparkSession.sqlContext().
Leemoonsoo Jul 10, 2016
1d72eb5
Update spark package directory name pattern to match 2.0.0-preview
Leemoonsoo Jul 10, 2016
0b76ff0
Update ZeppelinSparkCluterTest to make compatible with Spark 2
Leemoonsoo Jul 11, 2016
0b08ecd
full class name org.apache.spark.repl.SparkJLineCompletion
Leemoonsoo Jul 11, 2016
3aa7c91
Get last object from resourcePool
Leemoonsoo Jul 11, 2016
d862800
use getLastObject instead of getValue in putLatestVarInResourcePool w…
Leemoonsoo Jul 12, 2016
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 10 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,17 @@ addons:

matrix:
include:
# Test all modules
# Test all modules with spark-2.0.0-preview and scala 2.11
- jdk: "oraclejdk7"
env: SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr -Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pexamples" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="-Dpython.test.exclude=''"
env: SPARK_VER="2.0.0-preview" HADOOP_VER="2.3" PROFILE="-Pspark-2.0 -Dspark.version=2.0.0-preview -Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pexamples -Dscala-2.11" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="-Dpython.test.exclude=''"

# Test all modules with scala 2.10
- jdk: "oraclejdk7"
env: SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr -Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pexamples -Dscala-2.10" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="-Dpython.test.exclude=''"

# Test all modules with scala 2.11
- jdk: "oraclejdk7"
env: SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr -Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pexamples -Dscala-2.11" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="-Dpython.test.exclude=''"

# Test spark module for 1.5.2
- jdk: "oraclejdk7"
Expand Down
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,14 @@ And browse [localhost:8080](localhost:8080) in your browser.

For configuration details check __`./conf`__ subdirectory.

### Building for Scala 2.11

To produce a Zeppelin package compiled with Scala 2.11, use the -Dscala-2.11 property:

```
mvn clean package -Pspark-1.6 -Phadoop-2.4 -Pyarn -Ppyspark -Dscala-2.11 -DskipTests clean install
```

### Package
To package the final distribution including the compressed archive, run:

Expand Down
4 changes: 1 addition & 3 deletions cassandra/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -38,14 +38,11 @@
<cassandra.driver.version>3.0.1</cassandra.driver.version>
<snappy.version>1.0.5.4</snappy.version>
<lz4.version>1.3.0</lz4.version>
<scala.version>2.10.4</scala.version>
<scala.binary.version>2.10</scala.binary.version>
<commons-lang.version>3.3.2</commons-lang.version>
<scalate.version>1.7.1</scalate.version>
<cassandra.guava.version>16.0.1</cassandra.guava.version>

<!--TEST-->
<scalatest.version>2.2.4</scalatest.version>
<junit.version>4.12</junit.version>
<achilles.version>3.2.4-Zeppelin</achilles.version>
<assertj.version>1.7.0</assertj.version>
Expand Down Expand Up @@ -173,6 +170,7 @@
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.15.2</version>
<executions>
<execution>
<id>compile</id>
Expand Down
31 changes: 16 additions & 15 deletions flink/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,6 @@
<properties>
<flink.version>1.0.3</flink.version>
<flink.akka.version>2.3.7</flink.akka.version>
<flink.scala.binary.version>2.10</flink.scala.binary.version>
<flink.scala.version>2.10.4</flink.scala.version>
<scala.macros.version>2.0.1</scala.macros.version>
</properties>

Expand Down Expand Up @@ -73,68 +71,71 @@

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${flink.scala.binary.version}</artifactId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-runtime_${flink.scala.binary.version}</artifactId>
<artifactId>flink-runtime_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_${flink.scala.binary.version}</artifactId>
<artifactId>flink-scala_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala-shell_${flink.scala.binary.version}</artifactId>
<artifactId>flink-scala-shell_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>

<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_${flink.scala.binary.version}</artifactId>
<artifactId>akka-actor_${scala.binary.version}</artifactId>
<version>${flink.akka.version}</version>
</dependency>

<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_${flink.scala.binary.version}</artifactId>
<artifactId>akka-remote_${scala.binary.version}</artifactId>
<version>${flink.akka.version}</version>
</dependency>

<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_${flink.scala.binary.version}</artifactId>
<artifactId>akka-slf4j_${scala.binary.version}</artifactId>
<version>${flink.akka.version}</version>
</dependency>

<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-testkit_${flink.scala.binary.version}</artifactId>
<artifactId>akka-testkit_${scala.binary.version}</artifactId>
<version>${flink.akka.version}</version>
</dependency>

<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${flink.scala.version}</version>
<version>${scala.version}</version>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${flink.scala.version}</version>
<version>${scala.version}</version>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${flink.scala.version}</version>
<version>${scala.version}</version>
<scope>provided</scope>
</dependency>

<dependency>
Expand Down Expand Up @@ -169,7 +170,7 @@
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.1.4</version>
<version>3.2.2</version>
<executions>
<!-- Run scala compiler in the process-resources phase, so that dependencies on
scala classes can be resolved later in the (Java) compile phase -->
Expand Down Expand Up @@ -199,7 +200,7 @@
<compilerPlugins combine.children="append">
<compilerPlugin>
<groupId>org.scalamacros</groupId>
<artifactId>paradise_${flink.scala.version}</artifactId>
<artifactId>paradise_${scala.version}</artifactId>
<version>${scala.macros.version}</version>
</compilerPlugin>
</compilerPlugins>
Expand Down
30 changes: 14 additions & 16 deletions flink/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,15 @@
*/
package org.apache.zeppelin.flink;

import java.lang.reflect.InvocationTargetException;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.*;

import org.apache.flink.api.scala.FlinkILoop;
import org.apache.flink.configuration.Configuration;
Expand All @@ -45,6 +43,8 @@
import scala.Console;
import scala.None;
import scala.Some;
import scala.collection.JavaConversions;
import scala.collection.immutable.Nil;
import scala.runtime.AbstractFunction0;
import scala.tools.nsc.Settings;
import scala.tools.nsc.interpreter.IMain;
Expand Down Expand Up @@ -94,7 +94,7 @@ public void open() {

// prepare bindings
imain.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
binder = (Map<String, Object>) getValue("_binder");
Map<String, Object> binder = (Map<String, Object>) getLastObject();

// import libraries
imain.interpret("import scala.tools.nsc.io._");
Expand All @@ -103,7 +103,10 @@ public void open() {

imain.interpret("import org.apache.flink.api.scala._");
imain.interpret("import org.apache.flink.api.common.functions._");
imain.bindValue("env", env);

binder.put("env", env);
imain.interpret("val env = _binder.get(\"env\").asInstanceOf["
+ env.getClass().getName() + "]");
}

private boolean localMode() {
Expand Down Expand Up @@ -192,16 +195,11 @@ private List<File> classPath(ClassLoader cl) {
return paths;
}

public Object getValue(String name) {
IMain imain = flinkIloop.intp();
Object ret = imain.valueOfTerm(name);
if (ret instanceof None) {
return null;
} else if (ret instanceof Some) {
return ((Some) ret).get();
} else {
return ret;
}
public Object getLastObject() {
Object obj = imain.lastRequest().lineRep().call(
"$result",
JavaConversions.asScalaBuffer(new LinkedList<Object>()));
return obj;
}

@Override
Expand Down
10 changes: 4 additions & 6 deletions ignite/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,7 @@
<url>http://zeppelin.apache.org</url>

<properties>
<ignite.version>1.6.0</ignite.version>
<ignite.scala.binary.version>2.10</ignite.scala.binary.version>
<ignite.scala.version>2.10.4</ignite.scala.version>
<ignite.version>1.5.0.final</ignite.version>
</properties>

<dependencies>
Expand Down Expand Up @@ -73,19 +71,19 @@
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${ignite.scala.version}</version>
<version>${scala.version}</version>
</dependency>

<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${ignite.scala.version}</version>
<version>${scala.version}</version>
</dependency>

<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${ignite.scala.version}</version>
<version>${scala.version}</version>
</dependency>

<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import scala.Console;
import scala.None;
import scala.Some;
import scala.collection.JavaConversions;
import scala.tools.nsc.Settings;
import scala.tools.nsc.interpreter.IMain;
import scala.tools.nsc.interpreter.Results.Result;
Expand Down Expand Up @@ -174,16 +175,11 @@ private List<File> classPath(ClassLoader cl) {
return paths;
}

public Object getValue(String name) {
Object val = imain.valueOfTerm(name);

if (val instanceof None) {
return null;
} else if (val instanceof Some) {
return ((Some) val).get();
} else {
return val;
}
public Object getLastObject() {
Object obj = imain.lastRequest().lineRep().call(
"$result",
JavaConversions.asScalaBuffer(new LinkedList<Object>()));
return obj;
}

private Ignite getIgnite() {
Expand Down Expand Up @@ -222,7 +218,7 @@ private Ignite getIgnite() {

private void initIgnite() {
imain.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
Map<String, Object> binder = (Map<String, Object>) getValue("_binder");
Map<String, Object> binder = (Map<String, Object>) getLastObject();

if (getIgnite() != null) {
binder.put("ignite", ignite);
Expand Down
Loading