diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java index 89d24e67030..622e60271d1 100644 --- a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java +++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java @@ -67,10 +67,8 @@ import scala.collection.Iterator; import scala.collection.JavaConversions; import scala.collection.JavaConverters; -import scala.collection.convert.WrapAsJava; import scala.collection.Seq; import scala.collection.convert.WrapAsJava$; -import scala.collection.convert.WrapAsScala; import scala.collection.mutable.HashMap; import scala.collection.mutable.HashSet; import scala.reflect.io.AbstractFile; @@ -114,7 +112,7 @@ public class SparkInterpreter extends Interpreter { /** * completer - org.apache.spark.repl.SparkJLineCompletion (scala 2.10) */ - private Object completer; + private Object completer = null; private Map binder; private SparkVersion sparkVersion; @@ -723,11 +721,25 @@ public void open() { logger.error(e.getMessage(), e); } } + } + if (Utils.findClass("org.apache.spark.repl.SparkJLineCompletion", true) != null) { completer = Utils.instantiateClass( "org.apache.spark.repl.SparkJLineCompletion", new Class[]{Utils.findClass("org.apache.spark.repl.SparkIMain")}, new Object[]{intp}); + } else if (Utils.findClass( + "scala.tools.nsc.interpreter.PresentationCompilerCompleter", true) != null) { + completer = Utils.instantiateClass( + "scala.tools.nsc.interpreter.PresentationCompilerCompleter", + new Class[]{ IMain.class }, + new Object[]{ intp }); + } else if (Utils.findClass( + "scala.tools.nsc.interpreter.JLineCompletion", true) != null) { + completer = Utils.instantiateClass( + "scala.tools.nsc.interpreter.JLineCompletion", + new Class[]{ IMain.class }, + new Object[]{ intp }); } if (Utils.isSpark2()) { @@ -906,6 +918,11 @@ private List classPath(ClassLoader cl) { @Override public List completion(String buf, int cursor) { + if (completer == null) { + logger.warn("Can't find completer"); + return new LinkedList(); + } + if (buf.length() < cursor) { cursor = buf.length(); } @@ -914,22 +931,18 @@ public List completion(String buf, int cursor) { completionText = ""; cursor = completionText.length(); } - if (Utils.isScala2_10()) { - ScalaCompleter c = (ScalaCompleter) Utils.invokeMethod(completer, "completer"); - Candidates ret = c.complete(completionText, cursor); - List candidates = WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates()); - List completions = new LinkedList(); + ScalaCompleter c = (ScalaCompleter) Utils.invokeMethod(completer, "completer"); + Candidates ret = c.complete(completionText, cursor); - for (String candidate : candidates) { - completions.add(new InterpreterCompletion(candidate, candidate)); - } + List candidates = WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates()); + List completions = new LinkedList(); - return completions; - } else { - return new LinkedList(); + for (String candidate : candidates) { + completions.add(new InterpreterCompletion(candidate, candidate)); } + return completions; } private String getCompletionTargetString(String text, int cursor) { diff --git a/spark/src/main/java/org/apache/zeppelin/spark/Utils.java b/spark/src/main/java/org/apache/zeppelin/spark/Utils.java index 328fa199ca6..765791efd37 100644 --- a/spark/src/main/java/org/apache/zeppelin/spark/Utils.java +++ b/spark/src/main/java/org/apache/zeppelin/spark/Utils.java @@ -56,10 +56,16 @@ static Object invokeStaticMethod(Class c, String name) { } static Class findClass(String name) { + return findClass(name, false); + } + + static Class findClass(String name, boolean silence) { try { return Utils.class.forName(name); } catch (ClassNotFoundException e) { - logger.error(e.getMessage(), e); + if (!silence) { + logger.error(e.getMessage(), e); + } return null; } } diff --git a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java index badc4e20b0d..1c7979fc428 100644 --- a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java +++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java @@ -19,16 +19,16 @@ import static org.junit.Assert.*; -import java.io.BufferedReader; import java.io.File; import java.util.HashMap; import java.util.LinkedList; +import java.util.List; import java.util.Properties; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; -import org.apache.spark.repl.SparkILoop; import org.apache.zeppelin.display.AngularObjectRegistry; +import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.apache.zeppelin.resource.LocalResourcePool; import org.apache.zeppelin.resource.WellKnownResourceName; import org.apache.zeppelin.user.AuthenticationInfo; @@ -42,7 +42,6 @@ import org.junit.runners.MethodSorters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import scala.tools.nsc.interpreter.IMain; @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class SparkInterpreterTest { @@ -282,4 +281,10 @@ public void testDisableImplicitImport() { assertEquals(Code.ERROR, repl2.interpret(ddl, context).code()); repl2.close(); } + + @Test + public void testCompletion() { + List completions = repl.completion("sc.", "sc.".length()); + assertTrue(completions.size() > 0); + } }