Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public void testSparkInterpreter() throws IOException, InterruptedException, Int
properties.setProperty("spark.master", "local");
properties.setProperty("spark.app.name", "test");
properties.setProperty("zeppelin.spark.maxResult", "100");
properties.setProperty("zeppelin.spark.uiWebUrl", "fake_spark_weburl");
properties.setProperty("zeppelin.spark.uiWebUrl", "fake_spark_weburl/{{applicationId}}");
// disable color output for easy testing
properties.setProperty("zeppelin.spark.scala.color", "false");
properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false");
Expand Down Expand Up @@ -180,7 +180,8 @@ public void testSparkInterpreter() throws IOException, InterruptedException, Int
// spark job url is sent
ArgumentCaptor<Map> onParaInfosReceivedArg = ArgumentCaptor.forClass(Map.class);
verify(mockRemoteEventClient).onParaInfosReceived(onParaInfosReceivedArg.capture());
assertTrue(((String) onParaInfosReceivedArg.getValue().get("jobUrl")).startsWith("fake_spark_weburl"));
assertTrue(((String) onParaInfosReceivedArg.getValue().get("jobUrl")).startsWith("fake_spark_weburl/"
+ interpreter.getJavaSparkContext().sc().applicationId()));

// case class
result = interpreter.interpret("val bankText = sc.textFile(\"bank.csv\")", getInterpreterContext());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,8 @@ abstract class BaseSparkScalaInterpreter(val conf: SparkConf,
case None =>
}

initSparkWebUrl()

val hiveSiteExisted: Boolean =
Thread.currentThread().getContextClassLoader.getResource("hive-site.xml") != null
val hiveEnabled = conf.getBoolean("zeppelin.spark.useHiveContext", false)
Expand Down Expand Up @@ -306,7 +308,8 @@ abstract class BaseSparkScalaInterpreter(val conf: SparkConf,
case Some(url) => sparkUrl = url
case None =>
}
useYarnProxyURLIfNeeded()

initSparkWebUrl()

bind("spark", sparkSession.getClass.getCanonicalName, sparkSession, List("""@transient"""))
bind("sc", "org.apache.spark.SparkContext", sc, List("""@transient"""))
Expand All @@ -321,6 +324,15 @@ abstract class BaseSparkScalaInterpreter(val conf: SparkConf,
scalaInterpret("print(\"\")")
}

private def initSparkWebUrl(): Unit = {
val webUiUrl = properties.getProperty("zeppelin.spark.uiWebUrl");
if (!StringUtils.isBlank(webUiUrl)) {
this.sparkUrl = webUiUrl.replace("{{applicationId}}", sc.applicationId);
} else {
useYarnProxyURLIfNeeded()
}
}

protected def createZeppelinContext(): Unit = {

var sparkShims: SparkShims = null
Expand All @@ -329,13 +341,8 @@ abstract class BaseSparkScalaInterpreter(val conf: SparkConf,
} else {
sparkShims = SparkShims.getInstance(sc.version, properties, sc)
}
var webUiUrl = properties.getProperty("zeppelin.spark.uiWebUrl");
if (StringUtils.isBlank(webUiUrl)) {
webUiUrl = sparkUrl;
}
useYarnProxyURLIfNeeded()

sparkShims.setupSparkListener(sc.master, webUiUrl, InterpreterContext.get)
sparkShims.setupSparkListener(sc.master, sparkUrl, InterpreterContext.get)

z = new SparkZeppelinContext(sc, sparkShims,
interpreterGroup.getInterpreterHookRegistry,
Expand Down