diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java index 4816e4f16a4..e7040fa9cbc 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java @@ -174,7 +174,7 @@ public void sparkRTest() throws IOException { Map config = p.getConfig(); config.put("enabled", true); p.setConfig(config); - p.setText("%r localDF <- data.frame(name=c(\"a\", \"b\", \"c\"), age=c(19, 23, 18))\n" + + p.setText("%spark.r localDF <- data.frame(name=c(\"a\", \"b\", \"c\"), age=c(19, 23, 18))\n" + "df <- createDataFrame(" + sqlContextName + ", localDF)\n" + "count(df)" ); @@ -424,7 +424,7 @@ public void pySparkDepLoaderTest() throws IOException { p1.setText("%pyspark\n" + "from pyspark.sql import SQLContext\n" + "print(" + sqlContextName + ".read.format('com.databricks.spark.csv')" + - ".load('"+ tmpFile.getAbsolutePath() +"').count())"); + ".load('file:///"+ tmpFile.getAbsolutePath() +"').count())"); p1.setAuthenticationInfo(anonymous); note.run(p1.getId());