Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,10 @@ object RInterpreter {

// These are the additional properties we need on top of the ones provided by the spark interpreters
lazy val props: Map[String, InterpreterProperty] = new InterpreterPropertyBuilder()
.add("rhadoop.cmd", SparkInterpreter.getSystemDefault("HADOOP_CMD", "rhadoop.cmd", ""), "Usually /usr/bin/hadoop")
.add("rhadooop.streamingjar", SparkInterpreter.getSystemDefault("HADOOP_STREAMING", "rhadooop.streamingjar", ""), "Usually /usr/lib/hadoop/contrib/streaming/hadoop-streaming-<version>.jar")
.add("rscala.debug", SparkInterpreter.getSystemDefault("RSCALA_DEBUG", "rscala.debug","false"), "Whether to turn on rScala debugging") // TEST: Implemented but not tested
.add("rscala.timeout", SparkInterpreter.getSystemDefault("RSCALA_TIMEOUT", "rscala.timeout","60"), "Timeout for rScala") // TEST: Implemented but not tested
.add("rhadoop.cmd", "HADOOP_CMD", "rhadoop.cmd", "", "Usually /usr/bin/hadoop")
.add("rhadooop.streamingjar", "HADOOP_STREAMING", "rhadooop.streamingjar", "", "Usually /usr/lib/hadoop/contrib/streaming/hadoop-streaming-<version>.jar")
.add("rscala.debug", "RSCALA_DEBUG", "rscala.debug","false", "Whether to turn on rScala debugging") // TEST: Implemented but not tested
.add("rscala.timeout", "RSCALA_TIMEOUT", "rscala.timeout","60", "Timeout for rScala") // TEST: Implemented but not tested
.build

def getProps() = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import org.apache.zeppelin.interpreter.Interpreter;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.interpreter.InterpreterProperty;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.interpreter.InterpreterUtils;
Expand Down Expand Up @@ -444,10 +445,11 @@ public SparkContext createSparkContext_1() {
}

private void setupConfForPySpark(SparkConf conf) {
String pysparkBasePath = getSystemDefault("SPARK_HOME", null, null);
String pysparkBasePath = new InterpreterProperty("SPARK_HOME", null, null, null).getValue();
File pysparkPath;
if (null == pysparkBasePath) {
pysparkBasePath = getSystemDefault("ZEPPELIN_HOME", "zeppelin.home", "../");
pysparkBasePath =
new InterpreterProperty("ZEPPELIN_HOME", "zeppelin.home", "../", null).getValue();
pysparkPath = new File(pysparkBasePath,
"interpreter" + File.separator + "spark" + File.separator + "pyspark");
} else {
Expand Down Expand Up @@ -497,27 +499,6 @@ private boolean useSparkSubmit() {
return null != System.getenv("SPARK_SUBMIT");
}

public static String getSystemDefault(
String envName,
String propertyName,
String defaultValue) {

if (envName != null && !envName.isEmpty()) {
String envValue = System.getenv().get(envName);
if (envValue != null) {
return envValue;
}
}

if (propertyName != null && !propertyName.isEmpty()) {
String propValue = System.getProperty(propertyName);
if (propValue != null) {
return propValue;
}
}
return defaultValue;
}

public boolean printREPLOutput() {
return java.lang.Boolean.parseBoolean(getProperty("zeppelin.spark.printREPLOutput"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ public void setDescription(String description) {
}

public String getValue() {
//TODO(jongyoul): Remove SparkInterpreter's getSystemDefault method
if (envName != null && !envName.isEmpty()) {
String envValue = System.getenv().get(envName);
if (envValue != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,13 @@ public InterpreterPropertyBuilder add(String name, String defaultValue, String d
return this;
}

public InterpreterPropertyBuilder add(String name, String envName, String propertyName,
String defaultValue, String description){
properties.put(name,
new InterpreterProperty(envName, propertyName, defaultValue, description));
return this;
}

public Map<String, InterpreterProperty> build(){
return properties;
}
Expand Down