Skip to content

Commit f390b13

Browse files
committed
Better visibility for workaround constructors
1 parent d6b0afd commit f390b13

File tree

2 files changed

+11
-11
lines changed

2 files changed

+11
-11
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -85,10 +85,6 @@ class SparkContext(config: SparkConf)
8585
def this(master: String, appName: String, conf: SparkConf) =
8686
this(SparkContext.updatedConf(conf, master, appName))
8787

88-
// NOTE: The below constructors could be consolidated using default arguments. Due to
89-
// Scala bug SI-8479, however, this causes the compile step to fail when generating docs.
90-
// Until we have a good workaround for that bug the constructors remain broken out.
91-
9288
/**
9389
* Alternative constructor that allows setting common Spark properties directly
9490
*
@@ -111,14 +107,18 @@ class SparkContext(config: SparkConf)
111107
this.preferredNodeLocationData = preferredNodeLocationData
112108
}
113109

110+
// NOTE: The below constructors could be consolidated using default arguments. Due to
111+
// Scala bug SI-8479, however, this causes the compile step to fail when generating docs.
112+
// Until we have a good workaround for that bug the constructors remain broken out.
113+
114114
/**
115115
* Alternative constructor that allows setting common Spark properties directly
116116
*
117117
* @param master Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
118118
* @param appName A name for your application, to display on the cluster web UI.
119119
*/
120-
def this(master: String, appName: String) =
121-
this(master, appName, null, Nil, Map())
120+
private[spark] def this(master: String, appName: String) =
121+
this(master, appName, null, Nil, Map(), Map())
122122

123123
/**
124124
* Alternative constructor that allows setting common Spark properties directly
@@ -127,8 +127,8 @@ class SparkContext(config: SparkConf)
127127
* @param appName A name for your application, to display on the cluster web UI.
128128
* @param sparkHome Location where Spark is installed on cluster nodes.
129129
*/
130-
def this(master: String, appName: String, sparkHome: String) =
131-
this(master, appName, sparkHome, Nil, Map())
130+
private[spark] def this(master: String, appName: String, sparkHome: String) =
131+
this(master, appName, sparkHome, Nil, Map(), Map())
132132

133133
/**
134134
* Alternative constructor that allows setting common Spark properties directly
@@ -139,8 +139,8 @@ class SparkContext(config: SparkConf)
139139
* @param jars Collection of JARs to send to the cluster. These can be paths on the local file
140140
* system or HDFS, HTTP, HTTPS, or FTP URLs.
141141
*/
142-
def this(master: String, appName: String, sparkHome: String, jars: Seq[String]) =
143-
this(master, appName, sparkHome, jars, Map())
142+
private[spark] def this(master: String, appName: String, sparkHome: String, jars: Seq[String]) =
143+
this(master, appName, sparkHome, jars, Map(), Map())
144144

145145
private[spark] val conf = config.clone()
146146

core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
8989
*/
9090
def this(master: String, appName: String, sparkHome: String, jars: Array[String],
9191
environment: JMap[String, String]) =
92-
this(new SparkContext(master, appName, sparkHome, jars.toSeq, environment))
92+
this(new SparkContext(master, appName, sparkHome, jars.toSeq, environment, Map()))
9393

9494
private[spark] val env = sc.env
9595

0 commit comments

Comments
 (0)