Skip to content

Commit e91750a

Browse files
committed
Merge branch 'master' into SPARK-3454_w_jersey
Conflicts: core/pom.xml core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala core/src/main/scala/org/apache/spark/deploy/master/Master.scala core/src/main/scala/org/apache/spark/ui/WebUI.scala
2 parents 56d2fc7 + 4cca391 commit e91750a

File tree

94 files changed

+1154
-557
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

94 files changed

+1154
-557
lines changed

bin/pyspark

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ export PYTHONSTARTUP="$SPARK_HOME/python/pyspark/shell.py"
8989
if [[ -n "$SPARK_TESTING" ]]; then
9090
unset YARN_CONF_DIR
9191
unset HADOOP_CONF_DIR
92-
export PYSPARK_SUBMIT_ARGS=pyspark-shell
9392
if [[ -n "$PYSPARK_DOC_TEST" ]]; then
9493
exec "$PYSPARK_DRIVER_PYTHON" -m doctest $1
9594
else

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -429,7 +429,7 @@
429429
<overWriteIfNewer>true</overWriteIfNewer>
430430
<useSubDirectoryPerType>true</useSubDirectoryPerType>
431431
<includeArtifactIds>
432-
guava,jetty-io,jetty-servlet,jetty-continuation,jetty-http,jetty-plus,jetty-util,jetty-server,asm
432+
guava,jetty-io,jetty-servlet,jetty-continuation,jetty-http,jetty-plus,jetty-util,jetty-server,jetty-security,asm
433433
</includeArtifactIds>
434434
<silent>true</silent>
435435
</configuration>

core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,8 @@ import org.apache.spark.storage.StorageLevel
3232
import org.apache.spark.util.StatCounter
3333
import org.apache.spark.util.Utils
3434

35-
class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[JDouble, JavaDoubleRDD] {
35+
class JavaDoubleRDD(val srdd: RDD[scala.Double])
36+
extends AbstractJavaRDDLike[JDouble, JavaDoubleRDD] {
3637

3738
override val classTag: ClassTag[JDouble] = implicitly[ClassTag[JDouble]]
3839

core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ import org.apache.spark.util.Utils
4444

4545
class JavaPairRDD[K, V](val rdd: RDD[(K, V)])
4646
(implicit val kClassTag: ClassTag[K], implicit val vClassTag: ClassTag[V])
47-
extends JavaRDDLike[(K, V), JavaPairRDD[K, V]] {
47+
extends AbstractJavaRDDLike[(K, V), JavaPairRDD[K, V]] {
4848

4949
override def wrapRDD(rdd: RDD[(K, V)]): JavaPairRDD[K, V] = JavaPairRDD.fromRDD(rdd)
5050

core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ import org.apache.spark.storage.StorageLevel
3030
import org.apache.spark.util.Utils
3131

3232
class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T])
33-
extends JavaRDDLike[T, JavaRDD[T]] {
33+
extends AbstractJavaRDDLike[T, JavaRDD[T]] {
3434

3535
override def wrapRDD(rdd: RDD[T]): JavaRDD[T] = JavaRDD.fromRDD(rdd)
3636

core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,14 @@ import org.apache.spark.rdd.RDD
3838
import org.apache.spark.storage.StorageLevel
3939
import org.apache.spark.util.Utils
4040

41+
/**
42+
* As a workaround for https://issues.scala-lang.org/browse/SI-8905, implementations
43+
* of JavaRDDLike should extend this dummy abstract class instead of directly inheriting
44+
* from the trait. See SPARK-3266 for additional details.
45+
*/
46+
private[spark] abstract class AbstractJavaRDDLike[T, This <: JavaRDDLike[T, This]]
47+
extends JavaRDDLike[T, This]
48+
4149
/**
4250
* Defines operations common to several Java RDD implementations.
4351
* Note that this trait is not intended to be implemented by user code.

core/src/main/scala/org/apache/spark/deploy/ClientArguments.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ import org.apache.spark.util.{IntParam, MemoryParam}
2828
/**
2929
* Command-line parser for the driver client.
3030
*/
31-
private[spark] class ClientArguments(args: Array[String]) {
31+
private[deploy] class ClientArguments(args: Array[String]) {
3232
import ClientArguments._
3333

3434
var cmd: String = "" // 'launch' or 'kill'
@@ -96,7 +96,7 @@ private[spark] class ClientArguments(args: Array[String]) {
9696
/**
9797
* Print usage and exit JVM with the given exit code.
9898
*/
99-
def printUsageAndExit(exitCode: Int) {
99+
private def printUsageAndExit(exitCode: Int) {
100100
// TODO: It wouldn't be too hard to allow users to submit their app and dependency jars
101101
// separately similar to in the YARN client.
102102
val usage =
@@ -116,10 +116,10 @@ private[spark] class ClientArguments(args: Array[String]) {
116116
}
117117
}
118118

119-
object ClientArguments {
120-
private[spark] val DEFAULT_CORES = 1
121-
private[spark] val DEFAULT_MEMORY = 512 // MB
122-
private[spark] val DEFAULT_SUPERVISE = false
119+
private[deploy] object ClientArguments {
120+
val DEFAULT_CORES = 1
121+
val DEFAULT_MEMORY = 512 // MB
122+
val DEFAULT_SUPERVISE = false
123123

124124
def isValidJarUrl(s: String): Boolean = {
125125
try {

core/src/main/scala/org/apache/spark/deploy/DriverDescription.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.deploy
1919

20-
private[spark] class DriverDescription(
20+
private[deploy] class DriverDescription(
2121
val jarUrl: String,
2222
val mem: Int,
2323
val cores: Int,

core/src/main/scala/org/apache/spark/deploy/ExecutorDescription.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ package org.apache.spark.deploy
2222
* This state is sufficient for the Master to reconstruct its internal data structures during
2323
* failover.
2424
*/
25-
private[spark] class ExecutorDescription(
25+
private[deploy] class ExecutorDescription(
2626
val appId: String,
2727
val execId: Int,
2828
val cores: Int,

core/src/main/scala/org/apache/spark/deploy/ExecutorState.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.deploy
1919

20-
private[spark] object ExecutorState extends Enumeration {
20+
private[deploy] object ExecutorState extends Enumeration {
2121

2222
val LAUNCHING, LOADING, RUNNING, KILLED, FAILED, LOST, EXITED = Value
2323

0 commit comments

Comments
 (0)