Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.util.Random

import org.apache.spark.{SecurityManager, SparkConf, SparkException}
import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState, SparkHadoopUtil}
import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState}
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.master.DriverState.DriverState
import org.apache.spark.deploy.master.MasterMessages._
Expand Down Expand Up @@ -53,8 +53,6 @@ private[deploy] class Master(
private val forwardMessageThread =
ThreadUtils.newDaemonSingleThreadScheduledExecutor("master-forward-message-thread")

private val hadoopConf = SparkHadoopUtil.get.newConfiguration(conf)
Copy link
Contributor Author

@LuciferYang LuciferYang Feb 18, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

SPARK-2261 add hadoopConf and used by rebuildSparkUI in the pass, SPARK-12299 Remove history serving functionality from Master and hadoopConf become unused


// For application IDs
private def createDateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.US)

Expand Down Expand Up @@ -95,11 +93,6 @@ private[deploy] class Master(
// After onStart, webUi will be set
private var webUi: MasterWebUI = null

private val masterPublicAddress = {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

masterPublicAddress add in Use external addresses in standalone WebUI on EC2. and never used after SPARK-33774

val envVar = conf.getenv("SPARK_PUBLIC_DNS")
if (envVar != null) envVar else address.host
}

private val masterUrl = address.toSparkURL
private var masterWebUiUrl: String = _

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ import org.apache.spark.resource.ResourceUtils._
import org.apache.spark.rpc._
import org.apache.spark.scheduler.{ExecutorLossMessage, ExecutorLossReason, TaskDescription}
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
import org.apache.spark.serializer.SerializerInstance
import org.apache.spark.util.{ChildFirstURLClassLoader, MutableURLClassLoader, SignalUtils, ThreadUtils, Utils}

private[spark] class CoarseGrainedExecutorBackend(
Expand All @@ -65,10 +64,6 @@ private[spark] class CoarseGrainedExecutorBackend(
var executor: Executor = null
@volatile var driver: Option[RpcEndpointRef] = None

// If this CoarseGrainedExecutorBackend is changed to support multiple threads, then this may need
// to be changed so that we don't share the serializer instance across threads
private[this] val ser: SerializerInstance = env.closureSerializer.newInstance()
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

SPARK-3386 move ser out of receive method, it used for deserialize of TaskDescription, but SPARK-17931 change to use TaskDescription do this work


private var _resources = Map.empty[String, ResourceInformation]

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,6 @@ class NewHadoopRDD[K, V](
}

private var havePair = false
private var recordsSinceMetricsUpdate = 0
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

SPARK-2621 add recordsSinceMetricsUpdate for task metrics and after SPARK-4092 it never used


override def hasNext: Boolean = {
if (!finished && !havePair) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,6 @@ private[spark] object ResourceUtils extends Logging {
val resourcePlugins = Utils.loadExtensions(classOf[ResourceDiscoveryPlugin], pluginClasses,
sparkConf)
// apply each plugin until one of them returns the information for this resource
var riOption: Optional[ResourceInformation] = Optional.empty()
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

SPARK-30689 add riOption but it was not used because there is a definition with the same name in resourcePlugins.foreach loop

resourcePlugins.foreach { plugin =>
val riOption = plugin.discoverResource(resourceRequest, sparkConf)
if (riOption.isPresent()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import scala.reflect.{classTag, ClassTag}

import org.apache.spark.HashPartitioner
import org.apache.spark.graphx._
import org.apache.spark.graphx.util.BytecodeUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel

Expand Down Expand Up @@ -265,14 +264,6 @@ class GraphImpl[VD: ClassTag, ED: ClassTag] protected (
}
}

/** Test whether the closure accesses the attribute with name `attrName`. */
private def accessesVertexAttr(closure: AnyRef, attrName: String): Boolean = {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Handle ClassNotFoundException from ByteCodeUtils introduce this method and it used by mapReduceTriplets, Code cleaning to improve readability. clean up mapReduceTriplets from GraphImpl.scala, mapReduceTriplets is no longer used

try {
BytecodeUtils.invokedMethod(closure, classOf[EdgeTriplet[VD, ED]], attrName)
} catch {
case _: ClassNotFoundException => true // if we don't know, be conservative
}
}
} // end of class GraphImpl


Expand Down