-
Notifications
You must be signed in to change notification settings - Fork 29.1k
[SPARK-38249][CORE][GRAPHX] Cleanup unused private methods/fields #35566
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -25,7 +25,7 @@ import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} | |
| import scala.util.Random | ||
|
|
||
| import org.apache.spark.{SecurityManager, SparkConf, SparkException} | ||
| import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState, SparkHadoopUtil} | ||
| import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState} | ||
| import org.apache.spark.deploy.DeployMessages._ | ||
| import org.apache.spark.deploy.master.DriverState.DriverState | ||
| import org.apache.spark.deploy.master.MasterMessages._ | ||
|
|
@@ -53,8 +53,6 @@ private[deploy] class Master( | |
| private val forwardMessageThread = | ||
| ThreadUtils.newDaemonSingleThreadScheduledExecutor("master-forward-message-thread") | ||
|
|
||
| private val hadoopConf = SparkHadoopUtil.get.newConfiguration(conf) | ||
|
|
||
| // For application IDs | ||
| private def createDateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.US) | ||
|
|
||
|
|
@@ -95,11 +93,6 @@ private[deploy] class Master( | |
| // After onStart, webUi will be set | ||
| private var webUi: MasterWebUI = null | ||
|
|
||
| private val masterPublicAddress = { | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
| val envVar = conf.getenv("SPARK_PUBLIC_DNS") | ||
| if (envVar != null) envVar else address.host | ||
| } | ||
|
|
||
| private val masterUrl = address.toSparkURL | ||
| private var masterWebUiUrl: String = _ | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -42,7 +42,6 @@ import org.apache.spark.resource.ResourceUtils._ | |
| import org.apache.spark.rpc._ | ||
| import org.apache.spark.scheduler.{ExecutorLossMessage, ExecutorLossReason, TaskDescription} | ||
| import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._ | ||
| import org.apache.spark.serializer.SerializerInstance | ||
| import org.apache.spark.util.{ChildFirstURLClassLoader, MutableURLClassLoader, SignalUtils, ThreadUtils, Utils} | ||
|
|
||
| private[spark] class CoarseGrainedExecutorBackend( | ||
|
|
@@ -65,10 +64,6 @@ private[spark] class CoarseGrainedExecutorBackend( | |
| var executor: Executor = null | ||
| @volatile var driver: Option[RpcEndpointRef] = None | ||
|
|
||
| // If this CoarseGrainedExecutorBackend is changed to support multiple threads, then this may need | ||
| // to be changed so that we don't share the serializer instance across threads | ||
| private[this] val ser: SerializerInstance = env.closureSerializer.newInstance() | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. SPARK-3386 move |
||
|
|
||
| private var _resources = Map.empty[String, ResourceInformation] | ||
|
|
||
| /** | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -244,7 +244,6 @@ class NewHadoopRDD[K, V]( | |
| } | ||
|
|
||
| private var havePair = false | ||
| private var recordsSinceMetricsUpdate = 0 | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. SPARK-2621 add |
||
|
|
||
| override def hasNext: Boolean = { | ||
| if (!finished && !havePair) { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -386,7 +386,6 @@ private[spark] object ResourceUtils extends Logging { | |
| val resourcePlugins = Utils.loadExtensions(classOf[ResourceDiscoveryPlugin], pluginClasses, | ||
| sparkConf) | ||
| // apply each plugin until one of them returns the information for this resource | ||
| var riOption: Optional[ResourceInformation] = Optional.empty() | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. SPARK-30689 add |
||
| resourcePlugins.foreach { plugin => | ||
| val riOption = plugin.discoverResource(resourceRequest, sparkConf) | ||
| if (riOption.isPresent()) { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -21,7 +21,6 @@ import scala.reflect.{classTag, ClassTag} | |
|
|
||
| import org.apache.spark.HashPartitioner | ||
| import org.apache.spark.graphx._ | ||
| import org.apache.spark.graphx.util.BytecodeUtils | ||
| import org.apache.spark.rdd.RDD | ||
| import org.apache.spark.storage.StorageLevel | ||
|
|
||
|
|
@@ -265,14 +264,6 @@ class GraphImpl[VD: ClassTag, ED: ClassTag] protected ( | |
| } | ||
| } | ||
|
|
||
| /** Test whether the closure accesses the attribute with name `attrName`. */ | ||
| private def accessesVertexAttr(closure: AnyRef, attrName: String): Boolean = { | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Handle ClassNotFoundException from ByteCodeUtils introduce this method and it used by |
||
| try { | ||
| BytecodeUtils.invokedMethod(closure, classOf[EdgeTriplet[VD, ED]], attrName) | ||
| } catch { | ||
| case _: ClassNotFoundException => true // if we don't know, be conservative | ||
| } | ||
| } | ||
| } // end of class GraphImpl | ||
|
|
||
|
|
||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
SPARK-2261 add
hadoopConfand used byrebuildSparkUIin the pass, SPARK-12299 Remove history serving functionality from Master andhadoopConfbecome unused