Skip to content

Commit 897a914

Browse files
committed
Remove unused hash map
1 parent f5be578 commit 897a914

File tree

1 file changed

+0
-7
lines changed

1 file changed

+0
-7
lines changed

core/src/main/scala/org/apache/spark/MapOutputTracker.scala

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -233,11 +233,6 @@ private[spark] class MapOutputTrackerMaster(conf: SparkConf)
233233
protected val mapStatuses = new TimeStampedHashMap[Int, Array[MapStatus]]()
234234
private val cachedSerializedStatuses = new TimeStampedHashMap[Int, Array[Byte]]()
235235

236-
// For each shuffleId we also maintain a Map from reducerId -> (locations with largest outputs)
237-
// Lazily populated whenever the statuses are requested from DAGScheduler
238-
private val shuffleIdToReduceLocations =
239-
new TimeStampedHashMap[Int, HashMap[Int, Array[BlockManagerId]]]()
240-
241236
// For cleaning up TimeStampedHashMaps
242237
private val metadataCleaner =
243238
new MetadataCleaner(MetadataCleanerType.MAP_OUTPUT_TRACKER, this.cleanup, conf)
@@ -283,7 +278,6 @@ private[spark] class MapOutputTrackerMaster(conf: SparkConf)
283278
override def unregisterShuffle(shuffleId: Int) {
284279
mapStatuses.remove(shuffleId)
285280
cachedSerializedStatuses.remove(shuffleId)
286-
shuffleIdToReduceLocations.remove(shuffleId)
287281
}
288282

289283
/** Check if the given shuffle is being tracked */
@@ -386,7 +380,6 @@ private[spark] class MapOutputTrackerMaster(conf: SparkConf)
386380
private def cleanup(cleanupTime: Long) {
387381
mapStatuses.clearOldValues(cleanupTime)
388382
cachedSerializedStatuses.clearOldValues(cleanupTime)
389-
shuffleIdToReduceLocations.clearOldValues(cleanupTime)
390383
}
391384
}
392385

0 commit comments

Comments
 (0)