Skip to content

Commit d402506

Browse files
committed
Fixed imports.
1 parent 4a20531 commit d402506

File tree

1 file changed

+5
-6
lines changed

1 file changed

+5
-6
lines changed

core/src/main/scala/org/apache/spark/storage/BlockManager.scala

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import java.nio.{ByteBuffer, MappedByteBuffer}
2222

2323
import scala.concurrent.ExecutionContext.Implicits.global
2424

25-
import scala.collection.mutable.{ArrayBuffer, HashMap}
25+
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
2626
import scala.concurrent.{Await, Future}
2727
import scala.concurrent.duration._
2828
import scala.util.Random
@@ -37,7 +37,6 @@ import org.apache.spark.network._
3737
import org.apache.spark.serializer.Serializer
3838
import org.apache.spark.shuffle.ShuffleManager
3939
import org.apache.spark.util._
40-
import scala.collection.mutable
4140

4241

4342
private[spark] sealed trait BlockValues
@@ -112,7 +111,7 @@ private[spark] class BlockManager(
112111
MetadataCleanerType.BLOCK_MANAGER, this.dropOldNonBroadcastBlocks, conf)
113112
private val broadcastCleaner = new MetadataCleaner(
114113
MetadataCleanerType.BROADCAST_VARS, this.dropOldBroadcastBlocks, conf)
115-
private val cachedPeers = new mutable.HashSet[BlockManagerId]
114+
private val cachedPeers = new HashSet[BlockManagerId]
116115
private var lastPeerFetchTime = 0L
117116

118117
initialize()
@@ -792,7 +791,7 @@ private[spark] class BlockManager(
792791
/**
793792
* Get peer block managers in the system.
794793
*/
795-
private def getPeers(forceFetch: Boolean): mutable.Set[BlockManagerId] = {
794+
private def getPeers(forceFetch: Boolean): HashSet[BlockManagerId] = {
796795
val cachedPeersTtl = conf.getInt("spark.storage.cachedPeersTtl", 1000) // milliseconds
797796
def timeout = System.currentTimeMillis - lastPeerFetchTime > cachedPeersTtl
798797

@@ -813,8 +812,8 @@ private[spark] class BlockManager(
813812
private def replicate(blockId: BlockId, data: ByteBuffer, level: StorageLevel): Unit = {
814813
val maxReplicationFailures = conf.getInt("spark.storage.maxReplicationFailures", 1)
815814
val numPeersToReplicateTo = level.replication - 1
816-
val peersReplicatedTo = new mutable.HashSet[BlockManagerId]
817-
val peersFailedToReplicateTo = new mutable.HashSet[BlockManagerId]
815+
val peersReplicatedTo = new HashSet[BlockManagerId]
816+
val peersFailedToReplicateTo = new HashSet[BlockManagerId]
818817
val tLevel = StorageLevel(
819818
level.useDisk, level.useMemory, level.useOffHeap, level.deserialized, 1)
820819
val startTime = System.nanoTime

0 commit comments

Comments
 (0)