Skip to content

Commit fd84156

Browse files
committed
use randomUUID to generate sparkapp directory name on tachyon;minor code style fix
1 parent 939e467 commit fd84156

File tree

3 files changed

+5
-5
lines changed

3 files changed

+5
-5
lines changed

core/src/main/java/org/apache/spark/network/netty/TachyonFilePathResolver.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,6 @@
2121
import org.apache.spark.storage.TachyonFileSegment;
2222

2323
public interface TachyonFilePathResolver {
24-
/** Get the file segment in which the given block resides. */
24+
/** Get the file segment in which the given block resides. This is not a user-facing API*/
2525
TachyonFileSegment getBlockLocation(BlockId blockId);
2626
}

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import java.io._
2121
import java.net.URI
2222
import java.util.{Properties, UUID}
2323
import java.util.concurrent.atomic.AtomicInteger
24+
import java.util.UUID.randomUUID
2425
import scala.collection.{Map, Set}
2526
import scala.collection.generic.Growable
2627
import scala.collection.mutable.{ArrayBuffer, HashMap}
@@ -42,7 +43,6 @@ import org.apache.spark.scheduler.local.LocalBackend
4243
import org.apache.spark.storage.{BlockManagerSource, RDDInfo, StorageStatus, StorageUtils}
4344
import org.apache.spark.ui.SparkUI
4445
import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerType, TimeStampedHashMap, Utils}
45-
import java.util.Random
4646

4747
/**
4848
* Main entry point for Spark functionality. A SparkContext represents the connection to a Spark
@@ -128,7 +128,7 @@ class SparkContext(
128128

129129
// Generate the random name for a temp folder in Tachyon
130130
// Add a timestamp as the suffix here to make it more safe
131-
val tachyonFolderName = new Random().nextInt() + "_" + System.currentTimeMillis()
131+
val tachyonFolderName = "spark-" + randomUUID.toString()
132132
conf.set("spark.tachyonstore.foldername", tachyonFolderName)
133133

134134
val isLocal = (master == "local" || master.startsWith("local["))

core/src/main/scala/org/apache/spark/storage/BlockManager.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ private[spark] class BlockManager(
5858
private[storage] val memoryStore: BlockStore = new MemoryStore(this, maxMemory)
5959
private[storage] val diskStore = new DiskStore(this, diskBlockManager)
6060
var tachyonInitialized = false
61-
private[storage] lazy val tachyonStore : TachyonStore = {
61+
private[storage] lazy val tachyonStore: TachyonStore = {
6262
val storeDir = conf.get("spark.tachyonstore.dir", System.getProperty("java.io.tmpdir"))
6363
val appFolderName = conf.get("spark.tachyonstore.foldername")
6464
val tachyonStorePath = s"${storeDir}/${appFolderName}/${this.executorId}"
@@ -1000,7 +1000,7 @@ private[spark] class BlockManager(
10001000
blockInfo.clear()
10011001
memoryStore.clear()
10021002
diskStore.clear()
1003-
if(tachyonInitialized) {
1003+
if (tachyonInitialized) {
10041004
tachyonStore.clear()
10051005
}
10061006
metadataCleaner.cancel()

0 commit comments

Comments
 (0)