Skip to content

Commit 12f6300

Browse files
committed
add some comment
1 parent 1c36bfe commit 12f6300

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,8 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
160160
new HistoryServerDiskManager(conf, path, listing, clock)
161161
}
162162

163+
// Used to store the paths, which are being processed. This enable the replay log tasks execute
164+
// asynchronously and make sure that checkForLogs would not process a path repeatedly.
163165
private val processing = ConcurrentHashMap.newKeySet[String]
164166

165167
private def isProcessing(path: Path): Boolean = {
@@ -170,7 +172,7 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
170172
processing.add(path.getName)
171173
}
172174

173-
private def endProcess(path: Path): Unit = {
175+
private def endProcessing(path: Path): Unit = {
174176
processing.remove(path.getName)
175177
}
176178

@@ -685,7 +687,7 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
685687
case e: Exception =>
686688
logError("Exception while merging application listings", e)
687689
} finally {
688-
endProcess(fileStatus.getPath)
690+
endProcessing(fileStatus.getPath)
689691
pendingReplayTasksCount.decrementAndGet()
690692
}
691693
}

0 commit comments

Comments
 (0)