Skip to content

Commit e9ebb6f

Browse files
committed
fix ut
1 parent c6ac35e commit e9ebb6f

File tree

2 files changed

+12
-17
lines changed

2 files changed

+12
-17
lines changed

core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -665,6 +665,9 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
665665
try {
666666
pendingReplayTasksCount.incrementAndGet()
667667
doMergeApplicationListing(reader, scanTime, enableOptimizations)
668+
if (conf.get(CLEANER_ENABLED)) {
669+
checkAndCleanLog(reader.rootPath.toString)
670+
}
668671
} catch {
669672
case e: InterruptedException =>
670673
throw e
@@ -680,9 +683,6 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
680683
} finally {
681684
endProcessing(reader.rootPath)
682685
pendingReplayTasksCount.decrementAndGet()
683-
if (conf.get(CLEANER_ENABLED)) {
684-
checkAndCleanLog(reader.rootPath.toString)
685-
}
686686
}
687687
}
688688

core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala

Lines changed: 9 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1322,21 +1322,16 @@ class FsHistoryProviderSuite extends SparkFunSuite with Matchers with Logging {
13221322
}
13231323

13241324
test("SPARK-29043: clean up specified event log") {
1325-
def getLogPath(logFile: File): String = {
1326-
val uri = logFile.toURI
1327-
uri.getScheme + File.pathSeparator + uri.getPath
1328-
}
1329-
13301325
val clock = new ManualClock()
1331-
val conf = createTestConf().set(MAX_LOG_AGE_S.key, "2d")
1326+
val conf = createTestConf().set(MAX_LOG_AGE_S.key, "0").set(CLEANER_ENABLED.key, "true")
13321327
val provider = new FsHistoryProvider(conf, clock)
13331328

13341329
// create an invalid application log file
1335-
val nonValidLogFile = newLogFile("NonValidLogFile", None, inProgress = true)
1336-
nonValidLogFile.createNewFile()
1337-
writeFile(nonValidLogFile, None,
1338-
SparkListenerApplicationStart(nonValidLogFile.getName, None, 1L, "test", None))
1339-
nonValidLogFile.setLastModified(clock.getTimeMillis())
1330+
val inValidLogFile = newLogFile("inValidLogFile", None, inProgress = true)
1331+
inValidLogFile.createNewFile()
1332+
writeFile(inValidLogFile, None,
1333+
SparkListenerApplicationStart(inValidLogFile.getName, None, 1L, "test", None))
1334+
inValidLogFile.setLastModified(clock.getTimeMillis())
13401335

13411336
// create a valid application log file
13421337
val validLogFile = newLogFile("validLogFile", None, inProgress = true)
@@ -1346,12 +1341,12 @@ class FsHistoryProviderSuite extends SparkFunSuite with Matchers with Logging {
13461341
validLogFile.setLastModified(clock.getTimeMillis())
13471342

13481343
provider.checkForLogs()
1349-
clock.advance(TimeUnit.DAYS.toMillis(2))
1350-
provider.checkAndCleanLog(getLogPath(nonValidLogFile))
1344+
// The invalid application log file would be cleaned by checkAndCleanLog().
13511345
assert(new File(testDir.toURI).listFiles().size === 1)
13521346

13531347
clock.advance(1)
1354-
provider.checkAndCleanLog(getLogPath(validLogFile))
1348+
// cleanLogs() would clean the valid application log file.
1349+
provider.cleanLogs()
13551350
assert(new File(testDir.toURI).listFiles().size === 0)
13561351
}
13571352

0 commit comments

Comments
 (0)