Skip to content

Commit 9bda70e

Browse files
committed
Address review comments, akwats add to failedExecutors
1 parent 270d841 commit 9bda70e

File tree

1 file changed

+3
-7
lines changed

1 file changed

+3
-7
lines changed

core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -517,10 +517,6 @@ private[spark] class TaskSetManager(
517517
}
518518
var taskMetrics : TaskMetrics = null
519519
var failureReason: String = null
520-
val addToFailedExecutor = () => {
521-
failedExecutors.getOrElseUpdate(index, new HashMap[String, Long]()).
522-
put(info.executorId, clock.getTime())
523-
}
524520
reason match {
525521
case fetchFailed: FetchFailed =>
526522
logWarning("Loss was due to fetch failure from " + fetchFailed.bmAddress)
@@ -548,7 +544,6 @@ private[spark] class TaskSetManager(
548544
val key = ef.description
549545
failureReason = "Exception failure in TID %s on host %s: %s".format(
550546
tid, info.host, ef.description)
551-
addToFailedExecutor()
552547
val now = clock.getTime()
553548
val (printFull, dupCount) = {
554549
if (recentExceptions.contains(key)) {
@@ -575,13 +570,14 @@ private[spark] class TaskSetManager(
575570

576571
case TaskResultLost =>
577572
failureReason = "Lost result for TID %s on host %s".format(tid, info.host)
578-
addToFailedExecutor()
579573
logWarning(failureReason)
580574

581575
case _ =>
582576
failureReason = "TID %s on host %s failed for unknown reason".format(tid, info.host)
583-
addToFailedExecutor()
584577
}
578+
// always add to failed executors
579+
failedExecutors.getOrElseUpdate(index, new HashMap[String, Long]()).
580+
put(info.executorId, clock.getTime())
585581
sched.dagScheduler.taskEnded(tasks(index), reason, null, null, info, taskMetrics)
586582
addPendingTask(index)
587583
if (!isZombie && state != TaskState.KILLED) {

0 commit comments

Comments
 (0)