@@ -517,10 +517,6 @@ private[spark] class TaskSetManager(
517517 }
518518 var taskMetrics : TaskMetrics = null
519519 var failureReason : String = null
520- val addToFailedExecutor = () => {
521- failedExecutors.getOrElseUpdate(index, new HashMap [String , Long ]()).
522- put(info.executorId, clock.getTime())
523- }
524520 reason match {
525521 case fetchFailed : FetchFailed =>
526522 logWarning(" Loss was due to fetch failure from " + fetchFailed.bmAddress)
@@ -548,7 +544,6 @@ private[spark] class TaskSetManager(
548544 val key = ef.description
549545 failureReason = " Exception failure in TID %s on host %s: %s" .format(
550546 tid, info.host, ef.description)
551- addToFailedExecutor()
552547 val now = clock.getTime()
553548 val (printFull, dupCount) = {
554549 if (recentExceptions.contains(key)) {
@@ -575,13 +570,14 @@ private[spark] class TaskSetManager(
575570
576571 case TaskResultLost =>
577572 failureReason = " Lost result for TID %s on host %s" .format(tid, info.host)
578- addToFailedExecutor()
579573 logWarning(failureReason)
580574
581575 case _ =>
582576 failureReason = " TID %s on host %s failed for unknown reason" .format(tid, info.host)
583- addToFailedExecutor()
584577 }
578+ // always add to failed executors
579+ failedExecutors.getOrElseUpdate(index, new HashMap [String , Long ]()).
580+ put(info.executorId, clock.getTime())
585581 sched.dagScheduler.taskEnded(tasks(index), reason, null , null , info, taskMetrics)
586582 addPendingTask(index)
587583 if (! isZombie && state != TaskState .KILLED ) {
0 commit comments