File tree Expand file tree Collapse file tree 1 file changed +6
-7
lines changed
core/src/main/scala/org/apache/spark/deploy/master Expand file tree Collapse file tree 1 file changed +6
-7
lines changed Original file line number Diff line number Diff line change @@ -485,15 +485,14 @@ private[spark] class Master(
485485 val shuffledAliveWorkers = Random .shuffle(workers.toSeq.filter(_.state == WorkerState .ALIVE ))
486486 val aliveWorkerNum = shuffledAliveWorkers.size
487487 var curPos = 0
488- for (driver <- List (waitingDrivers : _* )) { // iterate over a copy of waitingDrivers
489- // For each waiting driver we pick a worker that has enough resources to launch it.
490- // The picking does in a round-robin fashion, starting from position behind last
491- // worker on which driver was just launched and ending with driver being launched
492- // or we have iterated over all workers.
493- val startPos = curPos
488+ for (driver <- waitingDrivers.toList) { // iterate over a copy of waitingDrivers
489+ // We assign workers to each waiting driver in a round-robin fashion. For each driver, we
490+ // start from the last worker that was assigned a driver, and continue onwards until we have
491+ // explored all alive workers.
494492 curPos = (curPos + 1 ) % aliveWorkerNum
493+ val startPos = curPos
495494 var launched = false
496- while (curPos - 1 != startPos && ! launched) {
495+ while (curPos != startPos && ! launched) {
497496 val worker = shuffledAliveWorkers(curPos)
498497 if (worker.memoryFree >= driver.desc.mem && worker.coresFree >= driver.desc.cores) {
499498 launchDriver(worker, driver)
You can’t perform that action at this time.
0 commit comments