Skip to content

Commit adc443e

Browse files
author
liguoqiang
committed
partitions check bugfix
1 parent 928e1e3 commit adc443e

File tree

1 file changed

+2
-4
lines changed

1 file changed

+2
-4
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -847,8 +847,7 @@ class SparkContext(
847847
partitions: Seq[Int],
848848
allowLocal: Boolean,
849849
resultHandler: (Int, U) => Unit) {
850-
val partitionRange = (0 until rdd.partitions.size)
851-
require(partitions.forall(partitionRange.contains(_)), "partition index out of range")
850+
require(partitions.toSet.diff(rdd.partitions.map(_.index).toSet).isEmpty, "partition index out of range")
852851
val callSite = getCallSite
853852
val cleanedFunc = clean(func)
854853
logInfo("Starting job: " + callSite)
@@ -952,8 +951,7 @@ class SparkContext(
952951
resultHandler: (Int, U) => Unit,
953952
resultFunc: => R): SimpleFutureAction[R] =
954953
{
955-
val partitionRange = (0 until rdd.partitions.size)
956-
require(partitions.forall(partitionRange.contains(_)), "partition index out of range")
954+
require(partitions.toSet.diff(rdd.partitions.map(_.index).toSet).isEmpty, "partition index out of range")
957955
val cleanF = clean(processPartition)
958956
val callSite = getCallSite
959957
val waiter = dagScheduler.submitJob(

0 commit comments

Comments
 (0)