Skip to content

Commit d0a6005

Browse files
author
liguoqiang
committed
review comment
1 parent 3395ee7 commit d0a6005

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -847,8 +847,8 @@ class SparkContext(
847847
partitions: Seq[Int],
848848
allowLocal: Boolean,
849849
resultHandler: (Int, U) => Unit) {
850-
require(partitions.toSet.diff(rdd.partitions.map(_.index).toSet).isEmpty,
851-
"partition index out of range")
850+
val outIndex = partitions.toSet.diff(rdd.partitions.map(_.index).toSet)
851+
require(outIndex.isEmpty,"Partition index out of bounds: "+ outIndex.mkString(","))
852852
val callSite = getCallSite
853853
val cleanedFunc = clean(func)
854854
logInfo("Starting job: " + callSite)
@@ -952,8 +952,8 @@ class SparkContext(
952952
resultHandler: (Int, U) => Unit,
953953
resultFunc: => R): SimpleFutureAction[R] =
954954
{
955-
require(partitions.toSet.diff(rdd.partitions.map(_.index).toSet).isEmpty,
956-
"partition index out of range")
955+
val outIndex = partitions.toSet.diff(rdd.partitions.map(_.index).toSet)
956+
require(outIndex.isEmpty,"Partition index out of bounds: "+ outIndex.mkString(","))
957957
val cleanF = clean(processPartition)
958958
val callSite = getCallSite
959959
val waiter = dagScheduler.submitJob(

0 commit comments

Comments
 (0)