File tree Expand file tree Collapse file tree 1 file changed +8
-4
lines changed
core/src/test/scala/org/apache/spark/util Expand file tree Collapse file tree 1 file changed +8
-4
lines changed Original file line number Diff line number Diff line change @@ -266,16 +266,20 @@ private object TestUserClosuresActuallyCleaned {
266266 rdd.mapPartitionsWithIndex { (_, it) => return ; it }.count()
267267 }
268268 def testFlatMapWith (rdd : RDD [Int ]): Unit = {
269- rdd.flatMapWith { _ => return ; Seq () }.count()
269+ import java .util .Random
270+ rdd.flatMapWith ((index : Int ) => new Random (index + 42 )){ _ => return ; Seq () }.count()
270271 }
271272 def testMapWith (rdd : RDD [Int ]): Unit = {
272- rdd.mapWith { _ => return ; 0 }.count()
273+ import java .util .Random
274+ rdd.mapWith ((index : Int ) => new Random (index + 42 )){ _ => return ; 0 }.count()
273275 }
274276 def testFilterWith (rdd : RDD [Int ]): Unit = {
275- rdd.filterWith { _ => return ; true }.count()
277+ import java .util .Random
278+ rdd.filterWith ((index : Int ) => new Random (index + 42 )){ _ => return ; true }.count()
276279 }
277280 def testForEachWith (rdd : RDD [Int ]): Unit = {
278- rdd.foreachWith { _ => return }
281+ import java .util .Random
282+ rdd.foreachWith ((index : Int ) => new Random (index + 42 )){ _ => return }
279283 }
280284 def testMapPartitionsWithContext (rdd : RDD [Int ]): Unit = {
281285 rdd.mapPartitionsWithContext { (_, it) => return ; it }.count()
You can’t perform that action at this time.
0 commit comments