File tree Expand file tree Collapse file tree 1 file changed +4
-4
lines changed
core/src/test/scala/org/apache/spark/util Expand file tree Collapse file tree 1 file changed +4
-4
lines changed Original file line number Diff line number Diff line change @@ -267,19 +267,19 @@ private object TestUserClosuresActuallyCleaned {
267267 }
268268 def testFlatMapWith (rdd : RDD [Int ]): Unit = {
269269 import java .util .Random
270- rdd.flatMapWith ((index : Int ) => new Random (index + 42 )){ _ => return ; Seq () }.count()
270+ rdd.flatMapWith ((index : Int ) => new Random (index + 42 )){ (_, it) => return ; Seq () }.count()
271271 }
272272 def testMapWith (rdd : RDD [Int ]): Unit = {
273273 import java .util .Random
274- rdd.mapWith ((index : Int ) => new Random (index + 42 )){ _ => return ; 0 }.count()
274+ rdd.mapWith ((index : Int ) => new Random (index + 42 )){ (_, it) => return ; 0 }.count()
275275 }
276276 def testFilterWith (rdd : RDD [Int ]): Unit = {
277277 import java .util .Random
278- rdd.filterWith ((index : Int ) => new Random (index + 42 )){ _ => return ; true }.count()
278+ rdd.filterWith ((index : Int ) => new Random (index + 42 )){ (_, it) => return ; true }.count()
279279 }
280280 def testForEachWith (rdd : RDD [Int ]): Unit = {
281281 import java .util .Random
282- rdd.foreachWith ((index : Int ) => new Random (index + 42 )){ _ => return }
282+ rdd.foreachWith ((index : Int ) => new Random (index + 42 )){ (_, it) => return }
283283 }
284284 def testMapPartitionsWithContext (rdd : RDD [Int ]): Unit = {
285285 rdd.mapPartitionsWithContext { (_, it) => return ; it }.count()
You can’t perform that action at this time.
0 commit comments