Skip to content

Commit 8df68fe

Browse files
committed
Clean context of most remaining functions in PairRDDFunctions, which ultimately call combineByKey
1 parent ef84dab commit 8df68fe

File tree

1 file changed

+4
-1
lines changed

1 file changed

+4
-1
lines changed

core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,10 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])
8484
throw new SparkException("Default partitioner cannot partition array keys.")
8585
}
8686
}
87-
val aggregator = new Aggregator[K, V, C](createCombiner, mergeValue, mergeCombiners)
87+
val aggregator = new Aggregator[K, V, C](
88+
self.context.clean(createCombiner),
89+
self.context.clean(mergeValue),
90+
self.context.clean(mergeCombiners))
8891
if (self.partitioner == Some(partitioner)) {
8992
self.mapPartitions(iter => {
9093
val context = TaskContext.get()

0 commit comments

Comments
 (0)