File tree Expand file tree Collapse file tree 2 files changed +2
-2
lines changed
core/src/main/scala/org/apache/spark/rdd
streaming/src/main/scala/org/apache/spark/streaming/dstream Expand file tree Collapse file tree 2 files changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -43,7 +43,7 @@ import org.apache.spark.SparkContext
4343@ JsonPropertyOrder (Array (" id" , " name" , " parent" ))
4444private [spark] class RDDOperationScope (
4545 val name : String ,
46- val parent : Option [RDDOperationScope ] = None ) {
46+ val parent : Option [RDDOperationScope ] = None ) extends Serializable {
4747
4848 val id : Int = RDDOperationScope .nextScopeId()
4949
Original file line number Diff line number Diff line change @@ -117,7 +117,7 @@ abstract class DStream[T: ClassTag] (
117117 * Instead, every time we call `compute` we instantiate a new scope using the same name as this
118118 * one. Otherwise, all RDDs ever created by this DStream will be in the same scope.
119119 */
120- @ transient private val scope : Option [RDDOperationScope ] = {
120+ private val scope : Option [RDDOperationScope ] = {
121121 Option (ssc.sc.getLocalProperty(SparkContext .RDD_SCOPE_KEY )).map(RDDOperationScope .fromJson)
122122 }
123123
You can’t perform that action at this time.
0 commit comments