Skip to content

Commit 569034a

Browse files
author
Andrew Or
committed
Add a flag to ignore parent settings and scopes
This flag will be enabled for SQL to show physical operators.
1 parent 3af423c commit 569034a

File tree

1 file changed

+23
-13
lines changed

1 file changed

+23
-13
lines changed

core/src/main/scala/org/apache/spark/rdd/RDDOperationScope.scala

Lines changed: 23 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -96,36 +96,46 @@ private[spark] object RDDOperationScope {
9696
sc: SparkContext,
9797
allowNesting: Boolean = false)(body: => T): T = {
9898
val callerMethodName = Thread.currentThread.getStackTrace()(3).getMethodName
99-
withScope[T](sc, callerMethodName, allowNesting)(body)
99+
withScope[T](sc, callerMethodName, allowNesting, ignoreParent = false)(body)
100100
}
101101

102102
/**
103103
* Execute the given body such that all RDDs created in this body will have the same scope.
104104
*
105-
* If nesting is allowed, this concatenates the previous scope with the new one in a way that
106-
* signifies the hierarchy. Otherwise, if nesting is not allowed, then any children calls to
107-
* this method executed in the body will have no effect.
105+
* If nesting is allowed, any subsequent calls to this method in the given body will instantiate
106+
* child scopes that are nested within our scope. Otherwise, these calls will take no effect.
107+
*
108+
* Additionally, the caller of this method may optionally ignore the configurations and scopes
109+
* set by the higher level caller. In this case, this method will ignore the parent caller's
110+
* intention to disallow nesting, and the new scope instantiated will not have a parent. This
111+
* is useful for scoping physical operations in Spark SQL, for instance.
108112
*
109113
* Note: Return statements are NOT allowed in body.
110114
*/
111115
private[spark] def withScope[T](
112116
sc: SparkContext,
113117
name: String,
114-
allowNesting: Boolean)(body: => T): T = {
118+
allowNesting: Boolean,
119+
ignoreParent: Boolean)(body: => T): T = {
115120
// Save the old scope to restore it later
116121
val scopeKey = SparkContext.RDD_SCOPE_KEY
117122
val noOverrideKey = SparkContext.RDD_SCOPE_NO_OVERRIDE_KEY
118123
val oldScopeJson = sc.getLocalProperty(scopeKey)
119-
val oldScope = Option(oldScopeJson).map(RDDOperationScope.fromJson)
120124
val oldNoOverride = sc.getLocalProperty(noOverrideKey)
121125
try {
122-
// Set the scope only if the higher level caller allows us to do so
123-
if (sc.getLocalProperty(noOverrideKey) == null) {
124-
sc.setLocalProperty(scopeKey, new RDDOperationScope(name, oldScope).toJson)
125-
}
126-
// Optionally disallow the child body to override our scope
127-
if (!allowNesting) {
128-
sc.setLocalProperty(noOverrideKey, "true")
126+
if (ignoreParent) {
127+
// Ignore all parent settings and scopes and start afresh with our own root scope
128+
sc.setLocalProperty(scopeKey, new RDDOperationScope(name).toJson)
129+
} else {
130+
// Otherwise, set the scope only if the higher level caller allows us to do so
131+
if (sc.getLocalProperty(noOverrideKey) == null) {
132+
val oldScope = Option(oldScopeJson).map(RDDOperationScope.fromJson)
133+
sc.setLocalProperty(scopeKey, new RDDOperationScope(name, oldScope).toJson)
134+
}
135+
// Optionally disallow the child body to override our scope
136+
if (!allowNesting) {
137+
sc.setLocalProperty(noOverrideKey, "true")
138+
}
129139
}
130140
body
131141
} finally {

0 commit comments

Comments
 (0)