Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1954,14 +1954,7 @@ class SQLConf extends Serializable with Logging {
entry.valueConverter(defaultValue)
}
}
Option(settings.get(key)).getOrElse {
// If the key is not set, need to check whether the config entry is registered and is
// a fallback conf, so that we can check its parent.
sqlConfEntries.get(key) match {
case e: FallbackConfigEntry[_] => getConfString(e.fallback.key, defaultValue)
case _ => defaultValue
}
}
Option(settings.get(key)).getOrElse(defaultValue)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a behavior change. We already have a conf that uses this fallback config. See

  val SQL_STRING_REDACTION_PATTERN =
    buildConf("spark.sql.redaction.string.regex")
      .doc("Regex to decide which parts of strings produced by Spark contain sensitive " +
        "information. When this regex matches a string part, that string part is replaced by a " +
        "dummy value. This is currently used to redact the output of SQL explain commands. " +
        "When this conf is not set, the value from `spark.redaction.string.regex` is used.")
      .fallbackConf(org.apache.spark.internal.config.STRING_REDACTION_PATTERN)

}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -282,32 +282,50 @@ class SQLConfSuite extends QueryTest with SharedSQLContext {
}

test("SPARK-22779: correctly compute default value for fallback configs") {
val fallback = SQLConf.buildConf("spark.sql.__test__.spark_22779")
.fallbackConf(SQLConf.PARQUET_COMPRESSION)

assert(spark.sessionState.conf.getConfString(fallback.key) ===
SQLConf.PARQUET_COMPRESSION.defaultValue.get)
assert(spark.sessionState.conf.getConfString(fallback.key, "lzo") === "lzo")

val displayValue = spark.sessionState.conf.getAllDefinedConfs
.find { case (key, _, _) => key == fallback.key }
.map { case (_, v, _) => v }
.get
assert(displayValue === fallback.defaultValueString)

spark.sessionState.conf.setConf(SQLConf.PARQUET_COMPRESSION, "gzip")
assert(spark.sessionState.conf.getConfString(fallback.key) === "gzip")
val parent = SQLConf.buildConf("spark.example.parent")
.booleanConf
.createWithDefault(false)
val child = SQLConf.buildConf("spark.example.child")
.fallbackConf(parent)

// explicitParentVal is Some(v) if we have called spark.conf.set(parent.key, v) or None if we
// have not called explicitly set a conf value for parent.key
def testCorrectValues(explicitParentVal: Option[Boolean]): Unit = {
val explicitParentString = explicitParentVal.map(_.toString)

// The order of precedence should be:
// 1. Explicit value if one was set
// 2. Otherwise, client default if one was provided
// 3. Otherwise, builder default (either scalar default or a fallback conf default)

// Using the ConfigBuilder directly should use the builder default
assert(spark.conf.get(parent) == explicitParentVal.getOrElse(false))
assert(spark.conf.get(child) == explicitParentVal.getOrElse(false))

// Using the ConfigBuilder's key with no client default should use the builder default
assert(spark.conf.get(parent.key) == explicitParentString.getOrElse("false"))
assert(spark.conf.get(child.key) == explicitParentString.getOrElse("false"))

// Using the ConfigBuilder's key with a client default should use the client default
assert(spark.conf.get(parent.key, "false") == explicitParentString.getOrElse("false"))
assert(spark.conf.get(parent.key, "true") == explicitParentString.getOrElse("true"))
assert(spark.conf.get(child.key, "false") == "false")
assert(spark.conf.get(child.key, "true") == "true")
}

spark.sessionState.conf.setConf(fallback, "lzo")
assert(spark.sessionState.conf.getConfString(fallback.key) === "lzo")
// When the parent is not explicitly set
testCorrectValues(None)

val newDisplayValue = spark.sessionState.conf.getAllDefinedConfs
.find { case (key, _, _) => key == fallback.key }
.map { case (_, v, _) => v }
.get
assert(newDisplayValue === "lzo")
try {
// When the parent is explicitly set to false
spark.conf.set(parent.key, false)
testCorrectValues(Some(false))

SQLConf.unregister(fallback)
// When the parent is explicitly set to true
spark.conf.set(parent.key, true)
testCorrectValues(Some(true))
} finally {
spark.conf.unset(parent.key)
}
}

}