diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index bffdddcf3fdb0..eb317194c75cc 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1954,14 +1954,7 @@ class SQLConf extends Serializable with Logging { entry.valueConverter(defaultValue) } } - Option(settings.get(key)).getOrElse { - // If the key is not set, need to check whether the config entry is registered and is - // a fallback conf, so that we can check its parent. - sqlConfEntries.get(key) match { - case e: FallbackConfigEntry[_] => getConfString(e.fallback.key, defaultValue) - case _ => defaultValue - } - } + Option(settings.get(key)).getOrElse(defaultValue) } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index c9a6975da6be8..742ebf9cb679b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -282,32 +282,50 @@ class SQLConfSuite extends QueryTest with SharedSQLContext { } test("SPARK-22779: correctly compute default value for fallback configs") { - val fallback = SQLConf.buildConf("spark.sql.__test__.spark_22779") - .fallbackConf(SQLConf.PARQUET_COMPRESSION) - - assert(spark.sessionState.conf.getConfString(fallback.key) === - SQLConf.PARQUET_COMPRESSION.defaultValue.get) - assert(spark.sessionState.conf.getConfString(fallback.key, "lzo") === "lzo") - - val displayValue = spark.sessionState.conf.getAllDefinedConfs - .find { case (key, _, _) => key == fallback.key } - .map { case (_, v, _) => v } - .get - assert(displayValue === fallback.defaultValueString) - - spark.sessionState.conf.setConf(SQLConf.PARQUET_COMPRESSION, "gzip") - assert(spark.sessionState.conf.getConfString(fallback.key) === "gzip") + val parent = SQLConf.buildConf("spark.example.parent") + .booleanConf + .createWithDefault(false) + val child = SQLConf.buildConf("spark.example.child") + .fallbackConf(parent) + + // explicitParentVal is Some(v) if we have called spark.conf.set(parent.key, v) or None if we + // have not called explicitly set a conf value for parent.key + def testCorrectValues(explicitParentVal: Option[Boolean]): Unit = { + val explicitParentString = explicitParentVal.map(_.toString) + + // The order of precedence should be: + // 1. Explicit value if one was set + // 2. Otherwise, client default if one was provided + // 3. Otherwise, builder default (either scalar default or a fallback conf default) + + // Using the ConfigBuilder directly should use the builder default + assert(spark.conf.get(parent) == explicitParentVal.getOrElse(false)) + assert(spark.conf.get(child) == explicitParentVal.getOrElse(false)) + + // Using the ConfigBuilder's key with no client default should use the builder default + assert(spark.conf.get(parent.key) == explicitParentString.getOrElse("false")) + assert(spark.conf.get(child.key) == explicitParentString.getOrElse("false")) + + // Using the ConfigBuilder's key with a client default should use the client default + assert(spark.conf.get(parent.key, "false") == explicitParentString.getOrElse("false")) + assert(spark.conf.get(parent.key, "true") == explicitParentString.getOrElse("true")) + assert(spark.conf.get(child.key, "false") == "false") + assert(spark.conf.get(child.key, "true") == "true") + } - spark.sessionState.conf.setConf(fallback, "lzo") - assert(spark.sessionState.conf.getConfString(fallback.key) === "lzo") + // When the parent is not explicitly set + testCorrectValues(None) - val newDisplayValue = spark.sessionState.conf.getAllDefinedConfs - .find { case (key, _, _) => key == fallback.key } - .map { case (_, v, _) => v } - .get - assert(newDisplayValue === "lzo") + try { + // When the parent is explicitly set to false + spark.conf.set(parent.key, false) + testCorrectValues(Some(false)) - SQLConf.unregister(fallback) + // When the parent is explicitly set to true + spark.conf.set(parent.key, true) + testCorrectValues(Some(true)) + } finally { + spark.conf.unset(parent.key) + } } - }