diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 983285ceb2ee..fda37dbe0a03 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -2850,6 +2850,7 @@ object SQLConf { .createWithDefault(100) val LEGACY_ALLOW_HASH_ON_MAPTYPE = buildConf("spark.sql.legacy.allowHashOnMapType") + .internal() .doc("When set to true, hash expressions can be applied on elements of MapType. Otherwise, " + "an analysis exception will be thrown.") .version("3.0.0") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala index 02cb6f29622f..bfefca4e2eba 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala @@ -195,6 +195,7 @@ object StaticSQLConf { val SQL_LEGACY_SESSION_INIT_WITH_DEFAULTS = buildStaticConf("spark.sql.legacy.sessionInitWithConfigDefaults") + .internal() .doc("Flag to revert to legacy behavior where a cloned SparkSession receives SparkConf " + "defaults, dropping any overrides in its parent SparkSession.") .version("3.0.0") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index e699c972268a..93b785952768 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -451,4 +451,14 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { val e2 = intercept[ParseException](sql("set time zone interval 19 hours")) assert(e2.getMessage contains "The interval value must be in the range of [-18, +18] hours") } + + test("SPARK-34454: configs from the legacy namespace should be internal") { + val nonInternalLegacyConfigs = spark.sessionState.conf.getAllDefinedConfs + .filter { case (key, _, _, _) => key.contains("spark.sql.legacy.") } + assert(nonInternalLegacyConfigs.isEmpty, + s""" + |Non internal legacy SQL configs: + |${nonInternalLegacyConfigs.map(_._1).mkString("\n")} + |""".stripMargin) + } }