diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala index d9c354b165e5..9a1de460b4d1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala @@ -36,7 +36,6 @@ object StaticSQLConf { val CATALOG_IMPLEMENTATION = buildStaticConf("spark.sql.catalogImplementation") .internal() .stringConf - .checkValues(Set("hive", "in-memory")) .createWithDefault("in-memory") val GLOBAL_TEMP_DATABASE = buildStaticConf("spark.sql.globalTempDatabase") diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index 2b847fb6f945..4fa3caf51e7b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -1079,6 +1079,12 @@ object SparkSession extends Logging { conf.get(CATALOG_IMPLEMENTATION) match { case "hive" => HIVE_SESSION_STATE_BUILDER_CLASS_NAME case "in-memory" => classOf[SessionStateBuilder].getCanonicalName + case other => conf.getOption(s"spark.sql.catalogImplementation.$other.builder") + .getOrElse { + throw new IllegalArgumentException( + "You need to configure spark.sql.catalogImplementation.xx.builder when xx configured by" + + "spark.sql.catalogImplementation is not in-memory nor hive") + } } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala index 4d2be13c4841..20587f99a4d4 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala @@ -169,6 +169,12 @@ object SharedState extends Logging { conf.get(CATALOG_IMPLEMENTATION) match { case "hive" => HIVE_EXTERNAL_CATALOG_CLASS_NAME case "in-memory" => classOf[InMemoryCatalog].getCanonicalName + case other => conf.getOption(s"spark.sql.catalogImplementation.$other.externalCatalog") + .getOrElse { + throw new IllegalArgumentException( + "You need to configure spark.sql.catalogImplementation.xx.externalCatalog when xx " + + "configured by spark.sql.catalogImplementation is not in-memory nor hive") + } } }