diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index b5e40dde0b4fc..0ec87b1bca96e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1767,6 +1767,11 @@ object SQLConf { "with String") .booleanConf .createWithDefault(false) + + val DEFAULT_V2_CATALOG = buildConf("spark.sql.default.catalog") + .doc("Name of the default v2 catalog, used when an catalog is not identified in queries") + .stringConf + .createOptional } /** @@ -2220,6 +2225,8 @@ class SQLConf extends Serializable with Logging { def castDatetimeToString: Boolean = getConf(SQLConf.LEGACY_CAST_DATETIME_TO_STRING) + def defaultV2Catalog: Option[String] = getConf(DEFAULT_V2_CATALOG) + /** ********************** SQLConf functionality methods ************ */ /** Set Spark SQL configuration properties. */ diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala index 09506f05ccfa4..72b05036fb04d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala @@ -43,6 +43,8 @@ case class DataSourceResolution( override def lookupCatalog: Option[String => CatalogPlugin] = Some(findCatalog) + def defaultCatalog: Option[CatalogPlugin] = conf.defaultV2Catalog.map(findCatalog) + override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators { case CreateTableStatement( AsTableIdentifier(table), schema, partitionCols, bucketSpec, properties, @@ -67,7 +69,7 @@ case class DataSourceResolution( case create: CreateTableAsSelectStatement => // the provider was not a v1 source, convert to a v2 plan val CatalogObjectIdentifier(maybeCatalog, identifier) = create.tableName - val catalog = maybeCatalog + val catalog = maybeCatalog.orElse(defaultCatalog) .getOrElse(throw new AnalysisException( s"No catalog specified for table ${identifier.quoted} and no default catalog is set")) .asTableCatalog diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala index c525b4cbcba57..f8119fd862d65 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala @@ -329,8 +329,7 @@ class PlanResolutionSuite extends AnalysisTest { } } - // TODO(rblue): enable this test after the default catalog is available - ignore("Test v2 CTAS with data source v2 provider") { + test("Test v2 CTAS with data source v2 provider") { val sql = s""" |CREATE TABLE IF NOT EXISTS mydb.page_view diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala index a9bc0369ad20f..0cfdfdd284789 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala @@ -66,8 +66,7 @@ class DataSourceV2SQLSuite extends QueryTest with SharedSQLContext with BeforeAn checkAnswer(spark.internalCreateDataFrame(rdd, table.schema), spark.table("source")) } - // TODO(rblue): enable this test after the default catalog is available - ignore("CreateTableAsSelect: use v2 plan because provider is v2") { + test("CreateTableAsSelect: use v2 plan because provider is v2") { spark.sql(s"CREATE TABLE table_name USING $orc2 AS SELECT id, data FROM source") val testCatalog = spark.catalog("testcat").asTableCatalog