diff --git a/docs/sql-keywords.md b/docs/sql-keywords.md
index 7a0e3efee8ff..b4f8d8be11c4 100644
--- a/docs/sql-keywords.md
+++ b/docs/sql-keywords.md
@@ -210,6 +210,7 @@ Below is a list of all the keywords in Spark SQL.
| PRECEDING | non-reserved | non-reserved | non-reserved |
| PRIMARY | reserved | non-reserved | reserved |
| PRINCIPALS | non-reserved | non-reserved | non-reserved |
+ | PROPERTIES | non-reserved | non-reserved | non-reserved |
| PURGE | non-reserved | non-reserved | non-reserved |
| QUERY | non-reserved | non-reserved | non-reserved |
| RANGE | non-reserved | non-reserved | reserved |
diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index 963077c35df9..7e5e16b8e32b 100644
--- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -83,10 +83,10 @@ statement
: query #statementDefault
| ctes? dmlStatementNoWith #dmlStatement
| USE NAMESPACE? multipartIdentifier #use
- | CREATE database (IF NOT EXISTS)? db=errorCapturingIdentifier
+ | CREATE (database | NAMESPACE) (IF NOT EXISTS)? multipartIdentifier
((COMMENT comment=STRING) |
locationSpec |
- (WITH DBPROPERTIES tablePropertyList))* #createDatabase
+ (WITH (DBPROPERTIES | PROPERTIES) tablePropertyList))* #createNamespace
| ALTER database db=errorCapturingIdentifier
SET DBPROPERTIES tablePropertyList #setDatabaseProperties
| ALTER database db=errorCapturingIdentifier
@@ -1039,6 +1039,7 @@ ansiNonReserved
| POSITION
| PRECEDING
| PRINCIPALS
+ | PROPERTIES
| PURGE
| QUERY
| RANGE
@@ -1299,6 +1300,7 @@ nonReserved
| PRECEDING
| PRIMARY
| PRINCIPALS
+ | PROPERTIES
| PURGE
| QUERY
| RANGE
@@ -1564,6 +1566,7 @@ POSITION: 'POSITION';
PRECEDING: 'PRECEDING';
PRIMARY: 'PRIMARY';
PRINCIPALS: 'PRINCIPALS';
+PROPERTIES: 'PROPERTIES';
PURGE: 'PURGE';
QUERY: 'QUERY';
RANGE: 'RANGE';
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
index 13a79a82a385..6553b3d57d7f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
@@ -168,6 +168,13 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
s"Can not specify catalog `${catalog.name}` for view ${viewName.quoted} " +
s"because view support in catalog has not been implemented yet")
+ case c @ CreateNamespaceStatement(NonSessionCatalog(catalog, nameParts), _, _) =>
+ CreateNamespace(
+ catalog.asNamespaceCatalog,
+ nameParts,
+ c.ifNotExists,
+ c.properties)
+
case ShowNamespacesStatement(Some(CatalogAndNamespace(catalog, namespace)), pattern) =>
ShowNamespaces(catalog.asNamespaceCatalog, namespace, pattern)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 548042bc9767..7c67952aba40 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -2307,6 +2307,46 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
}
}
+ /**
+ * Create a [[CreateNamespaceStatement]] command.
+ *
+ * For example:
+ * {{{
+ * CREATE NAMESPACE [IF NOT EXISTS] ns1.ns2.ns3
+ * create_namespace_clauses;
+ *
+ * create_namespace_clauses (order insensitive):
+ * [COMMENT namespace_comment]
+ * [LOCATION path]
+ * [WITH PROPERTIES (key1=val1, key2=val2, ...)]
+ * }}}
+ */
+ override def visitCreateNamespace(ctx: CreateNamespaceContext): LogicalPlan = withOrigin(ctx) {
+ checkDuplicateClauses(ctx.COMMENT, "COMMENT", ctx)
+ checkDuplicateClauses(ctx.locationSpec, "LOCATION", ctx)
+ checkDuplicateClauses(ctx.PROPERTIES, "WITH PROPERTIES", ctx)
+ checkDuplicateClauses(ctx.DBPROPERTIES, "WITH DBPROPERTIES", ctx)
+
+ if (!ctx.PROPERTIES.isEmpty && !ctx.DBPROPERTIES.isEmpty) {
+ throw new ParseException(s"Either PROPERTIES or DBPROPERTIES is allowed.", ctx)
+ }
+
+ var properties = ctx.tablePropertyList.asScala.headOption
+ .map(visitPropertyKeyValues)
+ .getOrElse(Map.empty)
+ Option(ctx.comment).map(string).map {
+ properties += CreateNamespaceStatement.COMMENT_PROPERTY_KEY -> _
+ }
+ ctx.locationSpec.asScala.headOption.map(visitLocationSpec).map {
+ properties += CreateNamespaceStatement.LOCATION_PROPERTY_KEY -> _
+ }
+
+ CreateNamespaceStatement(
+ visitMultipartIdentifier(ctx.multipartIdentifier),
+ ctx.EXISTS != null,
+ properties)
+ }
+
/**
* Create a [[ShowNamespacesStatement]] command.
*/
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala
index a73a2975aa9c..3bd16187320f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala
@@ -282,6 +282,19 @@ case class InsertIntoStatement(
case class ShowTablesStatement(namespace: Option[Seq[String]], pattern: Option[String])
extends ParsedStatement
+/**
+ * A CREATE NAMESPACE statement, as parsed from SQL.
+ */
+case class CreateNamespaceStatement(
+ namespace: Seq[String],
+ ifNotExists: Boolean,
+ properties: Map[String, String]) extends ParsedStatement
+
+object CreateNamespaceStatement {
+ val COMMENT_PROPERTY_KEY: String = "comment"
+ val LOCATION_PROPERTY_KEY: String = "location"
+}
+
/**
* A SHOW NAMESPACES statement, as parsed from SQL.
*/
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
index f89dfb1ec47d..8f5731a4a7a7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
@@ -237,6 +237,14 @@ case class ReplaceTableAsSelect(
}
}
+/**
+ * The logical plan of the CREATE NAMESPACE command that works for v2 catalogs.
+ */
+case class CreateNamespace(
+ catalog: SupportsNamespaces,
+ namespace: Seq[String],
+ ifNotExists: Boolean,
+ properties: Map[String, String]) extends Command
/**
* The logical plan of the SHOW NAMESPACES command that works for v2 catalogs.
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index 1dacb2384ac1..38ef357036a0 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -845,6 +845,90 @@ class DDLParserSuite extends AnalysisTest {
ShowTablesStatement(Some(Seq("tbl")), Some("*dog*")))
}
+ test("create namespace -- backward compatibility with DATABASE/DBPROPERTIES") {
+ val expected = CreateNamespaceStatement(
+ Seq("a", "b", "c"),
+ ifNotExists = true,
+ Map(
+ "a" -> "a",
+ "b" -> "b",
+ "c" -> "c",
+ "comment" -> "namespace_comment",
+ "location" -> "/home/user/db"))
+
+ comparePlans(
+ parsePlan(
+ """
+ |CREATE NAMESPACE IF NOT EXISTS a.b.c
+ |WITH PROPERTIES ('a'='a', 'b'='b', 'c'='c')
+ |COMMENT 'namespace_comment' LOCATION '/home/user/db'
+ """.stripMargin),
+ expected)
+
+ comparePlans(
+ parsePlan(
+ """
+ |CREATE DATABASE IF NOT EXISTS a.b.c
+ |WITH DBPROPERTIES ('a'='a', 'b'='b', 'c'='c')
+ |COMMENT 'namespace_comment' LOCATION '/home/user/db'
+ """.stripMargin),
+ expected)
+ }
+
+ test("create namespace -- check duplicates") {
+ def createDatabase(duplicateClause: String): String = {
+ s"""
+ |CREATE NAMESPACE IF NOT EXISTS a.b.c
+ |$duplicateClause
+ |$duplicateClause
+ """.stripMargin
+ }
+ val sql1 = createDatabase("COMMENT 'namespace_comment'")
+ val sql2 = createDatabase("LOCATION '/home/user/db'")
+ val sql3 = createDatabase("WITH PROPERTIES ('a'='a', 'b'='b', 'c'='c')")
+ val sql4 = createDatabase("WITH DBPROPERTIES ('a'='a', 'b'='b', 'c'='c')")
+
+ intercept(sql1, "Found duplicate clauses: COMMENT")
+ intercept(sql2, "Found duplicate clauses: LOCATION")
+ intercept(sql3, "Found duplicate clauses: WITH PROPERTIES")
+ intercept(sql4, "Found duplicate clauses: WITH DBPROPERTIES")
+ }
+
+ test("create namespace - property values must be set") {
+ assertUnsupported(
+ sql = "CREATE NAMESPACE a.b.c WITH PROPERTIES('key_without_value', 'key_with_value'='x')",
+ containsThesePhrases = Seq("key_without_value"))
+ }
+
+ test("create namespace -- either PROPERTIES or DBPROPERTIES is allowed") {
+ val sql =
+ s"""
+ |CREATE NAMESPACE IF NOT EXISTS a.b.c
+ |WITH PROPERTIES ('a'='a', 'b'='b', 'c'='c')
+ |WITH DBPROPERTIES ('a'='a', 'b'='b', 'c'='c')
+ """.stripMargin
+ intercept(sql, "Either PROPERTIES or DBPROPERTIES is allowed")
+ }
+
+ test("create namespace - support for other types in PROPERTIES") {
+ val sql =
+ """
+ |CREATE NAMESPACE a.b.c
+ |LOCATION '/home/user/db'
+ |WITH PROPERTIES ('a'=1, 'b'=0.1, 'c'=TRUE)
+ """.stripMargin
+ comparePlans(
+ parsePlan(sql),
+ CreateNamespaceStatement(
+ Seq("a", "b", "c"),
+ ifNotExists = false,
+ Map(
+ "a" -> "1",
+ "b" -> "0.1",
+ "c" -> "true",
+ "location" -> "/home/user/db")))
+ }
+
test("show databases: basic") {
comparePlans(
parsePlan("SHOW DATABASES"),
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ParserUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ParserUtilsSuite.scala
index 07f77ea889db..c6434f2bdd3e 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ParserUtilsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ParserUtilsSuite.scala
@@ -50,7 +50,7 @@ class ParserUtilsSuite extends SparkFunSuite {
|WITH DBPROPERTIES ('a'='a', 'b'='b', 'c'='c')
""".stripMargin
) { parser =>
- parser.statement().asInstanceOf[CreateDatabaseContext]
+ parser.statement().asInstanceOf[CreateNamespaceContext]
}
val emptyContext = buildContext("") { parser =>
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
index 4a2e6731d9d8..4cca9846e996 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogPlugin, LookupCatalog, TableChange, V1Table}
import org.apache.spark.sql.connector.expressions.Transform
-import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableRecoverPartitionsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, AnalyzeColumnCommand, AnalyzePartitionCommand, AnalyzeTableCommand, DescribeColumnCommand, DescribeTableCommand, DropTableCommand, ShowPartitionsCommand, ShowTablesCommand, TruncateTableCommand}
+import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableRecoverPartitionsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, AnalyzeColumnCommand, AnalyzePartitionCommand, AnalyzeTableCommand, CreateDatabaseCommand, DescribeColumnCommand, DescribeTableCommand, DropTableCommand, ShowPartitionsCommand, ShowTablesCommand, TruncateTableCommand}
import org.apache.spark.sql.execution.datasources.{CreateTable, DataSource}
import org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2
import org.apache.spark.sql.internal.SQLConf
@@ -255,6 +255,19 @@ class ResolveSessionCatalog(
case DropViewStatement(SessionCatalog(catalog, viewName), ifExists) =>
DropTableCommand(viewName.asTableIdentifier, ifExists, isView = true, purge = false)
+ case c @ CreateNamespaceStatement(SessionCatalog(catalog, nameParts), _, _) =>
+ if (nameParts.length != 1) {
+ throw new AnalysisException(
+ s"The database name is not valid: ${nameParts.quoted}")
+ }
+
+ val comment = c.properties.get(CreateNamespaceStatement.COMMENT_PROPERTY_KEY)
+ val location = c.properties.get(CreateNamespaceStatement.LOCATION_PROPERTY_KEY)
+ val newProperties = c.properties -
+ CreateNamespaceStatement.COMMENT_PROPERTY_KEY -
+ CreateNamespaceStatement.LOCATION_PROPERTY_KEY
+ CreateDatabaseCommand(nameParts.head, c.ifNotExists, location, comment, newProperties)
+
case ShowTablesStatement(Some(SessionCatalog(catalog, nameParts)), pattern) =>
if (nameParts.length != 1) {
throw new AnalysisException(
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 3f3f6b373eb0..38f3c6e1b750 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -329,33 +329,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) {
)
}
- /**
- * Create a [[CreateDatabaseCommand]] command.
- *
- * For example:
- * {{{
- * CREATE DATABASE [IF NOT EXISTS] database_name
- * create_database_clauses;
- *
- * create_database_clauses (order insensitive):
- * [COMMENT database_comment]
- * [LOCATION path]
- * [WITH DBPROPERTIES (key1=val1, key2=val2, ...)]
- * }}}
- */
- override def visitCreateDatabase(ctx: CreateDatabaseContext): LogicalPlan = withOrigin(ctx) {
- checkDuplicateClauses(ctx.COMMENT, "COMMENT", ctx)
- checkDuplicateClauses(ctx.locationSpec, "LOCATION", ctx)
- checkDuplicateClauses(ctx.DBPROPERTIES, "WITH DBPROPERTIES", ctx)
-
- CreateDatabaseCommand(
- ctx.db.getText,
- ctx.EXISTS != null,
- ctx.locationSpec.asScala.headOption.map(visitLocationSpec),
- Option(ctx.comment).map(string),
- ctx.tablePropertyList.asScala.headOption.map(visitPropertyKeyValues).getOrElse(Map.empty))
- }
-
/**
* Create an [[AlterDatabasePropertiesCommand]] command.
*
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateNamespaceExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateNamespaceExec.scala
new file mode 100644
index 000000000000..0f69f85dd837
--- /dev/null
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateNamespaceExec.scala
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.datasources.v2
+
+import scala.collection.JavaConverters.mapAsJavaMapConverter
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException
+import org.apache.spark.sql.catalyst.expressions.Attribute
+import org.apache.spark.sql.connector.catalog.SupportsNamespaces
+
+/**
+ * Physical plan node for creating a namespace.
+ */
+case class CreateNamespaceExec(
+ catalog: SupportsNamespaces,
+ namespace: Seq[String],
+ ifNotExists: Boolean,
+ private var properties: Map[String, String])
+ extends V2CommandExec {
+ override protected def run(): Seq[InternalRow] = {
+ import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
+
+ val ns = namespace.toArray
+ if (!catalog.namespaceExists(ns)) {
+ try {
+ catalog.createNamespace(ns, properties.asJava)
+ } catch {
+ case _: NamespaceAlreadyExistsException if ifNotExists =>
+ logWarning(s"Namespace ${namespace.quoted} was created concurrently. Ignoring.")
+ }
+ } else if (!ifNotExists) {
+ throw new NamespaceAlreadyExistsException(ns)
+ }
+
+ Seq.empty
+ }
+
+ override def output: Seq[Attribute] = Seq.empty
+}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
index c8d29520bcfc..49035c3cc3da 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
@@ -23,7 +23,7 @@ import scala.collection.mutable
import org.apache.spark.sql.{AnalysisException, Strategy}
import org.apache.spark.sql.catalyst.expressions.{And, AttributeReference, AttributeSet, Expression, PredicateHelper, SubqueryExpression}
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
-import org.apache.spark.sql.catalyst.plans.logical.{AlterTable, AppendData, CreateTableAsSelect, CreateV2Table, DeleteFromTable, DescribeTable, DropTable, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, Repartition, ReplaceTable, ReplaceTableAsSelect, SetCatalogAndNamespace, ShowNamespaces, ShowTables}
+import org.apache.spark.sql.catalyst.plans.logical.{AlterTable, AppendData, CreateNamespace, CreateTableAsSelect, CreateV2Table, DeleteFromTable, DescribeTable, DropTable, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, Repartition, ReplaceTable, ReplaceTableAsSelect, SetCatalogAndNamespace, ShowNamespaces, ShowTables}
import org.apache.spark.sql.connector.catalog.{StagingTableCatalog, TableCapability}
import org.apache.spark.sql.connector.read.{Scan, ScanBuilder, SupportsPushDownFilters, SupportsPushDownRequiredColumns}
import org.apache.spark.sql.connector.read.streaming.{ContinuousStream, MicroBatchStream}
@@ -289,6 +289,9 @@ object DataSourceV2Strategy extends Strategy with PredicateHelper {
case AlterTable(catalog, ident, _, changes) =>
AlterTableExec(catalog, ident, changes) :: Nil
+ case CreateNamespace(catalog, namespace, ifNotExists, properties) =>
+ CreateNamespaceExec(catalog, namespace, ifNotExists, properties) :: Nil
+
case r: ShowNamespaces =>
ShowNamespacesExec(r.output, r.catalog, r.namespace, r.pattern) :: Nil
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index 39709ab426a0..2ea26787dbb1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.connector
import scala.collection.JavaConverters._
import org.apache.spark.sql._
-import org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, NoSuchDatabaseException, NoSuchTableException, TableAlreadyExistsException}
+import org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, NamespaceAlreadyExistsException, NoSuchDatabaseException, NoSuchTableException, TableAlreadyExistsException}
import org.apache.spark.sql.connector.catalog._
import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.internal.SQLConf
@@ -764,6 +764,32 @@ class DataSourceV2SQLSuite
assert(expected === df.collect())
}
+ test("CreateNameSpace: basic tests") {
+ // Session catalog is used.
+ sql("CREATE NAMESPACE ns")
+ testShowNamespaces("SHOW NAMESPACES", Seq("default", "ns"))
+
+ // V2 non-session catalog is used.
+ sql("CREATE NAMESPACE testcat.ns1.ns2")
+ testShowNamespaces("SHOW NAMESPACES IN testcat", Seq("ns1"))
+ testShowNamespaces("SHOW NAMESPACES IN testcat.ns1", Seq("ns1.ns2"))
+
+ // TODO: Add tests for validating namespace metadata when DESCRIBE NAMESPACE is available.
+ }
+
+ test("CreateNameSpace: test handling of 'IF NOT EXIST'") {
+ sql("CREATE NAMESPACE IF NOT EXISTS testcat.ns1")
+
+ // The 'ns1' namespace already exists, so this should fail.
+ val exception = intercept[NamespaceAlreadyExistsException] {
+ sql("CREATE NAMESPACE testcat.ns1")
+ }
+ assert(exception.getMessage.contains("Namespace 'ns1' already exists"))
+
+ // The following will be no-op since the namespace already exists.
+ sql("CREATE NAMESPACE IF NOT EXISTS testcat.ns1")
+ }
+
test("ShowNamespaces: show root namespaces with default v2 catalog") {
spark.conf.set("spark.sql.default.catalog", "testcat")
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
index 0640d0540baa..a9b94bea9517 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
@@ -74,46 +74,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession {
}.head
}
- test("create database") {
- val sql =
- """
- |CREATE DATABASE IF NOT EXISTS database_name
- |WITH DBPROPERTIES ('a'='a', 'b'='b', 'c'='c')
- |COMMENT 'database_comment' LOCATION '/home/user/db'
- """.stripMargin
- val parsed = parser.parsePlan(sql)
- val expected = CreateDatabaseCommand(
- "database_name",
- ifNotExists = true,
- Some("/home/user/db"),
- Some("database_comment"),
- Map("a" -> "a", "b" -> "b", "c" -> "c"))
- comparePlans(parsed, expected)
- }
-
- test("create database -- check duplicates") {
- def createDatabase(duplicateClause: String): String = {
- s"""
- |CREATE DATABASE IF NOT EXISTS database_name
- |$duplicateClause
- |$duplicateClause
- """.stripMargin
- }
- val sql1 = createDatabase("COMMENT 'database_comment'")
- val sql2 = createDatabase("LOCATION '/home/user/db'")
- val sql3 = createDatabase("WITH DBPROPERTIES ('a'='a', 'b'='b', 'c'='c')")
-
- intercept(sql1, "Found duplicate clauses: COMMENT")
- intercept(sql2, "Found duplicate clauses: LOCATION")
- intercept(sql3, "Found duplicate clauses: WITH DBPROPERTIES")
- }
-
- test("create database - property values must be set") {
- assertUnsupported(
- sql = "CREATE DATABASE my_db WITH DBPROPERTIES('key_without_value', 'key_with_value'='x')",
- containsThesePhrases = Seq("key_without_value"))
- }
-
test("drop database") {
val sql1 = "DROP DATABASE IF EXISTS database_name RESTRICT"
val sql2 = "DROP DATABASE IF EXISTS database_name CASCADE"
@@ -870,24 +830,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession {
comparePlans(parsed4, expected4)
}
- test("support for other types in DBPROPERTIES") {
- val sql =
- """
- |CREATE DATABASE database_name
- |LOCATION '/home/user/db'
- |WITH DBPROPERTIES ('a'=1, 'b'=0.1, 'c'=TRUE)
- """.stripMargin
- val parsed = parser.parsePlan(sql)
- val expected = CreateDatabaseCommand(
- "database_name",
- ifNotExists = false,
- Some("/home/user/db"),
- None,
- Map("a" -> "1", "b" -> "0.1", "c" -> "true"))
-
- comparePlans(parsed, expected)
- }
-
test("Test CTAS #1") {
val s1 =
"""