diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index aca7602bdbcb..7b4920f8bb12 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -1233,55 +1233,6 @@ class DDLParserSuite extends AnalysisTest { assert(exc.getMessage.contains("There must be at least one WHEN clause in a MERGE statement")) } - test("show tables") { - comparePlans( - parsePlan("SHOW TABLES"), - ShowTables(UnresolvedNamespace(Seq.empty[String]), None)) - comparePlans( - parsePlan("SHOW TABLES '*test*'"), - ShowTables(UnresolvedNamespace(Seq.empty[String]), Some("*test*"))) - comparePlans( - parsePlan("SHOW TABLES LIKE '*test*'"), - ShowTables(UnresolvedNamespace(Seq.empty[String]), Some("*test*"))) - comparePlans( - parsePlan("SHOW TABLES FROM testcat.ns1.ns2.tbl"), - ShowTables(UnresolvedNamespace(Seq("testcat", "ns1", "ns2", "tbl")), None)) - comparePlans( - parsePlan("SHOW TABLES IN testcat.ns1.ns2.tbl"), - ShowTables(UnresolvedNamespace(Seq("testcat", "ns1", "ns2", "tbl")), None)) - comparePlans( - parsePlan("SHOW TABLES IN ns1 '*test*'"), - ShowTables(UnresolvedNamespace(Seq("ns1")), Some("*test*"))) - comparePlans( - parsePlan("SHOW TABLES IN ns1 LIKE '*test*'"), - ShowTables(UnresolvedNamespace(Seq("ns1")), Some("*test*"))) - } - - test("show table extended") { - comparePlans( - parsePlan("SHOW TABLE EXTENDED LIKE '*test*'"), - ShowTableStatement(None, "*test*", None)) - comparePlans( - parsePlan("SHOW TABLE EXTENDED FROM testcat.ns1.ns2 LIKE '*test*'"), - ShowTableStatement(Some(Seq("testcat", "ns1", "ns2")), "*test*", None)) - comparePlans( - parsePlan("SHOW TABLE EXTENDED IN testcat.ns1.ns2 LIKE '*test*'"), - ShowTableStatement(Some(Seq("testcat", "ns1", "ns2")), "*test*", None)) - comparePlans( - parsePlan("SHOW TABLE EXTENDED LIKE '*test*' PARTITION(ds='2008-04-09', hr=11)"), - ShowTableStatement(None, "*test*", Some(Map("ds" -> "2008-04-09", "hr" -> "11")))) - comparePlans( - parsePlan("SHOW TABLE EXTENDED FROM testcat.ns1.ns2 LIKE '*test*' " + - "PARTITION(ds='2008-04-09')"), - ShowTableStatement(Some(Seq("testcat", "ns1", "ns2")), "*test*", - Some(Map("ds" -> "2008-04-09")))) - comparePlans( - parsePlan("SHOW TABLE EXTENDED IN testcat.ns1.ns2 LIKE '*test*' " + - "PARTITION(ds='2008-04-09')"), - ShowTableStatement(Some(Seq("testcat", "ns1", "ns2")), "*test*", - Some(Map("ds" -> "2008-04-09")))) - } - test("show views") { comparePlans( parsePlan("SHOW VIEWS"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index 893ee5f130cd..6c56535d9307 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -898,71 +898,9 @@ class DataSourceV2SQLSuite } } - test("ShowTables: using v2 catalog") { - spark.sql("CREATE TABLE testcat.db.table_name (id bigint, data string) USING foo") - spark.sql("CREATE TABLE testcat.n1.n2.db.table_name (id bigint, data string) USING foo") - - runShowTablesSql("SHOW TABLES FROM testcat.db", Seq(Row("db", "table_name"))) - - runShowTablesSql( - "SHOW TABLES FROM testcat.n1.n2.db", - Seq(Row("n1.n2.db", "table_name"))) - } - - test("ShowTables: using v2 catalog with a pattern") { - spark.sql("CREATE TABLE testcat.db.table (id bigint, data string) USING foo") - spark.sql("CREATE TABLE testcat.db.table_name_1 (id bigint, data string) USING foo") - spark.sql("CREATE TABLE testcat.db.table_name_2 (id bigint, data string) USING foo") - spark.sql("CREATE TABLE testcat.db2.table_name_2 (id bigint, data string) USING foo") - - runShowTablesSql( - "SHOW TABLES FROM testcat.db", - Seq( - Row("db", "table"), - Row("db", "table_name_1"), - Row("db", "table_name_2"))) - - runShowTablesSql( - "SHOW TABLES FROM testcat.db LIKE '*name*'", - Seq(Row("db", "table_name_1"), Row("db", "table_name_2"))) - - runShowTablesSql( - "SHOW TABLES FROM testcat.db LIKE '*2'", - Seq(Row("db", "table_name_2"))) - } - - test("ShowTables: using v2 catalog, namespace doesn't exist") { - runShowTablesSql("SHOW TABLES FROM testcat.unknown", Seq()) - } - - test("ShowTables: using v1 catalog") { - runShowTablesSql( - "SHOW TABLES FROM default", - Seq(Row("", "source", true), Row("", "source2", true)), - expectV2Catalog = false) - } - - test("ShowTables: using v1 catalog, db doesn't exist ") { - // 'db' below resolves to a database name for v1 catalog because there is no catalog named - // 'db' and there is no default catalog set. - val exception = intercept[NoSuchDatabaseException] { - runShowTablesSql("SHOW TABLES FROM db", Seq(), expectV2Catalog = false) - } - - assert(exception.getMessage.contains("Database 'db' not found")) - } - - test("ShowTables: using v1 catalog, db name with multipartIdentifier ('a.b') is not allowed.") { - val exception = intercept[AnalysisException] { - runShowTablesSql("SHOW TABLES FROM a.b", Seq(), expectV2Catalog = false) - } - - assert(exception.getMessage.contains("The database name is not valid: a.b")) - } - test("ShowViews: using v1 catalog, db name with multipartIdentifier ('a.b') is not allowed.") { val exception = intercept[AnalysisException] { - sql("SHOW TABLES FROM a.b") + sql("SHOW VIEWS FROM a.b") } assert(exception.getMessage.contains("The database name is not valid: a.b")) @@ -977,48 +915,6 @@ class DataSourceV2SQLSuite " only SessionCatalog supports this command.")) } - test("ShowTables: using v2 catalog with empty namespace") { - spark.sql("CREATE TABLE testcat.table (id bigint, data string) USING foo") - runShowTablesSql("SHOW TABLES FROM testcat", Seq(Row("", "table"))) - } - - test("ShowTables: namespace is not specified and default v2 catalog is set") { - spark.conf.set(SQLConf.DEFAULT_CATALOG.key, "testcat") - spark.sql("CREATE TABLE testcat.table (id bigint, data string) USING foo") - - // v2 catalog is used where default namespace is empty for TestInMemoryTableCatalog. - runShowTablesSql("SHOW TABLES", Seq(Row("", "table"))) - } - - test("ShowTables: namespace not specified and default v2 catalog not set - fallback to v1") { - runShowTablesSql( - "SHOW TABLES", - Seq(Row("", "source", true), Row("", "source2", true)), - expectV2Catalog = false) - - runShowTablesSql( - "SHOW TABLES LIKE '*2'", - Seq(Row("", "source2", true)), - expectV2Catalog = false) - } - - test("ShowTables: change current catalog and namespace with USE statements") { - sql("CREATE TABLE testcat.ns1.ns2.table (id bigint) USING foo") - - // Initially, the v2 session catalog (current catalog) is used. - runShowTablesSql( - "SHOW TABLES", Seq(Row("", "source", true), Row("", "source2", true)), - expectV2Catalog = false) - - // Update the current catalog, and no table is matched since the current namespace is Array(). - sql("USE testcat") - runShowTablesSql("SHOW TABLES", Seq()) - - // Update the current namespace to match ns1.ns2.table. - sql("USE testcat.ns1.ns2") - runShowTablesSql("SHOW TABLES", Seq(Row("ns1.ns2", "table"))) - } - private def runShowTablesSql( sqlText: String, expected: Seq[Row], @@ -1039,50 +935,6 @@ class DataSourceV2SQLSuite assert(expected === df.collect()) } - test("SHOW TABLE EXTENDED not valid v1 database") { - def testV1CommandNamespace(sqlCommand: String, namespace: String): Unit = { - val e = intercept[AnalysisException] { - sql(sqlCommand) - } - assert(e.message.contains(s"The database name is not valid: ${namespace}")) - } - - val namespace = "testcat.ns1.ns2" - val table = "tbl" - withTable(s"$namespace.$table") { - sql(s"CREATE TABLE $namespace.$table (id bigint, data string) " + - s"USING foo PARTITIONED BY (id)") - - testV1CommandNamespace(s"SHOW TABLE EXTENDED FROM $namespace LIKE 'tb*'", - namespace) - testV1CommandNamespace(s"SHOW TABLE EXTENDED IN $namespace LIKE 'tb*'", - namespace) - testV1CommandNamespace("SHOW TABLE EXTENDED " + - s"FROM $namespace LIKE 'tb*' PARTITION(id=1)", - namespace) - testV1CommandNamespace("SHOW TABLE EXTENDED " + - s"IN $namespace LIKE 'tb*' PARTITION(id=1)", - namespace) - } - } - - test("SHOW TABLE EXTENDED valid v1") { - val expected = Seq(Row("", "source", true), Row("", "source2", true)) - val schema = new StructType() - .add("database", StringType, nullable = false) - .add("tableName", StringType, nullable = false) - .add("isTemporary", BooleanType, nullable = false) - .add("information", StringType, nullable = false) - - val df = sql("SHOW TABLE EXTENDED FROM default LIKE '*source*'") - val result = df.collect() - val resultWithoutInfo = result.map{ case Row(db, table, temp, _) => Row(db, table, temp)} - - assert(df.schema === schema) - assert(resultWithoutInfo === expected) - result.foreach{ case Row(_, _, _, info: String) => assert(info.nonEmpty)} - } - test("CreateNameSpace: basic tests") { // Session catalog is used. withNamespace("ns") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesParserSuite.scala new file mode 100644 index 000000000000..16f3dea8d75e --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesParserSuite.scala @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command + +import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedNamespace} +import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan +import org.apache.spark.sql.catalyst.plans.logical.{ShowTables, ShowTableStatement} +import org.apache.spark.sql.test.SharedSparkSession + +class ShowTablesParserSuite extends AnalysisTest with SharedSparkSession { + private val catalog = "test_catalog" + + test("show tables") { + comparePlans( + parsePlan("SHOW TABLES"), + ShowTables(UnresolvedNamespace(Seq.empty[String]), None)) + comparePlans( + parsePlan("SHOW TABLES '*test*'"), + ShowTables(UnresolvedNamespace(Seq.empty[String]), Some("*test*"))) + comparePlans( + parsePlan("SHOW TABLES LIKE '*test*'"), + ShowTables(UnresolvedNamespace(Seq.empty[String]), Some("*test*"))) + comparePlans( + parsePlan(s"SHOW TABLES FROM $catalog.ns1.ns2.tbl"), + ShowTables(UnresolvedNamespace(Seq(catalog, "ns1", "ns2", "tbl")), None)) + comparePlans( + parsePlan(s"SHOW TABLES IN $catalog.ns1.ns2.tbl"), + ShowTables(UnresolvedNamespace(Seq(catalog, "ns1", "ns2", "tbl")), None)) + comparePlans( + parsePlan("SHOW TABLES IN ns1 '*test*'"), + ShowTables(UnresolvedNamespace(Seq("ns1")), Some("*test*"))) + comparePlans( + parsePlan("SHOW TABLES IN ns1 LIKE '*test*'"), + ShowTables(UnresolvedNamespace(Seq("ns1")), Some("*test*"))) + } + + test("show table extended") { + comparePlans( + parsePlan("SHOW TABLE EXTENDED LIKE '*test*'"), + ShowTableStatement(None, "*test*", None)) + comparePlans( + parsePlan(s"SHOW TABLE EXTENDED FROM $catalog.ns1.ns2 LIKE '*test*'"), + ShowTableStatement(Some(Seq(catalog, "ns1", "ns2")), "*test*", None)) + comparePlans( + parsePlan(s"SHOW TABLE EXTENDED IN $catalog.ns1.ns2 LIKE '*test*'"), + ShowTableStatement(Some(Seq(catalog, "ns1", "ns2")), "*test*", None)) + comparePlans( + parsePlan("SHOW TABLE EXTENDED LIKE '*test*' PARTITION(ds='2008-04-09', hr=11)"), + ShowTableStatement(None, "*test*", Some(Map("ds" -> "2008-04-09", "hr" -> "11")))) + comparePlans( + parsePlan(s"SHOW TABLE EXTENDED FROM $catalog.ns1.ns2 LIKE '*test*' " + + "PARTITION(ds='2008-04-09')"), + ShowTableStatement(Some(Seq(catalog, "ns1", "ns2")), "*test*", + Some(Map("ds" -> "2008-04-09")))) + comparePlans( + parsePlan(s"SHOW TABLE EXTENDED IN $catalog.ns1.ns2 LIKE '*test*' " + + "PARTITION(ds='2008-04-09')"), + ShowTableStatement(Some(Seq(catalog, "ns1", "ns2")), "*test*", + Some(Map("ds" -> "2008-04-09")))) + } +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuite.scala new file mode 100644 index 000000000000..01720b572324 --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuite.scala @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command + +import org.scalactic.source.Position +import org.scalatest.Tag + +import org.apache.spark.sql.Row +import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ +import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.sql.test.SharedSparkSession +import org.apache.spark.sql.types.StructType + +trait ShowTablesSuite extends SharedSparkSession { + protected def version: String + protected def catalog: String + protected def defaultNamespace: Seq[String] + protected def defaultUsing: String + case class ShowRow(namespace: String, table: String, isTemporary: Boolean) + protected def getRows(showRows: Seq[ShowRow]): Seq[Row] + // Gets the schema of `SHOW TABLES` + protected def showSchema: StructType + + protected def runShowTablesSql(sqlText: String, expected: Seq[ShowRow]): Unit = { + val df = spark.sql(sqlText) + assert(df.schema === showSchema) + assert(df.collect() === getRows(expected)) + } + + override def test(testName: String, testTags: Tag*)(testFun: => Any) + (implicit pos: Position): Unit = { + super.test(s"SHOW TABLES $version: " + testName, testTags: _*)(testFun) + } + + test("show an existing table") { + withNamespace(s"$catalog.ns") { + sql(s"CREATE NAMESPACE $catalog.ns") + withTable(s"$catalog.ns.table") { + sql(s"CREATE TABLE $catalog.ns.table (name STRING, id INT) $defaultUsing") + runShowTablesSql(s"SHOW TABLES IN $catalog.ns", Seq(ShowRow("ns", "table", false))) + } + } + } + + test("show tables with a pattern") { + withNamespace(s"$catalog.ns1", s"$catalog.ns2") { + sql(s"CREATE NAMESPACE $catalog.ns1") + sql(s"CREATE NAMESPACE $catalog.ns2") + withTable( + s"$catalog.ns1.table", + s"$catalog.ns1.table_name_1", + s"$catalog.ns1.table_name_2", + s"$catalog.ns2.table_name_2") { + sql(s"CREATE TABLE $catalog.ns1.table (id bigint, data string) $defaultUsing") + sql(s"CREATE TABLE $catalog.ns1.table_name_1 (id bigint, data string) $defaultUsing") + sql(s"CREATE TABLE $catalog.ns1.table_name_2 (id bigint, data string) $defaultUsing") + sql(s"CREATE TABLE $catalog.ns2.table_name_2 (id bigint, data string) $defaultUsing") + + runShowTablesSql( + s"SHOW TABLES FROM $catalog.ns1", + Seq( + ShowRow("ns1", "table", false), + ShowRow("ns1", "table_name_1", false), + ShowRow("ns1", "table_name_2", false))) + + runShowTablesSql( + s"SHOW TABLES FROM $catalog.ns1 LIKE '*name*'", + Seq( + ShowRow("ns1", "table_name_1", false), + ShowRow("ns1", "table_name_2", false))) + + runShowTablesSql( + s"SHOW TABLES FROM $catalog.ns1 LIKE '*2'", + Seq(ShowRow("ns1", "table_name_2", false))) + } + } + } + + test("show tables with current catalog and namespace") { + withSQLConf(SQLConf.DEFAULT_CATALOG.key -> catalog) { + val tblName = (catalog +: defaultNamespace :+ "table").quoted + withTable(tblName) { + sql(s"CREATE TABLE $tblName (name STRING, id INT) $defaultUsing") + val ns = defaultNamespace.mkString(".") + runShowTablesSql("SHOW TABLES", Seq(ShowRow(ns, "table", false))) + } + } + } + + test("change current catalog and namespace with USE statements") { + withNamespace(s"$catalog.ns") { + sql(s"CREATE NAMESPACE $catalog.ns") + withTable(s"$catalog.ns.table") { + sql(s"CREATE TABLE $catalog.ns.table (name STRING, id INT) $defaultUsing") + + sql(s"USE $catalog") + // No table is matched since the current namespace is not ["ns"] + assert(defaultNamespace != Seq("ns")) + runShowTablesSql("SHOW TABLES", Seq()) + + // Update the current namespace to match "ns.tbl". + sql(s"USE $catalog.ns") + runShowTablesSql("SHOW TABLES", Seq(ShowRow("ns", "table", false))) + } + } + } +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala new file mode 100644 index 000000000000..feb3bc623f3f --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command.v1 + +import org.apache.spark.sql.{AnalysisException, Row} +import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +import org.apache.spark.sql.connector.catalog.CatalogManager +import org.apache.spark.sql.execution.command.{ShowTablesSuite => CommonShowTablesSuite} +import org.apache.spark.sql.types.{BooleanType, StringType, StructType} + +class ShowTablesSuite extends CommonShowTablesSuite { + override def version: String = "V1" + override def catalog: String = CatalogManager.SESSION_CATALOG_NAME + override def defaultNamespace: Seq[String] = Seq("default") + override def defaultUsing: String = "USING parquet" + override def showSchema: StructType = { + new StructType() + .add("database", StringType, nullable = false) + .add("tableName", StringType, nullable = false) + .add("isTemporary", BooleanType, nullable = false) + } + override def getRows(showRows: Seq[ShowRow]): Seq[Row] = { + showRows.map { + case ShowRow(namespace, table, isTemporary) => Row(namespace, table, isTemporary) + } + } + + private def withSourceViews(f: => Unit): Unit = { + withTable("source", "source2") { + val df = spark.createDataFrame(Seq((1L, "a"), (2L, "b"), (3L, "c"))).toDF("id", "data") + df.createOrReplaceTempView("source") + val df2 = spark.createDataFrame(Seq((4L, "d"), (5L, "e"), (6L, "f"))).toDF("id", "data") + df2.createOrReplaceTempView("source2") + f + } + } + + // `SHOW TABLES` returns empty result in V2 catalog instead of throwing the exception. + test("show table in a not existing namespace") { + val msg = intercept[NoSuchDatabaseException] { + runShowTablesSql(s"SHOW TABLES IN $catalog.unknown", Seq()) + }.getMessage + assert(msg.contains("Database 'unknown' not found")) + } + + // `SHOW TABLES` from v2 catalog returns empty result. + test("v1 SHOW TABLES list the temp views") { + withSourceViews { + runShowTablesSql( + "SHOW TABLES FROM default", + Seq(ShowRow("", "source", true), ShowRow("", "source2", true))) + } + } + + test("v1 SHOW TABLES only support single-level namespace") { + val exception = intercept[AnalysisException] { + runShowTablesSql("SHOW TABLES FROM a.b", Seq()) + } + assert(exception.getMessage.contains("The database name is not valid: a.b")) + } + + test("SHOW TABLE EXTENDED from default") { + withSourceViews { + val expected = Seq(Row("", "source", true), Row("", "source2", true)) + val schema = new StructType() + .add("database", StringType, nullable = false) + .add("tableName", StringType, nullable = false) + .add("isTemporary", BooleanType, nullable = false) + .add("information", StringType, nullable = false) + + val df = sql("SHOW TABLE EXTENDED FROM default LIKE '*source*'") + val result = df.collect() + val resultWithoutInfo = result.map { case Row(db, table, temp, _) => Row(db, table, temp) } + + assert(df.schema === schema) + assert(resultWithoutInfo === expected) + result.foreach { case Row(_, _, _, info: String) => assert(info.nonEmpty) } + } + } +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala new file mode 100644 index 000000000000..668120ae1cad --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command.v2 + +import org.apache.spark.SparkConf +import org.apache.spark.sql.{AnalysisException, Row} +import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException +import org.apache.spark.sql.connector.InMemoryTableCatalog +import org.apache.spark.sql.execution.command.{ShowTablesSuite => CommonShowTablesSuite} +import org.apache.spark.sql.types.{StringType, StructType} + +class ShowTablesSuite extends CommonShowTablesSuite { + override def version: String = "V2" + override def catalog: String = "test_catalog" + override def defaultNamespace: Seq[String] = Nil + override def defaultUsing: String = "USING _" + override def showSchema: StructType = { + new StructType() + .add("namespace", StringType, nullable = false) + .add("tableName", StringType, nullable = false) + } + override def getRows(showRows: Seq[ShowRow]): Seq[Row] = { + showRows.map { + case ShowRow(namespace, table, _) => Row(namespace, table) + } + } + + override def sparkConf: SparkConf = super.sparkConf + .set(s"spark.sql.catalog.$catalog", classOf[InMemoryTableCatalog].getName) + + // The test fails with the exception `NoSuchDatabaseException` in V1 catalog. + // TODO(SPARK-33394): Throw `NoSuchDatabaseException` for not existing namespace + test("show table in a not existing namespace") { + runShowTablesSql(s"SHOW TABLES IN $catalog.unknown", Seq()) + } + + // The test fails for V1 catalog with the error: + // org.apache.spark.sql.AnalysisException: + // The namespace in session catalog must have exactly one name part: spark_catalog.n1.n2.db + test("show tables in nested namespaces") { + withTable(s"$catalog.n1.n2.db") { + spark.sql(s"CREATE TABLE $catalog.n1.n2.db.table_name (id bigint, data string) $defaultUsing") + runShowTablesSql( + s"SHOW TABLES FROM $catalog.n1.n2.db", + Seq(ShowRow("n1.n2.db", "table_name", false))) + } + } + + // The test fails for V1 catalog with the error: + // org.apache.spark.sql.AnalysisException: + // The namespace in session catalog must have exactly one name part: spark_catalog.table + test("using v2 catalog with empty namespace") { + withTable(s"$catalog.table") { + spark.sql(s"CREATE TABLE $catalog.table (id bigint, data string) $defaultUsing") + runShowTablesSql(s"SHOW TABLES FROM $catalog", Seq(ShowRow("", "table", false))) + } + } + + // The test fails for V1 catalog with the error: + // org.apache.spark.sql.AnalysisException: + // The namespace in session catalog must have exactly one name part: spark_catalog.ns1.ns2.tbl + test("SHOW TABLE EXTENDED not valid v1 database") { + def testV1CommandNamespace(sqlCommand: String, namespace: String): Unit = { + val e = intercept[AnalysisException] { + sql(sqlCommand) + } + assert(e.message.contains(s"The database name is not valid: ${namespace}")) + } + + val namespace = s"$catalog.ns1.ns2" + val table = "tbl" + withTable(s"$namespace.$table") { + sql(s"CREATE TABLE $namespace.$table (id bigint, data string) " + + s"$defaultUsing PARTITIONED BY (id)") + + testV1CommandNamespace(s"SHOW TABLE EXTENDED FROM $namespace LIKE 'tb*'", + namespace) + testV1CommandNamespace(s"SHOW TABLE EXTENDED IN $namespace LIKE 'tb*'", + namespace) + testV1CommandNamespace("SHOW TABLE EXTENDED " + + s"FROM $namespace LIKE 'tb*' PARTITION(id=1)", + namespace) + testV1CommandNamespace("SHOW TABLE EXTENDED " + + s"IN $namespace LIKE 'tb*' PARTITION(id=1)", + namespace) + } + } + + // TODO(SPARK-33393): Support SHOW TABLE EXTENDED in DSv2 + test("SHOW TABLE EXTENDED: an existing table") { + val table = "people" + withTable(s"$catalog.$table") { + sql(s"CREATE TABLE $catalog.$table (name STRING, id INT) $defaultUsing") + val errMsg = intercept[NoSuchDatabaseException] { + sql(s"SHOW TABLE EXTENDED FROM $catalog LIKE '*$table*'").collect() + }.getMessage + assert(errMsg.contains(s"Database '$catalog' not found")) + } + } +}