diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala index efbc9dd755894..ecf367846689e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala @@ -53,10 +53,15 @@ class V2SessionCatalog(catalog: SessionCatalog) override def listTables(namespace: Array[String]): Array[Identifier] = { namespace match { case Array(db) => - catalog + val tables = catalog .listTables(db) .map(ident => Identifier.of(ident.database.map(Array(_)).getOrElse(Array()), ident.table)) .toArray + val views = catalog + .listViews(db, "*") + .map(ident => Identifier.of(ident.database.map(Array(_)).getOrElse(Array()), ident.table)) + .toArray + tables ++ views case _ => throw QueryCompilationErrors.noSuchNamespaceError(namespace) } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala index e11b349777e8f..239f7e9d9bc94 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala @@ -95,11 +95,11 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog { } /** - * Returns a list of tables in the current database. + * Returns a list of tables in the current catalog and current database. * This includes all temporary tables. */ override def listTables(): Dataset[Table] = { - listTables(currentDatabase) + listTables(currentCatalog() ++ "." ++ currentDatabase) } /** @@ -120,7 +120,13 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog { val plan = ShowTables(UnresolvedNamespace(ident), None) val ret = sparkSession.sessionState.executePlan(plan).toRdd.collect() val tables = ret - .map(row => ident ++ Seq(row.getString(1))) + .map(row => + // for views, their namespace are empty + if (row.getString(0).isEmpty) { + Seq(row.getString(1)) + } else { + ident ++ Seq(row.getString(1)) + }) .map(makeTable) CatalogImpl.makeDataset(tables, sparkSession) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala index 0de48325d981e..58c95775c3429 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala @@ -182,6 +182,31 @@ class CatalogSuite extends SharedSparkSession with AnalysisTest with BeforeAndAf assert(spark.catalog.listTables().collect().map(_.name).toSet == Set("my_table2")) } + test("SPARK-39828: Catalog.listTables() should respect currentCatalog") { + assert(spark.catalog.currentCatalog() == "spark_catalog") + assert(spark.catalog.listTables().collect().isEmpty) + createTable("my_table1") + assert(spark.catalog.listTables().collect().map(_.name).toSet == Set("my_table1")) + + val catalogName = "testcat" + val dbName = "my_db" + val tableName = "my_table2" + val tableSchema = new StructType().add("i", "int") + val description = "this is a test managed table" + sql(s"CREATE NAMESPACE ${catalogName}.${dbName}") + + spark.catalog.setCurrentCatalog("testcat") + spark.catalog.setCurrentDatabase("my_db") + assert(spark.catalog.listTables().collect().isEmpty) + + createTable(tableName, dbName, catalogName, classOf[FakeV2Provider].getName, tableSchema, + Map.empty[String, String], description) + assert(spark.catalog.listTables() + .collect() + .map(t => Array(t.catalog, t.namespace.mkString("."), t.name).mkString(".")).toSet == + Set("testcat.my_db.my_table2")) + } + test("list tables with database") { assert(spark.catalog.listTables("default").collect().isEmpty) createDatabase("my_db1")