Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,7 @@ case class CatalogTable(
val tableProperties = properties
.filterKeys(!_.startsWith(VIEW_PREFIX))
.toSeq.sortBy(_._1)
.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why this change?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

original code

val tableProperties = properties
      .filterKeys(!_.startsWith(VIEW_PREFIX))
      .toSeq.sortBy(_._1)
      .map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
...
if (properties.nonEmpty) map.put("Table Properties", tableProperties) // Here I think it is more reasonable to judge the empty of tablePropertie, but it is string "[]", not empty.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

makes sense

.map(p => p._1 + "=" + p._2)
val partitionColumns = partitionColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
val lastAccess = {
if (lastAccessTime <= 0) "UNKNOWN" else new Date(lastAccessTime).toString
Expand Down Expand Up @@ -414,7 +414,9 @@ case class CatalogTable(
}
}

if (properties.nonEmpty) map.put("Table Properties", tableProperties)
if (tableProperties.nonEmpty) {
map.put("Table Properties", tableProperties.mkString("[", ", ", "]"))
}
stats.foreach(s => map.put("Statistics", s.simpleString))
map ++= storage.toLinkedHashMap
if (tracksPartitionsInCatalog) map.put("Partition Provider", "Catalog")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,9 @@ import scala.collection.JavaConverters._
import scala.collection.mutable

import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.CatalogTable
import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType}
import org.apache.spark.sql.catalyst.util.quoteIfNeeded
import org.apache.spark.sql.connector.catalog.V1Table.addV2TableProperties
import org.apache.spark.sql.connector.expressions.{LogicalExpressions, Transform}
import org.apache.spark.sql.types.StructType

Expand Down Expand Up @@ -55,7 +56,7 @@ private[sql] case class V1Table(v1Table: CatalogTable) extends Table {
}
}

override lazy val properties: util.Map[String, String] = v1Table.properties.asJava
override lazy val properties: util.Map[String, String] = addV2TableProperties(v1Table).asJava

override lazy val schema: StructType = v1Table.schema

Expand All @@ -82,6 +83,21 @@ private[sql] case class V1Table(v1Table: CatalogTable) extends Table {
override def toString: String = s"V1Table($name)"
}

private[sql] object V1Table {
def addV2TableProperties(v1Table: CatalogTable): Map[String, String] = {
val external = v1Table.tableType == CatalogTableType.EXTERNAL

v1Table.properties ++
v1Table.storage.properties.map { case (key, value) =>
TableCatalog.OPTION_PREFIX + key -> value } ++
v1Table.provider.map(TableCatalog.PROP_PROVIDER -> _) ++
v1Table.comment.map(TableCatalog.PROP_COMMENT -> _) ++
v1Table.storage.locationUri.map(TableCatalog.PROP_LOCATION -> _.toString) ++
(if (external) Some(TableCatalog.PROP_EXTERNAL -> "true") else None) ++
Some(TableCatalog.PROP_OWNER -> v1Table.owner)
}
}

/**
* A V2 table with V1 fallback support. This is used to fallback to V1 table when the V2 one
* doesn't implement specific capabilities but V1 already has.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,8 +193,7 @@ Type VIEW
View Text select * from char_tbl
View Original Text select * from char_tbl
View Catalog and Namespace spark_catalog.default
View Query Output Columns [c, v]
Table Properties []
View Query Output Columns [c, v]


-- !query
Expand Down Expand Up @@ -342,8 +341,7 @@ Type VIEW
View Text select * from char_tbl2
View Original Text select * from char_tbl2
View Catalog and Namespace spark_catalog.default
View Query Output Columns [c, v]
Table Properties []
View Query Output Columns [c, v]


-- !query
Expand Down Expand Up @@ -459,8 +457,7 @@ Type VIEW
View Text select * from char_tbl2
View Original Text select * from char_tbl2
View Catalog and Namespace spark_catalog.default
View Query Output Columns [c, v]
Table Properties []
View Query Output Columns [c, v]


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -507,8 +507,7 @@ Type VIEW
View Text SELECT * FROM t
View Original Text SELECT * FROM t
View Catalog and Namespace spark_catalog.default
View Query Output Columns [a, b, c, d]
Table Properties []
View Query Output Columns [a, b, c, d]


-- !query
Expand All @@ -531,8 +530,7 @@ Type VIEW
View Text SELECT * FROM t
View Original Text SELECT * FROM t
View Catalog and Namespace spark_catalog.default
View Query Output Columns [a, b, c, d]
Table Properties []
View Query Output Columns [a, b, c, d]


-- !query
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 74
-- Number of queries: 76


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -250,8 +250,7 @@ Type VIEW
View Text SELECT * FROM base_table
View Original Text SELECT * FROM base_table
View Catalog and Namespace spark_catalog.temp_view_test
View Query Output Columns [a, id]
Table Properties []
View Query Output Columns [a, id]


-- !query
Expand Down Expand Up @@ -306,8 +305,7 @@ Type VIEW
View Text SELECT * FROM base_table
View Original Text SELECT * FROM base_table
View Catalog and Namespace spark_catalog.temp_view_test
View Query Output Columns [a, id]
Table Properties []
View Query Output Columns [a, id]


-- !query
Expand Down Expand Up @@ -352,8 +350,7 @@ View Original Text SELECT t1.a AS t1_a, t2.a AS t2_a
FROM base_table t1, base_table2 t2
WHERE t1.id = t2.id
View Catalog and Namespace spark_catalog.temp_view_test
View Query Output Columns [t1_a, t2_a]
Table Properties []
View Query Output Columns [t1_a, t2_a]


-- !query
Expand Down Expand Up @@ -406,8 +403,7 @@ Type VIEW
View Text SELECT * FROM base_table WHERE id IN (SELECT id FROM base_table2)
View Original Text SELECT * FROM base_table WHERE id IN (SELECT id FROM base_table2)
View Catalog and Namespace spark_catalog.temp_view_test
View Query Output Columns [a, id]
Table Properties []
View Query Output Columns [a, id]


-- !query
Expand Down Expand Up @@ -436,8 +432,7 @@ Type VIEW
View Text SELECT t1.id, t2.a FROM base_table t1, (SELECT * FROM base_table2) t2
View Original Text SELECT t1.id, t2.a FROM base_table t1, (SELECT * FROM base_table2) t2
View Catalog and Namespace spark_catalog.temp_view_test
View Query Output Columns [id, a]
Table Properties []
View Query Output Columns [id, a]


-- !query
Expand Down Expand Up @@ -466,8 +461,7 @@ Type VIEW
View Text SELECT * FROM base_table WHERE EXISTS (SELECT 1 FROM base_table2)
View Original Text SELECT * FROM base_table WHERE EXISTS (SELECT 1 FROM base_table2)
View Catalog and Namespace spark_catalog.temp_view_test
View Query Output Columns [a, id]
Table Properties []
View Query Output Columns [a, id]


-- !query
Expand Down Expand Up @@ -496,8 +490,7 @@ Type VIEW
View Text SELECT * FROM base_table WHERE NOT EXISTS (SELECT 1 FROM base_table2)
View Original Text SELECT * FROM base_table WHERE NOT EXISTS (SELECT 1 FROM base_table2)
View Catalog and Namespace spark_catalog.temp_view_test
View Query Output Columns [a, id]
Table Properties []
View Query Output Columns [a, id]


-- !query
Expand Down Expand Up @@ -526,8 +519,7 @@ Type VIEW
View Text SELECT * FROM base_table WHERE EXISTS (SELECT 1)
View Original Text SELECT * FROM base_table WHERE EXISTS (SELECT 1)
View Catalog and Namespace spark_catalog.temp_view_test
View Query Output Columns [a, id]
Table Properties []
View Query Output Columns [a, id]


-- !query
Expand Down Expand Up @@ -662,8 +654,7 @@ Type VIEW
View Text SELECT * FROM t1 CROSS JOIN t2
View Original Text SELECT * FROM t1 CROSS JOIN t2
View Catalog and Namespace spark_catalog.testviewschm2
View Query Output Columns [num, name, num2, value]
Table Properties []
View Query Output Columns [num, name, num2, value]


-- !query
Expand Down Expand Up @@ -703,8 +694,7 @@ Type VIEW
View Text SELECT * FROM t1 INNER JOIN t2 ON t1.num = t2.num2
View Original Text SELECT * FROM t1 INNER JOIN t2 ON t1.num = t2.num2
View Catalog and Namespace spark_catalog.testviewschm2
View Query Output Columns [num, name, num2, value]
Table Properties []
View Query Output Columns [num, name, num2, value]


-- !query
Expand Down Expand Up @@ -744,8 +734,7 @@ Type VIEW
View Text SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num2
View Original Text SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num2
View Catalog and Namespace spark_catalog.testviewschm2
View Query Output Columns [num, name, num2, value]
Table Properties []
View Query Output Columns [num, name, num2, value]


-- !query
Expand Down Expand Up @@ -785,8 +774,7 @@ Type VIEW
View Text SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num2 AND t2.value = 'xxx'
View Original Text SELECT * FROM t1 LEFT JOIN t2 ON t1.num = t2.num2 AND t2.value = 'xxx'
View Catalog and Namespace spark_catalog.testviewschm2
View Query Output Columns [num, name, num2, value]
Table Properties []
View Query Output Columns [num, name, num2, value]


-- !query
Expand Down Expand Up @@ -887,8 +875,7 @@ View Original Text SELECT * FROM tbl1 WHERE tbl1.a
BETWEEN (SELECT d FROM tbl2 WHERE c = 1) AND (SELECT e FROM tbl3 WHERE f = 2)
AND EXISTS (SELECT g FROM tbl4 LEFT JOIN tbl3 ON tbl4.h = tbl3.f)
View Catalog and Namespace spark_catalog.testviewschm2
View Query Output Columns [a, b]
Table Properties []
View Query Output Columns [a, b]


-- !query
Expand Down Expand Up @@ -926,8 +913,7 @@ BETWEEN (SELECT d FROM tbl2 WHERE c = 1) AND (SELECT e FROM tbl3 WHERE f = 2)
AND EXISTS (SELECT g FROM tbl4 LEFT JOIN tbl3 ON tbl4.h = tbl3.f)
AND NOT EXISTS (SELECT g FROM tbl4 LEFT JOIN tmptbl ON tbl4.h = tmptbl.j)
View Catalog and Namespace spark_catalog.testviewschm2
View Query Output Columns [a, b]
Table Properties []
View Query Output Columns [a, b]


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ Created Time [not included in comparison]
Last Access [not included in comparison]
Created By [not included in comparison]
Type: VIEW
Table Properties: []
Schema: root
|-- e: integer (nullable = true)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2899,6 +2899,39 @@ class DataSourceV2SQLSuite
}
}

test("SPARK-37827: put build-in properties into V1Table.properties to adapt v2 command") {
val t = "tbl"
withTable(t) {
sql(
s"""
|CREATE TABLE $t (
| a bigint,
| b bigint
|)
|using parquet
|OPTIONS (
| from = 0,
| to = 1)
|COMMENT 'This is a comment'
|TBLPROPERTIES ('prop1' = '1', 'prop2' = '2')
|PARTITIONED BY (a)
|LOCATION '/tmp'
""".stripMargin)

val table = spark.sessionState.catalogManager.v2SessionCatalog.asTableCatalog
.loadTable(Identifier.of(Array("default"), t))
val properties = table.properties
assert(properties.get(TableCatalog.PROP_PROVIDER) == "parquet")
assert(properties.get(TableCatalog.PROP_COMMENT) == "This is a comment")
assert(properties.get(TableCatalog.PROP_LOCATION) == "file:/tmp")
assert(properties.containsKey(TableCatalog.PROP_OWNER))
assert(properties.get(s"${TableCatalog.OPTION_PREFIX}from") == "0")
assert(properties.get(s"${TableCatalog.OPTION_PREFIX}to") == "1")
assert(properties.get("prop1") == "1")
assert(properties.get("prop2") == "2")
}
}

private def testNotSupportedV2Command(sqlCommand: String, sqlParams: String): Unit = {
val e = intercept[AnalysisException] {
sql(s"$sqlCommand $sqlParams")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,9 @@ trait ShowTblPropertiesSuiteBase extends QueryTest with DDLCommandTestUtils {
val status = "new"
spark.sql(s"CREATE TABLE $tbl (id bigint, data string) $defaultUsing " +
s"TBLPROPERTIES ('user'='$user', 'status'='$status')")

val properties = sql(s"SHOW TBLPROPERTIES $tbl").filter("key != 'transient_lastDdlTime'")
val properties = sql(s"SHOW TBLPROPERTIES $tbl")
.filter("key != 'transient_lastDdlTime'")
.filter("key != 'option.serialization.format'")
val schema = new StructType()
.add("key", StringType, nullable = false)
.add("value", StringType, nullable = false)
Expand Down
Loading