Skip to content
Closed
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,8 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) {
DescribeTableCommand(
visitTableIdentifier(ctx.tableIdentifier),
partitionSpec,
ctx.EXTENDED != null || ctx.FORMATTED != null)
ctx.EXTENDED != null,
ctx.FORMATTED != null)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we deprecated DESC FORMATTED intentionally, and made DESC EXTENDED behave as DESC FORMATTED. I really don't think users will query the result of DESC EXTENDED before, as all the detail table information is inside one column, so it's probably fine to break this behavior just for display.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh, I see. Then, you want remove the header completely for all case. Did I understand correctly?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Or, do I simple revert the following two commits after your review?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For DESC t, we should change it back to be compatible with before, for DESC EXTENDED, it should be same as DESC FORMATTED, so we don't need to change it.

}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -500,7 +500,8 @@ case class TruncateTableCommand(
case class DescribeTableCommand(
table: TableIdentifier,
partitionSpec: TablePartitionSpec,
isExtended: Boolean)
isExtended: Boolean,
isFormatted: Boolean)
extends RunnableCommand {

override val output: Seq[Attribute] = Seq(
Expand All @@ -522,15 +523,15 @@ case class DescribeTableCommand(
throw new AnalysisException(
s"DESC PARTITION is not allowed on a temporary view: ${table.identifier}")
}
describeSchema(catalog.lookupRelation(table).schema, result)
describeSchema(catalog.lookupRelation(table).schema, result, isFormatted)
} else {
val metadata = catalog.getTableMetadata(table)
if (metadata.schema.isEmpty) {
// In older version(prior to 2.1) of Spark, the table schema can be empty and should be
// inferred at runtime. We should still support it.
describeSchema(sparkSession.table(metadata.identifier).schema, result)
describeSchema(sparkSession.table(metadata.identifier).schema, result, isFormatted)
} else {
describeSchema(metadata.schema, result)
describeSchema(metadata.schema, result, isFormatted)
}

describePartitionInfo(metadata, result)
Expand All @@ -539,7 +540,7 @@ case class DescribeTableCommand(
// Outputs the partition-specific info for the DDL command:
// "DESCRIBE [EXTENDED|FORMATTED] table_name PARTITION (partitionVal*)"
describeDetailedPartitionInfo(sparkSession, catalog, metadata, result)
} else if (isExtended) {
} else if (isExtended || isFormatted) {
describeFormattedTableInfo(metadata, result)
}
}
Expand All @@ -550,7 +551,7 @@ case class DescribeTableCommand(
private def describePartitionInfo(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = {
if (table.partitionColumnNames.nonEmpty) {
append(buffer, "# Partition Information", "", "")
describeSchema(table.partitionSchema, buffer)
describeSchema(table.partitionSchema, buffer, header = true)
}
}

Expand Down Expand Up @@ -578,7 +579,9 @@ case class DescribeTableCommand(
}
DDLUtils.verifyPartitionProviderIsHive(spark, metadata, "DESC PARTITION")
val partition = catalog.getPartition(table, partitionSpec)
if (isExtended) describeFormattedDetailedPartitionInfo(table, metadata, partition, result)
if (isExtended || isFormatted) {
describeFormattedDetailedPartitionInfo(table, metadata, partition, result)
}
}

private def describeFormattedDetailedPartitionInfo(
Expand All @@ -601,8 +604,13 @@ case class DescribeTableCommand(
table.storage.toLinkedHashMap.foreach(s => append(buffer, s._1, s._2, ""))
}

private def describeSchema(schema: StructType, buffer: ArrayBuffer[Row]): Unit = {
append(buffer, s"# ${output.head.name}", output(1).name, output(2).name)
private def describeSchema(
schema: StructType,
buffer: ArrayBuffer[Row],
header: Boolean): Unit = {
if (header) {
append(buffer, s"# ${output.head.name}", output(1).name, output(2).name)
}
schema.foreach { column =>
append(buffer, column.name, column.dataType.simpleString, column.getComment().orNull)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ DESC test_change
-- !query 1 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 1 output
# col_name data_type comment
a int
b string
c int
Expand All @@ -35,7 +34,6 @@ DESC test_change
-- !query 3 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 3 output
# col_name data_type comment
a int
b string
c int
Expand All @@ -55,7 +53,6 @@ DESC test_change
-- !query 5 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 5 output
# col_name data_type comment
a int
b string
c int
Expand Down Expand Up @@ -94,7 +91,6 @@ DESC test_change
-- !query 8 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 8 output
# col_name data_type comment
a int
b string
c int
Expand Down Expand Up @@ -129,7 +125,6 @@ DESC test_change
-- !query 12 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 12 output
# col_name data_type comment
a int this is column a
b string #*02?`
c int
Expand All @@ -148,7 +143,6 @@ DESC test_change
-- !query 14 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 14 output
# col_name data_type comment
a int this is column a
b string #*02?`
c int
Expand All @@ -168,7 +162,6 @@ DESC test_change
-- !query 16 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 16 output
# col_name data_type comment
a int this is column a
b string #*02?`
c int
Expand All @@ -193,7 +186,6 @@ DESC test_change
-- !query 18 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 18 output
# col_name data_type comment
a int this is column a
b string #*02?`
c int
Expand Down Expand Up @@ -237,7 +229,6 @@ DESC test_change
-- !query 23 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 23 output
# col_name data_type comment
a int this is column A
b string #*02?`
c int
Expand Down
13 changes: 0 additions & 13 deletions sql/core/src/test/resources/sql-tests/results/describe.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ DESCRIBE t
-- !query 5 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 5 output
# col_name data_type comment
a string
b int
c string
Expand All @@ -70,7 +69,6 @@ DESC default.t
-- !query 6 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 6 output
# col_name data_type comment
a string
b int
c string
Expand All @@ -86,7 +84,6 @@ DESC TABLE t
-- !query 7 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 7 output
# col_name data_type comment
a string
b int
c string
Expand Down Expand Up @@ -132,7 +129,6 @@ DESC EXTENDED t
-- !query 9 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 9 output
# col_name data_type comment
a string
b int
c string
Expand Down Expand Up @@ -162,7 +158,6 @@ DESC t PARTITION (c='Us', d=1)
-- !query 10 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 10 output
# col_name data_type comment
a string
b int
c string
Expand All @@ -178,7 +173,6 @@ DESC EXTENDED t PARTITION (c='Us', d=1)
-- !query 11 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 11 output
# col_name data_type comment
a string
b int
c string
Expand Down Expand Up @@ -268,7 +262,6 @@ DESC temp_v
-- !query 16 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 16 output
# col_name data_type comment
a string
b int
c string
Expand All @@ -280,7 +273,6 @@ DESC TABLE temp_v
-- !query 17 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 17 output
# col_name data_type comment
a string
b int
c string
Expand All @@ -304,7 +296,6 @@ DESC EXTENDED temp_v
-- !query 19 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 19 output
# col_name data_type comment
a string
b int
c string
Expand All @@ -316,7 +307,6 @@ DESC temp_Data_Source_View
-- !query 20 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 20 output
# col_name data_type comment
intType int test comment test1
stringType string
dateType date
Expand Down Expand Up @@ -349,7 +339,6 @@ DESC v
-- !query 22 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 22 output
# col_name data_type comment
a string
b int
c string
Expand All @@ -361,7 +350,6 @@ DESC TABLE v
-- !query 23 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 23 output
# col_name data_type comment
a string
b int
c string
Expand Down Expand Up @@ -396,7 +384,6 @@ DESC EXTENDED v
-- !query 25 schema
struct<col_name:string,data_type:string,comment:string>
-- !query 25 output
# col_name data_type comment
a string
b int
c string
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -224,13 +224,13 @@ class SparkSqlParserSuite extends PlanTest {
test("SPARK-17328 Fix NPE with EXPLAIN DESCRIBE TABLE") {
assertEqual("describe table t",
DescribeTableCommand(
TableIdentifier("t"), Map.empty, isExtended = false))
TableIdentifier("t"), Map.empty, isExtended = false, isFormatted = false))
assertEqual("describe table extended t",
DescribeTableCommand(
TableIdentifier("t"), Map.empty, isExtended = true))
TableIdentifier("t"), Map.empty, isExtended = true, isFormatted = false))
assertEqual("describe table formatted t",
DescribeTableCommand(
TableIdentifier("t"), Map.empty, isExtended = true))
TableIdentifier("t"), Map.empty, isExtended = false, isFormatted = true))

intercept("explain describe tables x", "Unsupported SQL statement")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -806,7 +806,7 @@ class HiveDDLSuite

checkAnswer(
sql(s"DESC $tabName").select("col_name", "data_type", "comment"),
Row("# col_name", "data_type", "comment") :: Row("a", "int", "test") :: Nil
Row("a", "int", "test") :: Nil
)
}
}
Expand Down