@@ -309,12 +309,29 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF
309309
310310 // Shows data columns and partitioned columns (if any)
311311 private def describe (table : CatalogTable , buffer : ArrayBuffer [Row ]): Unit = {
312- describeSchema(table.schema, buffer)
312+ if (DDLUtils .isDatasourceTable(table)) {
313+ val schema = DDLUtils .getSchemaFromTableProperties(table)
313314
314- if (table.partitionColumns.nonEmpty) {
315- append(buffer, " # Partition Information" , " " , " " )
316- append(buffer, s " # ${output(0 ).name}" , output(1 ).name, output(2 ).name)
317- describeSchema(table.partitionColumns, buffer)
315+ if (schema.isEmpty) {
316+ append(buffer, " # Schema of this table is inferred at runtime" , " " , " " )
317+ } else {
318+ schema.foreach(describeSchema(_, buffer))
319+ }
320+
321+ val partCols = DDLUtils .getPartitionColumnsFromTableProperties(table)
322+ if (partCols.nonEmpty) {
323+ append(buffer, " # Partition Information" , " " , " " )
324+ append(buffer, s " # ${output.head.name}" , " " , " " )
325+ partCols.foreach(col => append(buffer, col, " " , " " ))
326+ }
327+ } else {
328+ describeSchema(table.schema, buffer)
329+
330+ if (table.partitionColumns.nonEmpty) {
331+ append(buffer, " # Partition Information" , " " , " " )
332+ append(buffer, s " # ${output.head.name}" , output(1 ).name, output(2 ).name)
333+ describeSchema(table.partitionColumns, buffer)
334+ }
318335 }
319336 }
320337
@@ -338,26 +355,47 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF
338355 append(buffer, " Table Type:" , table.tableType.name, " " )
339356
340357 append(buffer, " Table Parameters:" , " " , " " )
341- table.properties.foreach { case (key, value) =>
358+ table.properties.filterNot {
359+ // Hides schema properties that hold user-defined schema, partition columns, and bucketing
360+ // information since they are already extracted and shown in other parts.
361+ case (key, _) => key.startsWith(" spark.sql.sources.schema" )
362+ }.foreach { case (key, value) =>
342363 append(buffer, s " $key" , value, " " )
343364 }
344365
366+ describeStorageInfo(table, buffer)
367+ }
368+
369+ private def describeStorageInfo (metadata : CatalogTable , buffer : ArrayBuffer [Row ]): Unit = {
345370 append(buffer, " " , " " , " " )
346371 append(buffer, " # Storage Information" , " " , " " )
347- table.storage.serde.foreach(serdeLib => append(buffer, " SerDe Library:" , serdeLib, " " ))
348- table.storage.inputFormat.foreach(format => append(buffer, " InputFormat:" , format, " " ))
349- table.storage.outputFormat.foreach(format => append(buffer, " OutputFormat:" , format, " " ))
350- append(buffer, " Compressed:" , if (table.storage.compressed) " Yes" else " No" , " " )
351- append(buffer, " Num Buckets:" , table.numBuckets.toString, " " )
352- append(buffer, " Bucket Columns:" , table.bucketColumnNames.mkString(" [" , " , " , " ]" ), " " )
353- append(buffer, " Sort Columns:" , table.sortColumnNames.mkString(" [" , " , " , " ]" ), " " )
372+ metadata.storage.serde.foreach(serdeLib => append(buffer, " SerDe Library:" , serdeLib, " " ))
373+ metadata.storage.inputFormat.foreach(format => append(buffer, " InputFormat:" , format, " " ))
374+ metadata.storage.outputFormat.foreach(format => append(buffer, " OutputFormat:" , format, " " ))
375+ append(buffer, " Compressed:" , if (metadata.storage.compressed) " Yes" else " No" , " " )
376+ describeBucketingInfo(metadata, buffer)
354377
355378 append(buffer, " Storage Desc Parameters:" , " " , " " )
356- table .storage.serdeProperties.foreach { case (key, value) =>
379+ metadata .storage.serdeProperties.foreach { case (key, value) =>
357380 append(buffer, s " $key" , value, " " )
358381 }
359382 }
360383
384+ private def describeBucketingInfo (metadata : CatalogTable , buffer : ArrayBuffer [Row ]): Unit = {
385+ if (DDLUtils .isDatasourceTable(metadata)) {
386+ val numBuckets = DDLUtils .getNumBucketFromTableProperties(metadata)
387+ val bucketCols = DDLUtils .getBucketingColumnsFromTableProperties(metadata)
388+ val sortCols = DDLUtils .getSortingColumnsFromTableProperties(metadata)
389+ append(buffer, " Num Buckets:" , numBuckets.map(_.toString).getOrElse(" " ), " " )
390+ append(buffer, " Bucket Columns:" , bucketCols.mkString(" [" , " , " , " ]" ), " " )
391+ append(buffer, " Sort Columns:" , sortCols.mkString(" [" , " , " , " ]" ), " " )
392+ } else {
393+ append(buffer, " Num Buckets:" , metadata.numBuckets.toString, " " )
394+ append(buffer, " Bucket Columns:" , metadata.bucketColumnNames.mkString(" [" , " , " , " ]" ), " " )
395+ append(buffer, " Sort Columns:" , metadata.sortColumnNames.mkString(" [" , " , " , " ]" ), " " )
396+ }
397+ }
398+
361399 private def describeSchema (schema : Seq [CatalogColumn ], buffer : ArrayBuffer [Row ]): Unit = {
362400 schema.foreach { column =>
363401 append(buffer, column.name, column.dataType.toLowerCase, column.comment.orNull)
0 commit comments