diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 2a6bad6366d5..761a0d508e87 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -1005,7 +1005,7 @@ trait ShowCreateTableCommandBase { protected def showTableProperties(metadata: CatalogTable, builder: StringBuilder): Unit = { if (metadata.properties.nonEmpty) { - val props = metadata.properties.map { case (key, value) => + val props = metadata.properties.toSeq.sortBy(_._1).map { case (key, value) => s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'" } @@ -1158,8 +1158,9 @@ case class ShowCreateTableCommand( // If it is a Hive table, we already convert its metadata and fill in a provider. builder ++= s"USING ${metadata.provider.get}\n" - val dataSourceOptions = conf.redactOptions(metadata.storage.properties).map { - case (key, value) => s"${quoteIdentifier(key)} '${escapeSingleQuotedString(value)}'" + val dataSourceOptions = conf.redactOptions(metadata.storage.properties).toSeq.sortBy(_._1).map { + case (key, value) => + s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'" } if (dataSourceOptions.nonEmpty) { diff --git a/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out b/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out index 88fef8908638..e7399e45c357 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out @@ -50,7 +50,7 @@ CREATE TABLE `default`.`tbl` ( `c` INT) USING parquet OPTIONS ( - `a` '1') + 'a' = '1') -- !query diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala index 683929434878..13983120955f 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ShowCreateTableSuite.scala @@ -200,6 +200,32 @@ abstract class ShowCreateTableSuite extends QueryTest with SQLTestUtils { } } + test("SPARK-37494: Unify v1 and v2 option output") { + withTable("ddl_test") { + sql( + s"""CREATE TABLE ddl_test ( + | a STRING + |) + |USING json + |TBLPROPERTIES ( + | 'b' = '1', + | 'a' = '2') + |OPTIONS ( + | k4 'v4', + | `k3` 'v3', + | 'k5' 'v5', + | 'k1' = 'v1', + | k2 = 'v2' + |) + """.stripMargin + ) + val expected = "CREATE TABLE `default`.`ddl_test` ( `a` STRING) USING json" + + " OPTIONS ( 'k1' = 'v1', 'k2' = 'v2', 'k3' = 'v3', 'k4' = 'v4', 'k5' = 'v5')" + + " TBLPROPERTIES ( 'a' = '2', 'b' = '1')" + assert(getShowDDL("SHOW CREATE TABLE ddl_test") == expected) + } + } + protected def getShowDDL(showCreateTableSql: String): String = { sql(showCreateTableSql).head().getString(0).split("\n").map(_.trim).mkString(" ") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala index 8842db2a2aca..87bdc2e721ad 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala @@ -1207,8 +1207,8 @@ class JDBCSuite extends QueryTest .select("createtab_stmt").collect() assert(createTabStmt.length === 1) createTabStmt.foreach { r => - assert(r.getString(0).contains(s"`url` '${Utils.REDACTION_REPLACEMENT_TEXT}'")) - assert(r.getString(0).contains(s"`password` '${Utils.REDACTION_REPLACEMENT_TEXT}'")) + assert(r.getString(0).contains(s"'url' = '${Utils.REDACTION_REPLACEMENT_TEXT}'")) + assert(r.getString(0).contains(s"'password' = '${Utils.REDACTION_REPLACEMENT_TEXT}'")) } } }