From f51e9dec932c876bbd59e08c17aced30885bac59 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 25 Nov 2020 13:01:54 +0300 Subject: [PATCH 1/5] Move tests to AlterTablePartitionParserSuite --- .../sql/catalyst/parser/DDLParserSuite.scala | 109 ------------- .../AlterTablePartitionParserSuite.scala | 146 ++++++++++++++++++ 2 files changed, 146 insertions(+), 109 deletions(-) create mode 100644 sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTablePartitionParserSuite.scala diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index 997c642276bf..cfe605309c7a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -1657,39 +1657,6 @@ class DDLParserSuite extends AnalysisTest { comparePlans(parsed4, expected4) } - test("alter table: recover partitions") { - comparePlans( - parsePlan("ALTER TABLE a.b.c RECOVER PARTITIONS"), - AlterTableRecoverPartitionsStatement(Seq("a", "b", "c"))) - } - - test("alter table: add partition") { - val sql1 = - """ - |ALTER TABLE a.b.c ADD IF NOT EXISTS PARTITION - |(dt='2008-08-08', country='us') LOCATION 'location1' PARTITION - |(dt='2009-09-09', country='uk') - """.stripMargin - val sql2 = "ALTER TABLE a.b.c ADD PARTITION (dt='2008-08-08') LOCATION 'loc'" - - val parsed1 = parsePlan(sql1) - val parsed2 = parsePlan(sql2) - - val expected1 = AlterTableAddPartition( - UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... ADD PARTITION ..."), - Seq( - UnresolvedPartitionSpec(Map("dt" -> "2008-08-08", "country" -> "us"), Some("location1")), - UnresolvedPartitionSpec(Map("dt" -> "2009-09-09", "country" -> "uk"), None)), - ifNotExists = true) - val expected2 = AlterTableAddPartition( - UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... ADD PARTITION ..."), - Seq(UnresolvedPartitionSpec(Map("dt" -> "2008-08-08"), Some("loc"))), - ifNotExists = false) - - comparePlans(parsed1, expected1) - comparePlans(parsed2, expected2) - } - test("alter view: add partition (not supported)") { assertUnsupported( """ @@ -1699,82 +1666,6 @@ class DDLParserSuite extends AnalysisTest { """.stripMargin) } - test("alter table: rename partition") { - val sql1 = - """ - |ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') - |RENAME TO PARTITION (dt='2008-09-09', country='uk') - """.stripMargin - val parsed1 = parsePlan(sql1) - val expected1 = AlterTableRenamePartitionStatement( - Seq("table_name"), - Map("dt" -> "2008-08-08", "country" -> "us"), - Map("dt" -> "2008-09-09", "country" -> "uk")) - comparePlans(parsed1, expected1) - - val sql2 = - """ - |ALTER TABLE a.b.c PARTITION (ds='2017-06-10') - |RENAME TO PARTITION (ds='2018-06-10') - """.stripMargin - val parsed2 = parsePlan(sql2) - val expected2 = AlterTableRenamePartitionStatement( - Seq("a", "b", "c"), - Map("ds" -> "2017-06-10"), - Map("ds" -> "2018-06-10")) - comparePlans(parsed2, expected2) - } - - // ALTER TABLE table_name DROP [IF EXISTS] PARTITION spec1[, PARTITION spec2, ...] - // ALTER VIEW table_name DROP [IF EXISTS] PARTITION spec1[, PARTITION spec2, ...] - test("alter table: drop partition") { - val sql1_table = - """ - |ALTER TABLE table_name DROP IF EXISTS PARTITION - |(dt='2008-08-08', country='us'), PARTITION (dt='2009-09-09', country='uk') - """.stripMargin - val sql2_table = - """ - |ALTER TABLE table_name DROP PARTITION - |(dt='2008-08-08', country='us'), PARTITION (dt='2009-09-09', country='uk') - """.stripMargin - val sql1_view = sql1_table.replace("TABLE", "VIEW") - val sql2_view = sql2_table.replace("TABLE", "VIEW") - - val parsed1_table = parsePlan(sql1_table) - val parsed2_table = parsePlan(sql2_table) - val parsed1_purge = parsePlan(sql1_table + " PURGE") - - assertUnsupported(sql1_view) - assertUnsupported(sql2_view) - - val expected1_table = AlterTableDropPartition( - UnresolvedTable(Seq("table_name"), "ALTER TABLE ... DROP PARTITION ..."), - Seq( - UnresolvedPartitionSpec(Map("dt" -> "2008-08-08", "country" -> "us")), - UnresolvedPartitionSpec(Map("dt" -> "2009-09-09", "country" -> "uk"))), - ifExists = true, - purge = false, - retainData = false) - val expected2_table = expected1_table.copy(ifExists = false) - val expected1_purge = expected1_table.copy(purge = true) - - comparePlans(parsed1_table, expected1_table) - comparePlans(parsed2_table, expected2_table) - comparePlans(parsed1_purge, expected1_purge) - - val sql3_table = "ALTER TABLE a.b.c DROP IF EXISTS PARTITION (ds='2017-06-10')" - val expected3_table = AlterTableDropPartition( - UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... DROP PARTITION ..."), - Seq(UnresolvedPartitionSpec(Map("ds" -> "2017-06-10"))), - ifExists = true, - purge = false, - retainData = false) - - val parsed3_table = parsePlan(sql3_table) - comparePlans(parsed3_table, expected3_table) - } - test("show current namespace") { comparePlans( parsePlan("SHOW CURRENT NAMESPACE"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTablePartitionParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTablePartitionParserSuite.scala new file mode 100644 index 000000000000..a20fc8adf623 --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTablePartitionParserSuite.scala @@ -0,0 +1,146 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command + +import java.util.Locale + +import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartitionSpec, UnresolvedTable} +import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan +import org.apache.spark.sql.catalyst.parser.ParseException +import org.apache.spark.sql.catalyst.plans.logical.{AlterTableAddPartition, AlterTableDropPartition, AlterTableRecoverPartitionsStatement, AlterTableRenamePartitionStatement} +import org.apache.spark.sql.test.SharedSparkSession + +class AlterTablePartitionParserSuite extends AnalysisTest with SharedSparkSession { + + private def assertUnsupported(sql: String, containsThesePhrases: Seq[String] = Seq()): Unit = { + val e = intercept[ParseException] { + parsePlan(sql) + } + assert(e.getMessage.toLowerCase(Locale.ROOT).contains("operation not allowed")) + containsThesePhrases.foreach { p => + assert(e.getMessage.toLowerCase(Locale.ROOT).contains(p.toLowerCase(Locale.ROOT))) + } + } + + test("ALTER TABLE .. ADD PARTITION") { + val sql1 = + """ + |ALTER TABLE a.b.c ADD IF NOT EXISTS PARTITION + |(dt='2008-08-08', country='us') LOCATION 'location1' PARTITION + |(dt='2009-09-09', country='uk') + """.stripMargin + val sql2 = "ALTER TABLE a.b.c ADD PARTITION (dt='2008-08-08') LOCATION 'loc'" + + val parsed1 = parsePlan(sql1) + val parsed2 = parsePlan(sql2) + + val expected1 = AlterTableAddPartition( + UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... ADD PARTITION ..."), + Seq( + UnresolvedPartitionSpec(Map("dt" -> "2008-08-08", "country" -> "us"), Some("location1")), + UnresolvedPartitionSpec(Map("dt" -> "2009-09-09", "country" -> "uk"), None)), + ifNotExists = true) + val expected2 = AlterTableAddPartition( + UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... ADD PARTITION ..."), + Seq(UnresolvedPartitionSpec(Map("dt" -> "2008-08-08"), Some("loc"))), + ifNotExists = false) + + comparePlans(parsed1, expected1) + comparePlans(parsed2, expected2) + } + + test("ALTER TABLE .. DROP PARTITION") { + val sql1_table = + """ + |ALTER TABLE table_name DROP IF EXISTS PARTITION + |(dt='2008-08-08', country='us'), PARTITION (dt='2009-09-09', country='uk') + """.stripMargin + val sql2_table = + """ + |ALTER TABLE table_name DROP PARTITION + |(dt='2008-08-08', country='us'), PARTITION (dt='2009-09-09', country='uk') + """.stripMargin + val sql1_view = sql1_table.replace("TABLE", "VIEW") + val sql2_view = sql2_table.replace("TABLE", "VIEW") + + val parsed1_table = parsePlan(sql1_table) + val parsed2_table = parsePlan(sql2_table) + val parsed1_purge = parsePlan(sql1_table + " PURGE") + + assertUnsupported(sql1_view) + assertUnsupported(sql2_view) + + val expected1_table = AlterTableDropPartition( + UnresolvedTable(Seq("table_name"), "ALTER TABLE ... DROP PARTITION ..."), + Seq( + UnresolvedPartitionSpec(Map("dt" -> "2008-08-08", "country" -> "us")), + UnresolvedPartitionSpec(Map("dt" -> "2009-09-09", "country" -> "uk"))), + ifExists = true, + purge = false, + retainData = false) + val expected2_table = expected1_table.copy(ifExists = false) + val expected1_purge = expected1_table.copy(purge = true) + + comparePlans(parsed1_table, expected1_table) + comparePlans(parsed2_table, expected2_table) + comparePlans(parsed1_purge, expected1_purge) + + val sql3_table = "ALTER TABLE a.b.c DROP IF EXISTS PARTITION (ds='2017-06-10')" + val expected3_table = AlterTableDropPartition( + UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... DROP PARTITION ..."), + Seq(UnresolvedPartitionSpec(Map("ds" -> "2017-06-10"))), + ifExists = true, + purge = false, + retainData = false) + + val parsed3_table = parsePlan(sql3_table) + comparePlans(parsed3_table, expected3_table) + } + + test("ALTER TABLE .. RECOVER PARTITIONS") { + comparePlans( + parsePlan("ALTER TABLE a.b.c RECOVER PARTITIONS"), + AlterTableRecoverPartitionsStatement(Seq("a", "b", "c"))) + } + + test("ALTER TABLE .. PARTITION RENAME") { + val sql1 = + """ + |ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') + |RENAME TO PARTITION (dt='2008-09-09', country='uk') + """.stripMargin + val parsed1 = parsePlan(sql1) + val expected1 = AlterTableRenamePartitionStatement( + Seq("table_name"), + Map("dt" -> "2008-08-08", "country" -> "us"), + Map("dt" -> "2008-09-09", "country" -> "uk")) + comparePlans(parsed1, expected1) + + val sql2 = + """ + |ALTER TABLE a.b.c PARTITION (ds='2017-06-10') + |RENAME TO PARTITION (ds='2018-06-10') + """.stripMargin + val parsed2 = parsePlan(sql2) + val expected2 = AlterTableRenamePartitionStatement( + Seq("a", "b", "c"), + Map("ds" -> "2017-06-10"), + Map("ds" -> "2018-06-10")) + comparePlans(parsed2, expected2) + } +} From f1d3a4a242d914a8113ac6a40abb6ab8e42cba12 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 25 Nov 2020 13:24:27 +0300 Subject: [PATCH 2/5] Move AlterTablePartitionV2SQLSuite.scala to v2/AlterTablePartitionSuite.scala --- .../command/v2/AlterTablePartitionSuite.scala} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename sql/core/src/test/scala/org/apache/spark/sql/{connector/AlterTablePartitionV2SQLSuite.scala => execution/command/v2/AlterTablePartitionSuite.scala} (98%) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTablePartitionV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTablePartitionSuite.scala similarity index 98% rename from sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTablePartitionV2SQLSuite.scala rename to sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTablePartitionSuite.scala index 4cacd5ec2b49..b6870c2333e8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTablePartitionV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTablePartitionSuite.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.spark.sql.connector +package org.apache.spark.sql.execution.command.v2 import java.time.{LocalDate, LocalDateTime} @@ -23,17 +23,17 @@ import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{NoSuchPartitionsException, PartitionsAlreadyExistException} import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils} +import org.apache.spark.sql.connector.{DatasourceV2SQLBase, InMemoryPartitionTable} import org.apache.spark.sql.connector.catalog.{CatalogV2Implicits, Identifier} import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Implicits import org.apache.spark.sql.internal.SQLConf import org.apache.spark.unsafe.types.UTF8String -class AlterTablePartitionV2SQLSuite extends DatasourceV2SQLBase { +class AlterTablePartitionSuite extends DatasourceV2SQLBase { import CatalogV2Implicits._ import DataSourceV2Implicits._ - test("ALTER TABLE RECOVER PARTITIONS") { val t = "testcat.ns1.ns2.tbl" withTable(t) { From 810b3fa040ea872255f2f0933872d857dfb8d10c Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 25 Nov 2020 13:33:54 +0300 Subject: [PATCH 3/5] Move the test "set partition location" to v2/AlterTablePartitionSuite --- .../spark/sql/connector/AlterTableTests.scala | 13 ------------- .../command/v2/AlterTablePartitionSuite.scala | 16 +++++++++++++++- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala index afc51f45c54e..6dc0d1970400 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala @@ -1092,19 +1092,6 @@ trait AlterTableTests extends SharedSparkSession { } } - test("AlterTable: set partition location") { - val t = s"${catalogAndNamespace}table_name" - withTable(t) { - sql(s"CREATE TABLE $t (id int) USING $v2Format") - - val exc = intercept[AnalysisException] { - sql(s"ALTER TABLE $t PARTITION(ds='2017-06-10') SET LOCATION 's3://bucket/path'") - } - assert(exc.getMessage.contains( - "ALTER TABLE SET LOCATION does not support partition for v2 tables")) - } - } - test("AlterTable: set table property") { val t = s"${catalogAndNamespace}table_name" withTable(t) { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTablePartitionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTablePartitionSuite.scala index b6870c2333e8..2a01193fbf46 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTablePartitionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTablePartitionSuite.scala @@ -23,7 +23,7 @@ import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{NoSuchPartitionsException, PartitionsAlreadyExistException} import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils} -import org.apache.spark.sql.connector.{DatasourceV2SQLBase, InMemoryPartitionTable} +import org.apache.spark.sql.connector.{DatasourceV2SQLBase, FakeV2Provider, InMemoryPartitionTable} import org.apache.spark.sql.connector.catalog.{CatalogV2Implicits, Identifier} import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Implicits import org.apache.spark.sql.internal.SQLConf @@ -243,4 +243,18 @@ class AlterTablePartitionSuite extends DatasourceV2SQLBase { assert(!partTable.partitionExists(expectedPartition)) } } + + test("AlterTable: set partition location") { + val v2Format = classOf[FakeV2Provider].getName + val t = s"testcat.ns1.ns2.table_name" + withTable(t) { + sql(s"CREATE TABLE $t (id int) USING $v2Format") + + val exc = intercept[AnalysisException] { + sql(s"ALTER TABLE $t PARTITION(ds='2017-06-10') SET LOCATION 's3://bucket/path'") + } + assert(exc.getMessage.contains( + "ALTER TABLE SET LOCATION does not support partition for v2 tables")) + } + } } From 3abb93dcbf9f718862cdf114f633912261fbf687 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 25 Nov 2020 13:44:52 +0300 Subject: [PATCH 4/5] Move some tests from DDLParserSuite --- .../AlterTablePartitionParserSuite.scala | 56 ++++++++++++++++++- .../execution/command/DDLParserSuite.scala | 52 ----------------- 2 files changed, 55 insertions(+), 53 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTablePartitionParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTablePartitionParserSuite.scala index a20fc8adf623..290859eb43f8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTablePartitionParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTablePartitionParserSuite.scala @@ -23,13 +23,15 @@ import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartition import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.{AlterTableAddPartition, AlterTableDropPartition, AlterTableRecoverPartitionsStatement, AlterTableRenamePartitionStatement} +import org.apache.spark.sql.execution.SparkSqlParser import org.apache.spark.sql.test.SharedSparkSession class AlterTablePartitionParserSuite extends AnalysisTest with SharedSparkSession { + private lazy val parser = new SparkSqlParser() private def assertUnsupported(sql: String, containsThesePhrases: Seq[String] = Seq()): Unit = { val e = intercept[ParseException] { - parsePlan(sql) + parser.parsePlan(sql) } assert(e.getMessage.toLowerCase(Locale.ROOT).contains("operation not allowed")) containsThesePhrases.foreach { p => @@ -143,4 +145,56 @@ class AlterTablePartitionParserSuite extends AnalysisTest with SharedSparkSessio Map("ds" -> "2018-06-10")) comparePlans(parsed2, expected2) } + + test("alter table: exchange partition (not supported)") { + assertUnsupported( + """ + |ALTER TABLE table_name_1 EXCHANGE PARTITION + |(dt='2008-08-08', country='us') WITH TABLE table_name_2 + """.stripMargin) + } + + test("alter table: archive partition (not supported)") { + assertUnsupported("ALTER TABLE table_name ARCHIVE PARTITION (dt='2008-08-08', country='us')") + } + + test("alter table: unarchive partition (not supported)") { + assertUnsupported("ALTER TABLE table_name UNARCHIVE PARTITION (dt='2008-08-08', country='us')") + } + + test("alter table: set file format (not allowed)") { + assertUnsupported( + "ALTER TABLE table_name SET FILEFORMAT INPUTFORMAT 'test' OUTPUTFORMAT 'test'") + assertUnsupported( + "ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') " + + "SET FILEFORMAT PARQUET") + } + + test("alter table: touch (not supported)") { + assertUnsupported("ALTER TABLE table_name TOUCH") + assertUnsupported("ALTER TABLE table_name TOUCH PARTITION (dt='2008-08-08', country='us')") + } + + test("alter table: compact (not supported)") { + assertUnsupported("ALTER TABLE table_name COMPACT 'compaction_type'") + assertUnsupported( + """ + |ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') + |COMPACT 'MAJOR' + """.stripMargin) + } + + test("alter table: concatenate (not supported)") { + assertUnsupported("ALTER TABLE table_name CONCATENATE") + assertUnsupported( + "ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') CONCATENATE") + } + + test("duplicate columns in partition specs") { + val e = intercept[ParseException] { + parser.parsePlan( + "ALTER TABLE dbx.tab1 PARTITION (a='1', a='2') RENAME TO PARTITION (a='100', a='200')") + }.getMessage + assert(e.contains("Found duplicate keys 'a'")) + } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala index 8ce4bcbadc22..728adda1d82a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala @@ -271,50 +271,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession { containsThesePhrases = Seq("key_without_value")) } - test("alter table: exchange partition (not supported)") { - assertUnsupported( - """ - |ALTER TABLE table_name_1 EXCHANGE PARTITION - |(dt='2008-08-08', country='us') WITH TABLE table_name_2 - """.stripMargin) - } - - test("alter table: archive partition (not supported)") { - assertUnsupported("ALTER TABLE table_name ARCHIVE PARTITION (dt='2008-08-08', country='us')") - } - - test("alter table: unarchive partition (not supported)") { - assertUnsupported("ALTER TABLE table_name UNARCHIVE PARTITION (dt='2008-08-08', country='us')") - } - - test("alter table: set file format (not allowed)") { - assertUnsupported( - "ALTER TABLE table_name SET FILEFORMAT INPUTFORMAT 'test' OUTPUTFORMAT 'test'") - assertUnsupported( - "ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') " + - "SET FILEFORMAT PARQUET") - } - - test("alter table: touch (not supported)") { - assertUnsupported("ALTER TABLE table_name TOUCH") - assertUnsupported("ALTER TABLE table_name TOUCH PARTITION (dt='2008-08-08', country='us')") - } - - test("alter table: compact (not supported)") { - assertUnsupported("ALTER TABLE table_name COMPACT 'compaction_type'") - assertUnsupported( - """ - |ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') - |COMPACT 'MAJOR' - """.stripMargin) - } - - test("alter table: concatenate (not supported)") { - assertUnsupported("ALTER TABLE table_name CONCATENATE") - assertUnsupported( - "ALTER TABLE table_name PARTITION (dt='2008-08-08', country='us') CONCATENATE") - } - test("alter table: cluster by (not supported)") { assertUnsupported( "ALTER TABLE table_name CLUSTERED BY (col_name) SORTED BY (col2_name) INTO 3 BUCKETS") @@ -351,14 +307,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession { assert(e.contains("Found duplicate keys 'key1'")) } - test("duplicate columns in partition specs") { - val e = intercept[ParseException] { - parser.parsePlan( - "ALTER TABLE dbx.tab1 PARTITION (a='1', a='2') RENAME TO PARTITION (a='100', a='200')") - }.getMessage - assert(e.contains("Found duplicate keys 'a'")) - } - test("Test CTAS #1") { val s1 = """ From 665d7d84a15053cafb42cf2a65b3093c60d85f55 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 25 Nov 2020 17:17:18 +0300 Subject: [PATCH 5/5] Removed an unused import --- .../org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index cfe605309c7a..443b9d76744b 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.parser import java.util.Locale import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedPartitionSpec, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView} +import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView} import org.apache.spark.sql.catalyst.catalog.{ArchiveResource, BucketSpec, FileResource, FunctionResource, JarResource} import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal} import org.apache.spark.sql.catalyst.plans.logical._