Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1629,32 +1629,6 @@ class DDLParserSuite extends AnalysisTest {
TruncateTableStatement(Seq("a", "b", "c"), Some(Map("ds" -> "2017-06-10"))))
}

test("SHOW PARTITIONS") {
val sql1 = "SHOW PARTITIONS t1"
val sql2 = "SHOW PARTITIONS db1.t1"
val sql3 = "SHOW PARTITIONS t1 PARTITION(partcol1='partvalue', partcol2='partvalue')"
val sql4 = "SHOW PARTITIONS a.b.c"
val sql5 = "SHOW PARTITIONS a.b.c PARTITION(ds='2017-06-10')"

val parsed1 = parsePlan(sql1)
val expected1 = ShowPartitionsStatement(Seq("t1"), None)
val parsed2 = parsePlan(sql2)
val expected2 = ShowPartitionsStatement(Seq("db1", "t1"), None)
val parsed3 = parsePlan(sql3)
val expected3 = ShowPartitionsStatement(Seq("t1"),
Some(Map("partcol1" -> "partvalue", "partcol2" -> "partvalue")))
val parsed4 = parsePlan(sql4)
val expected4 = ShowPartitionsStatement(Seq("a", "b", "c"), None)
val parsed5 = parsePlan(sql5)
val expected5 = ShowPartitionsStatement(Seq("a", "b", "c"), Some(Map("ds" -> "2017-06-10")))

comparePlans(parsed1, expected1)
comparePlans(parsed2, expected2)
comparePlans(parsed3, expected3)
comparePlans(parsed4, expected4)
comparePlans(parsed5, expected5)
}

test("REFRESH TABLE") {
comparePlans(
parsePlan("REFRESH TABLE a.b.c"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1909,21 +1909,6 @@ class DataSourceV2SQLSuite
}
}

test("SHOW PARTITIONS") {
val t = "testcat.ns1.ns2.tbl"
withTable(t) {
sql(
s"""
|CREATE TABLE $t (id bigint, data string)
|USING foo
|PARTITIONED BY (id)
""".stripMargin)

testV1Command("SHOW PARTITIONS", t)
testV1Command("SHOW PARTITIONS", s"$t PARTITION(id='1')")
}
}

test("LOAD DATA INTO TABLE") {
val t = "testcat.ns1.ns2.tbl"
withTable(t) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,6 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils {
sql(s"SHOW CREATE TABLE $viewName")
}.getMessage
assert(e3.contains(s"$viewName is a temp view not table or permanent view"))
assertNoSuchTable(s"SHOW PARTITIONS $viewName")
val e4 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $viewName COMPUTE STATISTICS")
}.getMessage
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -359,14 +359,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession {
assert(e.contains("Found duplicate keys 'a'"))
}

test("empty values in non-optional partition specs") {
val e = intercept[ParseException] {
parser.parsePlan(
"SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)")
}.getMessage
assert(e.contains("Found an empty partition key 'b'"))
}

test("Test CTAS #1") {
val s1 =
"""
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command

import org.apache.spark.sql.catalyst.analysis.AnalysisTest
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.logical.ShowPartitionsStatement
import org.apache.spark.sql.execution.SparkSqlParser
import org.apache.spark.sql.test.SharedSparkSession

class ShowPartitionsParserSuite extends AnalysisTest with SharedSparkSession {
test("SHOW PARTITIONS") {
Seq(
"SHOW PARTITIONS t1" -> ShowPartitionsStatement(Seq("t1"), None),
"SHOW PARTITIONS db1.t1" -> ShowPartitionsStatement(Seq("db1", "t1"), None),
"SHOW PARTITIONS t1 PARTITION(partcol1='partvalue', partcol2='partvalue')" ->
ShowPartitionsStatement(
Seq("t1"),
Some(Map("partcol1" -> "partvalue", "partcol2" -> "partvalue"))),
"SHOW PARTITIONS a.b.c" -> ShowPartitionsStatement(Seq("a", "b", "c"), None),
"SHOW PARTITIONS a.b.c PARTITION(ds='2017-06-10')" ->
ShowPartitionsStatement(Seq("a", "b", "c"), Some(Map("ds" -> "2017-06-10")))
).foreach { case (sql, expected) =>
val parsed = parsePlan(sql)
comparePlans(parsed, expected)
}
}

test("empty values in non-optional partition specs") {
val e = intercept[ParseException] {
new SparkSqlParser().parsePlan(
"SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)")
}.getMessage
assert(e.contains("Found an empty partition key 'b'"))
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command

import org.scalactic.source.Position
import org.scalatest.Tag

import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.test.SQLTestUtils

trait ShowPartitionsSuiteBase extends QueryTest with SQLTestUtils {
protected def version: String
protected def catalog: String
protected def defaultNamespace: Seq[String]
protected def defaultUsing: String

override def test(testName: String, testTags: Tag*)(testFun: => Any)
(implicit pos: Position): Unit = {
super.test(s"SHOW PARTITIONS $version: " + testName, testTags: _*)(testFun)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command.v1

import org.apache.spark.sql.{AnalysisException, Row, SaveMode}
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.connector.catalog.CatalogManager
import org.apache.spark.sql.execution.command
import org.apache.spark.sql.test.SharedSparkSession

trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase {
override def version: String = "V1"
override def catalog: String = CatalogManager.SESSION_CATALOG_NAME
override def defaultNamespace: Seq[String] = Seq("default")
override def defaultUsing: String = "USING parquet"

private def createDateTable(table: String): Unit = {
sql(s"""
|CREATE TABLE $table (price int, qty int, year int, month int)
|$defaultUsing
|partitioned by (year, month)""".stripMargin)
sql(s"INSERT INTO $table PARTITION(year = 2015, month = 1) SELECT 1, 1")
sql(s"INSERT INTO $table PARTITION(year = 2015, month = 2) SELECT 2, 2")
sql(s"INSERT INTO $table PARTITION(year = 2016, month = 2) SELECT 3, 3")
sql(s"INSERT INTO $table PARTITION(year = 2016, month = 3) SELECT 3, 3")
}

test("show everything") {
val table = "dateTable"
withTable(table) {
createDateTable(table)
checkAnswer(
sql(s"show partitions $table"),
Row("year=2015/month=1") ::
Row("year=2015/month=2") ::
Row("year=2016/month=2") ::
Row("year=2016/month=3") :: Nil)

checkAnswer(
sql(s"show partitions default.$table"),
Row("year=2015/month=1") ::
Row("year=2015/month=2") ::
Row("year=2016/month=2") ::
Row("year=2016/month=3") :: Nil)
}
}

test("filter by partitions") {
val table = "dateTable"
withTable(table) {
createDateTable(table)
checkAnswer(
sql(s"show partitions default.$table PARTITION(year=2015)"),
Row("year=2015/month=1") ::
Row("year=2015/month=2") :: Nil)
checkAnswer(
sql(s"show partitions default.$table PARTITION(year=2015, month=1)"),
Row("year=2015/month=1") :: Nil)
checkAnswer(
sql(s"show partitions default.$table PARTITION(month=2)"),
Row("year=2015/month=2") ::
Row("year=2016/month=2") :: Nil)
}
}

test("show everything more than 5 part keys") {
val table = "wideTable"
withTable(table) {
sql(s"""
|CREATE TABLE $table (
| price int, qty int,
| year int, month int, hour int, minute int, sec int, extra int)
|$defaultUsing
|PARTITIONED BY (year, month, hour, minute, sec, extra)""".stripMargin)
sql(s"""
|INSERT INTO $table
|PARTITION(year = 2016, month = 3, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3
""".stripMargin)
sql(s"""
|INSERT INTO $table
|PARTITION(year = 2016, month = 4, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3
""".stripMargin)
checkAnswer(
sql(s"show partitions $table"),
Row("year=2016/month=3/hour=10/minute=10/sec=10/extra=1") ::
Row("year=2016/month=4/hour=10/minute=10/sec=10/extra=1") :: Nil)
}
}

test("non-partitioning columns") {
val table = "dateTable"
withTable(table) {
createDateTable(table)
val errMsg = intercept[AnalysisException] {
sql(s"SHOW PARTITIONS $table PARTITION(abcd=2015, xyz=1)")
}.getMessage
assert(errMsg.contains("Non-partitioning column(s) [abcd, xyz] are specified"))
}
}

test("show partitions of non-partitioned table") {
val table = "not_partitioned_table"
withTable(table) {
sql(s"CREATE TABLE $table (col1 int) $defaultUsing")
val errMsg = intercept[AnalysisException] {
sql(s"SHOW PARTITIONS $table")
}.getMessage
assert(errMsg.contains("not allowed on a table that is not partitioned"))
}
}

test("show partitions of a view") {
val table = "dateTable"
withTable(table) {
createDateTable(table)
val view = "view1"
withView(view) {
sql(s"CREATE VIEW $view as select * from $table")
val errMsg = intercept[AnalysisException] {
sql(s"SHOW PARTITIONS $view")
}.getMessage
assert(errMsg.contains("is not allowed on a view"))
}
}
}

test("show partitions of a temporary view") {
val viewName = "test_view"
withTempView(viewName) {
spark.range(10).createTempView(viewName)
val errMsg = intercept[NoSuchTableException] {
sql(s"SHOW PARTITIONS $viewName")
}.getMessage
assert(errMsg.contains(s"Table or view '$viewName' not found"))
}
}
}

class ShowPartitionsSuite extends ShowPartitionsSuiteBase with SharedSparkSession {
// The test is placed here because it fails with `USING HIVE`:
// org.apache.spark.sql.AnalysisException:
// Hive data source can only be used with tables, you can't use it with CREATE TEMP VIEW USING
test("issue exceptions on the temporary view") {
val viewName = "test_view"
withTempView(viewName) {
sql(s"""
|CREATE TEMPORARY VIEW $viewName (c1 INT, c2 STRING)
|$defaultUsing""".stripMargin)
val errMsg = intercept[NoSuchTableException] {
sql(s"SHOW PARTITIONS $viewName")
}.getMessage
assert(errMsg.contains(s"Table or view '$viewName' not found"))
}
}

test("show partitions from a datasource") {
import testImplicits._
withTable("part_datasrc") {
val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c")
df.write
.partitionBy("a")
.format("parquet")
.mode(SaveMode.Overwrite)
.saveAsTable("part_datasrc")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this seems like testing the DataFrameWriter API not the SHOW PARTITIONS command.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ah the test was already there. Let's keep it then.


assert(sql("SHOW PARTITIONS part_datasrc").count() == 3)
}
}
}
Loading