Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -215,20 +215,10 @@ private[hive] trait HiveClient {
* Returns the partitions for the given table that match the supplied partition spec.
* If no partition spec is specified, all partitions are returned.
*/
final def getPartitions(
def getPartitions(
db: String,
table: String,
partialSpec: Option[TablePartitionSpec]): Seq[CatalogTablePartition] = {
getPartitions(getTable(db, table), partialSpec)
}

/**
* Returns the partitions for the given table that match the supplied partition spec.
* If no partition spec is specified, all partitions are returned.
*/
def getPartitions(
catalogTable: CatalogTable,
partialSpec: Option[TablePartitionSpec] = None): Seq[CatalogTablePartition]
partialSpec: Option[TablePartitionSpec]): Seq[CatalogTablePartition]

/** Returns partitions filtered by predicates for the given table. */
def getPartitionsByFilter(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.metrics.source.HiveCatalogMetrics
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException, NoSuchPartitionsException, PartitionAlreadyExistsException, PartitionsAlreadyExistException}
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException, NoSuchPartitionsException, NoSuchTableException, PartitionAlreadyExistsException, PartitionsAlreadyExistException}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.Expression
Expand Down Expand Up @@ -733,14 +733,19 @@ private[hive] class HiveClientImpl(
Option(hivePartition).map(fromHivePartition)
}

/**
* Returns the partitions for the given table that match the supplied partition spec.
* If no partition spec is specified, all partitions are returned.
*/
override def getPartitions(
table: CatalogTable,
db: String,
table: String,
spec: Option[TablePartitionSpec]): Seq[CatalogTablePartition] = {
val hiveTable = withHiveState {
getRawTableOption(db, table).getOrElse(throw new NoSuchTableException(db, table))
}
getPartitions(hiveTable, spec)
}

private def getPartitions(
hiveTable: HiveTable,
spec: Option[TablePartitionSpec]): Seq[CatalogTablePartition] = withHiveState {
val hiveTable = toHiveTable(table, Some(userName))
val partSpec = spec match {
case None => CatalogTypes.emptyTablePartitionSpec
case Some(s) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.spark.sql.hive

import java.io.File
import java.util.Locale

import com.google.common.io.Files
import org.apache.hadoop.fs.Path
Expand Down Expand Up @@ -854,4 +855,54 @@ class InsertSuite extends QueryTest with TestHiveSingleton with BeforeAndAfter
assert(e.contains("Partition spec is invalid"))
}
}

test("SPARK-35531: Insert data with different cases of bucket column") {
withTable("test1") {
Seq(true, false).foreach { isHiveTable =>
val createSpark = if (isHiveTable) {
"""
|CREATE TABLE TEST1(
|v1 BIGINT,
|s1 INT)
|PARTITIONED BY (pk BIGINT)
|CLUSTERED BY (v1)
|SORTED BY (s1)
|INTO 200 BUCKETS
|STORED AS PARQUET
""".stripMargin
} else {
"""
|CREATE TABLE test1(
|v1 BIGINT,
|s1 INT)
|USING PARQUET
|PARTITIONED BY (pk BIGINT)
|CLUSTERED BY (v1)
|SORTED BY (s1)
|INTO 200 BUCKETS
""".stripMargin
}

val insertString =
"""
|INSERT INTO test1
|SELECT * FROM VALUES(1,1,1)
""".stripMargin

val dropString = "DROP TABLE IF EXISTS test1"

sql(dropString)
sql(createSpark.toLowerCase(Locale.ROOT))

sql(insertString.toLowerCase(Locale.ROOT))
sql(insertString.toUpperCase(Locale.ROOT))

sql(dropString)
sql(createSpark.toUpperCase(Locale.ROOT))

sql(insertString.toLowerCase(Locale.ROOT))
sql(insertString.toUpperCase(Locale.ROOT))
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -480,9 +480,9 @@ class VersionsSuite extends SparkFunSuite with Logging {
assert(partitionNames == client.getPartitionNames(client.getTable("default", "src_part")))
}

test(s"$version: getPartitions(catalogTable)") {
test(s"$version: getPartitions(db, table, spec)") {
assert(testPartitionCount ==
client.getPartitions(client.getTable("default", "src_part")).size)
client.getPartitions("default", "src_part", None).size)
}

test(s"$version: getPartitionsByFilter") {
Expand Down