Skip to content

Commit 473f2fb

Browse files
fjh100456dongjoon-hyun
authored andcommitted
[SPARK-21786][SQL][FOLLOWUP] Add compressionCodec test for CTAS
## What changes were proposed in this pull request? Before Apache Spark 2.3, table properties were ignored when writing data to a hive table(created with STORED AS PARQUET/ORC syntax), because the compression configurations were not passed to the FileFormatWriter in hadoopConf. Then it was fixed in #20087. But actually for CTAS with USING PARQUET/ORC syntax, table properties were ignored too when convertMastore, so the test case for CTAS not supported. Now it has been fixed in #20522 , the test case should be enabled too. ## How was this patch tested? This only re-enables the test cases of previous PR. Closes #22302 from fjh100456/compressionCodec. Authored-by: fjh100456 <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent f96a8bf commit 473f2fb

File tree

1 file changed

+4
-6
lines changed

1 file changed

+4
-6
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ class CompressionCodecSuite extends TestHiveSingleton with ParquetTest with Befo
122122
""".stripMargin)
123123
}
124124

125-
private def writeDateToTableUsingCTAS(
125+
private def writeDataToTableUsingCTAS(
126126
rootDir: File,
127127
tableName: String,
128128
partitionValue: Option[String],
@@ -152,7 +152,7 @@ class CompressionCodecSuite extends TestHiveSingleton with ParquetTest with Befo
152152
usingCTAS: Boolean): String = {
153153
val partitionValue = if (isPartitioned) Some("test") else None
154154
if (usingCTAS) {
155-
writeDateToTableUsingCTAS(tmpDir, tableName, partitionValue, format, compressionCodec)
155+
writeDataToTableUsingCTAS(tmpDir, tableName, partitionValue, format, compressionCodec)
156156
} else {
157157
createTable(tmpDir, tableName, isPartitioned, format, compressionCodec)
158158
writeDataToTable(tableName, partitionValue)
@@ -258,8 +258,7 @@ class CompressionCodecSuite extends TestHiveSingleton with ParquetTest with Befo
258258
def checkForTableWithCompressProp(format: String, compressCodecs: List[String]): Unit = {
259259
Seq(true, false).foreach { isPartitioned =>
260260
Seq(true, false).foreach { convertMetastore =>
261-
// TODO: Also verify CTAS(usingCTAS=true) cases when the bug(SPARK-22926) is fixed.
262-
Seq(false).foreach { usingCTAS =>
261+
Seq(true, false).foreach { usingCTAS =>
263262
checkTableCompressionCodecForCodecs(
264263
format,
265264
isPartitioned,
@@ -281,8 +280,7 @@ class CompressionCodecSuite extends TestHiveSingleton with ParquetTest with Befo
281280
def checkForTableWithoutCompressProp(format: String, compressCodecs: List[String]): Unit = {
282281
Seq(true, false).foreach { isPartitioned =>
283282
Seq(true, false).foreach { convertMetastore =>
284-
// TODO: Also verify CTAS(usingCTAS=true) cases when the bug(SPARK-22926) is fixed.
285-
Seq(false).foreach { usingCTAS =>
283+
Seq(true, false).foreach { usingCTAS =>
286284
checkTableCompressionCodecForCodecs(
287285
format,
288286
isPartitioned,

0 commit comments

Comments
 (0)