Skip to content

Commit 42aca3d

Browse files
committed
[SPARK-21786][SQL] The 'spark.sql.parquet.compression.codec' configuration doesn't take effect on tables with partition field(s)
Fix test problem
1 parent 6d77bf9 commit 42aca3d

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -779,7 +779,7 @@ class InsertSuite extends QueryTest with TestHiveSingleton with BeforeAndAfter
779779
def getTableSize(tableName: String, codec: String,
780780
isPartitioned: Boolean = false): Long = {
781781
insertOverwriteTable(tableName, codec, isPartitioned)
782-
val path = s"${tmpDir.toURI.toString.stripSuffix("/")}/$tableName"
782+
val path = s"${tmpDir.getPath.stripSuffix("/")}/$tableName"
783783
val dir = new File(path)
784784
val files = getDirFiles(dir).filter(_.getName.startsWith("part-"))
785785
files.map(_.length()).sum

0 commit comments

Comments
 (0)