Skip to content

Commit 44e534e

Browse files
lianchengmarmbrus
authored andcommitted
[SPARK-3515][SQL] Moves test suite setup code to beforeAll rather than in constructor
Please refer to the JIRA ticket for details. **NOTE** We should check all test suites that do similar initialization-like side effects in their constructors. This PR only fixes `ParquetMetastoreSuite` because it breaks our Jenkins Maven build. Author: Cheng Lian <[email protected]> Closes #2375 from liancheng/say-no-to-constructor and squashes the following commits: 0ceb75b [Cheng Lian] Moves test suite setup code to beforeAll rather than in constructor (cherry picked from commit 6d887db) Signed-off-by: Michael Armbrust <[email protected]>
1 parent 9c06c72 commit 44e534e

File tree

1 file changed

+24
-29
lines changed

1 file changed

+24
-29
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/parquet/ParquetMetastoreSuite.scala

Lines changed: 24 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,10 @@ package org.apache.spark.sql.parquet
2020

2121
import java.io.File
2222

23-
import org.apache.spark.sql.hive.execution.HiveTableScan
2423
import org.scalatest.BeforeAndAfterAll
2524

26-
import scala.reflect.ClassTag
27-
28-
import org.apache.spark.sql.{SQLConf, QueryTest}
29-
import org.apache.spark.sql.execution.{BroadcastHashJoin, ShuffledHashJoin}
30-
import org.apache.spark.sql.hive.test.TestHive
25+
import org.apache.spark.sql.QueryTest
26+
import org.apache.spark.sql.hive.execution.HiveTableScan
3127
import org.apache.spark.sql.hive.test.TestHive._
3228

3329
case class ParquetData(intField: Int, stringField: String)
@@ -36,27 +32,19 @@ case class ParquetData(intField: Int, stringField: String)
3632
* Tests for our SerDe -> Native parquet scan conversion.
3733
*/
3834
class ParquetMetastoreSuite extends QueryTest with BeforeAndAfterAll {
39-
4035
override def beforeAll(): Unit = {
41-
setConf("spark.sql.hive.convertMetastoreParquet", "true")
42-
}
43-
44-
override def afterAll(): Unit = {
45-
setConf("spark.sql.hive.convertMetastoreParquet", "false")
46-
}
47-
48-
val partitionedTableDir = File.createTempFile("parquettests", "sparksql")
49-
partitionedTableDir.delete()
50-
partitionedTableDir.mkdir()
51-
52-
(1 to 10).foreach { p =>
53-
val partDir = new File(partitionedTableDir, s"p=$p")
54-
sparkContext.makeRDD(1 to 10)
55-
.map(i => ParquetData(i, s"part-$p"))
56-
.saveAsParquetFile(partDir.getCanonicalPath)
57-
}
58-
59-
sql(s"""
36+
val partitionedTableDir = File.createTempFile("parquettests", "sparksql")
37+
partitionedTableDir.delete()
38+
partitionedTableDir.mkdir()
39+
40+
(1 to 10).foreach { p =>
41+
val partDir = new File(partitionedTableDir, s"p=$p")
42+
sparkContext.makeRDD(1 to 10)
43+
.map(i => ParquetData(i, s"part-$p"))
44+
.saveAsParquetFile(partDir.getCanonicalPath)
45+
}
46+
47+
sql(s"""
6048
create external table partitioned_parquet
6149
(
6250
intField INT,
@@ -70,7 +58,7 @@ class ParquetMetastoreSuite extends QueryTest with BeforeAndAfterAll {
7058
location '${partitionedTableDir.getCanonicalPath}'
7159
""")
7260

73-
sql(s"""
61+
sql(s"""
7462
create external table normal_parquet
7563
(
7664
intField INT,
@@ -83,8 +71,15 @@ class ParquetMetastoreSuite extends QueryTest with BeforeAndAfterAll {
8371
location '${new File(partitionedTableDir, "p=1").getCanonicalPath}'
8472
""")
8573

86-
(1 to 10).foreach { p =>
87-
sql(s"ALTER TABLE partitioned_parquet ADD PARTITION (p=$p)")
74+
(1 to 10).foreach { p =>
75+
sql(s"ALTER TABLE partitioned_parquet ADD PARTITION (p=$p)")
76+
}
77+
78+
setConf("spark.sql.hive.convertMetastoreParquet", "true")
79+
}
80+
81+
override def afterAll(): Unit = {
82+
setConf("spark.sql.hive.convertMetastoreParquet", "false")
8883
}
8984

9085
test("project the partitioning column") {

0 commit comments

Comments
 (0)