diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQuerySuite.scala index 3fb356deadb3..afbdd971a092 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQuerySuite.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql +import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.util.resourceToString import org.apache.spark.sql.internal.SQLConf @@ -46,11 +47,7 @@ class TPCDSQuerySuite extends BenchmarkQueryTest with TPCDSSchema { "q81", "q82", "q83", "q84", "q85", "q86", "q87", "q88", "q89", "q90", "q91", "q92", "q93", "q94", "q95", "q96", "q97", "q98", "q99") - val sqlConfgs = Seq( - SQLConf.CBO_ENABLED.key -> "true", - SQLConf.PLAN_STATS_ENABLED.key -> "true", - SQLConf.JOIN_REORDER_ENABLED.key -> "true" - ) + val sqlConfgs: Seq[(String, String)] = Nil tpcdsQueries.foreach { name => val queryString = resourceToString(s"tpcds/$name.sql", @@ -104,3 +101,23 @@ class TPCDSQuerySuite extends BenchmarkQueryTest with TPCDSSchema { } } } + +class TPCDSQueryWithStatsSuite extends TPCDSQuerySuite { + + override def beforeAll(): Unit = { + super.beforeAll() + for (tableName <- tableNames) { + // To simulate plan generation on actual TPCDS data, injects data stats here + spark.sessionState.catalog.alterTableStats( + TableIdentifier(tableName), Some(TPCDSTableStats.sf100TableStats(tableName))) + } + } + + // Sets configurations for enabling the optimization rules that + // exploit data statistics. + override val sqlConfgs = Seq( + SQLConf.CBO_ENABLED.key -> "true", + SQLConf.PLAN_STATS_ENABLED.key -> "true", + SQLConf.JOIN_REORDER_ENABLED.key -> "true" + ) +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSSchema.scala b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSSchema.scala index 012b7d10d8f7..43974ad22f2e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSSchema.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSSchema.scala @@ -17,8 +17,6 @@ package org.apache.spark.sql -import org.apache.spark.sql.catalyst.TableIdentifier - trait TPCDSSchema { private val tableColumns = Map( @@ -257,9 +255,5 @@ trait TPCDSSchema { |USING $format |${options.mkString("\n")} """.stripMargin) - - // To simulate plan generation on actual TPCDS data, injects data stats here - spark.sessionState.catalog.alterTableStats( - TableIdentifier(tableName), Some(TPCDSTableStats.sf100TableStats(tableName))) } }