diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala index b7c8f775b857..b7368b6a00fe 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala @@ -185,9 +185,9 @@ abstract class Optimizer(catalogManager: CatalogManager) RemoveLiteralFromGroupExpressions, RemoveRepetitionFromGroupExpressions) :: Nil ++ operatorOptimizationBatch) :+ - // This batch rewrites data source plans and should be run after the operator - // optimization batch and before any batches that depend on stats. - Batch("Data Source Rewrite Rules", Once, dataSourceRewriteRules: _*) :+ + // This batch rewrites plans after the operator optimization and + // before any batches that depend on stats. + Batch("Pre CBO Rules", Once, preCBORules: _*) :+ // This batch pushes filters and projections into scan nodes. Before this batch, the logical // plan may contain nodes that do not report stats. Anything that uses stats must run after // this batch. @@ -293,10 +293,10 @@ abstract class Optimizer(catalogManager: CatalogManager) def earlyScanPushDownRules: Seq[Rule[LogicalPlan]] = Nil /** - * Override to provide additional rules for rewriting data source plans. Such rules will be - * applied after operator optimization rules and before any rules that depend on stats. + * Override to provide additional rules for rewriting plans after operator optimization rules and + * before any cost-based optimization rules that depend on stats. */ - def dataSourceRewriteRules: Seq[Rule[LogicalPlan]] = Nil + def preCBORules: Seq[Rule[LogicalPlan]] = Nil /** * Returns (defaultBatches - (excludedRules - nonExcludableRules)), the rule batches that diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSessionExtensions.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSessionExtensions.scala index 6952f4bfd056..074906a971b1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSessionExtensions.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSessionExtensions.scala @@ -40,6 +40,7 @@ import org.apache.spark.sql.execution.{ColumnarRule, SparkPlan} *
  • Analyzer Rules.
  • *
  • Check Analysis Rules.
  • *
  • Optimizer Rules.
  • + *
  • Pre CBO Rules.
  • *
  • Planning Strategies.
  • *
  • Customized Parser.
  • *
  • (External) Catalog listeners.
  • @@ -199,6 +200,21 @@ class SparkSessionExtensions { optimizerRules += builder } + private[this] val preCBORules = mutable.Buffer.empty[RuleBuilder] + + private[sql] def buildPreCBORules(session: SparkSession): Seq[Rule[LogicalPlan]] = { + preCBORules.map(_.apply(session)).toSeq + } + + /** + * Inject an optimizer `Rule` builder that rewrites logical plans into the [[SparkSession]]. + * The injected rules will be executed once after the operator optimization batch and + * before any cost-based optimization rules that depend on stats. + */ + def injectPreCBORule(builder: RuleBuilder): Unit = { + preCBORules += builder + } + private[this] val plannerStrategyBuilders = mutable.Buffer.empty[StrategyBuilder] private[sql] def buildPlannerStrategies(session: SparkSession): Seq[Strategy] = { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala index c71634f1868e..34b9af12607e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala @@ -231,8 +231,8 @@ abstract class BaseSessionStateBuilder( override def earlyScanPushDownRules: Seq[Rule[LogicalPlan]] = super.earlyScanPushDownRules ++ customEarlyScanPushDownRules - override def dataSourceRewriteRules: Seq[Rule[LogicalPlan]] = - super.dataSourceRewriteRules ++ customDataSourceRewriteRules + override def preCBORules: Seq[Rule[LogicalPlan]] = + super.preCBORules ++ customPreCBORules override def extendedOperatorOptimizationRules: Seq[Rule[LogicalPlan]] = super.extendedOperatorOptimizationRules ++ customOperatorOptimizationRules @@ -258,12 +258,14 @@ abstract class BaseSessionStateBuilder( protected def customEarlyScanPushDownRules: Seq[Rule[LogicalPlan]] = Nil /** - * Custom rules for rewriting data source plans to add to the Optimizer. Prefer overriding - * this instead of creating your own Optimizer. + * Custom rules for rewriting plans after operator optimization and before CBO. + * Prefer overriding this instead of creating your own Optimizer. * * Note that this may NOT depend on the `optimizer` function. */ - protected def customDataSourceRewriteRules: Seq[Rule[LogicalPlan]] = Nil + protected def customPreCBORules: Seq[Rule[LogicalPlan]] = { + extensions.buildPreCBORules(session) + } /** * Planner that converts optimized logical plans to physical plans. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala index 6797dcd34a53..35d251383561 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala @@ -88,6 +88,12 @@ class SparkSessionExtensionSuite extends SparkFunSuite { } } + test("SPARK-33621: inject a pre CBO rule") { + withSession(Seq(_.injectPreCBORule(MyRule))) { session => + assert(session.sessionState.optimizer.preCBORules.contains(MyRule(session))) + } + } + test("inject spark planner strategy") { withSession(Seq(_.injectPlannerStrategy(MySparkStrategy))) { session => assert(session.sessionState.planner.strategies.contains(MySparkStrategy(session)))