diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 761f3575f1b8..7f6027a27f87 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -200,6 +200,11 @@ class Analyzer( val postHocResolutionRules: Seq[Rule[LogicalPlan]] = Nil lazy val batches: Seq[Batch] = Seq( + Batch("Substitution", fixedPoint, + CTESubstitution, + WindowsSubstitution, + EliminateUnions, + new SubstituteUnresolvedOrdinals(conf)), Batch("Disable Hints", Once, new ResolveHints.DisableHints(conf)), Batch("Hints", fixedPoint, @@ -207,11 +212,6 @@ class Analyzer( new ResolveHints.ResolveCoalesceHints(conf)), Batch("Simple Sanity Check", Once, LookupFunctions), - Batch("Substitution", fixedPoint, - CTESubstitution, - WindowsSubstitution, - EliminateUnions, - new SubstituteUnresolvedOrdinals(conf)), Batch("Resolution", fixedPoint, ResolveTableValuedFunctions :: ResolveNamespace(catalogManager) :: diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala index c0be49af2107..77e427d93dae 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala @@ -26,7 +26,7 @@ import org.apache.log4j.Level import org.scalatest.Matchers import org.apache.spark.api.python.PythonEvalType -import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier} import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType, InMemoryCatalog, SessionCatalog} import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans._ @@ -895,4 +895,27 @@ class AnalysisSuite extends AnalysisTest with Matchers { assertAnalysisError(testRelation2.select(RowNumber() + 1), Seq("Window function row_number() requires an OVER clause.")) } + + test("SPARK-32237: Hint in CTE") { + val plan = With( + Project( + Seq(UnresolvedAttribute("cte.a")), + UnresolvedRelation(TableIdentifier("cte")) + ), + Seq( + ( + "cte", + SubqueryAlias( + AliasIdentifier("cte"), + UnresolvedHint( + "REPARTITION", + Seq(Literal(3)), + Project(testRelation.output, testRelation) + ) + ) + ) + ) + ) + assertAnalysisSuccess(plan) + } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 989f304b1f07..2bddaee0f179 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -3560,6 +3560,18 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark } } } + + test("SPARK-32237: Hint in CTE") { + withTable("t") { + sql("CREATE TABLE t USING PARQUET AS SELECT 1 AS id") + checkAnswer( + sql(s""" + |WITH cte AS (SELECT /*+ REPARTITION(3) */ * FROM t) + |SELECT * FROM cte + """.stripMargin), + Row(1) :: Nil) + } + } } case class Foo(bar: Option[String])