Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -768,6 +768,10 @@ class Analyzer(override val catalogManager: CatalogManager) extends RuleExecutor
} else {
colResolved.havingCondition
}
// `cond` might contain unresolved aggregate functions so defer its resolution to
// `ResolveAggregateFunctions` rule if needed.
if (!cond.resolved) return colResolved

// Try resolving the condition of the filter as though it is in the aggregate clause
val (extraAggExprs, Seq(resolvedHavingCond)) =
ResolveAggregateFunctions.resolveExprsWithAggregate(Seq(cond), aggForResolving)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ FROM (VALUES ('x', 'a', 10), ('y', 'b', 20) ) AS t (c1, c2, c3)
GROUP BY GROUPING SETS ( ( c1 ), ( c2 ) )
HAVING GROUPING__ID > 1
-- !query analysis
Filter (grouping__id#xL > cast(1 as bigint))
Filter (GROUPING__ID#xL > cast(1 as bigint))
+- Aggregate [c1#x, c2#x, spark_grouping_id#xL], [c1#x, c2#x, sum(c3#x) AS sum(c3)#xL, spark_grouping_id#xL AS grouping__id#xL]
+- Expand [[c1#x, c2#x, c3#x, c1#x, null, 1], [c1#x, c2#x, c3#x, null, c2#x, 2]], [c1#x, c2#x, c3#x, c1#x, c2#x, spark_grouping_id#xL]
+- Project [c1#x, c2#x, c3#x, c1#x AS c1#x, c2#x AS c2#x]
Expand Down
16 changes: 16 additions & 0 deletions sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4921,6 +4921,22 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark
Row(Array(0), Array(0)), Row(Array(1), Array(1)), Row(Array(2), Array(2)))
checkAnswer(df, expectedAnswer)
}

test("SPARK-53094: Fix cube-related data quality problem") {
val df = sql(
"""SELECT product, region, sum(amount) AS s
|FROM VALUES
| ('a', 'east', 100),
| ('b', 'east', 200),
| ('a', 'west', 150),
| ('b', 'west', 250),
| ('a', 'east', 120) AS t(product, region, amount)
|GROUP BY product, region WITH CUBE
|HAVING count(product) > 2
|ORDER BY s DESC""".stripMargin)

checkAnswer(df, Seq(Row(null, null, 820), Row(null, "east", 420), Row("a", null, 370)))
}
}

case class Foo(bar: Option[String])