Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1493,7 +1493,7 @@ class Analyzer(
// to push down this ordering expression and can reference the original aggregate
// expression instead.
val needsPushDown = ArrayBuffer.empty[NamedExpression]
val evaluatedOrderings = resolvedAliasedOrdering.zip(sortOrder).map {
val evaluatedOrderings = resolvedAliasedOrdering.zip(unresolvedSortOrders).map {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

A good catch! Is this a bug since 1.6.x?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@gatorsmile Yeah.. seems like it sean.

case (evaluated, order) =>
val index = originalAggExprs.indexWhere {
case Alias(child, _) => child semanticEquals evaluated.child
Expand Down
41 changes: 40 additions & 1 deletion sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.spark.sql

import java.io.File
import java.math.MathContext
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

BTW, please do not remove the unnecessary import next time. Thanks!

When I backported it to 2.2, I hit a build break issue because it is still being used by the other test cases in 2.2

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@gatorsmile Oh.. OK.. Sorry about it. I will not remove next time.

import java.net.{MalformedURLException, URL}
import java.sql.Timestamp
import java.util.concurrent.atomic.AtomicBoolean
Expand Down Expand Up @@ -1618,6 +1617,46 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
}
}

test("SPARK-23281: verify the correctness of sort direction on composite order by clause") {
withTempView("src") {
Seq[(Integer, Integer)](
(1, 1),
(1, 3),
(2, 3),
(3, 3),
(4, null),
(5, null)
).toDF("key", "value").createOrReplaceTempView("src")

checkAnswer(sql(
"""
|SELECT MAX(value) as value, key as col2
|FROM src
|GROUP BY key
|ORDER BY value desc, key
""".stripMargin),
Seq(Row(3, 1), Row(3, 2), Row(3, 3), Row(null, 4), Row(null, 5)))

checkAnswer(sql(
"""
|SELECT MAX(value) as value, key as col2
|FROM src
|GROUP BY key
|ORDER BY value desc, key desc
""".stripMargin),
Seq(Row(3, 3), Row(3, 2), Row(3, 1), Row(null, 5), Row(null, 4)))

checkAnswer(sql(
"""
|SELECT MAX(value) as value, key as col2
|FROM src
|GROUP BY key
|ORDER BY value asc, key desc
""".stripMargin),
Seq(Row(null, 5), Row(null, 4), Row(3, 3), Row(3, 2), Row(3, 1)))
}
}

test("run sql directly on files") {
val df = spark.range(100).toDF()
withTempPath(f => {
Expand Down