diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/PhysicalDataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/PhysicalDataType.scala index c43b81915a70..f80aee4c8cbe 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/PhysicalDataType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/types/PhysicalDataType.scala @@ -242,8 +242,7 @@ case class PhysicalMapType(keyType: DataType, valueType: DataType, valueContains class PhysicalNullType() extends PhysicalDataType with PhysicalPrimitiveType { override private[sql] def ordering = - throw QueryExecutionErrors.orderedOperationUnsupportedByDataTypeError( - "PhysicalNullType") + implicitly[Ordering[Unit]].asInstanceOf[Ordering[Any]] override private[sql] type InternalType = Any @transient private[sql] lazy val tag = typeTag[InternalType] } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 302b05e9b5ce..4000475404f2 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -31,7 +31,7 @@ import org.apache.commons.io.FileUtils import org.apache.spark.{AccumulatorSuite, SPARK_DOC_ROOT, SparkArithmeticException, SparkDateTimeException, SparkException, SparkNumberFormatException, SparkRuntimeException} import org.apache.spark.scheduler.{SparkListener, SparkListenerJobStart} import org.apache.spark.sql.catalyst.ExtendedAnalysisException -import org.apache.spark.sql.catalyst.expressions.{GenericRow, Hex} +import org.apache.spark.sql.catalyst.expressions.{CodegenObjectFactoryMode, GenericRow, Hex} import org.apache.spark.sql.catalyst.expressions.Cast._ import org.apache.spark.sql.catalyst.expressions.aggregate.{Complete, Partial} import org.apache.spark.sql.catalyst.optimizer.{ConvertToLocalRelation, NestedColumnAliasingSuite} @@ -1430,6 +1430,17 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark } } + test("SPARK-49200: Fix null type non-codegen ordering exception") { + withSQLConf( + SQLConf.CODEGEN_FACTORY_MODE.key -> CodegenObjectFactoryMode.NO_CODEGEN.toString, + SQLConf.OPTIMIZER_EXCLUDED_RULES.key -> + "org.apache.spark.sql.catalyst.optimizer.EliminateSorts") { + checkAnswer( + sql("SELECT * FROM range(3) ORDER BY array(null)"), + Seq(Row(0), Row(1), Row(2))) + } + } + test("SPARK-8837: use keyword in column name") { withTempView("t") { val df = Seq(1 -> "a").toDF("count", "sort")