diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 10464dac8d55e..a0cb5da078438 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -134,6 +134,7 @@ object Cast { case (DateType, TimestampType) => true case (_: AtomicType, StringType) => true case (_: CalendarIntervalType, StringType) => true + case (NullType, _) => true // Spark supports casting between long and timestamp, please see `longToTimestamp` and // `timestampToLong` for details. diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala index bbb3cb516b7d5..69adb8e922cc3 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala @@ -974,7 +974,7 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper { ) } - import DataTypeTestUtils.numericTypes + import DataTypeTestUtils._ numericTypes.foreach { from => val (safeTargetTypes, unsafeTargetTypes) = numericTypes.partition(to => isCastSafe(from, to)) @@ -1008,6 +1008,10 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper { assert(!Cast.canUpCast(complexType, StringType)) } } + + atomicTypes.foreach { atomicType => + assert(Cast.canUpCast(NullType, atomicType)) + } } test("SPARK-27671: cast from nested null type in struct") {