diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 424b82533fca4..25e1889109e8d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -112,6 +112,7 @@ object Cast { case (StringType, _: AnsiIntervalType) => true case (_: AnsiIntervalType, _: IntegralType | _: DecimalType) => true + case (_: IntegralType, _: AnsiIntervalType) => true case (_: DayTimeIntervalType, _: DayTimeIntervalType) => true case (_: YearMonthIntervalType, _: YearMonthIntervalType) => true @@ -196,6 +197,7 @@ object Cast { case (_: DayTimeIntervalType, _: DayTimeIntervalType) => true case (_: YearMonthIntervalType, _: YearMonthIntervalType) => true case (_: AnsiIntervalType, _: IntegralType | _: DecimalType) => true + case (_: IntegralType, _: AnsiIntervalType) => true case (StringType, _: NumericType) => true case (BooleanType, _: NumericType) => true @@ -786,7 +788,6 @@ case class Cast( case _: DayTimeIntervalType => buildCast[Long](_, s => IntervalUtils.durationToMicros(IntervalUtils.microsToDuration(s), it.endField)) case x: IntegralType => - assert(it.startField == it.endField) if (x == LongType) { b => IntervalUtils.longToDayTimeInterval( x.integral.asInstanceOf[Integral[Any]].toLong(b), it.endField) @@ -804,7 +805,6 @@ case class Cast( case _: YearMonthIntervalType => buildCast[Int](_, s => IntervalUtils.periodToMonths(IntervalUtils.monthsToPeriod(s), it.endField)) case x: IntegralType => - assert(it.startField == it.endField) if (x == LongType) { b => IntervalUtils.longToYearMonthInterval( x.integral.asInstanceOf[Integral[Any]].toLong(b), it.endField) diff --git a/sql/core/src/test/resources/sql-tests/inputs/cast.sql b/sql/core/src/test/resources/sql-tests/inputs/cast.sql index 66a78ec9473ad..34102a1250780 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/cast.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/cast.sql @@ -105,7 +105,7 @@ select cast('a' as timestamp_ntz); select cast(cast('inf' as double) as timestamp); select cast(cast('inf' as float) as timestamp); --- cast ANSI intervals to numerics +-- cast ANSI intervals to integrals select cast(interval '1' year as tinyint); select cast(interval '-10-2' year to month as smallint); select cast(interval '1000' month as int); @@ -117,6 +117,18 @@ select cast(interval '10' day as bigint); select cast(interval '-1000' month as tinyint); select cast(interval '1000000' second as smallint); +-- cast integrals to ANSI intervals +select cast(1Y as interval year); +select cast(-122S as interval year to month); +select cast(1000 as interval month); +select cast(-10L as interval second); +select cast(100Y as interval hour to second); +select cast(-1000S as interval day to second); +select cast(10 as interval day); + +select cast(2147483647 as interval year); +select cast(-9223372036854775808L as interval day); + -- cast ANSI intervals to decimals select cast(interval '-1' year as decimal(10, 0)); select cast(interval '1.000001' second as decimal(10, 6)); diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out index 470a6081c469d..c4b454b135c9e 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out @@ -840,6 +840,80 @@ org.apache.spark.SparkArithmeticException [CAST_OVERFLOW] The value INTERVAL '1000000' SECOND of the type "INTERVAL SECOND" cannot be cast to "SMALLINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +-- !query +select cast(1Y as interval year) +-- !query schema +struct +-- !query output +1-0 + + +-- !query +select cast(-122S as interval year to month) +-- !query schema +struct +-- !query output +-10-2 + + +-- !query +select cast(1000 as interval month) +-- !query schema +struct +-- !query output +83-4 + + +-- !query +select cast(-10L as interval second) +-- !query schema +struct +-- !query output +-0 00:00:10.000000000 + + +-- !query +select cast(100Y as interval hour to second) +-- !query schema +struct +-- !query output +0 00:01:40.000000000 + + +-- !query +select cast(-1000S as interval day to second) +-- !query schema +struct +-- !query output +-0 00:16:40.000000000 + + +-- !query +select cast(10 as interval day) +-- !query schema +struct +-- !query output +10 00:00:00.000000000 + + +-- !query +select cast(2147483647 as interval year) +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +[CAST_OVERFLOW] The value 2147483647 of the type "INT" cannot be cast to "INTERVAL YEAR" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. + + +-- !query +select cast(-9223372036854775808L as interval day) +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +[CAST_OVERFLOW] The value -9223372036854775808L of the type "BIGINT" cannot be cast to "INTERVAL DAY" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. + + -- !query select cast(interval '-1' year as decimal(10, 0)) -- !query schema diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/cast.sql.out index 911eaff30b938..2b976914bfe98 100644 --- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out @@ -668,6 +668,80 @@ org.apache.spark.SparkArithmeticException [CAST_OVERFLOW] The value INTERVAL '1000000' SECOND of the type "INTERVAL SECOND" cannot be cast to "SMALLINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +-- !query +select cast(1Y as interval year) +-- !query schema +struct +-- !query output +1-0 + + +-- !query +select cast(-122S as interval year to month) +-- !query schema +struct +-- !query output +-10-2 + + +-- !query +select cast(1000 as interval month) +-- !query schema +struct +-- !query output +83-4 + + +-- !query +select cast(-10L as interval second) +-- !query schema +struct +-- !query output +-0 00:00:10.000000000 + + +-- !query +select cast(100Y as interval hour to second) +-- !query schema +struct +-- !query output +0 00:01:40.000000000 + + +-- !query +select cast(-1000S as interval day to second) +-- !query schema +struct +-- !query output +-0 00:16:40.000000000 + + +-- !query +select cast(10 as interval day) +-- !query schema +struct +-- !query output +10 00:00:00.000000000 + + +-- !query +select cast(2147483647 as interval year) +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +[CAST_OVERFLOW] The value 2147483647 of the type "INT" cannot be cast to "INTERVAL YEAR" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. + + +-- !query +select cast(-9223372036854775808L as interval day) +-- !query schema +struct<> +-- !query output +org.apache.spark.SparkArithmeticException +[CAST_OVERFLOW] The value -9223372036854775808L of the type "BIGINT" cannot be cast to "INTERVAL DAY" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. + + -- !query select cast(interval '-1' year as decimal(10, 0)) -- !query schema