diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 7a2b4e63e133a..8ab570f7e290b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -278,7 +278,7 @@ class Analyzer( * 1. if both side are interval, stays the same; * 2. else if the left side is date and the right side is interval, * turns it to [[DateAddInterval(l, -r)]]; - * 3. else if the right side is an interval, turns it to [[TimeSub]]; + * 3. else if the right side is an interval, turns it to [[TimeAdd(l, -r)]]; * 4. else if one side is timestamp, turns it to [[SubtractTimestamps]]; * 5. else if the right side is date, turns it to [[DateDiff]]/[[SubtractDates]]; * 6. else if the left side is date, turns it to [[DateSub]]; @@ -308,7 +308,7 @@ class Analyzer( case s @ Subtract(l, r) if s.childrenResolved => (l.dataType, r.dataType) match { case (CalendarIntervalType, CalendarIntervalType) => s case (DateType, CalendarIntervalType) => DateAddInterval(l, UnaryMinus(r)) - case (_, CalendarIntervalType) => Cast(TimeSub(l, r), l.dataType) + case (_, CalendarIntervalType) => Cast(TimeAdd(l, UnaryMinus(r)), l.dataType) case (TimestampType, _) => SubtractTimestamps(l, r) case (_, TimestampType) => SubtractTimestamps(l, r) case (_, DateType) => SubtractDates(l, r) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala index 3faf3403f9a52..21f2985eef523 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/StreamingJoinHelper.scala @@ -232,8 +232,6 @@ object StreamingJoinHelper extends PredicateHelper with Logging { collect(left, negate) ++ collect(right, !negate) case TimeAdd(left, right, _) => collect(left, negate) ++ collect(right, negate) - case TimeSub(left, right, _) => - collect(left, negate) ++ collect(right, !negate) case UnaryMinus(child) => collect(child, !negate) case CheckOverflow(child, _, _) => diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala index c6e3f56766a8a..d970bf466fb81 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala @@ -831,10 +831,7 @@ object TypeCoercion { case s @ SubtractTimestamps(_, DateType()) => s.copy(startTimestamp = Cast(s.startTimestamp, TimestampType)) - case t @ TimeAdd(DateType(), _, _) => t.copy(start = Cast(t.start, TimestampType)) case t @ TimeAdd(StringType(), _, _) => t.copy(start = Cast(t.start, TimestampType)) - case t @ TimeSub(DateType(), _, _) => t.copy(start = Cast(t.start, TimestampType)) - case t @ TimeSub(StringType(), _, _) => t.copy(start = Cast(t.start, TimestampType)) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index b131b24a49703..26458a674e96e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -1330,41 +1330,6 @@ case class FromUTCTimestamp(left: Expression, right: Expression) } } -/** - * Subtracts an interval from timestamp. - */ -case class TimeSub(start: Expression, interval: Expression, timeZoneId: Option[String] = None) - extends BinaryExpression with TimeZoneAwareExpression with ExpectsInputTypes { - - def this(start: Expression, interval: Expression) = this(start, interval, None) - - override def left: Expression = start - override def right: Expression = interval - - override def toString: String = s"$left - $right" - override def sql: String = s"${left.sql} - ${right.sql}" - override def inputTypes: Seq[AbstractDataType] = Seq(TimestampType, CalendarIntervalType) - - override def dataType: DataType = TimestampType - - override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = - copy(timeZoneId = Option(timeZoneId)) - - override def nullSafeEval(start: Any, interval: Any): Any = { - val itvl = interval.asInstanceOf[CalendarInterval] - DateTimeUtils.timestampAddInterval( - start.asInstanceOf[Long], 0 - itvl.months, 0 - itvl.days, 0 - itvl.microseconds, zoneId) - } - - override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { - val zid = ctx.addReferenceObj("zoneId", zoneId, classOf[ZoneId].getName) - val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") - defineCodeGen(ctx, ev, (sd, i) => { - s"""$dtu.timestampAddInterval($sd, 0 - $i.months, 0 - $i.days, 0 - $i.microseconds, $zid)""" - }) - } -} - /** * Returns the date that is num_months after start_date. */ diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala index 90939f3dd9354..6e8397d12da78 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala @@ -465,54 +465,54 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { sdf.setTimeZone(TimeZone.getTimeZone(zid)) checkEvaluation( - TimeSub( + TimeAdd( Literal(new Timestamp(sdf.parse("2016-03-31 10:00:00.000").getTime)), - Literal(new CalendarInterval(1, 0, 0)), + UnaryMinus(Literal(new CalendarInterval(1, 0, 0))), timeZoneId), DateTimeUtils.fromJavaTimestamp( new Timestamp(sdf.parse("2016-02-29 10:00:00.000").getTime))) checkEvaluation( - TimeSub( + TimeAdd( Literal(new Timestamp(sdf.parse("2016-03-31 10:00:00.000").getTime)), - Literal(new CalendarInterval(1, 1, 0)), + UnaryMinus(Literal(new CalendarInterval(1, 1, 0))), timeZoneId), DateTimeUtils.fromJavaTimestamp( new Timestamp(sdf.parse("2016-02-28 10:00:00.000").getTime))) checkEvaluation( - TimeSub( + TimeAdd( Literal(new Timestamp(sdf.parse("2016-03-30 00:00:01.000").getTime)), - Literal(new CalendarInterval(1, 0, 2000000.toLong)), + UnaryMinus(Literal(new CalendarInterval(1, 0, 2000000.toLong))), timeZoneId), DateTimeUtils.fromJavaTimestamp( new Timestamp(sdf.parse("2016-02-28 23:59:59.000").getTime))) checkEvaluation( - TimeSub( + TimeAdd( Literal(new Timestamp(sdf.parse("2016-03-30 00:00:01.000").getTime)), - Literal(new CalendarInterval(1, 1, 2000000.toLong)), + UnaryMinus(Literal(new CalendarInterval(1, 1, 2000000.toLong))), timeZoneId), DateTimeUtils.fromJavaTimestamp( new Timestamp(sdf.parse("2016-02-27 23:59:59.000").getTime))) checkEvaluation( - TimeSub( + TimeAdd( Literal.create(null, TimestampType), - Literal(new CalendarInterval(1, 2, 123000L)), + UnaryMinus(Literal(new CalendarInterval(1, 2, 123000L))), timeZoneId), null) checkEvaluation( - TimeSub( + TimeAdd( Literal(new Timestamp(sdf.parse("2016-01-29 10:00:00.000").getTime)), - Literal.create(null, CalendarIntervalType), + UnaryMinus(Literal.create(null, CalendarIntervalType)), timeZoneId), null) checkEvaluation( - TimeSub( + TimeAdd( Literal.create(null, TimestampType), - Literal.create(null, CalendarIntervalType), + UnaryMinus(Literal.create(null, CalendarIntervalType)), timeZoneId), null) - checkConsistencyBetweenInterpretedAndCodegen( - (start: Expression, interval: Expression) => TimeSub(start, interval, timeZoneId), + checkConsistencyBetweenInterpretedAndCodegen((start: Expression, interval: Expression) => + TimeAdd(start, UnaryMinus(interval), timeZoneId), TimestampType, CalendarIntervalType) } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala index ab0ef22bb746f..7ffbae805ca43 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionSQLBuilderSuite.scala @@ -172,10 +172,6 @@ class ExpressionSQLBuilderSuite extends SparkFunSuite { "`a` + INTERVAL '1 hours'" ) - checkSQL( - TimeSub('a, interval), - "`a` - INTERVAL '1 hours'" - ) checkSQL( DateAddInterval('a, interval), "`a` + INTERVAL '1 hours'" diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamingSymmetricHashJoinHelper.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamingSymmetricHashJoinHelper.scala index cdd3a854c9a90..71792facf698a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamingSymmetricHashJoinHelper.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamingSymmetricHashJoinHelper.scala @@ -23,13 +23,11 @@ import org.apache.spark.{Partition, SparkContext, TaskContext} import org.apache.spark.internal.Logging import org.apache.spark.rdd.{RDD, ZippedPartitionsBaseRDD, ZippedPartitionsPartition, ZippedPartitionsRDD2} import org.apache.spark.sql.catalyst.analysis.StreamingJoinHelper -import org.apache.spark.sql.catalyst.expressions.{Add, And, Attribute, AttributeReference, AttributeSet, BoundReference, Cast, CheckOverflow, Expression, ExpressionSet, GreaterThan, GreaterThanOrEqual, LessThan, LessThanOrEqual, Literal, Multiply, NamedExpression, PreciseTimestampConversion, PredicateHelper, Subtract, TimeAdd, TimeSub, UnaryMinus} +import org.apache.spark.sql.catalyst.expressions.{And, Attribute, AttributeSet, BoundReference, Expression, NamedExpression, PredicateHelper} import org.apache.spark.sql.catalyst.plans.logical.EventTimeWatermark._ import org.apache.spark.sql.execution.SparkPlan import org.apache.spark.sql.execution.streaming.WatermarkSupport.watermarkExpression import org.apache.spark.sql.execution.streaming.state.{StateStoreCoordinatorRef, StateStoreProvider, StateStoreProviderId} -import org.apache.spark.sql.types._ -import org.apache.spark.unsafe.types.CalendarInterval /** diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out index 81a73a6377715..b95891f321a76 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out @@ -152,7 +152,7 @@ struct +struct -- !query output 2011-11-09 11:11:11 @@ -178,7 +178,7 @@ requirement failed: Cannot add hours, minutes or seconds, milliseconds, microsec -- !query select '2011-11-11' - interval '2' day -- !query schema -struct +struct -- !query output 2011-11-09 00:00:00 @@ -186,7 +186,7 @@ struct -- !query select '2011-11-11 11:11:11' - interval '2' second -- !query schema -struct +struct -- !query output 2011-11-11 11:11:09 @@ -194,7 +194,7 @@ struct +struct -- !query output NULL @@ -205,7 +205,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 - INTERVAL '2 seconds'' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 +cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index e8201d46924da..d4a3f0ae004ac 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -705,7 +705,7 @@ select interval '2-2' year to month + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 @@ -749,7 +749,7 @@ select interval '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out index 2e600850c48b9..20f66eef9e68a 100755 --- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out @@ -126,7 +126,7 @@ struct +struct -- !query output 2011-11-09 11:11:11 @@ -150,7 +150,7 @@ struct -- !query select '2011-11-11' - interval '2' day -- !query schema -struct +struct -- !query output 2011-11-09 00:00:00 @@ -158,7 +158,7 @@ struct -- !query select '2011-11-11 11:11:11' - interval '2' second -- !query schema -struct +struct -- !query output 2011-11-11 11:11:09 @@ -166,7 +166,7 @@ struct +struct -- !query output NULL @@ -177,7 +177,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 - INTERVAL '2 seconds'' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 +cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argument 1 requires timestamp type, however, '1' is of int type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 84e3fb3c238a4..7a9f08b26714b 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -684,7 +684,7 @@ select interval '2-2' year to month + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 @@ -727,7 +727,7 @@ select interval '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query schema -struct +struct -- !query output 2012-01-01 00:00:00 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out index 7b1fcddfdac7d..db6ffd5d5e175 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out @@ -242,7 +242,7 @@ select cast(1 as tinyint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS TINYINT) - INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS TINYINT) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 -- !query @@ -251,7 +251,7 @@ select cast(1 as smallint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS SMALLINT) - INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS SMALLINT) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -260,7 +260,7 @@ select cast(1 as int) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS INT) - INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 +cannot resolve 'CAST(1 AS INT) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 -- !query @@ -269,7 +269,7 @@ select cast(1 as bigint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BIGINT) - INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BIGINT) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -278,7 +278,7 @@ select cast(1 as float) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS FLOAT) - INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 +cannot resolve 'CAST(1 AS FLOAT) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 -- !query @@ -287,7 +287,7 @@ select cast(1 as double) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DOUBLE) - INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DOUBLE) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -296,13 +296,13 @@ select cast(1 as decimal(10, 0)) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DECIMAL(10,0)) - INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query select cast('2017-12-11' as string) - interval 2 day -- !query schema -struct +struct -- !query output 2017-12-09 00:00:00 @@ -310,7 +310,7 @@ struct +struct -- !query output 2017-12-09 09:30:00 @@ -321,7 +321,7 @@ select cast('1' as binary) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST('1' AS BINARY) - INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 +cannot resolve 'CAST('1' AS BINARY) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 -- !query @@ -330,13 +330,13 @@ select cast(1 as boolean) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BOOLEAN) - INTERVAL '2 days'' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 +cannot resolve 'CAST(1 AS BOOLEAN) + (- INTERVAL '2 days')' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 -- !query select cast('2017-12-11 09:30:00.0' as timestamp) - interval 2 day -- !query schema -struct +struct -- !query output 2017-12-09 09:30:00