diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TimeTravelSpec.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TimeTravelSpec.scala index 8bfcd955497b9..fecec238145e1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TimeTravelSpec.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TimeTravelSpec.scala @@ -18,7 +18,9 @@ package org.apache.spark.sql.catalyst.analysis import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.expressions.{Cast, Expression, Literal, RuntimeReplaceable, SubqueryExpression, Unevaluable} +import org.apache.spark.sql.catalyst.expressions.{Alias, Cast, Expression, Literal, SubqueryExpression, Unevaluable} +import org.apache.spark.sql.catalyst.optimizer.{ComputeCurrentTime, ReplaceExpressions} +import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Project} import org.apache.spark.sql.errors.QueryCompilationErrors import org.apache.spark.sql.types.TimestampType import org.apache.spark.sql.util.CaseInsensitiveStringMap @@ -42,14 +44,19 @@ object TimeTravelSpec { throw QueryCompilationErrors.invalidTimestampExprForTimeTravel( "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.INPUT", ts) } - val tsToEval = ts.transform { - case r: RuntimeReplaceable => r.replacement + val tsToEval = { + val fakeProject = Project(Seq(Alias(ts, "ts")()), OneRowRelation()) + ComputeCurrentTime(ReplaceExpressions(fakeProject)).asInstanceOf[Project] + .expressions.head.asInstanceOf[Alias].child + } + tsToEval.foreach { case _: Unevaluable => throw QueryCompilationErrors.invalidTimestampExprForTimeTravel( "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.UNEVALUABLE", ts) case e if !e.deterministic => throw QueryCompilationErrors.invalidTimestampExprForTimeTravel( "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.NON_DETERMINISTIC", ts) + case _ => } val tz = Some(sessionLocalTimeZone) // Set `ansiEnabled` to false, so that it can return null for invalid input and we can provide diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index f21c0c2b52fab..93f199dfd5854 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -3003,6 +3003,19 @@ class DataSourceV2SQLSuiteV1Filter .collect() assert(res10 === Array(Row(7), Row(8))) + checkError( + exception = intercept[AnalysisException] { + // `current_date()` is a valid expression for time travel timestamp, but the test uses + // a fake time travel implementation that only supports two hardcoded timestamp values. + sql("SELECT * FROM t TIMESTAMP AS OF current_date()") + }, + errorClass = "TABLE_OR_VIEW_NOT_FOUND", + parameters = Map("relationName" -> "`t`"), + context = ExpectedContext( + fragment = "t", + start = 14, + stop = 14)) + checkError( exception = intercept[AnalysisException] { sql("SELECT * FROM t TIMESTAMP AS OF INTERVAL 1 DAY").collect()