Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@
package org.apache.spark.sql.catalyst.analysis

import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.expressions.{Cast, Expression, Literal, RuntimeReplaceable, SubqueryExpression, Unevaluable}
import org.apache.spark.sql.catalyst.expressions.{Alias, Cast, Expression, Literal, SubqueryExpression, Unevaluable}
import org.apache.spark.sql.catalyst.optimizer.{ComputeCurrentTime, ReplaceExpressions}
import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Project}
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.types.TimestampType
import org.apache.spark.sql.util.CaseInsensitiveStringMap
Expand All @@ -42,14 +44,19 @@ object TimeTravelSpec {
throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(
"INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.INPUT", ts)
}
val tsToEval = ts.transform {
case r: RuntimeReplaceable => r.replacement
val tsToEval = {
val fakeProject = Project(Seq(Alias(ts, "ts")()), OneRowRelation())
ComputeCurrentTime(ReplaceExpressions(fakeProject)).asInstanceOf[Project]
.expressions.head.asInstanceOf[Alias].child
}
tsToEval.foreach {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Btw, seems like "pre-eval" is needed in a couple of places (e.g. inline tables).

In future, it would be good if we could add some helper that will do all the safe pre-evals:

  1. Runtime replaceable
  2. ComuteCurrentTime
  3. ReplaceCurrentTime

In short, there are places where finishAnalysis rules, which are part of QO phase, need to be executed before entering QO.

case _: Unevaluable =>
throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(
"INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.UNEVALUABLE", ts)
case e if !e.deterministic =>
throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(
"INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.NON_DETERMINISTIC", ts)
case _ =>
}
val tz = Some(sessionLocalTimeZone)
// Set `ansiEnabled` to false, so that it can return null for invalid input and we can provide
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3003,6 +3003,19 @@ class DataSourceV2SQLSuiteV1Filter
.collect()
assert(res10 === Array(Row(7), Row(8)))

checkError(
exception = intercept[AnalysisException] {
// `current_date()` is a valid expression for time travel timestamp, but the test uses
// a fake time travel implementation that only supports two hardcoded timestamp values.
sql("SELECT * FROM t TIMESTAMP AS OF current_date()")
},
errorClass = "TABLE_OR_VIEW_NOT_FOUND",
parameters = Map("relationName" -> "`t`"),
context = ExpectedContext(
fragment = "t",
start = 14,
stop = 14))

checkError(
exception = intercept[AnalysisException] {
sql("SELECT * FROM t TIMESTAMP AS OF INTERVAL 1 DAY").collect()
Expand Down