-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-29838][SQL] PostgreSQL dialect: cast to timestamp #26472
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
d705adc
8b56ae9
00ffde3
6a93060
1024d37
700143d
111d673
4fc3d71
08c71f1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -16,29 +16,52 @@ | |
| */ | ||
| package org.apache.spark.sql.catalyst.expressions.postgreSQL | ||
|
|
||
| import java.time.ZoneId | ||
|
|
||
| import org.apache.spark.sql.AnalysisException | ||
| import org.apache.spark.sql.catalyst.analysis.TypeCheckResult | ||
| import org.apache.spark.sql.catalyst.expressions.{CastBase, Expression, TimeZoneAwareExpression} | ||
| import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, JavaCode} | ||
| import org.apache.spark.sql.catalyst.expressions.codegen.Block._ | ||
| import org.apache.spark.sql.catalyst.util.DateTimeUtils | ||
| import org.apache.spark.sql.catalyst.util.postgreSQL.StringUtils | ||
| import org.apache.spark.sql.types._ | ||
| import org.apache.spark.unsafe.types.UTF8String | ||
|
|
||
| case class PostgreCastToBoolean(child: Expression, timeZoneId: Option[String]) | ||
| extends CastBase { | ||
| abstract class PostgreCastBase(toType: DataType) extends CastBase { | ||
|
|
||
| override protected def ansiEnabled = | ||
| throw new UnsupportedOperationException("PostgreSQL dialect doesn't support ansi mode") | ||
| def fromTypes: TypeCollection | ||
|
|
||
| override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = | ||
| copy(timeZoneId = Option(timeZoneId)) | ||
| override def dataType: DataType = toType | ||
|
|
||
| override protected def ansiEnabled: Boolean = | ||
| throw new UnsupportedOperationException("PostgreSQL dialect doesn't support ansi mode") | ||
|
|
||
| override def checkInputDataTypes(): TypeCheckResult = child.dataType match { | ||
| case StringType | IntegerType | NullType => | ||
| override def checkInputDataTypes(): TypeCheckResult = { | ||
| if (!fromTypes.acceptsType(child.dataType)) { | ||
| TypeCheckResult.TypeCheckFailure( | ||
| s"cannot cast type ${child.dataType.simpleString} to ${toType.simpleString}") | ||
| } else { | ||
| TypeCheckResult.TypeCheckSuccess | ||
| case _ => | ||
| TypeCheckResult.TypeCheckFailure(s"cannot cast type ${child.dataType} to boolean") | ||
| } | ||
| } | ||
|
|
||
| override def nullable: Boolean = child.nullable | ||
|
|
||
| override def sql: String = s"CAST(${child.sql} AS ${toType.sql})" | ||
|
|
||
| override def toString: String = | ||
| s"PostgreCastTo${toType.simpleString}($child as ${toType.simpleString})" | ||
| } | ||
|
|
||
| case class PostgreCastToBoolean(child: Expression, timeZoneId: Option[String]) | ||
| extends PostgreCastBase(BooleanType) { | ||
|
|
||
| override def fromTypes: TypeCollection = TypeCollection(StringType, IntegerType, NullType) | ||
|
|
||
| override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = | ||
| copy(timeZoneId = Option(timeZoneId)) | ||
|
|
||
| override def castToBoolean(from: DataType): Any => Any = from match { | ||
| case StringType => | ||
| buildCast[UTF8String](_, str => { | ||
|
|
@@ -58,7 +81,7 @@ case class PostgreCastToBoolean(child: Expression, timeZoneId: Option[String]) | |
| override def castToBooleanCode(from: DataType): CastFunction = from match { | ||
| case StringType => | ||
| val stringUtils = inline"${StringUtils.getClass.getName.stripSuffix("$")}" | ||
| (c, evPrim, evNull) => | ||
| (c, evPrim, _) => | ||
| code""" | ||
| if ($stringUtils.isTrueString($c.trim().toLowerCase())) { | ||
| $evPrim = true; | ||
|
|
@@ -68,16 +91,48 @@ case class PostgreCastToBoolean(child: Expression, timeZoneId: Option[String]) | |
| throw new IllegalArgumentException("invalid input syntax for type boolean: $c"); | ||
| } | ||
| """ | ||
|
|
||
| case IntegerType => | ||
| super.castToBooleanCode(from) | ||
| } | ||
| } | ||
|
|
||
| override def dataType: DataType = BooleanType | ||
| case class PostgreCastToTimestamp(child: Expression, timeZoneId: Option[String]) | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. btw, we need to define a new rule and a new cast expr for each Pg cast pattern? I mean we cannot define all the Pg cast patterns in a single rule and a cast expr? cc: @cloud-fan @Ngone51
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think we can and should combine them into a single one(both rule and expression) when more types get in. Just like the original
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. yea, I personally think so. @cloud-fan |
||
| extends PostgreCastBase(TimestampType) { | ||
|
|
||
| override def nullable: Boolean = child.nullable | ||
| override def fromTypes: TypeCollection = TypeCollection(StringType, DateType, NullType) | ||
|
|
||
| override def toString: String = s"PostgreCastToBoolean($child as ${dataType.simpleString})" | ||
| override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = | ||
| copy(timeZoneId = Option(timeZoneId)) | ||
|
|
||
| override def sql: String = s"CAST(${child.sql} AS ${dataType.sql})" | ||
| override def castToTimestamp(from: DataType): Any => Any = from match { | ||
| case StringType => | ||
| buildCast[UTF8String](_, utfs => DateTimeUtils.stringToTimestamp(utfs, zoneId) | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I believe that postgre could correctly parse string
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thanks for your suggestion. I will check this.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Spark results with NULL for all of them.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @maropu kindly review latest changes and give your feedback on supporting above queries. Do we need to support them in this PR? If yes, we need to list all formats for timestamps which postgres supports but spark don't.
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I personally think that the support above is not a main issue of this pr, so better to separate the two work: the timestamp cast support and the timestamp format support for the pg dialect. |
||
| .getOrElse(throw new | ||
| IllegalArgumentException(s"invalid input syntax for type timestamp:$utfs"))) | ||
| case DateType => | ||
| super.castToTimestamp(from) | ||
| } | ||
|
|
||
| override def castToTimestampCode( | ||
| from: DataType, | ||
| ctx: CodegenContext): CastFunction = from match { | ||
| case StringType => | ||
| val zoneIdClass = classOf[ZoneId] | ||
| val zid = JavaCode.global( | ||
| ctx.addReferenceObj("zoneId", zoneId, zoneIdClass.getName), | ||
| zoneIdClass) | ||
| val longOpt = ctx.freshVariable("longOpt", classOf[Option[Long]]) | ||
| (c, evPrim, _) => | ||
| code""" | ||
| scala.Option<Long> $longOpt = | ||
| org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToTimestamp($c, $zid); | ||
| if ($longOpt.isDefined()) { | ||
| $evPrim = ((Long) $longOpt.get()).longValue(); | ||
| } else { | ||
| throw new IllegalArgumentException(s"invalid input syntax for type timestamp:$c"); | ||
| } | ||
| """ | ||
| case DateType => | ||
| super.castToTimestampCode(from, ctx) | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,10 @@ | ||
| SELECT CAST(1 AS timestamp); | ||
| SELECT CAST(1.1 AS timestamp); | ||
| SELECT CAST(CAST(1 AS float) AS timestamp); | ||
| SELECT CAST(CAST(1 AS boolean) AS timestamp); | ||
| SELECT CAST(CAST(1 AS byte) AS timestamp); | ||
| SELECT CAST(CAST(1 AS short) AS timestamp); | ||
| SELECT CAST(CAST(1 AS double) AS timestamp); | ||
| SELECT CAST(CAST('2019' AS date) AS timestamp) | ||
| SELECT CAST(NULL AS timestamp) | ||
| SELECT CAST('2019' AS timestamp) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,88 @@ | ||
| -- Number of queries: 10 | ||
|
|
||
|
|
||
| -- !query 0 | ||
| SELECT CAST(1 AS timestamp) | ||
| -- !query 0 schema | ||
| struct<> | ||
| -- !query 0 output | ||
| org.apache.spark.sql.AnalysisException | ||
| cannot resolve 'CAST(1 AS TIMESTAMP)' due to data type mismatch: cannot cast type int to timestamp | ||
|
|
||
|
|
||
| -- !query 1 | ||
| SELECT CAST(1.1 AS timestamp) | ||
| -- !query 1 schema | ||
| struct<> | ||
| -- !query 1 output | ||
| org.apache.spark.sql.AnalysisException | ||
| cannot resolve 'CAST(1.1BD AS TIMESTAMP)' due to data type mismatch: cannot cast type decimal(2,1) to timestamp | ||
|
|
||
|
|
||
| -- !query 2 | ||
| SELECT CAST(CAST(1 AS float) AS timestamp) | ||
| -- !query 2 schema | ||
| struct<> | ||
| -- !query 2 output | ||
| org.apache.spark.sql.AnalysisException | ||
| cannot resolve 'CAST(CAST(1 AS FLOAT) AS TIMESTAMP)' due to data type mismatch: cannot cast type float to timestamp | ||
|
|
||
|
|
||
| -- !query 3 | ||
| SELECT CAST(CAST(1 AS boolean) AS timestamp) | ||
| -- !query 3 schema | ||
| struct<> | ||
| -- !query 3 output | ||
| org.apache.spark.sql.AnalysisException | ||
| cannot resolve 'CAST(CAST(1 AS BOOLEAN) AS TIMESTAMP)' due to data type mismatch: cannot cast type boolean to timestamp | ||
|
|
||
|
|
||
| -- !query 4 | ||
| SELECT CAST(CAST(1 AS byte) AS timestamp) | ||
| -- !query 4 schema | ||
| struct<> | ||
| -- !query 4 output | ||
| org.apache.spark.sql.AnalysisException | ||
| cannot resolve 'CAST(CAST(1 AS TINYINT) AS TIMESTAMP)' due to data type mismatch: cannot cast type tinyint to timestamp | ||
|
|
||
|
|
||
| -- !query 5 | ||
| SELECT CAST(CAST(1 AS short) AS timestamp) | ||
| -- !query 5 schema | ||
| struct<> | ||
| -- !query 5 output | ||
| org.apache.spark.sql.AnalysisException | ||
| cannot resolve 'CAST(CAST(1 AS SMALLINT) AS TIMESTAMP)' due to data type mismatch: cannot cast type smallint to timestamp | ||
|
|
||
|
|
||
| -- !query 6 | ||
| SELECT CAST(CAST(1 AS double) AS timestamp) | ||
| -- !query 6 schema | ||
| struct<> | ||
| -- !query 6 output | ||
| org.apache.spark.sql.AnalysisException | ||
| cannot resolve 'CAST(CAST(1 AS DOUBLE) AS TIMESTAMP)' due to data type mismatch: cannot cast type double to timestamp | ||
|
|
||
|
|
||
| -- !query 7 | ||
| SELECT CAST(CAST('2019' AS date) AS timestamp) | ||
| -- !query 7 schema | ||
| struct<CAST(CAST(2019 AS DATE) AS TIMESTAMP): timestamp> | ||
| -- !query 7 output | ||
| 2019-01-01 00:00:00.0 | ||
|
|
||
|
|
||
| -- !query 8 | ||
| SELECT CAST(NULL AS timestamp) | ||
| -- !query 8 schema | ||
| struct<CAST(NULL AS TIMESTAMP): timestamp> | ||
| -- !query 8 output | ||
|
|
||
|
|
||
|
|
||
| -- !query 9 | ||
| SELECT CAST('2019' AS timestamp) | ||
| -- !query 9 schema | ||
| struct<CAST(2019 AS TIMESTAMP): timestamp> | ||
| -- !query 9 output | ||
| 2019-01-01 00:00:00.0 |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -39,4 +39,11 @@ class PostgreSQLDialectQuerySuite extends QueryTest with SharedSparkSession { | |
| intercept[IllegalArgumentException](sql(s"select cast('$input' as boolean)").collect()) | ||
| } | ||
| } | ||
|
|
||
| test("cast to timestamp") { | ||
| Seq(1, 0.1, 1.toDouble, 5.toFloat, true, 3.toByte, 4.toShort) foreach { value => | ||
| intercept[IllegalArgumentException](sql(s"select cast('$value' as timestamp)").collect()) | ||
| } | ||
| } | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can you move these tests to
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I have moved these test cases.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Need to delete this test case. |
||
| } | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.