-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-33474][SQL] Support TypeConstructed partition spec value #30421
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 4 commits
adb1842
ae59115
c9a97d0
bcdc7e5
d171377
516c070
251c36b
1590e8a
6adefa7
05f1962
e312697
edd270e
a8f26a1
98986a0
e2749c3
713da66
4898fb4
e875bcf
0228292
2db3f14
055a903
e6092c1
06a9321
bc3e347
0358274
ca69533
894ac90
0b4b211
7264a3d
b68ee81
dc0783a
d3b1960
2e9058a
31c1092
9f5d569
5e05adb
dd34027
4cdb4a7
093b062
adc3984
f6bdbbe
2ef0fd0
4023041
fe5095a
63a4fb4
4f61e60
08c55f6
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -36,8 +36,8 @@ import org.apache.spark.sql.catalyst.expressions.aggregate.{First, Last} | |
| import org.apache.spark.sql.catalyst.parser.SqlBaseParser._ | ||
| import org.apache.spark.sql.catalyst.plans._ | ||
| import org.apache.spark.sql.catalyst.plans.logical._ | ||
| import org.apache.spark.sql.catalyst.util.{DateFormatter, IntervalUtils, TimestampFormatter} | ||
| import org.apache.spark.sql.catalyst.util.DateTimeUtils.{getZoneId, stringToDate, stringToTimestamp} | ||
| import org.apache.spark.sql.catalyst.util.IntervalUtils | ||
| import org.apache.spark.sql.catalyst.util.IntervalUtils.IntervalUnit | ||
| import org.apache.spark.sql.connector.catalog.{SupportsNamespaces, TableCatalog} | ||
| import org.apache.spark.sql.connector.catalog.TableChange.ColumnPosition | ||
|
|
@@ -503,13 +503,32 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg | |
| } | ||
| } | ||
|
|
||
| def convertTypeConstructedLiteralToString(literal: Literal): String = literal match { | ||
| case Literal(data: Int, dataType: DateType) => | ||
| UTF8String.fromString( | ||
|
||
| DateFormatter(getZoneId(SQLConf.get.sessionLocalTimeZone)) | ||
| .format(data)).toString | ||
| case Literal(data: Long, dataType: TimestampType) => | ||
| UTF8String.fromString( | ||
| TimestampFormatter.getFractionFormatter(getZoneId(SQLConf.get.sessionLocalTimeZone)) | ||
| .format(data)).toString | ||
| case Literal(data: CalendarInterval, dataType: CalendarIntervalType) => | ||
| UTF8String.fromString(data.toString).toString | ||
| case Literal(data: Array[Byte], dataType: BinaryType) => | ||
| UTF8String.fromBytes(data).toString | ||
| case Literal(data, dataType) => | ||
| UTF8String.fromString(data.toString).toString | ||
|
||
| } | ||
|
|
||
| /** | ||
| * Convert a constant of any type into a string. This is typically used in DDL commands, and its | ||
| * main purpose is to prevent slight differences due to back to back conversions i.e.: | ||
| * String -> Literal -> String. | ||
| */ | ||
| protected def visitStringConstant(ctx: ConstantContext): String = withOrigin(ctx) { | ||
|
||
| ctx match { | ||
| case l: TypeConstructorContext => | ||
| convertTypeConstructedLiteralToString(visitTypeConstructor(l)) | ||
| case s: StringLiteralContext => createString(s) | ||
| case o => o.getText | ||
|
||
| } | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -2584,6 +2584,14 @@ abstract class SQLQuerySuiteBase extends QueryTest with SQLTestUtils with TestHi | |
| } | ||
| } | ||
| } | ||
|
|
||
| test("SPARK-33474: Support TypeConstructed partition spec value") { | ||
|
||
| withTable("t") { | ||
| sql("CREATE TABLE t(name STRING) PARTITIONED BY (part DATE) STORED AS ORC") | ||
|
||
| sql("INSERT INTO t PARTITION(part = date'2019-01-02') VALUES('a')") | ||
| checkAnswer(sql("SELECT name, CAST(part AS STRING) FROM t"), Row("a", "2019-01-02")) | ||
| } | ||
| } | ||
| } | ||
|
|
||
| @SlowHiveTest | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.