-
Notifications
You must be signed in to change notification settings - Fork 29k
[WIP][SPARK-35076][SQL] Parse interval literals as ANSI intervals #32176
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
447e17b
8cab0f0
0d24c9b
aa7303b
f5e692a
a408ab9
e4a4776
baafc51
142b47e
ae83c65
7df057c
39d33c9
0d45567
4d36071
3024d3e
94a5a98
2cdf813
cd578f0
2e67e6e
2ecd284
de06515
7668405
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -2392,6 +2392,7 @@ object DatePart { | |
| } | ||
| } | ||
|
|
||
| // scalastyle:off line.size.limit line.contains.tab | ||
| // scalastyle:off line.size.limit | ||
| @ExpressionDescription( | ||
| usage = "_FUNC_(field, source) - Extracts a part of the date/timestamp or interval source.", | ||
|
|
@@ -2410,17 +2411,22 @@ object DatePart { | |
| 224 | ||
| > SELECT _FUNC_('SECONDS', timestamp'2019-10-01 00:00:01.000001'); | ||
| 1.000001 | ||
| > SET spark.sql.legacy.interval.enabled=true; | ||
| spark.sql.legacy.interval.enabled true | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. SPARK-35090 for ANSI intervals |
||
| > SELECT _FUNC_('days', interval 1 year 10 months 5 days); | ||
| 5 | ||
| > SELECT _FUNC_('seconds', interval 5 hours 30 seconds 1 milliseconds 1 microseconds); | ||
| 30.001001 | ||
| > SET spark.sql.legacy.interval.enabled=false; | ||
| spark.sql.legacy.interval.enabled false | ||
| """, | ||
| note = """ | ||
| The _FUNC_ function is equivalent to the SQL-standard function `EXTRACT(field FROM source)` | ||
| """, | ||
| group = "datetime_funcs", | ||
| since = "3.0.0") | ||
| // scalastyle:on line.size.limit | ||
| // scalastyle:on line.size.limit line.contains.tab | ||
| case class DatePart(field: Expression, source: Expression, child: Expression) | ||
| extends RuntimeReplaceable { | ||
|
|
||
|
|
@@ -2437,6 +2443,7 @@ case class DatePart(field: Expression, source: Expression, child: Expression) | |
| copy(child = newChild) | ||
| } | ||
|
|
||
| // scalastyle:off line.size.limit line.contains.tab | ||
| // scalastyle:off line.size.limit | ||
| @ExpressionDescription( | ||
| usage = "_FUNC_(field FROM source) - Extracts a part of the date/timestamp or interval source.", | ||
|
|
@@ -2475,17 +2482,22 @@ case class DatePart(field: Expression, source: Expression, child: Expression) | |
| 224 | ||
| > SELECT _FUNC_(SECONDS FROM timestamp'2019-10-01 00:00:01.000001'); | ||
| 1.000001 | ||
| > SET spark.sql.legacy.interval.enabled=true; | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Created SPARK-35091 to support ANSI intervals by |
||
| spark.sql.legacy.interval.enabled true | ||
| > SELECT _FUNC_(days FROM interval 1 year 10 months 5 days); | ||
| 5 | ||
| > SELECT _FUNC_(seconds FROM interval 5 hours 30 seconds 1 milliseconds 1 microseconds); | ||
| 30.001001 | ||
| > SET spark.sql.legacy.interval.enabled=false; | ||
| spark.sql.legacy.interval.enabled false | ||
| """, | ||
| note = """ | ||
| The _FUNC_ function is equivalent to `date_part(field, source)`. | ||
| """, | ||
| group = "datetime_funcs", | ||
| since = "3.0.0") | ||
| // scalastyle:on line.size.limit | ||
| // scalastyle:on line.size.limit line.contains.tab | ||
| case class Extract(field: Expression, source: Expression, child: Expression) | ||
| extends RuntimeReplaceable { | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -22,6 +22,7 @@ import org.antlr.v4.runtime.ParserRuleContext | |
| import org.apache.spark.sql.catalyst.parser.ParseException | ||
| import org.apache.spark.sql.catalyst.parser.SqlBaseParser._ | ||
| import org.apache.spark.sql.catalyst.trees.Origin | ||
| import org.apache.spark.sql.internal.SQLConf | ||
|
|
||
| /** | ||
| * Object for grouping all error messages of the query parsing. | ||
|
|
@@ -367,4 +368,10 @@ object QueryParsingErrors { | |
| new ParseException("LOCAL is supported only with file: scheme", ctx) | ||
| } | ||
|
|
||
| def mixedIntervalError(ctx: ParserRuleContext): Throwable = { | ||
| new ParseException( | ||
| "Mixing of year-month and day-time fields is not allowed. " + | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Does ANSI allow mixing daytime interval with same unit e.g. |
||
| s"Set '${SQLConf.LEGACY_INTERVAL_ENABLED.key}' to true to enable the legacy interval type " + | ||
| "which supports mixed fields.", ctx) | ||
| } | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Created JIRA SPARK-35088 for ANSI intervals