Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1860,16 +1860,18 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
override def visitTypeConstructor(ctx: TypeConstructorContext): Literal = withOrigin(ctx) {
val value = string(ctx.STRING)
val valueType = ctx.identifier.getText.toUpperCase(Locale.ROOT)
val isNegative = ctx.negativeSign != null

def toLiteral[T](f: UTF8String => Option[T], t: DataType): Literal = {
f(UTF8String.fromString(value)).map(Literal(_, t)).getOrElse {
throw new ParseException(s"Cannot parse the $valueType value: $value", ctx)
}
}
try {
valueType match {
case "DATE" =>
case "DATE" if !isNegative =>
toLiteral(stringToDate(_, getZoneId(SQLConf.get.sessionLocalTimeZone)), DateType)
case "TIMESTAMP" =>
case "TIMESTAMP" if !isNegative =>
val zoneId = getZoneId(SQLConf.get.sessionLocalTimeZone)
toLiteral(stringToTimestamp(_, zoneId), TimestampType)
case "INTERVAL" =>
Expand All @@ -1881,8 +1883,9 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
ex.setStackTrace(e.getStackTrace)
throw ex
}
Literal(applyNegativeSign(ctx.negativeSign, interval), CalendarIntervalType)
case "X" =>
val signedInterval = if (isNegative) IntervalUtils.negate(interval) else interval
Literal(signedInterval, CalendarIntervalType)
case "X" if !isNegative =>
val padding = if (value.length % 2 != 0) "0" else ""
Literal(DatatypeConverter.parseHexBinary(padding + value))
case "INTEGER" =>
Expand All @@ -1894,9 +1897,11 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
ex.setStackTrace(e.getStackTrace)
throw ex
}
Literal(i, IntegerType)
Literal(if (isNegative) -i else i, IntegerType)
case other =>
throw new ParseException(s"Literals of type '$other' are currently not supported.", ctx)
val negativeSign: String = if (isNegative) "-" else ""
throw new ParseException(s"Literals of type '$negativeSign$other' are currently not" +
" supported.", ctx)
}
} catch {
case e: IllegalArgumentException =>
Expand Down Expand Up @@ -2026,7 +2031,7 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
}

private def applyNegativeSign(sign: Token, interval: CalendarInterval): CalendarInterval = {
if (sign != null && sign.getText == "-") {
if (sign != null) {
IntervalUtils.negate(interval)
} else {
interval
Expand Down
9 changes: 9 additions & 0 deletions sql/core/src/test/resources/sql-tests/inputs/literals.sql
Original file line number Diff line number Diff line change
Expand Up @@ -153,3 +153,12 @@ select interval '1' year to second;
select '1' year to second;
select interval 1 year '2-1' year to month;
select 1 year '2-1' year to month;
SET spark.sql.ansi.enabled=false;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not related to this PR, but it's really annoying to duplicate the test cases with ansi mode on and off.

I'm wondering if we can introduce a -- import abc.sql directive so that we can easily import test cases in ansi/interval.sql, with several ansi specific test cases. cc @maropu

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah, I see. That's certainly annoying. I'll try to make a pr later for that approach.


-- awareness of the negative sign before type
select -integer '7';
select -date '1999-01-01';
select -timestamp '1999-01-01';
select -x'2379ACFe';
select +integer '7';
select +interval '1 second';
76 changes: 75 additions & 1 deletion sql/core/src/test/resources/sql-tests/results/literals.sql.out
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 82
-- Number of queries: 89


-- !query 0
Expand Down Expand Up @@ -846,3 +846,77 @@ Can only have a single from-to unit in the interval literal syntax(line 1, pos 1
== SQL ==
select 1 year '2-1' year to month
--------------^^^


-- !query 82
SET spark.sql.ansi.enabled=false
-- !query 82 schema
struct<key:string,value:string>
-- !query 82 output
spark.sql.ansi.enabled false


-- !query 83
select -integer '7'
-- !query 83 schema
struct<-7:int>
-- !query 83 output
-7


-- !query 84
select -date '1999-01-01'
-- !query 84 schema
struct<>
-- !query 84 output
org.apache.spark.sql.catalyst.parser.ParseException

Literals of type '-DATE' are currently not supported.(line 1, pos 7)

== SQL ==
select -date '1999-01-01'
-------^^^


-- !query 85
select -timestamp '1999-01-01'
-- !query 85 schema
struct<>
-- !query 85 output
org.apache.spark.sql.catalyst.parser.ParseException

Literals of type '-TIMESTAMP' are currently not supported.(line 1, pos 7)

== SQL ==
select -timestamp '1999-01-01'
-------^^^


-- !query 86
select -x'2379ACFe'
-- !query 86 schema
struct<>
-- !query 86 output
org.apache.spark.sql.catalyst.parser.ParseException

Literals of type '-X' are currently not supported.(line 1, pos 7)

== SQL ==
select -x'2379ACFe'
-------^^^


-- !query 87
select +integer '7'
-- !query 87 schema
struct<7:int>
-- !query 87 output
7


-- !query 88
select +interval '1 second'
-- !query 88 schema
struct<1 seconds:interval>
-- !query 88 output
1 seconds
Expand Down