diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 00a1964c9501..0388dfe6c834 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -1860,6 +1860,8 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging override def visitTypeConstructor(ctx: TypeConstructorContext): Literal = withOrigin(ctx) { val value = string(ctx.STRING) val valueType = ctx.identifier.getText.toUpperCase(Locale.ROOT) + val isNegative = ctx.negativeSign != null + def toLiteral[T](f: UTF8String => Option[T], t: DataType): Literal = { f(UTF8String.fromString(value)).map(Literal(_, t)).getOrElse { throw new ParseException(s"Cannot parse the $valueType value: $value", ctx) @@ -1867,9 +1869,9 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging } try { valueType match { - case "DATE" => + case "DATE" if !isNegative => toLiteral(stringToDate(_, getZoneId(SQLConf.get.sessionLocalTimeZone)), DateType) - case "TIMESTAMP" => + case "TIMESTAMP" if !isNegative => val zoneId = getZoneId(SQLConf.get.sessionLocalTimeZone) toLiteral(stringToTimestamp(_, zoneId), TimestampType) case "INTERVAL" => @@ -1881,8 +1883,9 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging ex.setStackTrace(e.getStackTrace) throw ex } - Literal(applyNegativeSign(ctx.negativeSign, interval), CalendarIntervalType) - case "X" => + val signedInterval = if (isNegative) IntervalUtils.negate(interval) else interval + Literal(signedInterval, CalendarIntervalType) + case "X" if !isNegative => val padding = if (value.length % 2 != 0) "0" else "" Literal(DatatypeConverter.parseHexBinary(padding + value)) case "INTEGER" => @@ -1894,9 +1897,11 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging ex.setStackTrace(e.getStackTrace) throw ex } - Literal(i, IntegerType) + Literal(if (isNegative) -i else i, IntegerType) case other => - throw new ParseException(s"Literals of type '$other' are currently not supported.", ctx) + val negativeSign: String = if (isNegative) "-" else "" + throw new ParseException(s"Literals of type '$negativeSign$other' are currently not" + + " supported.", ctx) } } catch { case e: IllegalArgumentException => @@ -2026,7 +2031,7 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging } private def applyNegativeSign(sign: Token, interval: CalendarInterval): CalendarInterval = { - if (sign != null && sign.getText == "-") { + if (sign != null) { IntervalUtils.negate(interval) } else { interval diff --git a/sql/core/src/test/resources/sql-tests/inputs/literals.sql b/sql/core/src/test/resources/sql-tests/inputs/literals.sql index cf5b7976e70c..8c927d456c8b 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/literals.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/literals.sql @@ -153,3 +153,12 @@ select interval '1' year to second; select '1' year to second; select interval 1 year '2-1' year to month; select 1 year '2-1' year to month; +SET spark.sql.ansi.enabled=false; + +-- awareness of the negative sign before type +select -integer '7'; +select -date '1999-01-01'; +select -timestamp '1999-01-01'; +select -x'2379ACFe'; +select +integer '7'; +select +interval '1 second'; \ No newline at end of file diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index e9aa046717f1..8f641e51d823 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -1,5 +1,5 @@ -- Automatically generated by SQLQueryTestSuite --- Number of queries: 82 +-- Number of queries: 89 -- !query 0 @@ -846,3 +846,77 @@ Can only have a single from-to unit in the interval literal syntax(line 1, pos 1 == SQL == select 1 year '2-1' year to month --------------^^^ + + +-- !query 82 +SET spark.sql.ansi.enabled=false +-- !query 82 schema +struct +-- !query 82 output +spark.sql.ansi.enabled false + + +-- !query 83 +select -integer '7' +-- !query 83 schema +struct<-7:int> +-- !query 83 output +-7 + + +-- !query 84 +select -date '1999-01-01' +-- !query 84 schema +struct<> +-- !query 84 output +org.apache.spark.sql.catalyst.parser.ParseException + +Literals of type '-DATE' are currently not supported.(line 1, pos 7) + +== SQL == +select -date '1999-01-01' +-------^^^ + + +-- !query 85 +select -timestamp '1999-01-01' +-- !query 85 schema +struct<> +-- !query 85 output +org.apache.spark.sql.catalyst.parser.ParseException + +Literals of type '-TIMESTAMP' are currently not supported.(line 1, pos 7) + +== SQL == +select -timestamp '1999-01-01' +-------^^^ + + +-- !query 86 +select -x'2379ACFe' +-- !query 86 schema +struct<> +-- !query 86 output +org.apache.spark.sql.catalyst.parser.ParseException + +Literals of type '-X' are currently not supported.(line 1, pos 7) + +== SQL == +select -x'2379ACFe' +-------^^^ + + +-- !query 87 +select +integer '7' +-- !query 87 schema +struct<7:int> +-- !query 87 output +7 + + +-- !query 88 +select +interval '1 second' +-- !query 88 schema +struct<1 seconds:interval> +-- !query 88 output +1 seconds