From 2e64de4887d4104cae0c3a4bb0ba346092be850c Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 20:34:52 +0300 Subject: [PATCH 1/6] Refactor fromDayTimeString() --- .../sql/catalyst/util/IntervalUtils.scala | 105 +++++++++--------- 1 file changed, 55 insertions(+), 50 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index f55b0545ee9c..737bde1a3bb9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -178,8 +178,24 @@ object IntervalUtils { fromDayTimeString(s, "day", "second") } - private val dayTimePattern = - "^([+|-])?((\\d+) )?((\\d+):)?(\\d+):(\\d+)(\\.(\\d+))?$".r + private val dayTimePattern = ("^(?[+|-])?((?\\d+) )?" + + "((?\\d+):)?(?\\d+):(?(\\d+)(\\.(\\d+))?)$").r + + object UnitName extends Enumeration { + val microsecond = Value(0, "microsecond") + val millisecond = Value(1, "millisecond") + val second = Value(2, "second") + val minute = Value(3, "minute") + val hour = Value(4, "hour") + val day = Value(5, "day") + val week = Value(6, "week") + val month = Value(7, "month") + val year = Value(8, "year") + } + + private def unitsRange(start: String, end: String): Seq[UnitName.Value] = { + (UnitName.withName(start).id to UnitName.withName(end).id).map(UnitName(_)) + } /** * Parse dayTime string in form: [-]d HH:mm:ss.nnnnnnnnn and [-]HH:mm:ss.nnnnnnnnn @@ -196,53 +212,29 @@ object IntervalUtils { val m = dayTimePattern.pattern.matcher(input) require(m.matches, s"Interval string must match day-time format of 'd h:m:s.n': $input") + def toLong(unitName: UnitName.Value, minValue: Long, maxValue: Long): Long = { + val name = unitName.toString + toLongWithRange(name, m.group(name), minValue, maxValue) + } + try { - val sign = if (m.group(1) != null && m.group(1) == "-") -1 else 1 - val days = if (m.group(2) == null) { - 0 - } else { - toLongWithRange("day", m.group(3), 0, Integer.MAX_VALUE) - } - var hours: Long = 0L - var minutes: Long = 0L - var seconds: Long = 0L - if (m.group(5) != null || from == "minute") { // 'HH:mm:ss' or 'mm:ss minute' - hours = toLongWithRange("hour", m.group(5), 0, 23) - minutes = toLongWithRange("minute", m.group(6), 0, 59) - seconds = toLongWithRange("second", m.group(7), 0, 59) - } else if (m.group(8) != null) { // 'mm:ss.nn' - minutes = toLongWithRange("minute", m.group(6), 0, 59) - seconds = toLongWithRange("second", m.group(7), 0, 59) - } else { // 'HH:mm' - hours = toLongWithRange("hour", m.group(6), 0, 23) - minutes = toLongWithRange("second", m.group(7), 0, 59) - } - // Hive allow nanosecond precision interval - val nanoStr = if (m.group(9) == null) { - null - } else { - (m.group(9) + "000000000").substring(0, 9) - } - var nanos = toLongWithRange("nanosecond", nanoStr, 0L, 999999999L) - to match { - case "hour" => - minutes = 0 - seconds = 0 - nanos = 0 - case "minute" => - seconds = 0 - nanos = 0 - case "second" => - // No-op + val micros = unitsRange(to, from).map { + case name @ UnitName.day => + val days = toLong(name, 0, Integer.MAX_VALUE) + Math.multiplyExact(days, DateTimeUtils.MICROS_PER_DAY) + case name @ UnitName.hour => + val hours = toLong(name, 0, 23) + Math.multiplyExact(hours, MICROS_PER_HOUR) + case name @ UnitName.minute => + val minutes = toLong(name, 0, 59) + Math.multiplyExact(minutes, MICROS_PER_MINUTE) + case UnitName.second => + parseSecondNano(m.group(UnitName.second.toString)) case _ => throw new IllegalArgumentException( s"Cannot support (interval '$input' $from to $to) expression") - } - var micros = nanos / DateTimeUtils.NANOS_PER_MICROS - micros = Math.addExact(micros, Math.multiplyExact(days, DateTimeUtils.MICROS_PER_DAY)) - micros = Math.addExact(micros, Math.multiplyExact(hours, MICROS_PER_HOUR)) - micros = Math.addExact(micros, Math.multiplyExact(minutes, MICROS_PER_MINUTE)) - micros = Math.addExact(micros, Math.multiplyExact(seconds, DateTimeUtils.MICROS_PER_SECOND)) + }.reduce((x: Long, y: Long) => Math.addExact(x, y)) + val sign = if (m.group("sign") != null && m.group("sign") == "-") -1 else 1 new CalendarInterval(0, sign * micros) } catch { case e: Exception => @@ -292,6 +284,21 @@ object IntervalUtils { new CalendarInterval(months, microseconds) } + // Parses a string with nanoseconds, truncates the result and returns microseconds + private def parseNanos(nanosStr: String, isNegative: Boolean): Long = { + if (nanosStr != null) { + val maxNanosLen = 9 + val alignedStr = if (nanosStr.length < maxNanosLen) { + (nanosStr + "000000000").substring(0, maxNanosLen) + } else nanosStr + val nanos = toLongWithRange("nanosecond", alignedStr, 0L, 999999999L) + val micros = nanos / DateTimeUtils.NANOS_PER_MICROS + if (isNegative) -micros else micros + } else { + 0L + } + } + /** * Parse second_nano string in ss.nnnnnnnnn format to microseconds */ @@ -303,15 +310,13 @@ object IntervalUtils { Long.MinValue / DateTimeUtils.MICROS_PER_SECOND, Long.MaxValue / DateTimeUtils.MICROS_PER_SECOND) * DateTimeUtils.MICROS_PER_SECOND } - def parseNanos(nanosStr: String): Long = { - toLongWithRange("nanosecond", nanosStr, 0L, 999999999L) / DateTimeUtils.NANOS_PER_MICROS - } secondNano.split("\\.") match { case Array(secondsStr) => parseSeconds(secondsStr) - case Array("", nanosStr) => parseNanos(nanosStr) + case Array("", nanosStr) => parseNanos(nanosStr, false) case Array(secondsStr, nanosStr) => - Math.addExact(parseSeconds(secondsStr), parseNanos(nanosStr)) + val seconds = parseSeconds(secondsStr) + Math.addExact(seconds, parseNanos(nanosStr, seconds < 0)) case _ => throw new IllegalArgumentException( "Interval string does not match second-nano format of ss.nnnnnnnnn") From feee264606e67ec19da1426aefddaaf93afe0327 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 22:00:19 +0300 Subject: [PATCH 2/6] Use UnitName type --- .../spark/sql/catalyst/parser/AstBuilder.scala | 13 +++++++------ .../spark/sql/catalyst/util/IntervalUtils.scala | 11 +++++++---- .../sql/catalyst/util/IntervalUtilsSuite.scala | 4 ++-- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index c9ecbdb658f4..f6c2d650abb1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -38,6 +38,7 @@ import org.apache.spark.sql.catalyst.plans._ import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.util.DateTimeUtils.{getZoneId, stringToDate, stringToTimestamp} import org.apache.spark.sql.catalyst.util.IntervalUtils +import org.apache.spark.sql.catalyst.util.IntervalUtils.UnitName import org.apache.spark.sql.connector.expressions.{ApplyTransform, BucketTransform, DaysTransform, Expression => V2Expression, FieldReference, HoursTransform, IdentityTransform, LiteralValue, MonthsTransform, Transform, YearsTransform} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ @@ -1967,17 +1968,17 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging case ("year", Some("month")) => IntervalUtils.fromYearMonthString(s) case ("day", Some("hour")) => - IntervalUtils.fromDayTimeString(s, "day", "hour") + IntervalUtils.fromDayTimeString(s, UnitName.day, UnitName.hour) case ("day", Some("minute")) => - IntervalUtils.fromDayTimeString(s, "day", "minute") + IntervalUtils.fromDayTimeString(s, UnitName.day, UnitName.minute) case ("day", Some("second")) => - IntervalUtils.fromDayTimeString(s, "day", "second") + IntervalUtils.fromDayTimeString(s, UnitName.day, UnitName.second) case ("hour", Some("minute")) => - IntervalUtils.fromDayTimeString(s, "hour", "minute") + IntervalUtils.fromDayTimeString(s, UnitName.hour, UnitName.minute) case ("hour", Some("second")) => - IntervalUtils.fromDayTimeString(s, "hour", "second") + IntervalUtils.fromDayTimeString(s, UnitName.hour, UnitName.second) case ("minute", Some("second")) => - IntervalUtils.fromDayTimeString(s, "minute", "second") + IntervalUtils.fromDayTimeString(s, UnitName.minute, UnitName.second) case (from, Some(t)) => throw new ParseException(s"Intervals FROM $from TO $t are not supported.", ctx) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index 737bde1a3bb9..ffa7404ef053 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -175,7 +175,7 @@ object IntervalUtils { * adapted from HiveIntervalDayTime.valueOf */ def fromDayTimeString(s: String): CalendarInterval = { - fromDayTimeString(s, "day", "second") + fromDayTimeString(s, UnitName.day, UnitName.second) } private val dayTimePattern = ("^(?[+|-])?((?\\d+) )?" + @@ -193,8 +193,8 @@ object IntervalUtils { val year = Value(8, "year") } - private def unitsRange(start: String, end: String): Seq[UnitName.Value] = { - (UnitName.withName(start).id to UnitName.withName(end).id).map(UnitName(_)) + private def unitsRange(start: UnitName.Value, end: UnitName.Value): Seq[UnitName.Value] = { + (start.id to end.id).map(UnitName(_)) } /** @@ -206,7 +206,10 @@ object IntervalUtils { * - HOUR TO (MINUTE|SECOND) * - MINUTE TO SECOND */ - def fromDayTimeString(input: String, from: String, to: String): CalendarInterval = { + def fromDayTimeString( + input: String, + from: UnitName.Value, + to: UnitName.Value): CalendarInterval = { require(input != null, "Interval day-time string must be not null") assert(input.length == input.trim.length) val m = dayTimePattern.pattern.matcher(input) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala index 9addc396b8d3..5072a75a950a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.catalyst.util import org.apache.spark.SparkFunSuite -import org.apache.spark.sql.catalyst.util.IntervalUtils.{fromDayTimeString, fromString, fromYearMonthString} +import org.apache.spark.sql.catalyst.util.IntervalUtils.{fromDayTimeString, fromString, fromYearMonthString, UnitName} import org.apache.spark.unsafe.types.CalendarInterval import org.apache.spark.unsafe.types.CalendarInterval._ @@ -141,7 +141,7 @@ class IntervalUtilsSuite extends SparkFunSuite { } try { - fromDayTimeString("5 1:12:20", "hour", "microsecond") + fromDayTimeString("5 1:12:20", UnitName.hour, UnitName.microsecond) fail("Expected to throw an exception for the invalid convention type") } catch { case e: IllegalArgumentException => From a1ab88d02a48d1399814693042e9e75e6f4f2e16 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 22:11:46 +0300 Subject: [PATCH 3/6] Add unitValueProps --- .../sql/catalyst/util/IntervalUtils.scala | 24 +++++++++---------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index ffa7404ef053..2c0c64a87a08 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -193,6 +193,12 @@ object IntervalUtils { val year = Value(8, "year") } + val unitValueProps: Map[UnitName.Value, (Long, Long, Long => Long)] = Map( + UnitName.minute -> (0, 59, Math.multiplyExact(_, MICROS_PER_MINUTE)), + UnitName.hour -> (0, 23, Math.multiplyExact(_, MICROS_PER_HOUR)), + UnitName.day -> (0, Integer.MAX_VALUE, Math.multiplyExact(_, DateTimeUtils.MICROS_PER_DAY)) + ) + private def unitsRange(start: UnitName.Value, end: UnitName.Value): Seq[UnitName.Value] = { (start.id to end.id).map(UnitName(_)) } @@ -215,24 +221,16 @@ object IntervalUtils { val m = dayTimePattern.pattern.matcher(input) require(m.matches, s"Interval string must match day-time format of 'd h:m:s.n': $input") - def toLong(unitName: UnitName.Value, minValue: Long, maxValue: Long): Long = { + def toLong(unitName: UnitName.Value): Long = { val name = unitName.toString - toLongWithRange(name, m.group(name), minValue, maxValue) + val (minValue, maxValue, conv) = unitValueProps(unitName) + conv(toLongWithRange(name, m.group(name), minValue, maxValue)) } try { val micros = unitsRange(to, from).map { - case name @ UnitName.day => - val days = toLong(name, 0, Integer.MAX_VALUE) - Math.multiplyExact(days, DateTimeUtils.MICROS_PER_DAY) - case name @ UnitName.hour => - val hours = toLong(name, 0, 23) - Math.multiplyExact(hours, MICROS_PER_HOUR) - case name @ UnitName.minute => - val minutes = toLong(name, 0, 59) - Math.multiplyExact(minutes, MICROS_PER_MINUTE) - case UnitName.second => - parseSecondNano(m.group(UnitName.second.toString)) + case name @ (UnitName.day | UnitName.hour | UnitName.minute) => toLong(name) + case UnitName.second => parseSecondNano(m.group(UnitName.second.toString)) case _ => throw new IllegalArgumentException( s"Cannot support (interval '$input' $from to $to) expression") From c3fe563bfe3f8e5d7aa4f62a338a6fbab233eadc Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 22:16:02 +0300 Subject: [PATCH 4/6] conv -> toMicros --- .../org/apache/spark/sql/catalyst/util/IntervalUtils.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index 2c0c64a87a08..226492b1b257 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -179,7 +179,7 @@ object IntervalUtils { } private val dayTimePattern = ("^(?[+|-])?((?\\d+) )?" + - "((?\\d+):)?(?\\d+):(?(\\d+)(\\.(\\d+))?)$").r + "((?\\d{1,2}+):)?(?\\d{1,2}+):(?(\\d{1,2}+)(\\.(\\d{1,9}+))?)$").r object UnitName extends Enumeration { val microsecond = Value(0, "microsecond") @@ -223,8 +223,8 @@ object IntervalUtils { def toLong(unitName: UnitName.Value): Long = { val name = unitName.toString - val (minValue, maxValue, conv) = unitValueProps(unitName) - conv(toLongWithRange(name, m.group(name), minValue, maxValue)) + val (minValue, maxValue, toMicros) = unitValueProps(unitName) + toMicros(toLongWithRange(name, m.group(name), minValue, maxValue)) } try { From 3713abcaf22a5a2649a1a4031e2a57f880411ac8 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Thu, 31 Oct 2019 22:35:10 +0300 Subject: [PATCH 5/6] Refactor dayTimePattern --- .../sql/catalyst/util/IntervalUtils.scala | 27 ++++++++++++++----- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index 226492b1b257..f67f92677db2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -17,8 +17,6 @@ package org.apache.spark.sql.catalyst.util -import java.util.regex.Pattern - import scala.util.control.NonFatal import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParseException} @@ -178,9 +176,6 @@ object IntervalUtils { fromDayTimeString(s, UnitName.day, UnitName.second) } - private val dayTimePattern = ("^(?[+|-])?((?\\d+) )?" + - "((?\\d{1,2}+):)?(?\\d{1,2}+):(?(\\d{1,2}+)(\\.(\\d{1,9}+))?)$").r - object UnitName extends Enumeration { val microsecond = Value(0, "microsecond") val millisecond = Value(1, "millisecond") @@ -199,6 +194,21 @@ object IntervalUtils { UnitName.day -> (0, Integer.MAX_VALUE, Math.multiplyExact(_, DateTimeUtils.MICROS_PER_DAY)) ) + private val signRe = "(?[+|-])?" + private val dayRe = "((?\\d+)\\s+)?" + private val hourRe = "(?\\d{1,2}+)" + private val minuteRe = "(?\\d{1,2}+)" + private val secondRe = "(?(\\d{1,2}+)(\\.(\\d{1,9}+))?)" + + private val dayTimeRe = Map( + (UnitName.minute, UnitName.second) -> (s"^$signRe$minuteRe:$secondRe$$").r, + (UnitName.hour, UnitName.minute) -> (s"^$signRe$hourRe:$minuteRe$$").r, + (UnitName.hour, UnitName.second) -> (s"^$signRe$hourRe:$minuteRe:$secondRe$$").r, + (UnitName.day, UnitName.hour) -> (s"^$signRe$dayRe$hourRe$$").r, + (UnitName.day, UnitName.minute) -> (s"^$signRe$dayRe$hourRe:$minuteRe$$").r, + (UnitName.day, UnitName.second) -> (s"^$signRe$dayRe$hourRe:$minuteRe:$secondRe$$").r + ) + private def unitsRange(start: UnitName.Value, end: UnitName.Value): Seq[UnitName.Value] = { (start.id to end.id).map(UnitName(_)) } @@ -218,8 +228,11 @@ object IntervalUtils { to: UnitName.Value): CalendarInterval = { require(input != null, "Interval day-time string must be not null") assert(input.length == input.trim.length) - val m = dayTimePattern.pattern.matcher(input) - require(m.matches, s"Interval string must match day-time format of 'd h:m:s.n': $input") + require(dayTimeRe.contains(from -> to), + s"Cannot support (interval '$input' $from to $to) expression") + val pattern = dayTimeRe(from, to).pattern + val m = pattern.matcher(input) + require(m.matches, s"Interval string must match day-time format of '$pattern': $input") def toLong(unitName: UnitName.Value): Long = { val name = unitName.toString From 140a26fbe9836908bd3094d15a9f4dc234346fc8 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Fri, 1 Nov 2019 01:20:05 +0300 Subject: [PATCH 6/6] Special pattern for min-sec --- .../sql/catalyst/util/IntervalUtils.scala | 23 +++++++----------- .../results/postgreSQL/interval.sql.out | 24 +++++++++---------- 2 files changed, 21 insertions(+), 26 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index ffd0a25a9951..6412a67c5eba 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -196,20 +196,13 @@ object IntervalUtils { UnitName.day -> (0, Integer.MAX_VALUE, Math.multiplyExact(_, DateTimeUtils.MICROS_PER_DAY)) ) - private val signRe = "(?[+|-])?" - private val dayRe = "((?\\d+)\\s+)?" + private val signRe = "(?[+|-])" + private val dayRe = "((?\\d+)\\s+)" private val hourRe = "(?\\d{1,2}+)" private val minuteRe = "(?\\d{1,2}+)" private val secondRe = "(?(\\d{1,2}+)(\\.(\\d{1,9}+))?)" - - private val dayTimeRe = Map( - (UnitName.minute, UnitName.second) -> (s"^$signRe$minuteRe:$secondRe$$").r, - (UnitName.hour, UnitName.minute) -> (s"^$signRe$hourRe:$minuteRe$$").r, - (UnitName.hour, UnitName.second) -> (s"^$signRe$hourRe:$minuteRe:$secondRe$$").r, - (UnitName.day, UnitName.hour) -> (s"^$signRe$dayRe$hourRe$$").r, - (UnitName.day, UnitName.minute) -> (s"^$signRe$dayRe$hourRe:$minuteRe$$").r, - (UnitName.day, UnitName.second) -> (s"^$signRe$dayRe$hourRe:$minuteRe:$secondRe$$").r - ) + private val minsecRe = (s"^$signRe?$dayRe?($hourRe:)?$minuteRe:$secondRe$$").r + private val daysecRe = (s"^$signRe?$dayRe?$hourRe(:$minuteRe(:$secondRe)?)?$$").r private def unitsRange(start: UnitName.Value, end: UnitName.Value): Seq[UnitName.Value] = { (start.id to end.id).map(UnitName(_)) @@ -230,9 +223,10 @@ object IntervalUtils { to: UnitName.Value): CalendarInterval = { require(input != null, "Interval day-time string must be not null") assert(input.length == input.trim.length) - require(dayTimeRe.contains(from -> to), - s"Cannot support (interval '$input' $from to $to) expression") - val pattern = dayTimeRe(from, to).pattern + val pattern = (from, to) match { + case (UnitName.minute, UnitName.second) => minsecRe.pattern + case _ => daysecRe.pattern + } val m = pattern.matcher(input) require(m.matches, s"Interval string must match day-time format of '$pattern': $input") @@ -327,6 +321,7 @@ object IntervalUtils { Long.MaxValue / DateTimeUtils.MICROS_PER_SECOND) * DateTimeUtils.MICROS_PER_SECOND } + if (secondNano == null) return 0L secondNano.split("\\.") match { case Array(secondsStr) => parseSeconds(secondsStr) case Array("", nanosStr) => parseNanos(nanosStr, false) diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out index bed5d7a56c1f..7b210e3b7c71 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out @@ -149,46 +149,46 @@ interval 1 days 2 hours 3 minutes 4 seconds -- !query 18 SELECT interval '1 2:03' hour to minute -- !query 18 schema -struct +struct -- !query 18 output -interval 1 days 2 hours 3 minutes +interval 2 hours 3 minutes -- !query 19 SELECT interval '1 2:03:04' hour to minute -- !query 19 schema -struct +struct -- !query 19 output -interval 1 days 2 hours 3 minutes +interval 2 hours 3 minutes -- !query 20 SELECT interval '1 2:03' hour to second -- !query 20 schema -struct +struct -- !query 20 output -interval 1 days 2 hours 3 minutes +interval 2 hours 3 minutes -- !query 21 SELECT interval '1 2:03:04' hour to second -- !query 21 schema -struct +struct -- !query 21 output -interval 1 days 2 hours 3 minutes 4 seconds +interval 2 hours 3 minutes 4 seconds -- !query 22 SELECT interval '1 2:03' minute to second -- !query 22 schema -struct +struct -- !query 22 output -interval 1 days 2 minutes 3 seconds +interval 2 minutes 3 seconds -- !query 23 SELECT interval '1 2:03:04' minute to second -- !query 23 schema -struct +struct -- !query 23 output -interval 1 days 2 hours 3 minutes 4 seconds +interval 3 minutes 4 seconds