diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index 882c1d85267e4..991312bff30aa 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -496,6 +496,17 @@ object IntervalUtils { state = TRIM_BEFORE_SIGN case TRIM_BEFORE_SIGN => trimToNextState(b, SIGN) case SIGN => + currentValue = 0 + fraction = 0 + // We preset next state from SIGN to TRIM_BEFORE_VALUE. If we meet '.' in the SIGN state, + // it means that the interval value we deal with here is a numeric with only fractional + // part, such as '.11 second', which can be parsed to 0.11 seconds. In this case, we need + // to reset next state to `VALUE_FRACTIONAL_PART` to go parse the fraction part of the + // interval value. + state = TRIM_BEFORE_VALUE + // We preset the scale to an invalid value to track fraction presence in the UNIT_BEGIN + // state. If we meet '.', the scale become valid for the VALUE_FRACTIONAL_PART state. + fractionScale = -1 b match { case '-' => isNegative = true @@ -505,14 +516,13 @@ object IntervalUtils { i += 1 case _ if '0' <= b && b <= '9' => isNegative = false + case '.' => + isNegative = false + fractionScale = (NANOS_PER_SECOND / 10).toInt + i += 1 + state = VALUE_FRACTIONAL_PART case _ => return null } - currentValue = 0 - fraction = 0 - // Sets the scale to an invalid value to track fraction presence - // in the BEGIN_UNIT_NAME state - fractionScale = -1 - state = TRIM_BEFORE_VALUE case TRIM_BEFORE_VALUE => trimToNextState(b, VALUE) case VALUE => b match { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala index 8c84eb107cd30..f919bd1644871 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala @@ -106,6 +106,7 @@ class IntervalUtilsSuite extends SparkFunSuite { checkFromString("-1.5 seconds", new CalendarInterval(0, 0, -1500000)) // truncate nanoseconds to microseconds checkFromString("0.999999999 seconds", new CalendarInterval(0, 0, 999999)) + checkFromString(".999999999 seconds", new CalendarInterval(0, 0, 999999)) checkFromInvalidString("0.123456789123 seconds", "Error parsing interval string") }