diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index 725ff81ef410e..46584b67097ce 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -505,7 +505,9 @@ object IntervalUtils { var days: Int = 0 var microseconds: Long = 0 var fractionScale: Int = 0 + val initialFractionScale = (NANOS_PER_SECOND / 10).toInt var fraction: Int = 0 + var pointPrefixed: Boolean = false def trimToNextState(b: Byte, next: ParseState): Unit = { b match { @@ -545,6 +547,7 @@ object IntervalUtils { // We preset the scale to an invalid value to track fraction presence in the UNIT_BEGIN // state. If we meet '.', the scale become valid for the VALUE_FRACTIONAL_PART state. fractionScale = -1 + pointPrefixed = false b match { case '-' => isNegative = true @@ -556,7 +559,8 @@ object IntervalUtils { isNegative = false case '.' => isNegative = false - fractionScale = (NANOS_PER_SECOND / 10).toInt + fractionScale = initialFractionScale + pointPrefixed = true i += 1 state = VALUE_FRACTIONAL_PART case _ => throwIAE( s"unrecognized number '$currentWord'") @@ -572,7 +576,7 @@ object IntervalUtils { } case ' ' => state = TRIM_BEFORE_UNIT case '.' => - fractionScale = (NANOS_PER_SECOND / 10).toInt + fractionScale = initialFractionScale state = VALUE_FRACTIONAL_PART case _ => throwIAE(s"invalid value '$currentWord'") } @@ -582,7 +586,7 @@ object IntervalUtils { case _ if '0' <= b && b <= '9' && fractionScale > 0 => fraction += (b - '0') * fractionScale fractionScale /= 10 - case ' ' => + case ' ' if !pointPrefixed || fractionScale < initialFractionScale => fraction /= NANOS_PER_MICROS.toInt state = TRIM_BEFORE_UNIT case _ if '0' <= b && b <= '9' => diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala index 73a2adbaec1db..ee3db0391ed00 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala @@ -115,7 +115,7 @@ class IntervalUtilsSuite extends SparkFunSuite { checkFromInvalidString("2234567890 days", "integer overflow") checkFromInvalidString("\n", "Error parsing '\n' to interval") checkFromInvalidString("\t", "Error parsing '\t' to interval") - + checkFromInvalidString(". seconds", "invalid value '.'") } test("string to interval: seconds with fractional part") {