From 012143faefcdc9275a427aaf040d3d9bcde4dfa9 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 10:02:31 +0300 Subject: [PATCH 1/8] Fix parsing nanos --- .../spark/sql/catalyst/util/IntervalUtils.scala | 15 ++++++--------- .../catalyst/parser/ExpressionParserSuite.scala | 10 +++++++++- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index f55b0545ee9c..a94569ddee98 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -218,12 +218,7 @@ object IntervalUtils { minutes = toLongWithRange("second", m.group(7), 0, 59) } // Hive allow nanosecond precision interval - val nanoStr = if (m.group(9) == null) { - null - } else { - (m.group(9) + "000000000").substring(0, 9) - } - var nanos = toLongWithRange("nanosecond", nanoStr, 0L, 999999999L) + var nanos = parseNanos(m.group(9)) to match { case "hour" => minutes = 0 @@ -292,6 +287,11 @@ object IntervalUtils { new CalendarInterval(months, microseconds) } + private def parseNanos(nanosStr: String): Long = { + val alignedStr = if (nanosStr == null) nanosStr else (nanosStr + "000000000").substring(0, 9) + toLongWithRange("nanosecond", alignedStr, 0L, 999999999L) / DateTimeUtils.NANOS_PER_MICROS + } + /** * Parse second_nano string in ss.nnnnnnnnn format to microseconds */ @@ -303,9 +303,6 @@ object IntervalUtils { Long.MinValue / DateTimeUtils.MICROS_PER_SECOND, Long.MaxValue / DateTimeUtils.MICROS_PER_SECOND) * DateTimeUtils.MICROS_PER_SECOND } - def parseNanos(nanosStr: String): Long = { - toLongWithRange("nanosecond", nanosStr, 0L, 999999999L) / DateTimeUtils.NANOS_PER_MICROS - } secondNano.split("\\.") match { case Array(secondsStr) => parseSeconds(secondsStr) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 5a7b3ffec53f..bd49db1536ad 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.FunctionIdentifier import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, _} import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate.{First, Last} -import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, IntervalUtils} +import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils, IntervalUtils} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.CalendarInterval @@ -629,6 +629,14 @@ class ExpressionParserSuite extends AnalysisTest { // Hive nanosecond notation. checkIntervals("13.123456789 seconds", intervalLiteral("second", "13.123456789")) checkIntervals("-13.123456789 second", intervalLiteral("second", "-13.123456789")) + checkIntervals( + "13.123456 second", + Literal(new CalendarInterval( + 0, + 13 * DateTimeUtils.MICROS_PER_SECOND + + 123 * DateTimeUtils.MICROS_PER_MILLIS + + 456))) + checkIntervals("1.001 second", Literal(IntervalUtils.fromString("1 second 1 millisecond"))) // Non Existing unit intercept("interval 10 nanoseconds", From d768468dc4c32d0cd898cd4273a755316960ad67 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 10:36:32 +0300 Subject: [PATCH 2/8] Fix parsing negative nanos --- .../spark/sql/catalyst/util/IntervalUtils.scala | 13 ++++++++----- .../sql/catalyst/parser/ExpressionParserSuite.scala | 10 ++++++---- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index a94569ddee98..4f02ebf625cb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -218,7 +218,7 @@ object IntervalUtils { minutes = toLongWithRange("second", m.group(7), 0, 59) } // Hive allow nanosecond precision interval - var nanos = parseNanos(m.group(9)) + var nanos = parseNanos(m.group(9), seconds < 0) to match { case "hour" => minutes = 0 @@ -287,9 +287,11 @@ object IntervalUtils { new CalendarInterval(months, microseconds) } - private def parseNanos(nanosStr: String): Long = { + private def parseNanos(nanosStr: String, isNegative: Boolean): Long = { val alignedStr = if (nanosStr == null) nanosStr else (nanosStr + "000000000").substring(0, 9) - toLongWithRange("nanosecond", alignedStr, 0L, 999999999L) / DateTimeUtils.NANOS_PER_MICROS + val nanos = toLongWithRange("nanosecond", alignedStr, 0L, 999999999L) + val micros = nanos / DateTimeUtils.NANOS_PER_MICROS + if (isNegative) -micros else micros } /** @@ -306,9 +308,10 @@ object IntervalUtils { secondNano.split("\\.") match { case Array(secondsStr) => parseSeconds(secondsStr) - case Array("", nanosStr) => parseNanos(nanosStr) + case Array("", nanosStr) => parseNanos(nanosStr, false) case Array(secondsStr, nanosStr) => - Math.addExact(parseSeconds(secondsStr), parseNanos(nanosStr)) + val seconds = parseSeconds(secondsStr) + Math.addExact(seconds, parseNanos(nanosStr, seconds < 0)) case _ => throw new IllegalArgumentException( "Interval string does not match second-nano format of ss.nnnnnnnnn") diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index bd49db1536ad..5423f7516c2e 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -628,14 +628,16 @@ class ExpressionParserSuite extends AnalysisTest { // Hive nanosecond notation. checkIntervals("13.123456789 seconds", intervalLiteral("second", "13.123456789")) - checkIntervals("-13.123456789 second", intervalLiteral("second", "-13.123456789")) + checkIntervals( + "-13.123456789 second", + Literal(new CalendarInterval( + 0, + -13 * DateTimeUtils.MICROS_PER_SECOND - 123 * DateTimeUtils.MICROS_PER_MILLIS - 456))) checkIntervals( "13.123456 second", Literal(new CalendarInterval( 0, - 13 * DateTimeUtils.MICROS_PER_SECOND + - 123 * DateTimeUtils.MICROS_PER_MILLIS + - 456))) + 13 * DateTimeUtils.MICROS_PER_SECOND + 123 * DateTimeUtils.MICROS_PER_MILLIS + 456))) checkIntervals("1.001 second", Literal(IntervalUtils.fromString("1 second 1 millisecond"))) // Non Existing unit From 46f14646e274818782a69ec09d29389ee32dbe54 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 11:57:02 +0300 Subject: [PATCH 3/8] Regen interval.sql.out --- .../sql-tests/results/ansi/interval.sql.out | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 43ad3c3f539f..b53fd247d83a 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -61,9 +61,9 @@ select interval '13' second, interval '13.123456789' second -- !query 4 schema -struct +struct -- !query 4 output -interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 milliseconds 654 microseconds interval 1 weeks 3 days interval 11 hours interval 12 minutes interval 13 seconds interval 13 seconds 123 milliseconds 456 microseconds +interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 microseconds interval 1 weeks 3 days interval 11 hours interval 12 minutes interval 13 seconds interval 13 seconds 123 milliseconds 456 microseconds -- !query 5 @@ -75,9 +75,9 @@ select '13' second, '13.123456789' second -- !query 5 schema -struct +struct -- !query 5 output -interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 milliseconds 654 microseconds interval 1 weeks 3 days interval 11 hours interval 12 minutes interval 13 seconds interval 13 seconds 123 milliseconds 456 microseconds +interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 microseconds interval 1 weeks 3 days interval 11 hours interval 12 minutes interval 13 seconds interval 13 seconds 123 milliseconds 456 microseconds -- !query 6 @@ -204,7 +204,7 @@ select interval '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query 15 schema -struct +struct -- !query 15 output 2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 @@ -220,7 +220,7 @@ select '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query 16 schema -struct +struct -- !query 16 output 2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 @@ -236,9 +236,9 @@ select interval '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query 17 schema -struct +struct -- !query 17 output -2012-01-01 00:00:00 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 2012-04-09 12:22:33.123456 2011-09-23 13:37:26.876544 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 +2012-01-01 00:00:00 2011-09-23 13:37:26.999877 2012-04-09 12:22:33.000123 2012-04-09 12:22:33.000123 2011-09-23 13:37:26.999877 2011-09-23 13:37:26.999877 2012-04-09 12:22:33.000123 -- !query 18 @@ -252,9 +252,9 @@ select '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query 18 schema -struct +struct -- !query 18 output -2012-01-01 00:00:00 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 2012-04-09 12:22:33.123456 2011-09-23 13:37:26.876544 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 +2012-01-01 00:00:00 2011-09-23 13:37:26.999877 2012-04-09 12:22:33.000123 2012-04-09 12:22:33.000123 2011-09-23 13:37:26.999877 2011-09-23 13:37:26.999877 2012-04-09 12:22:33.000123 -- !query 19 @@ -263,9 +263,9 @@ select interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second from interval_arithmetic -- !query 19 schema -struct<(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds + interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):interval,(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds - interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):interval> +struct<(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 microseconds + interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 microseconds):interval,(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 microseconds - interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 microseconds):interval> -- !query 19 output -interval 15 weeks 4 days 20 hours 30 minutes 40 seconds 246 milliseconds 912 microseconds interval 12 weeks 5 days 2 hours 14 minutes 26 seconds +interval 15 weeks 4 days 20 hours 30 minutes 40 seconds 246 microseconds interval 12 weeks 5 days 2 hours 14 minutes 26 seconds -- !query 20 @@ -274,9 +274,9 @@ select '99 11:22:33.123456789' day to second - '10 9:8:7.123456789' day to second from interval_arithmetic -- !query 20 schema -struct<(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds + interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):interval,(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds - interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):interval> +struct<(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 microseconds + interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 microseconds):interval,(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 microseconds - interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 microseconds):interval> -- !query 20 output -interval 15 weeks 4 days 20 hours 30 minutes 40 seconds 246 milliseconds 912 microseconds interval 12 weeks 5 days 2 hours 14 minutes 26 seconds +interval 15 weeks 4 days 20 hours 30 minutes 40 seconds 246 microseconds interval 12 weeks 5 days 2 hours 14 minutes 26 seconds -- !query 21 From 182deef9cfda94657a6ffd06712a5784416203d5 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 13:22:17 +0300 Subject: [PATCH 4/8] Address a review comment --- .../spark/sql/catalyst/util/IntervalUtils.scala | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index 4f02ebf625cb..ddf89cadb864 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -288,10 +288,14 @@ object IntervalUtils { } private def parseNanos(nanosStr: String, isNegative: Boolean): Long = { - val alignedStr = if (nanosStr == null) nanosStr else (nanosStr + "000000000").substring(0, 9) - val nanos = toLongWithRange("nanosecond", alignedStr, 0L, 999999999L) - val micros = nanos / DateTimeUtils.NANOS_PER_MICROS - if (isNegative) -micros else micros + if (nanosStr != null) { + val alignedStr = (nanosStr + "000000000").substring(0, 9) + val nanos = toLongWithRange("nanosecond", alignedStr, 0L, 999999999L) + val micros = nanos / DateTimeUtils.NANOS_PER_MICROS + if (isNegative) -micros else micros + } else { + 0L + } } /** From bf324f1fdb145cb885584239c72c752b402ffae4 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 14:40:10 +0300 Subject: [PATCH 5/8] Fix IntervalUtilsSuite --- .../apache/spark/sql/catalyst/util/IntervalUtils.scala | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index ddf89cadb864..59f02b462ad9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -218,22 +218,22 @@ object IntervalUtils { minutes = toLongWithRange("second", m.group(7), 0, 59) } // Hive allow nanosecond precision interval - var nanos = parseNanos(m.group(9), seconds < 0) + var secondsFraction = parseNanos(m.group(9), seconds < 0) to match { case "hour" => minutes = 0 seconds = 0 - nanos = 0 + secondsFraction = 0 case "minute" => seconds = 0 - nanos = 0 + secondsFraction = 0 case "second" => // No-op case _ => throw new IllegalArgumentException( s"Cannot support (interval '$input' $from to $to) expression") } - var micros = nanos / DateTimeUtils.NANOS_PER_MICROS + var micros = secondsFraction micros = Math.addExact(micros, Math.multiplyExact(days, DateTimeUtils.MICROS_PER_DAY)) micros = Math.addExact(micros, Math.multiplyExact(hours, MICROS_PER_HOUR)) micros = Math.addExact(micros, Math.multiplyExact(minutes, MICROS_PER_MINUTE)) @@ -287,6 +287,7 @@ object IntervalUtils { new CalendarInterval(months, microseconds) } + // Parses a string with nanoseconds, truncates the result and returns microseconds private def parseNanos(nanosStr: String, isNegative: Boolean): Long = { if (nanosStr != null) { val alignedStr = (nanosStr + "000000000").substring(0, 9) From a9797dbda8fc4393f025b9f7db8f9aeddcad1017 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 15:44:33 +0300 Subject: [PATCH 6/8] Regen interval.sql.out --- .../sql-tests/results/ansi/interval.sql.out | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index b53fd247d83a..43ad3c3f539f 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -61,9 +61,9 @@ select interval '13' second, interval '13.123456789' second -- !query 4 schema -struct +struct -- !query 4 output -interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 microseconds interval 1 weeks 3 days interval 11 hours interval 12 minutes interval 13 seconds interval 13 seconds 123 milliseconds 456 microseconds +interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 milliseconds 654 microseconds interval 1 weeks 3 days interval 11 hours interval 12 minutes interval 13 seconds interval 13 seconds 123 milliseconds 456 microseconds -- !query 5 @@ -75,9 +75,9 @@ select '13' second, '13.123456789' second -- !query 5 schema -struct +struct -- !query 5 output -interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 microseconds interval 1 weeks 3 days interval 11 hours interval 12 minutes interval 13 seconds interval 13 seconds 123 milliseconds 456 microseconds +interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 milliseconds 654 microseconds interval 1 weeks 3 days interval 11 hours interval 12 minutes interval 13 seconds interval 13 seconds 123 milliseconds 456 microseconds -- !query 6 @@ -204,7 +204,7 @@ select interval '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query 15 schema -struct +struct -- !query 15 output 2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 @@ -220,7 +220,7 @@ select '99 11:22:33.123456789' day to second + dateval from interval_arithmetic -- !query 16 schema -struct +struct -- !query 16 output 2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 @@ -236,9 +236,9 @@ select interval '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query 17 schema -struct +struct -- !query 17 output -2012-01-01 00:00:00 2011-09-23 13:37:26.999877 2012-04-09 12:22:33.000123 2012-04-09 12:22:33.000123 2011-09-23 13:37:26.999877 2011-09-23 13:37:26.999877 2012-04-09 12:22:33.000123 +2012-01-01 00:00:00 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 2012-04-09 12:22:33.123456 2011-09-23 13:37:26.876544 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 -- !query 18 @@ -252,9 +252,9 @@ select '99 11:22:33.123456789' day to second + tsval from interval_arithmetic -- !query 18 schema -struct +struct -- !query 18 output -2012-01-01 00:00:00 2011-09-23 13:37:26.999877 2012-04-09 12:22:33.000123 2012-04-09 12:22:33.000123 2011-09-23 13:37:26.999877 2011-09-23 13:37:26.999877 2012-04-09 12:22:33.000123 +2012-01-01 00:00:00 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 2012-04-09 12:22:33.123456 2011-09-23 13:37:26.876544 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 -- !query 19 @@ -263,9 +263,9 @@ select interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second from interval_arithmetic -- !query 19 schema -struct<(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 microseconds + interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 microseconds):interval,(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 microseconds - interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 microseconds):interval> +struct<(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds + interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):interval,(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds - interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):interval> -- !query 19 output -interval 15 weeks 4 days 20 hours 30 minutes 40 seconds 246 microseconds interval 12 weeks 5 days 2 hours 14 minutes 26 seconds +interval 15 weeks 4 days 20 hours 30 minutes 40 seconds 246 milliseconds 912 microseconds interval 12 weeks 5 days 2 hours 14 minutes 26 seconds -- !query 20 @@ -274,9 +274,9 @@ select '99 11:22:33.123456789' day to second - '10 9:8:7.123456789' day to second from interval_arithmetic -- !query 20 schema -struct<(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 microseconds + interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 microseconds):interval,(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 microseconds - interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 microseconds):interval> +struct<(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds + interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):interval,(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds - interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):interval> -- !query 20 output -interval 15 weeks 4 days 20 hours 30 minutes 40 seconds 246 microseconds interval 12 weeks 5 days 2 hours 14 minutes 26 seconds +interval 15 weeks 4 days 20 hours 30 minutes 40 seconds 246 milliseconds 912 microseconds interval 12 weeks 5 days 2 hours 14 minutes 26 seconds -- !query 21 From 81881c331b541d54b3205ff7e560a58b5ea17ba0 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 18:42:33 +0300 Subject: [PATCH 7/8] Fix for DDLParserSuite --- .../org/apache/spark/sql/catalyst/util/IntervalUtils.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index 59f02b462ad9..67e79297b64f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -290,7 +290,10 @@ object IntervalUtils { // Parses a string with nanoseconds, truncates the result and returns microseconds private def parseNanos(nanosStr: String, isNegative: Boolean): Long = { if (nanosStr != null) { - val alignedStr = (nanosStr + "000000000").substring(0, 9) + val maxNanosLen = 9 + val alignedStr = if (nanosStr.length < maxNanosLen) { + (nanosStr + "000000000").substring(0, maxNanosLen) + } else nanosStr val nanos = toLongWithRange("nanosecond", alignedStr, 0L, 999999999L) val micros = nanos / DateTimeUtils.NANOS_PER_MICROS if (isNegative) -micros else micros From 337e9af2fb6fa20c5245419c19dda807554d0e6c Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Wed, 30 Oct 2019 18:47:54 +0300 Subject: [PATCH 8/8] Regen literals.sql.out --- .../src/test/resources/sql-tests/results/literals.sql.out | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index 115287821bf4..550b9bd936a0 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -323,9 +323,9 @@ select timestamp '2016-33-11 20:54:00.000' -- !query 34 select interval 13.123456789 seconds, interval -13.123456789 second -- !query 34 schema -struct +struct -- !query 34 output -interval 13 seconds 123 milliseconds 456 microseconds interval -12 seconds -876 milliseconds -544 microseconds +interval 13 seconds 123 milliseconds 456 microseconds interval -13 seconds -123 milliseconds -456 microseconds -- !query 35