From 670e51f8da95ee8872e3b11d339401dcc1f48208 Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Mon, 12 Dec 2022 09:55:17 +0800 Subject: [PATCH 1/6] reuse --- .../org/apache/spark/sql/catalyst/parser/AstBuilder.scala | 2 +- .../org/apache/spark/sql/errors/QueryParsingErrors.scala | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 0ad185bef177..da25702e1f21 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -2417,7 +2417,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] with SQLConfHelper wit IntervalUtils.stringToInterval(UTF8String.fromString(value)) } catch { case e: IllegalArgumentException => - val ex = QueryParsingErrors.cannotParseIntervalValueError(value, ctx) + val ex = QueryParsingErrors.cannotParseValueTypeError(valueType, value, ctx) ex.setStackTrace(e.getStackTrace) throw ex } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala index ad6f72986d64..aef95a538a6e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala @@ -219,13 +219,6 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { ctx) } - def cannotParseIntervalValueError(value: String, ctx: TypeConstructorContext): Throwable = { - new ParseException( - errorClass = "_LEGACY_ERROR_TEMP_0020", - messageParameters = Map("value" -> value), - ctx) - } - def literalValueTypeUnsupportedError( unsupportedType: String, supportedTypes: Seq[String], From 07337a669567cbe0b0a6ceb6635636c60b106207 Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Mon, 12 Dec 2022 10:02:09 +0800 Subject: [PATCH 2/6] remove _LEGACY_ERROR_TEMP_1020 --- core/src/main/resources/error/error-classes.json | 5 ----- 1 file changed, 5 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 3f091f090fce..1bc18a392a61 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -1906,11 +1906,6 @@ " is a permanent view, which is not supported by streaming reading API such as `DataStreamReader.table` yet." ] }, - "_LEGACY_ERROR_TEMP_1020" : { - "message" : [ - "Invalid usage of in ." - ] - }, "_LEGACY_ERROR_TEMP_1021" : { "message" : [ "count(.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)." From 98587778f7fa271bf55d2bceef7d235621655dca Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Mon, 12 Dec 2022 10:03:46 +0800 Subject: [PATCH 3/6] revert_LEGACY_ERROR_TEMP_1020 --- core/src/main/resources/error/error-classes.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 1bc18a392a61..3f091f090fce 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -1906,6 +1906,11 @@ " is a permanent view, which is not supported by streaming reading API such as `DataStreamReader.table` yet." ] }, + "_LEGACY_ERROR_TEMP_1020" : { + "message" : [ + "Invalid usage of in ." + ] + }, "_LEGACY_ERROR_TEMP_1021" : { "message" : [ "count(.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)." From c8e89734b3ca3dea608cbdf659dbb583e7499509 Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Mon, 12 Dec 2022 10:21:44 +0800 Subject: [PATCH 4/6] remove _LEGACY_ERROR_TEMP_0020 --- core/src/main/resources/error/error-classes.json | 5 ----- pom.xml | 2 +- .../spark/sql/catalyst/parser/ExpressionParserSuite.scala | 6 +++--- 3 files changed, 4 insertions(+), 9 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 3f091f090fce..e76328e970dc 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -1610,11 +1610,6 @@ "Function trim doesn't support with type . Please use BOTH, LEADING or TRAILING as trim type." ] }, - "_LEGACY_ERROR_TEMP_0020" : { - "message" : [ - "Cannot parse the INTERVAL value: ." - ] - }, "_LEGACY_ERROR_TEMP_0022" : { "message" : [ "." diff --git a/pom.xml b/pom.xml index 54eb92873853..136238978891 100644 --- a/pom.xml +++ b/pom.xml @@ -109,7 +109,7 @@ UTF-8 UTF-8 - 1.8 + 11 ${java.version} ${java.version} 3.8.6 diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 01c9907cb8c1..1e085d7e1061 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -602,7 +602,7 @@ class ExpressionParserSuite extends AnalysisTest { assertEqual("INTERVAL '1 year 2 month'", ymIntervalLiteral) checkError( exception = parseException("Interval 'interval 1 yearsss 2 monthsss'"), - errorClass = "_LEGACY_ERROR_TEMP_0020", + errorClass = "INVALID_TYPED_LITERAL", parameters = Map("value" -> "interval 1 yearsss 2 monthsss"), context = ExpectedContext( fragment = "Interval 'interval 1 yearsss 2 monthsss'", @@ -616,7 +616,7 @@ class ExpressionParserSuite extends AnalysisTest { assertEqual("INTERVAL '1 day 2 hour 3 minute 4.005006 second'", dtIntervalLiteral) checkError( exception = parseException("Interval 'interval 1 daysss 2 hoursss'"), - errorClass = "_LEGACY_ERROR_TEMP_0020", + errorClass = "INVALID_TYPED_LITERAL", parameters = Map("value" -> "interval 1 daysss 2 hoursss"), context = ExpectedContext( fragment = "Interval 'interval 1 daysss 2 hoursss'", @@ -639,7 +639,7 @@ class ExpressionParserSuite extends AnalysisTest { assertEqual("INTERVAL '3 month 1 hour'", intervalLiteral) checkError( exception = parseException("Interval 'interval 3 monthsss 1 hoursss'"), - errorClass = "_LEGACY_ERROR_TEMP_0020", + errorClass = "INVALID_TYPED_LITERAL", parameters = Map("value" -> "interval 3 monthsss 1 hoursss"), context = ExpectedContext( fragment = "Interval 'interval 3 monthsss 1 hoursss'", From 0ba07605b897a7f2333efeecf0201fcc3140e803 Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Mon, 12 Dec 2022 10:31:06 +0800 Subject: [PATCH 5/6] update golden files --- .../sql-tests/results/ansi/interval.sql.out | 60 ++++++++++++------- .../sql-tests/results/interval.sql.out | 60 ++++++++++++------- 2 files changed, 80 insertions(+), 40 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 9d298fe350a6..493d8769ad47 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -2398,9 +2398,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "中文 interval 1 day" + "value" : "'中文 interval 1 day'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2419,9 +2421,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "interval中文 1 day" + "value" : "'interval中文 1 day'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2440,9 +2444,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "interval 1中文day" + "value" : "'interval 1中文day'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2579,9 +2585,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "+" + "value" : "'+'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2600,9 +2608,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "+." + "value" : "'+.'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2621,9 +2631,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "1" + "value" : "'1'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2642,9 +2654,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "1.2" + "value" : "'1.2'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2663,9 +2677,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "- 2" + "value" : "'- 2'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2684,9 +2700,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "1 day -" + "value" : "'1 day -'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2705,9 +2723,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "1 day 1" + "value" : "'1 day 1'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 716ea9335c9b..690f3da0f9a1 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -2211,9 +2211,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "中文 interval 1 day" + "value" : "'中文 interval 1 day'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2232,9 +2234,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "interval中文 1 day" + "value" : "'interval中文 1 day'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2253,9 +2257,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "interval 1中文day" + "value" : "'interval 1中文day'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2392,9 +2398,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "+" + "value" : "'+'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2413,9 +2421,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "+." + "value" : "'+.'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2434,9 +2444,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "1" + "value" : "'1'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2455,9 +2467,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "1.2" + "value" : "'1.2'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2476,9 +2490,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "- 2" + "value" : "'- 2'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2497,9 +2513,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "1 day -" + "value" : "'1 day -'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", @@ -2518,9 +2536,11 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0020", + "errorClass" : "INVALID_TYPED_LITERAL", + "sqlState" : "42000", "messageParameters" : { - "value" : "1 day 1" + "value" : "'1 day 1'", + "valueType" : "\"INTERVAL\"" }, "queryContext" : [ { "objectType" : "", From 3364214db62f819e1f3886c24369b2a4bb4be370 Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Mon, 12 Dec 2022 10:46:22 +0800 Subject: [PATCH 6/6] fix test suites --- pom.xml | 2 +- .../catalyst/parser/ExpressionParserSuite.scala | 15 ++++++++++++--- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 136238978891..54eb92873853 100644 --- a/pom.xml +++ b/pom.xml @@ -109,7 +109,7 @@ UTF-8 UTF-8 - 11 + 1.8 ${java.version} ${java.version} 3.8.6 diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 1e085d7e1061..760b8630f633 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -603,7 +603,10 @@ class ExpressionParserSuite extends AnalysisTest { checkError( exception = parseException("Interval 'interval 1 yearsss 2 monthsss'"), errorClass = "INVALID_TYPED_LITERAL", - parameters = Map("value" -> "interval 1 yearsss 2 monthsss"), + parameters = Map( + "valueType" -> "\"INTERVAL\"", + "value" -> "'interval 1 yearsss 2 monthsss'" + ), context = ExpectedContext( fragment = "Interval 'interval 1 yearsss 2 monthsss'", start = 0, @@ -617,7 +620,10 @@ class ExpressionParserSuite extends AnalysisTest { checkError( exception = parseException("Interval 'interval 1 daysss 2 hoursss'"), errorClass = "INVALID_TYPED_LITERAL", - parameters = Map("value" -> "interval 1 daysss 2 hoursss"), + parameters = Map( + "valueType" -> "\"INTERVAL\"", + "value" -> "'interval 1 daysss 2 hoursss'" + ), context = ExpectedContext( fragment = "Interval 'interval 1 daysss 2 hoursss'", start = 0, @@ -640,7 +646,10 @@ class ExpressionParserSuite extends AnalysisTest { checkError( exception = parseException("Interval 'interval 3 monthsss 1 hoursss'"), errorClass = "INVALID_TYPED_LITERAL", - parameters = Map("value" -> "interval 3 monthsss 1 hoursss"), + parameters = Map( + "valueType" -> "\"INTERVAL\"", + "value" -> "'interval 3 monthsss 1 hoursss'" + ), context = ExpectedContext( fragment = "Interval 'interval 3 monthsss 1 hoursss'", start = 0,