diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 12c97c2108a4..82809db8218c 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -681,6 +681,11 @@ "The location name cannot be empty string, but `` was given." ] }, + "INVALID_EXTRACT_FIELD" : { + "message" : [ + "Cannot extract from ." + ] + }, "INVALID_FIELD_NAME" : { "message" : [ "Field name is invalid: is not a struct." @@ -2215,11 +2220,6 @@ "Invalid value for the '' parameter of function '': .." ] }, - "_LEGACY_ERROR_TEMP_1102" : { - "message" : [ - "Literals of type '' are currently not supported for the type." - ] - }, "_LEGACY_ERROR_TEMP_1103" : { "message" : [ "Unsupported component type in arrays." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 0f245597efd9..d3a10e9db2e9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -1073,10 +1073,10 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def literalTypeUnsupportedForSourceTypeError(field: String, source: Expression): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_1102", + errorClass = "INVALID_EXTRACT_FIELD", messageParameters = Map( - "field" -> field, - "srcDataType" -> source.dataType.catalogString)) + "field" -> toSQLId(field), + "expr" -> toSQLExpr(source))) } def arrayComponentTypeUnsupportedError(clz: Class[_]): Throwable = { diff --git a/sql/core/src/test/resources/sql-tests/results/extract.sql.out b/sql/core/src/test/resources/sql-tests/results/extract.sql.out index 298e39813161..30aa0ea04ea7 100644 --- a/sql/core/src/test/resources/sql-tests/results/extract.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/extract.sql.out @@ -318,10 +318,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1102", + "errorClass" : "INVALID_EXTRACT_FIELD", "messageParameters" : { - "field" : "not_supported", - "srcDataType" : "string" + "expr" : "\"c\"", + "field" : "`not_supported`" }, "queryContext" : [ { "objectType" : "", @@ -340,10 +340,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1102", + "errorClass" : "INVALID_EXTRACT_FIELD", "messageParameters" : { - "field" : "not_supported", - "srcDataType" : "interval year to month" + "expr" : "\"i\"", + "field" : "`not_supported`" }, "queryContext" : [ { "objectType" : "", @@ -362,10 +362,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1102", + "errorClass" : "INVALID_EXTRACT_FIELD", "messageParameters" : { - "field" : "not_supported", - "srcDataType" : "interval day to second" + "expr" : "\"j\"", + "field" : "`not_supported`" }, "queryContext" : [ { "objectType" : "", @@ -376,7 +376,6 @@ org.apache.spark.sql.AnalysisException } ] } - -- !query select date_part('year', c), date_part('year', ntz), date_part('year', i) from t -- !query schema @@ -688,10 +687,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1102", + "errorClass" : "INVALID_EXTRACT_FIELD", "messageParameters" : { - "field" : "not_supported", - "srcDataType" : "string" + "expr" : "\"c\"", + "field" : "`not_supported`" }, "queryContext" : [ { "objectType" : "", @@ -964,10 +963,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1102", + "errorClass" : "INVALID_EXTRACT_FIELD", "messageParameters" : { - "field" : "DAY", - "srcDataType" : "interval year to month" + "expr" : "\"INTERVAL '2-1' YEAR TO MONTH\"", + "field" : "`DAY`" }, "queryContext" : [ { "objectType" : "", @@ -986,10 +985,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1102", + "errorClass" : "INVALID_EXTRACT_FIELD", "messageParameters" : { - "field" : "DAY", - "srcDataType" : "interval year to month" + "expr" : "\"INTERVAL '2-1' YEAR TO MONTH\"", + "field" : "`DAY`" }, "queryContext" : [ { "objectType" : "", @@ -1008,10 +1007,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1102", + "errorClass" : "INVALID_EXTRACT_FIELD", "messageParameters" : { - "field" : "not_supported", - "srcDataType" : "interval year to month" + "expr" : "\"INTERVAL '2-1' YEAR TO MONTH\"", + "field" : "`not_supported`" }, "queryContext" : [ { "objectType" : "", @@ -1134,10 +1133,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1102", + "errorClass" : "INVALID_EXTRACT_FIELD", "messageParameters" : { - "field" : "MONTH", - "srcDataType" : "interval day to second" + "expr" : "\"INTERVAL '123 12:34:56.789123' DAY TO SECOND\"", + "field" : "`MONTH`" }, "queryContext" : [ { "objectType" : "", @@ -1148,7 +1147,6 @@ org.apache.spark.sql.AnalysisException } ] } - -- !query select date_part('not_supported', interval '123 12:34:56.789123123' DAY TO SECOND) -- !query schema @@ -1156,10 +1154,10 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1102", + "errorClass" : "INVALID_EXTRACT_FIELD", "messageParameters" : { - "field" : "not_supported", - "srcDataType" : "interval day to second" + "expr" : "\"INTERVAL '123 12:34:56.789123' DAY TO SECOND\"", + "field" : "`not_supported`" }, "queryContext" : [ { "objectType" : "",