diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 6f5b3b5a1347..d380507cbe1a 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -4312,5 +4312,40 @@ "message" : [ "Not enough memory to build and broadcast the table to all worker nodes. As a workaround, you can either disable broadcast by setting to -1 or increase the spark driver memory by setting to a higher value" ] + }, + "_LEGACY_ERROR_TEMP_2276" : { + "message" : [ + "Hive table with ANSI intervals is not supported" + ] + }, + "_LEGACY_ERROR_TEMP_2277" : { + "message" : [ + "Number of dynamic partitions created is , which is more than . To solve this try to set to at least ." + ] + }, + "_LEGACY_ERROR_TEMP_2278" : { + "message" : [ + "The input '' does not match the given number format: ''" + ] + }, + "_LEGACY_ERROR_TEMP_2279" : { + "message" : [ + "Multiple bucket transforms are not supported." + ] + }, + "_LEGACY_ERROR_TEMP_2280" : { + "message" : [ + "Create namespace comment is not supported" + ] + }, + "_LEGACY_ERROR_TEMP_2281" : { + "message" : [ + "Remove namespace comment is not supported" + ] + }, + "_LEGACY_ERROR_TEMP_2282" : { + "message" : [ + "Drop namespace restrict is not supported" + ] } } \ No newline at end of file diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 4aedfb3b03da..4decf31d8195 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2577,8 +2577,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { "expected" -> s"Detail message: $detailMessage")) } - def hiveTableWithAnsiIntervalsError(tableName: String): Throwable = { - new UnsupportedOperationException(s"Hive table $tableName with ANSI intervals is not supported") + def hiveTableWithAnsiIntervalsError(tableName: String): SparkUnsupportedOperationException = { + new SparkUnsupportedOperationException( + errorClass = "_LEGACY_ERROR_TEMP_2276", + messageParameters = Map("tableName" -> tableName)) } def cannotConvertOrcTimestampToTimestampNTZError(): Throwable = { @@ -2602,31 +2604,47 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { maxDynamicPartitions: Int, maxDynamicPartitionsKey: String): Throwable = { new SparkException( - s"Number of dynamic partitions created is $numWrittenParts" + - s", which is more than $maxDynamicPartitions" + - s". To solve this try to set $maxDynamicPartitionsKey" + - s" to at least $numWrittenParts.") + errorClass = "_LEGACY_ERROR_TEMP_2277", + messageParameters = Map( + "numWrittenParts" -> numWrittenParts.toString(), + "maxDynamicPartitionsKey" -> maxDynamicPartitionsKey, + "maxDynamicPartitions" -> maxDynamicPartitions.toString(), + "numWrittenParts" -> numWrittenParts.toString()), + cause = null) } - def invalidNumberFormatError(valueType: String, input: String, format: String): Throwable = { - new IllegalArgumentException( - s"The input $valueType '$input' does not match the given number format: '$format'") + def invalidNumberFormatError( + valueType: String, input: String, format: String): SparkIllegalArgumentException = { + new SparkIllegalArgumentException( + errorClass = "_LEGACY_ERROR_TEMP_2278", + messageParameters = Map( + "valueType" -> valueType, + "input" -> input, + "format" -> format)) } - def multipleBucketTransformsError(): Throwable = { - new UnsupportedOperationException("Multiple bucket transforms are not supported.") + def multipleBucketTransformsError(): SparkUnsupportedOperationException = { + new SparkUnsupportedOperationException( + errorClass = "_LEGACY_ERROR_TEMP_2279", + messageParameters = Map.empty) } - def unsupportedCreateNamespaceCommentError(): Throwable = { - new SQLFeatureNotSupportedException("Create namespace comment is not supported") + def unsupportedCreateNamespaceCommentError(): SparkSQLFeatureNotSupportedException = { + new SparkSQLFeatureNotSupportedException( + errorClass = "_LEGACY_ERROR_TEMP_2280", + messageParameters = Map.empty) } - def unsupportedRemoveNamespaceCommentError(): Throwable = { - new SQLFeatureNotSupportedException("Remove namespace comment is not supported") + def unsupportedRemoveNamespaceCommentError(): SparkSQLFeatureNotSupportedException = { + new SparkSQLFeatureNotSupportedException( + errorClass = "_LEGACY_ERROR_TEMP_2281", + messageParameters = Map.empty) } - def unsupportedDropNamespaceRestrictError(): Throwable = { - new SQLFeatureNotSupportedException("Drop namespace restrict is not supported") + def unsupportedDropNamespaceRestrictError(): SparkSQLFeatureNotSupportedException = { + new SparkSQLFeatureNotSupportedException( + errorClass = "_LEGACY_ERROR_TEMP_2282", + messageParameters = Map.empty) } def timestampAddOverflowError(micros: Long, amount: Int, unit: String): ArithmeticException = { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala index 94ae774070c8..15513037fe1b 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.expressions import java.math.{BigDecimal => JavaBigDecimal} -import org.apache.spark.SparkFunSuite +import org.apache.spark.{SparkFunSuite, SparkIllegalArgumentException} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.dsl.expressions._ @@ -1124,7 +1124,8 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { ).foreach { case (str: String, format: String) => val toNumberExpr = ToNumber(Literal(str), Literal(format)) assert(toNumberExpr.checkInputDataTypes() == TypeCheckResult.TypeCheckSuccess) - checkExceptionInExpression[IllegalArgumentException]( + + checkExceptionInExpression[SparkIllegalArgumentException]( toNumberExpr, "does not match the given number format") val tryToNumberExpr = TryToNumber(Literal(str), Literal(format)) diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out index 53a57ee270ba..9ddd87f10de1 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out @@ -4694,8 +4694,15 @@ SELECT '' AS to_number_1, to_number('-34,338,492', '99G999G999') -- !query schema struct<> -- !query output -java.lang.IllegalArgumentException -The input string '-34,338,492' does not match the given number format: '99G999G999' +org.apache.spark.SparkIllegalArgumentException +{ + "errorClass" : "_LEGACY_ERROR_TEMP_2278", + "messageParameters" : { + "format" : "99G999G999", + "input" : "-34,338,492", + "valueType" : "string" + } +} -- !query @@ -4761,8 +4768,15 @@ SELECT '' AS to_number_16, to_number('123456','999G999') -- !query schema struct<> -- !query output -java.lang.IllegalArgumentException -The input string '123456' does not match the given number format: '999G999' +org.apache.spark.SparkIllegalArgumentException +{ + "errorClass" : "_LEGACY_ERROR_TEMP_2278", + "messageParameters" : { + "format" : "999G999", + "input" : "123456", + "valueType" : "string" + } +} -- !query