diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 9c494c043796..fff4ff811150 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -285,14 +285,14 @@ "The must be between (current value = )" ] }, - "WRONG_NUM_ENDPOINTS" : { + "WRONG_NUM_ARGS" : { "message" : [ - "The number of endpoints must be >= 2 to construct intervals but the actual number is ." + "The requires parameters but the actual number is ." ] }, - "WRONG_NUM_PARAMS" : { + "WRONG_NUM_ENDPOINTS" : { "message" : [ - "The requires parameters but the actual number is ." + "The number of endpoints must be >= 2 to construct intervals but the actual number is ." ] } } @@ -407,12 +407,6 @@ "Fail to recognize pattern in the DateTimeFormatter. 1) You can set to \"LEGACY\" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html" ] }, - "FORMAT_DATETIME_BY_NEW_PARSER" : { - "message" : [ - "Spark >= 3.0:", - "Fail to format it to in the new formatter. You can set to \"LEGACY\" to restore the behavior before Spark 3.0, or set to \"CORRECTED\" and treat it as an invalid datetime string." - ] - }, "PARSE_DATETIME_BY_NEW_PARSER" : { "message" : [ "Spark >= 3.0:", @@ -580,28 +574,6 @@ "More than one row returned by a subquery used as an expression." ] }, - "NAMESPACE_ALREADY_EXISTS" : { - "message" : [ - "Cannot create namespace because it already exists.", - "Choose a different name, drop the existing namespace, or add the IF NOT EXISTS clause to tolerate pre-existing namespace." - ], - "sqlState" : "42000" - }, - "NAMESPACE_NOT_EMPTY" : { - "message" : [ - "Cannot drop a namespace because it contains objects.", - "Use DROP NAMESPACE ... CASCADE to drop the namespace and all its objects." - ], - "sqlState" : "42000" - }, - "NAMESPACE_NOT_FOUND" : { - "message" : [ - "The namespace cannot be found. Verify the spelling and correctness of the namespace.", - "If you did not qualify the name with, verify the current_schema() output, or qualify the name with the correctly.", - "To tolerate the error on drop use DROP NAMESPACE IF EXISTS." - ], - "sqlState" : "42000" - }, "NON_LITERAL_PIVOT_VALUES" : { "message" : [ "Literal expressions required for pivot values, found ." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala index 50050d391592..a047b187dbf7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala @@ -127,5 +127,5 @@ class FunctionAlreadyExistsException(errorClass: String, messageParameters: Map[ } class IndexAlreadyExistsException(message: String, cause: Option[Throwable] = None) - extends AnalysisException(errorClass = "INDEX_NOT_FOUND", + extends AnalysisException(errorClass = "INDEX_ALREADY_EXISTS", Map("message" -> message), cause) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala index fa52e6cd8517..3d01ae1b7811 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala @@ -65,7 +65,7 @@ case class CallMethodViaReflection(children: Seq[Expression]) override def checkInputDataTypes(): TypeCheckResult = { if (children.size < 2) { DataTypeMismatch( - errorSubClass = "WRONG_NUM_PARAMS", + errorSubClass = "WRONG_NUM_ARGS", messageParameters = Map( "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index 4d99c3b02a07..116227224fdd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -1209,7 +1209,7 @@ case class Least(children: Seq[Expression]) extends ComplexTypeMergingExpression override def checkInputDataTypes(): TypeCheckResult = { if (children.length <= 1) { DataTypeMismatch( - errorSubClass = "WRONG_NUM_PARAMS", + errorSubClass = "WRONG_NUM_ARGS", messageParameters = Map( "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", @@ -1300,7 +1300,7 @@ case class Greatest(children: Seq[Expression]) extends ComplexTypeMergingExpress override def checkInputDataTypes(): TypeCheckResult = { if (children.length <= 1) { DataTypeMismatch( - errorSubClass = "WRONG_NUM_PARAMS", + errorSubClass = "WRONG_NUM_ARGS", messageParameters = Map( "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/hash.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/hash.scala index 4f8ed1953f40..3cdf7b3b0d0f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/hash.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/hash.scala @@ -271,7 +271,7 @@ abstract class HashExpression[E] extends Expression { override def checkInputDataTypes(): TypeCheckResult = { if (children.length < 1) { DataTypeMismatch( - errorSubClass = "WRONG_NUM_PARAMS", + errorSubClass = "WRONG_NUM_ARGS", messageParameters = Map( "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 0", diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala index 3529644aeeac..aab9b0a13c30 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala @@ -396,7 +396,7 @@ case class JsonTuple(children: Seq[Expression]) override def checkInputDataTypes(): TypeCheckResult = { if (children.length < 2) { DataTypeMismatch( - errorSubClass = "WRONG_NUM_PARAMS", + errorSubClass = "WRONG_NUM_ARGS", messageParameters = Map( "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala index 8ae4bb9c29c0..cc47d739d71a 100755 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala @@ -276,7 +276,7 @@ case class Elt( override def checkInputDataTypes(): TypeCheckResult = { if (children.size < 2) { DataTypeMismatch( - errorSubClass = "WRONG_NUM_PARAMS", + errorSubClass = "WRONG_NUM_ARGS", messageParameters = Map( "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index a006687c6ddb..cf7e3524d5bf 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -978,7 +978,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { } def corruptedViewReferredTempFunctionsInCatalogError(e: Exception): Throwable = { - new AnalysisException( + new AnalysisException( errorClass = "_LEGACY_ERROR_TEMP_1088", messageParameters = Map.empty, cause = Some(e)) @@ -1309,19 +1309,19 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { } def tableIsNotRowLevelOperationTableError(table: Table): Throwable = { - new AnalysisException( + new AnalysisException( errorClass = "_LEGACY_ERROR_TEMP_1122", messageParameters = Map("table" -> table.name())) } def cannotRenameTableWithAlterViewError(): Throwable = { - new AnalysisException( + new AnalysisException( errorClass = "_LEGACY_ERROR_TEMP_1123", messageParameters = Map.empty) } private def notSupportedForV2TablesError(cmd: String): Throwable = { - new AnalysisException( + new AnalysisException( errorClass = "_LEGACY_ERROR_TEMP_1124", messageParameters = Map("cmd" -> cmd)) } @@ -1355,25 +1355,25 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { } def databaseFromV1SessionCatalogNotSpecifiedError(): Throwable = { - new AnalysisException( + new AnalysisException( errorClass = "_LEGACY_ERROR_TEMP_1125", messageParameters = Map.empty) } def nestedDatabaseUnsupportedByV1SessionCatalogError(catalog: String): Throwable = { - new AnalysisException( + new AnalysisException( errorClass = "_LEGACY_ERROR_TEMP_1126", messageParameters = Map("catalog" -> catalog)) } def invalidRepartitionExpressionsError(sortOrders: Seq[Any]): Throwable = { - new AnalysisException( + new AnalysisException( errorClass = "_LEGACY_ERROR_TEMP_1127", messageParameters = Map("sortOrders" -> sortOrders.toString())) } def partitionColumnNotSpecifiedError(format: String, partitionColumn: String): Throwable = { - new AnalysisException( + new AnalysisException( errorClass = "_LEGACY_ERROR_TEMP_1128", messageParameters = Map( "format" -> format, @@ -2145,7 +2145,9 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def invalidPatternError(pattern: String, message: String): Throwable = { new AnalysisException( errorClass = "_LEGACY_ERROR_TEMP_1216", - messageParameters = Map("pattern" -> pattern, "message" -> message)) + messageParameters = Map( + "pattern" -> toSQLValue(pattern, StringType), + "message" -> message)) } def tableIdentifierExistsError(tableIdentifier: TableIdentifier): Throwable = { @@ -2305,7 +2307,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { } def analyzeTableNotSupportedOnViewsError(): Throwable = { - new AnalysisException( + new AnalysisException( errorClass = "_LEGACY_ERROR_TEMP_1236", messageParameters = Map.empty) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 7e870e23fba0..37c4714d2c4c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -1395,7 +1395,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def failToRecognizePatternError(pattern: String, e: Throwable): SparkRuntimeException = { new SparkRuntimeException( errorClass = "_LEGACY_ERROR_TEMP_2130", - messageParameters = Map("pattern" -> pattern), + messageParameters = Map("pattern" -> toSQLValue(pattern, StringType)), cause = e) } @@ -2686,7 +2686,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { messageParameters = Map( "parameter" -> "regexp", "functionName" -> toSQLId(funcName), - "expected" -> pattern)) + "expected" -> toSQLValue(pattern, StringType))) } def tooManyArrayElementsError( diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala index e3829311e2dc..a7cdd589606c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala @@ -90,7 +90,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer exception = intercept[AnalysisException] { assertSuccess(expr) }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", parameters = messageParameters) } @@ -447,7 +447,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer exception = intercept[AnalysisException] { assertSuccess(murmur3Hash) }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", parameters = Map( "sqlExpr" -> "\"hash()\"", "functionName" -> toSQLId(murmur3Hash.prettyName), @@ -459,7 +459,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer exception = intercept[AnalysisException] { assertSuccess(xxHash64) }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", parameters = Map( "sqlExpr" -> "\"xxhash64()\"", "functionName" -> toSQLId(xxHash64.prettyName), diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala index 98a6a9bc19c4..095c2736ae07 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala @@ -523,15 +523,15 @@ class RegexpExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { checkExceptionInExpression[SparkRuntimeException]( RegExpExtract(s, p, r), create_row("1a 2b 14m", "(?l)", 0), - s"$prefix `regexp_extract` is invalid: (?l)") + s"$prefix `regexp_extract` is invalid: '(?l)'") checkExceptionInExpression[SparkRuntimeException]( RegExpExtractAll(s, p, r), create_row("abc", "] [", 0), - s"$prefix `regexp_extract_all` is invalid: ] [") + s"$prefix `regexp_extract_all` is invalid: '] ['") checkExceptionInExpression[SparkRuntimeException]( RegExpInStr(s, p, r), create_row("abc", ", (", 0), - s"$prefix `regexp_instr` is invalid: , (") + s"$prefix `regexp_instr` is invalid: ', ('") } test("RegExpReplace: fails analysis if pos is not a constant") { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala index 15513037fe1b..f9726c4a6dd5 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala @@ -1593,7 +1593,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { val expr1 = Elt(Seq(indexExpr1)) assert(expr1.checkInputDataTypes() == DataTypeMismatch( - errorSubClass = "WRONG_NUM_PARAMS", + errorSubClass = "WRONG_NUM_ARGS", messageParameters = Map( "functionName" -> "`elt`", "expectedNum" -> "> 1", diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out index b3c1e94314d3..27ec604cb450 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out @@ -447,7 +447,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "_LEGACY_ERROR_TEMP_1216", "messageParameters" : { "message" : "the escape character is not allowed to precede 'a'", - "pattern" : "m%aca" + "pattern" : "'m%aca'" } } @@ -462,7 +462,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "_LEGACY_ERROR_TEMP_1216", "messageParameters" : { "message" : "the escape character is not allowed to precede 'a'", - "pattern" : "m%aca" + "pattern" : "'m%aca'" } } @@ -477,7 +477,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "_LEGACY_ERROR_TEMP_1216", "messageParameters" : { "message" : "the escape character is not allowed to precede 'a'", - "pattern" : "m%a%%a" + "pattern" : "'m%a%%a'" } } @@ -492,7 +492,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "_LEGACY_ERROR_TEMP_1216", "messageParameters" : { "message" : "the escape character is not allowed to precede 'a'", - "pattern" : "m%a%%a" + "pattern" : "'m%a%%a'" } } @@ -507,7 +507,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "_LEGACY_ERROR_TEMP_1216", "messageParameters" : { "message" : "the escape character is not allowed to precede 'e'", - "pattern" : "b_ear" + "pattern" : "'b_ear'" } } @@ -522,7 +522,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "_LEGACY_ERROR_TEMP_1216", "messageParameters" : { "message" : "the escape character is not allowed to precede 'e'", - "pattern" : "b_ear" + "pattern" : "'b_ear'" } } @@ -537,7 +537,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "_LEGACY_ERROR_TEMP_1216", "messageParameters" : { "message" : "the escape character is not allowed to precede 'e'", - "pattern" : "b_e__r" + "pattern" : "'b_e__r'" } } @@ -552,7 +552,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "_LEGACY_ERROR_TEMP_1216", "messageParameters" : { "message" : "the escape character is not allowed to precede 'e'", - "pattern" : "b_e__r" + "pattern" : "'b_e__r'" } } diff --git a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out index 60094af7a991..3474aba39110 100644 --- a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out @@ -163,7 +163,7 @@ org.apache.spark.SparkRuntimeException "errorClass" : "INVALID_PARAMETER_VALUE", "sqlState" : "22023", "messageParameters" : { - "expected" : "(?l)", + "expected" : "'(?l)'", "functionName" : "`regexp_extract`", "parameter" : "regexp" } @@ -334,7 +334,7 @@ org.apache.spark.SparkRuntimeException "errorClass" : "INVALID_PARAMETER_VALUE", "sqlState" : "22023", "messageParameters" : { - "expected" : "], [", + "expected" : "'], ['", "functionName" : "`regexp_extract_all`", "parameter" : "regexp" } @@ -671,7 +671,7 @@ org.apache.spark.SparkRuntimeException "errorClass" : "INVALID_PARAMETER_VALUE", "sqlState" : "22023", "messageParameters" : { - "expected" : ") ?", + "expected" : "') ?'", "functionName" : "`regexp_instr`", "parameter" : "regexp" } diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 5e04562a648d..a326e009af4d 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -894,7 +894,7 @@ org.apache.spark.SparkRuntimeException { "errorClass" : "_LEGACY_ERROR_TEMP_2130", "messageParameters" : { - "pattern" : "yyyy-MM-dd GGGGG" + "pattern" : "'yyyy-MM-dd GGGGG'" } } @@ -908,7 +908,7 @@ org.apache.spark.SparkRuntimeException { "errorClass" : "_LEGACY_ERROR_TEMP_2130", "messageParameters" : { - "pattern" : "dd MM yyyy EEEEEE" + "pattern" : "'dd MM yyyy EEEEEE'" } } @@ -922,7 +922,7 @@ org.apache.spark.SparkRuntimeException { "errorClass" : "_LEGACY_ERROR_TEMP_2130", "messageParameters" : { - "pattern" : "dd MM yyyy EEEEE" + "pattern" : "'dd MM yyyy EEEEE'" } } diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index 8d98209e6254..24273560001d 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -871,7 +871,7 @@ org.apache.spark.SparkRuntimeException { "errorClass" : "_LEGACY_ERROR_TEMP_2130", "messageParameters" : { - "pattern" : "yyyy-MM-dd GGGGG" + "pattern" : "'yyyy-MM-dd GGGGG'" } } @@ -885,7 +885,7 @@ org.apache.spark.SparkRuntimeException { "errorClass" : "_LEGACY_ERROR_TEMP_2130", "messageParameters" : { - "pattern" : "dd MM yyyy EEEEEE" + "pattern" : "'dd MM yyyy EEEEEE'" } } @@ -899,7 +899,7 @@ org.apache.spark.SparkRuntimeException { "errorClass" : "_LEGACY_ERROR_TEMP_2130", "messageParameters" : { - "pattern" : "dd MM yyyy EEEEE" + "pattern" : "'dd MM yyyy EEEEE'" } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala index 85877c97ed59..4bbe750ecbd5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala @@ -4229,7 +4229,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException] { df.select(hash()) }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", sqlState = None, parameters = Map( "sqlExpr" -> "\"hash()\"", @@ -4241,7 +4241,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException] { df.selectExpr("hash()") }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", sqlState = None, parameters = Map( "sqlExpr" -> "\"hash()\"", @@ -4257,7 +4257,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException] { df.select(xxhash64()) }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", sqlState = None, parameters = Map( "sqlExpr" -> "\"xxhash64()\"", @@ -4269,7 +4269,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException] { df.selectExpr("xxhash64()") }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", sqlState = None, parameters = Map( "sqlExpr" -> "\"xxhash64()\"", @@ -4285,7 +4285,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException] { df.select(greatest()) }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", sqlState = None, parameters = Map( "sqlExpr" -> "\"greatest()\"", @@ -4298,7 +4298,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException] { df.selectExpr("greatest()") }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", sqlState = None, parameters = Map( "sqlExpr" -> "\"greatest()\"", @@ -4315,7 +4315,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException] { df.select(least()) }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", sqlState = None, parameters = Map( "sqlExpr" -> "\"least()\"", @@ -4328,7 +4328,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException] { df.selectExpr("least()") }, - errorClass = "DATATYPE_MISMATCH.WRONG_NUM_PARAMS", + errorClass = "DATATYPE_MISMATCH.WRONG_NUM_ARGS", sqlState = None, parameters = Map( "sqlExpr" -> "\"least()\"", diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index dd3ad0f4d6bd..e9aeba9c8206 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -3709,7 +3709,7 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark }, errorClass = "_LEGACY_ERROR_TEMP_1216", parameters = Map( - "pattern" -> "m%@ca", + "pattern" -> "'m%@ca'", "message" -> "the escape character is not allowed to precede '@'")) checkAnswer(sql("SELECT s LIKE 'm@@ca' ESCAPE '@' FROM df"), Row(true))