diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 12c97c2108a4..34800e7c211c 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -751,6 +751,11 @@ " is not a Protobuf message type" ] }, + "INVALID_SCHEMA" : { + "message" : [ + "The expression is not a valid schema string." + ] + }, "INVALID_SQL_SYNTAX" : { "message" : [ "Invalid SQL syntax: " @@ -2165,11 +2170,6 @@ "Cannot read table property '' as it's corrupted.
." ] }, - "_LEGACY_ERROR_TEMP_1092" : { - "message" : [ - "The expression '' is not a valid schema string." - ] - }, "_LEGACY_ERROR_TEMP_1093" : { "message" : [ "Schema should be specified in DDL format as a string literal or output of the schema_of_json/schema_of_csv functions instead of ." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 0f245597efd9..a6219087ca7a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -996,8 +996,8 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def invalidSchemaStringError(exp: Expression): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_1092", - messageParameters = Map("expr" -> exp.sql)) + errorClass = "INVALID_SCHEMA", + messageParameters = Map("expr" -> toSQLExpr(exp))) } def schemaNotFoldableError(exp: Expression): Throwable = { diff --git a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out index c0df2751933f..35fd733e6c8f 100644 --- a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out @@ -22,9 +22,9 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1092", + "errorClass" : "INVALID_SCHEMA", "messageParameters" : { - "expr" : "1" + "expr" : "\"1\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index cb8d4fca4942..d83b3657ef1f 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -148,9 +148,9 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1092", + "errorClass" : "INVALID_SCHEMA", "messageParameters" : { - "expr" : "1" + "expr" : "\"1\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala index 0c3703ee89e1..2a3058d93957 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala @@ -363,10 +363,13 @@ class CsvFunctionsSuite extends QueryTest with SharedSparkSession { }.getMessage assert(errMsg.contains("Schema should be specified in DDL format as a string literal")) - val errMsg2 = intercept[AnalysisException] { - Seq("1").toDF("csv").select(from_csv($"csv", lit(1), options)).collect() - }.getMessage - assert(errMsg2.contains("The expression '1' is not a valid schema string")) + checkError( + exception = intercept[AnalysisException] { + Seq("1").toDF("csv").select(from_csv($"csv", lit(1), options)).collect() + }, + errorClass = "INVALID_SCHEMA", + parameters = Map("expr" -> "\"1\"") + ) } test("schema_of_csv - infers the schema of foldable CSV string") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala index 2a15fbc9534b..5a5ead31e9de 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala @@ -5179,6 +5179,23 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { Seq(Row(Map("a" -> Map("a" -> 6, "b" -> 8), "b" -> Map("a" -> 8, "b" -> 10)))) ) } + + test("from_json - invalid schema string") { + checkError( + exception = intercept[AnalysisException] { + sql("select from_json('{\"a\":1}', 1)") + }, + errorClass = "INVALID_SCHEMA", + parameters = Map( + "expr" -> "\"1\"" + ), + context = ExpectedContext( + fragment = "from_json('{\"a\":1}', 1)", + start = 7, + stop = 29 + ) + ) + } } object DataFrameFunctionsSuite { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala index 0a84e9000c3e..d2c6055fe363 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala @@ -424,10 +424,19 @@ class JsonFunctionsSuite extends QueryTest with SharedSparkSession { "from_json(value, 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy HH:mm'))"), Row(Row(java.sql.Timestamp.valueOf("2015-08-26 18:00:00.0")))) - val errMsg1 = intercept[AnalysisException] { - df3.selectExpr("from_json(value, 1)") - } - assert(errMsg1.getMessage.startsWith("The expression '1' is not a valid schema string")) + checkError( + exception = intercept[AnalysisException] { + df3.selectExpr("from_json(value, 1)") + }, + errorClass = "INVALID_SCHEMA", + parameters = Map("expr" -> "\"1\""), + context = ExpectedContext( + fragment = "from_json(value, 1)", + start = 0, + stop = 18 + ) + ) + val errMsg2 = intercept[AnalysisException] { df3.selectExpr("""from_json(value, 'time InvalidType')""") }