Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
530 changes: 528 additions & 2 deletions core/src/main/resources/error/error-classes.json

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,10 @@ case class NoSuchTableException(

case class NoSuchPartitionException(
override val message: String)
extends AnalysisException(message) {
extends AnalysisException(
message,
errorClass = Some("_LEGACY_ERROR_TEMP_1238"),
messageParameters = Map("msg" -> message)) {

def this(db: String, table: String, spec: TablePartitionSpec) = {
this(s"Partition not found in table '$table' database '$db':\n" + spec.mkString("\n"))
Expand All @@ -83,7 +86,10 @@ case class NoSuchPermanentFunctionException(db: String, func: String)
extends AnalysisException(s"Function '$func' not found in database '$db'")

case class NoSuchFunctionException(override val message: String)
extends AnalysisException(message) {
extends AnalysisException(
message,
errorClass = Some("_LEGACY_ERROR_TEMP_1258"),
messageParameters = Map("msg" -> message)) {

def this(db: String, func: String) = {
this(s"Undefined function: '$func'. " +
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ class EncoderResolutionSuite extends PlanTest {
val encoder = ExpressionEncoder[ArrayClass]
val attrs = Seq($"arr".array(new StructType().add("c", "int")))
assert(intercept[AnalysisException](encoder.resolveAndBind(attrs)).message ==
"No such struct field a in c")
"No such struct field a in c.")
}

test("the real type is not compatible with encoder schema: nested array element type") {
Expand All @@ -150,8 +150,10 @@ class EncoderResolutionSuite extends PlanTest {
withClue("nested array element type is not compatible") {
val attrs = Seq($"nestedArr".array(new StructType()
.add("arr", ArrayType(new StructType().add("c", "int")))))
assert(intercept[AnalysisException](encoder.resolveAndBind(attrs)).message ==
"No such struct field a in c")
checkError(
exception = intercept[AnalysisException](encoder.resolveAndBind(attrs)),
errorClass = "_LEGACY_ERROR_TEMP_1208",
parameters = Map("fieldName" -> "a", "fields" -> "c"))
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1692,7 +1692,20 @@ select interval (-30) day
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1242",
"messageParameters" : {
"fullName" : "spark_catalog.default.interval",
"rawName" : "interval"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 21,
"fragment" : "interval (-30)"
} ]
}


-- !query
Expand All @@ -1701,7 +1714,20 @@ select interval (a + 1) day
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1242",
"messageParameters" : {
"fullName" : "spark_catalog.default.interval",
"rawName" : "interval"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 23,
"fragment" : "interval (a + 1)"
} ]
}


-- !query
Expand All @@ -1726,7 +1752,20 @@ select interval (-30) days
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1242",
"messageParameters" : {
"fullName" : "spark_catalog.default.interval",
"rawName" : "interval"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 21,
"fragment" : "interval (-30)"
} ]
}


-- !query
Expand All @@ -1735,7 +1774,20 @@ select interval (a + 1) days
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1242",
"messageParameters" : {
"fullName" : "spark_catalog.default.interval",
"rawName" : "interval"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 23,
"fragment" : "interval (a + 1)"
} ]
}


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,10 +173,13 @@ select 1234567890123456789012345678901234567890
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException

decimal can only support precision up to 38
== SQL ==
select 1234567890123456789012345678901234567890
{
"errorClass" : "_LEGACY_ERROR_TEMP_1229",
"messageParameters" : {
"decimalType" : "decimal",
"precision" : "38"
}
}


-- !query
Expand All @@ -185,10 +188,13 @@ select 1234567890123456789012345678901234567890.0
struct<>
-- !query output
org.apache.spark.sql.catalyst.parser.ParseException

decimal can only support precision up to 38
== SQL ==
select 1234567890123456789012345678901234567890.0
{
"errorClass" : "_LEGACY_ERROR_TEMP_1229",
"messageParameters" : {
"decimalType" : "decimal",
"precision" : "38"
}
}


-- !query
Expand Down Expand Up @@ -467,7 +473,7 @@ org.apache.spark.sql.catalyst.parser.ParseException
{
"errorClass" : "_LEGACY_ERROR_TEMP_0061",
"messageParameters" : {
"msg" : "decimal can only support precision up to 38"
"msg" : "decimal can only support precision up to 38."
},
"queryContext" : [ {
"objectType" : "",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,15 @@ ALTER TABLE test_change CHANGE a TYPE STRING
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
ALTER TABLE CHANGE COLUMN is not supported for changing column 'a' with type 'IntegerType' to 'a' with type 'StringType'
{
"errorClass" : "_LEGACY_ERROR_TEMP_1245",
"messageParameters" : {
"newName" : "a",
"newType" : "StringType",
"originName" : "a",
"originType" : "IntegerType"
}
}


-- !query
Expand Down
19 changes: 17 additions & 2 deletions sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,15 @@ alter table char_tbl1 change column c type char(6)
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
ALTER TABLE CHANGE COLUMN is not supported for changing column 'c' with type 'CharType(5)' to 'c' with type 'CharType(6)'
{
"errorClass" : "_LEGACY_ERROR_TEMP_1245",
"messageParameters" : {
"newName" : "c",
"newType" : "CharType(6)",
"originName" : "c",
"originType" : "CharType(5)"
}
}


-- !query
Expand Down Expand Up @@ -575,7 +583,14 @@ alter table char_part partition (v2='ke') rename to partition (v2='nt')
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Partition spec is invalid. The spec (v2) must match the partition spec (v2, c2) defined in table '`spark_catalog`.`default`.`char_part`'
{
"errorClass" : "_LEGACY_ERROR_TEMP_1232",
"messageParameters" : {
"partitionColumnNames" : "v2, c2",
"specKeys" : "v2",
"tableName" : "`spark_catalog`.`default`.`char_part`"
}
}


-- !query
Expand Down
64 changes: 56 additions & 8 deletions sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,13 @@ SELECT * FROM t2
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228.
{
"errorClass" : "_LEGACY_ERROR_TEMP_1200",
"messageParameters" : {
"config" : "spark.sql.legacy.ctePrecedencePolicy",
"name" : "t"
}
}


-- !query
Expand Down Expand Up @@ -82,7 +88,13 @@ SELECT * FROM t2
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228.
{
"errorClass" : "_LEGACY_ERROR_TEMP_1200",
"messageParameters" : {
"config" : "spark.sql.legacy.ctePrecedencePolicy",
"name" : "t"
}
}


-- !query
Expand Down Expand Up @@ -136,7 +148,13 @@ SELECT (
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228.
{
"errorClass" : "_LEGACY_ERROR_TEMP_1200",
"messageParameters" : {
"config" : "spark.sql.legacy.ctePrecedencePolicy",
"name" : "t"
}
}


-- !query
Expand All @@ -151,7 +169,13 @@ SELECT (
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228.
{
"errorClass" : "_LEGACY_ERROR_TEMP_1200",
"messageParameters" : {
"config" : "spark.sql.legacy.ctePrecedencePolicy",
"name" : "t"
}
}


-- !query
Expand All @@ -167,7 +191,13 @@ SELECT (
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228.
{
"errorClass" : "_LEGACY_ERROR_TEMP_1200",
"messageParameters" : {
"config" : "spark.sql.legacy.ctePrecedencePolicy",
"name" : "t"
}
}


-- !query
Expand All @@ -181,7 +211,13 @@ WHERE c IN (
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228.
{
"errorClass" : "_LEGACY_ERROR_TEMP_1200",
"messageParameters" : {
"config" : "spark.sql.legacy.ctePrecedencePolicy",
"name" : "t"
}
}


-- !query
Expand Down Expand Up @@ -210,7 +246,13 @@ SELECT * FROM t
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228.
{
"errorClass" : "_LEGACY_ERROR_TEMP_1200",
"messageParameters" : {
"config" : "spark.sql.legacy.ctePrecedencePolicy",
"name" : "aBc"
}
}


-- !query
Expand All @@ -223,7 +265,13 @@ SELECT (
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228.
{
"errorClass" : "_LEGACY_ERROR_TEMP_1200",
"messageParameters" : {
"config" : "spark.sql.legacy.ctePrecedencePolicy",
"name" : "aBc"
}
}


-- !query
Expand Down
18 changes: 14 additions & 4 deletions sql/core/src/test/resources/sql-tests/results/describe.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -362,9 +362,12 @@ DESC t PARTITION (c='Us', d=2)
struct<>
-- !query output
org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException
Partition not found in table 't' database 'default':
c -> Us
d -> 2
{
"errorClass" : "_LEGACY_ERROR_TEMP_1238",
"messageParameters" : {
"msg" : "Partition not found in table 't' database 'default':\nc -> Us\nd -> 2"
}
}


-- !query
Expand All @@ -373,7 +376,14 @@ DESC t PARTITION (c='Us')
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`default`.`t`'
{
"errorClass" : "_LEGACY_ERROR_TEMP_1232",
"messageParameters" : {
"partitionColumnNames" : "c, d",
"specKeys" : "c",
"tableName" : "`spark_catalog`.`default`.`t`"
}
}


-- !query
Expand Down
Loading