Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
127 changes: 127 additions & 0 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -3678,5 +3678,132 @@
"message" : [
"Failed to merge incompatible data types ${leftCatalogString} and ${rightCatalogString}"
]
},
"_LEGACY_ERROR_TEMP_2126" : {
"message" : [
"Unsuccessful attempt to build maps with <size> elements due to exceeding the map size limit <maxRoundedArrayLength>."
]
},
"_LEGACY_ERROR_TEMP_2127" : {
"message" : [
"Duplicate map key <key> was found, please check the input data. If you want to remove the duplicated keys, you can set <mapKeyDedupPolicy> to <lastWin> so that the key inserted at last takes precedence."
]
},
"_LEGACY_ERROR_TEMP_2128" : {
"message" : [
"The key array and value array of MapData must have the same length."
]
},
"_LEGACY_ERROR_TEMP_2129" : {
"message" : [
"Conflict found: Field <field> <actual> differs from <field> <expected> derived from <candidate>"
]
},
"_LEGACY_ERROR_TEMP_2130" : {
"message" : [
"Fail to recognize '<pattern>' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"
]
},
"_LEGACY_ERROR_TEMP_2131" : {
"message" : [
"Exception when registering StreamingQueryListener"
]
},
"_LEGACY_ERROR_TEMP_2132" : {
"message" : [
"Parsing JSON arrays as structs is forbidden."
]
},
"_LEGACY_ERROR_TEMP_2133" : {
"message" : [
"Cannot parse field name <fieldName>, field value <fieldValue>, [<token>] as target spark data type [<dataType>]."
]
},
"_LEGACY_ERROR_TEMP_2134" : {
"message" : [
"Cannot parse field value <value> for pattern <pattern> as target spark data type [<dataType>]."
]
},
"_LEGACY_ERROR_TEMP_2135" : {
"message" : [
"Failed to parse an empty string for data type <dataType>"
]
},
"_LEGACY_ERROR_TEMP_2136" : {
"message" : [
"Failed to parse field name <fieldName>, field value <fieldValue>, [<token>] to target spark data type [<dataType>]."
]
},
"_LEGACY_ERROR_TEMP_2137" : {
"message" : [
"Root converter returned null"
]
},
"_LEGACY_ERROR_TEMP_2138" : {
"message" : [
"Cannot have circular references in bean class, but got the circular reference of class <clazz>"
]
},
"_LEGACY_ERROR_TEMP_2139" : {
"message" : [
"cannot have circular references in class, but got the circular reference of class <t>"
]
},
"_LEGACY_ERROR_TEMP_2140" : {
"message" : [
"`<fieldName>` is not a valid identifier of Java and cannot be used as field name",
"<walkedTypePath>"
]
},
"_LEGACY_ERROR_TEMP_2141" : {
"message" : [
"No Encoder found for <tpe>",
"<walkedTypePath>"
]
},
"_LEGACY_ERROR_TEMP_2142" : {
"message" : [
"Attributes for type <schema> is not supported"
]
},
"_LEGACY_ERROR_TEMP_2143" : {
"message" : [
"Schema for type <tpe> is not supported"
]
},
"_LEGACY_ERROR_TEMP_2144" : {
"message" : [
"Unable to find constructor for <tpe>. This could happen if <tpe> is an interface, or a trait without companion object constructor."
]
},
"_LEGACY_ERROR_TEMP_2145" : {
"message" : [
"<paramName> cannot be more than one character"
]
},
"_LEGACY_ERROR_TEMP_2146" : {
"message" : [
"<paramName> should be an integer. Found <value>"
]
},
"_LEGACY_ERROR_TEMP_2147" : {
"message" : [
"<paramName> flag can be true or false"
]
},
"_LEGACY_ERROR_TEMP_2148" : {
"message" : [
"null value found but field <name> is not nullable."
]
},
"_LEGACY_ERROR_TEMP_2149" : {
"message" : [
"Malformed CSV record"
]
},
"_LEGACY_ERROR_TEMP_2150" : {
"message" : [
"Due to Scala's limited support of tuple, tuple with more than 22 elements are not supported."
]
}
}
Loading