Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 65 additions & 0 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
{
"ALTER_TABLE_WITH_DROP_PARTITION_AND_PURGE_UNSUPPORTED" : {
"message" : [ "ALTER TABLE ... DROP PARTITION ... PURGE" ],
"sqlState" : "0A000"
},
"AMBIGUOUS_FIELD_NAME" : {
"message" : [ "Field name %s is ambiguous and has %s matching fields in the struct." ],
"sqlState" : "42000"
Expand All @@ -11,16 +15,30 @@
"message" : [ "%s cannot be represented as Decimal(%s, %s)." ],
"sqlState" : "22005"
},
"CANNOT_CONVERT_HIVE_TABLE_TO_CATALOG_TABLE" : {
"message" : [ "%s, db: %s, table: %s" ]
},
"CANNOT_EVALUATE_EXPRESSION" : {
"message" : [ "Cannot evaluate expression: %s" ]
},
"CANNOT_FETCH_DATABASE_TABLES" : {
"message" : [ "Unable to fetch tables of db %s" ]
},
"CANNOT_GENERATE_CODE_FOR_EXPRESSION" : {
"message" : [ "Cannot generate code for expression: %s" ]
},
"CANNOT_PARSE_DECIMAL" : {
"message" : [ "Cannot parse decimal" ],
"sqlState" : "42000"
},
"CANNOT_RECOGNIZE_HIVE_TYPE" : {
"message" : [ "Cannot recognize hive type string: %s, column: %s" ],
"sqlState" : "42000"
},
"CANNOT_SET_TIMEOUT_DURATION" : {
"message" : [ "Cannot set timeout duration without enabling processing time timeout in [map|flatMap]GroupsWithState" ],
"sqlState" : "0A000"
},
"CANNOT_TERMINATE_GENERATOR" : {
"message" : [ "Cannot terminate expression: %s" ]
},
Expand All @@ -35,6 +53,10 @@
"message" : [ "divide by zero" ],
"sqlState" : "22012"
},
"DROP_TABLE_WITH_PURGE_UNSUPPORTED" : {
"message" : [ "DROP TABLE ... PURGE" ],
"sqlState" : "0A000"
},
"DUPLICATE_KEY" : {
"message" : [ "Found duplicate keys '%s'" ],
"sqlState" : "23000"
Expand All @@ -46,9 +68,19 @@
"message" : [ "Failed to rename %s to %s as destination already exists" ],
"sqlState" : "22023"
},
"FAILED_RENAME_TEMPORARY_FILE" : {
"message" : [ "Failed to rename temp file %s to %s as rename returned false" ]
},
"FAILED_SET_ORIGINAL_PERMISSION_BACK" : {
"message" : [ "Failed to set original permission %s back to the created path: %s. Exception: %s" ]
},
"GET_PARTITION_METADATA_BY_FILTER" : {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The naming of this isn't obviously an error. Can we name this something like CANNOT_GET_PARTITION_METADATA_BY_FILTER?

"message" : [ "Caught Hive MetaException attempting to get partition metadata by filter from Hive.", "You can set the Spark configuration setting %s to true to work around this problem, however this will result in degraded performance.", "Please report a bug: https://issues.apache.org/jira/browse/SPARK" ]
},
"GET_TABLES_BY_TYPE_UNSUPPORTED_BY_HIVE_VERSION" : {
"message" : [ "Hive 2.2 and lower versions don't support getTablesByType. Please use Hive 2.3 or higher version." ],
"sqlState" : "0A000"
},
"GROUPING_COLUMN_MISMATCH" : {
"message" : [ "Column of grouping (%s) can't be found in grouping columns %s" ],
"sqlState" : "42000"
Expand All @@ -63,6 +95,10 @@
"IF_PARTITION_NOT_EXISTS_UNSUPPORTED" : {
"message" : [ "Cannot write, IF NOT EXISTS is not supported for table: %s" ]
},
"ILLEGAL_LOCATION_CLAUSE_FOR_VIEW_PARTITION" : {
"message" : [ "LOCATION clause illegal for view partition" ],
"sqlState" : "42000"
},
"INCOMPARABLE_PIVOT_COLUMN" : {
"message" : [ "Invalid pivot column '%s'. Pivot columns must be comparable." ],
"sqlState" : "42000"
Expand Down Expand Up @@ -92,6 +128,16 @@
"INVALID_JSON_SCHEMA_MAPTYPE" : {
"message" : [ "Input schema %s can only contain StringType as a key type for a MapType." ]
},
"INVALID_PARTITION_FILTER" : {
"message" : [ "Partition filter cannot have both `\"` and `\\'` characters" ],
"sqlState" : "42000"
},
"LEGACY_METADATA_PATH_EXISTS" : {
"message" : [ "Error: we detected a possible problem with the location of your \"_spark_metadata\" directory and you likely need to move it before restarting this query.", "Earlier version of Spark incorrectly escaped paths when writing out the \"_spark_metadata\" directory for structured streaming.", "While this was corrected in Spark 3.0, it appears that your query was started using an earlier version that incorrectly handled the \"_spark_metadata\" path.", "Correct \"_spark_metadata\" Directory: %s. Incorrect \"_spark_metadata\" Directory: %s", "Please move the data from the incorrect directory to the correct one, delete the incorrect directory, and then restart this query.", "If you believe you are receiving this message in error, you can disable it with the SQL conf %s." ]
},
"LOAD_HIVE_CLIENT_CAUSES_NO_CLASS_DEFINITION_FOUND" : {
"message" : [ "%s when creating Hive client using classpath: %s", "Please make sure that jars for your version of hive and hadoop are included in the paths passed to %s." ]
},
"LOGICAL_HINT_OPERATOR_NOT_REMOVED_DURING_ANALYSIS" : {
"message" : [ "Internal error: logical hint operator should have been removed during analysis" ]
},
Expand All @@ -106,6 +152,10 @@
"message" : [ "A method named \"%s\" is not declared in any enclosing class nor any supertype" ],
"sqlState" : "42000"
},
"MISSING_PARTITION_COLUMN" : {
"message" : [ "Partition column %s not found in schema %s" ],
"sqlState" : "42000"
},
"MISSING_STATIC_PARTITION_COLUMN" : {
"message" : [ "Unknown static partition column: %s" ],
"sqlState" : "42000"
Expand All @@ -122,6 +172,10 @@
"message" : [ "Invalid pivot value '%s': value data type %s does not match pivot column data type %s" ],
"sqlState" : "42000"
},
"RENAME_OVERWRITES_EXISTING_PATH" : {
"message" : [ "Failed to rename as %s already exists" ],
"sqlState" : "42000"
},
"RENAME_SRC_PATH_NOT_FOUND" : {
"message" : [ "Failed to rename as %s was not found" ],
"sqlState" : "22023"
Expand All @@ -134,6 +188,13 @@
"message" : [ "The second argument of '%s' function needs to be an integer." ],
"sqlState" : "22023"
},
"SERDE_INTERFACE_NOT_FOUND" : {
"message" : [ "The SerDe interface removed since Hive 2.3(HIVE-15167). Please migrate your custom SerDes to Hive 2.3. See HIVE-15167 for more details." ],
"sqlState" : "0A000"
},
"STATE_NOT_DEFINED_OR_ALREADY_REMOVED" : {
"message" : [ "State is either not defined or has already been removed" ]
},
"UNABLE_TO_ACQUIRE_MEMORY" : {
"message" : [ "Unable to acquire %s bytes of memory, got %s" ]
},
Expand All @@ -149,6 +210,10 @@
"message" : [ "Unsupported data type %s" ],
"sqlState" : "0A000"
},
"UNSUPPORTED_HIVE_METASTORE_VERSION" : {
"message" : [ "Unsupported Hive Metastore version (%s). Please set %s with a valid version." ],
"sqlState" : "0A000"
},
"UNSUPPORTED_LITERAL_TYPE" : {
"message" : [ "Unsupported literal type %s %s" ],
"sqlState" : "0A000"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import org.apache.hadoop.fs.permission.FsPermission
import org.codehaus.commons.compiler.CompileException
import org.codehaus.janino.InternalCompilerException

import org.apache.spark.{Partition, SparkArithmeticException, SparkArrayIndexOutOfBoundsException, SparkClassNotFoundException, SparkConcurrentModificationException, SparkDateTimeException, SparkException, SparkFileAlreadyExistsException, SparkFileNotFoundException, SparkIllegalArgumentException, SparkIllegalStateException, SparkIndexOutOfBoundsException, SparkNoSuchElementException, SparkNoSuchMethodException, SparkNumberFormatException, SparkRuntimeException, SparkSecurityException, SparkSQLException, SparkSQLFeatureNotSupportedException, SparkUnsupportedOperationException, SparkUpgradeException}
import org.apache.spark.{Partition, SparkArithmeticException, SparkArrayIndexOutOfBoundsException, SparkClassNotFoundException, SparkConcurrentModificationException, SparkDateTimeException, SparkException, SparkFileAlreadyExistsException, SparkFileNotFoundException, SparkIllegalArgumentException, SparkIllegalStateException, SparkIndexOutOfBoundsException, SparkIOException, SparkNoSuchElementException, SparkNoSuchMethodException, SparkNumberFormatException, SparkRuntimeException, SparkSecurityException, SparkSQLException, SparkSQLFeatureNotSupportedException, SparkUnsupportedOperationException, SparkUpgradeException}
import org.apache.spark.executor.CommitDeniedException
import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.memory.SparkOutOfMemoryError
Expand Down Expand Up @@ -1294,127 +1294,143 @@ object QueryExecutionErrors {
}

def serDeInterfaceNotFoundError(e: NoClassDefFoundError): Throwable = {
new ClassNotFoundException("The SerDe interface removed since Hive 2.3(HIVE-15167)." +
" Please migrate your custom SerDes to Hive 2.3. See HIVE-15167 for more details.", e)
new SparkClassNotFoundException(
errorClass = "SERDE_INTERFACE_NOT_FOUND",
messageParameters = Array.empty, e)
}

def convertHiveTableToCatalogTableError(
e: SparkException, dbName: String, tableName: String): Throwable = {
new SparkException(s"${e.getMessage}, db: $dbName, table: $tableName", e)
new SparkException(
errorClass = "CANNOT_CONVERT_HIVE_TABLE_TO_CATALOG_TABLE",
messageParameters = Array(e.getMessage, dbName, tableName), e)
}

def cannotRecognizeHiveTypeError(
e: ParseException, fieldType: String, fieldName: String): Throwable = {
new SparkException(
s"Cannot recognize hive type string: $fieldType, column: $fieldName", e)
errorClass = "CANNOT_RECOGNIZE_HIVE_TYPE",
messageParameters = Array(fieldType, fieldName), e)
}

def getTablesByTypeUnsupportedByHiveVersionError(): Throwable = {
new UnsupportedOperationException("Hive 2.2 and lower versions don't support " +
"getTablesByType. Please use Hive 2.3 or higher version.")
new SparkUnsupportedOperationException(
errorClass = "GET_TABLES_BY_TYPE_UNSUPPORTED_BY_HIVE_VERSION",
messageParameters = Array.empty
)
}

def dropTableWithPurgeUnsupportedError(): Throwable = {
new UnsupportedOperationException("DROP TABLE ... PURGE")
new SparkUnsupportedOperationException(
errorClass = "DROP_TABLE_WITH_PURGE_UNSUPPORTED",
messageParameters = Array.empty
)
}

def alterTableWithDropPartitionAndPurgeUnsupportedError(): Throwable = {
new UnsupportedOperationException("ALTER TABLE ... DROP PARTITION ... PURGE")
new SparkUnsupportedOperationException(
errorClass = "ALTER_TABLE_WITH_DROP_PARTITION_AND_PURGE_UNSUPPORTED",
messageParameters = Array.empty
)
}

def invalidPartitionFilterError(): Throwable = {
new UnsupportedOperationException(
"""Partition filter cannot have both `"` and `'` characters""")
new SparkUnsupportedOperationException(
errorClass = "INVALID_PARTITION_FILTER",
messageParameters = Array.empty
)
}

def getPartitionMetadataByFilterError(e: InvocationTargetException): Throwable = {
new RuntimeException(
s"""
|Caught Hive MetaException attempting to get partition metadata by filter
|from Hive. You can set the Spark configuration setting
|${SQLConf.HIVE_METASTORE_PARTITION_PRUNING_FALLBACK_ON_EXCEPTION} to true to work around
|this problem, however this will result in degraded performance. Please
|report a bug: https://issues.apache.org/jira/browse/SPARK
""".stripMargin.replaceAll("\n", " "), e)
new SparkRuntimeException(
errorClass = "GET_PARTITION_METADATA_BY_FILTER",
messageParameters = Array(
SQLConf.HIVE_METASTORE_PARTITION_PRUNING_FALLBACK_ON_EXCEPTION.toString),
e)
}

def unsupportedHiveMetastoreVersionError(version: String, key: String): Throwable = {
new UnsupportedOperationException(s"Unsupported Hive Metastore version ($version). " +
s"Please set $key with a valid version.")
new SparkUnsupportedOperationException(
errorClass = "UNSUPPORTED_HIVE_METASTORE_VERSION",
messageParameters = Array(version, key))
}

def loadHiveClientCausesNoClassDefFoundError(
cnf: NoClassDefFoundError,
execJars: Seq[URL],
key: String,
e: InvocationTargetException): Throwable = {
new ClassNotFoundException(
s"""
|$cnf when creating Hive client using classpath: ${execJars.mkString(", ")}\n
|Please make sure that jars for your version of hive and hadoop are included in the
|paths passed to $key.
""".stripMargin.replaceAll("\n", " "), e)
new SparkClassNotFoundException(
errorClass = "LOAD_HIVE_CLIENT_CAUSES_NO_CLASS_DEFINITION_FOUND",
messageParameters = Array(cnf.toString, execJars.mkString(", "), key), e)
}

def cannotFetchTablesOfDatabaseError(dbName: String, e: Exception): Throwable = {
new SparkException(s"Unable to fetch tables of db $dbName", e)
new SparkException(
errorClass = "CANNOT_FETCH DATABASE_TABLES",
messageParameters = Array(dbName), e)
}

def illegalLocationClauseForViewPartitionError(): Throwable = {
new SparkException("LOCATION clause illegal for view partition")
new SparkException(
errorClass = "ILLEGAL_LOCATION_CLAUSE_FOR_VIEW_PARTITION",
messageParameters = Array.empty, null)
}

def renamePathAsExistsPathError(srcPath: Path, dstPath: Path): Throwable = {
new SparkFileAlreadyExistsException(errorClass = "FAILED_RENAME_PATH",
Array(srcPath.toString, dstPath.toString))
new SparkFileAlreadyExistsException(
errorClass = "FAILED_RENAME_PATH",
messageParameters = Array(srcPath.toString, dstPath.toString))
}

def renameAsExistsPathError(dstPath: Path): Throwable = {
new FileAlreadyExistsException(s"Failed to rename as $dstPath already exists")
new SparkFileAlreadyExistsException(
errorClass = "RENAME_OVERWRITES_EXISTING_PATH",
messageParameters = Array(dstPath.toString)
)
}

def renameSrcPathNotFoundError(srcPath: Path): Throwable = {
new SparkFileNotFoundException(errorClass = "RENAME_SRC_PATH_NOT_FOUND",
Array(srcPath.toString))
new SparkFileNotFoundException(
errorClass = "RENAME_SRC_PATH_NOT_FOUND",
messageParameters = Array(srcPath.toString)
)
}

def failedRenameTempFileError(srcPath: Path, dstPath: Path): Throwable = {
new IOException(s"Failed to rename temp file $srcPath to $dstPath as rename returned false")
new SparkIOException(
errorClass = "FAILED_RENAME_TEMPORARY_FILE",
messageParameters = Array(srcPath.toString, dstPath.toString)
)
}

def legacyMetadataPathExistsError(metadataPath: Path, legacyMetadataPath: Path): Throwable = {
new SparkException(
s"""
|Error: we detected a possible problem with the location of your "_spark_metadata"
|directory and you likely need to move it before restarting this query.
|
|Earlier version of Spark incorrectly escaped paths when writing out the
|"_spark_metadata" directory for structured streaming. While this was corrected in
|Spark 3.0, it appears that your query was started using an earlier version that
|incorrectly handled the "_spark_metadata" path.
|
|Correct "_spark_metadata" Directory: $metadataPath
|Incorrect "_spark_metadata" Directory: $legacyMetadataPath
|
|Please move the data from the incorrect directory to the correct one, delete the
|incorrect directory, and then restart this query. If you believe you are receiving
|this message in error, you can disable it with the SQL conf
|${SQLConf.STREAMING_CHECKPOINT_ESCAPED_PATH_CHECK_ENABLED.key}.
""".stripMargin)
errorClass = "LEGACY_METADATA_PATH_EXISTS",
messageParameters = Array(
metadataPath.toString,
legacyMetadataPath.toString,
SQLConf.STREAMING_CHECKPOINT_ESCAPED_PATH_CHECK_ENABLED.key), null)
}

def partitionColumnNotFoundInSchemaError(col: String, schema: StructType): Throwable = {
new RuntimeException(s"Partition column $col not found in schema $schema")
new SparkRuntimeException(
errorClass = "MISSING_PARTITION_COLUMN",
messageParameters = Array(col, schema.toString), null)
}

def stateNotDefinedOrAlreadyRemovedError(): Throwable = {
new NoSuchElementException("State is either not defined or has already been removed")
new SparkNoSuchElementException(
errorClass = "STATE_NOT_DEFINED_OR_ALREADY_REMOVED",
messageParameters = Array.empty
)
}

def cannotSetTimeoutDurationError(): Throwable = {
new UnsupportedOperationException(
"Cannot set timeout duration without enabling processing time timeout in " +
"[map|flatMap]GroupsWithState")
new SparkUnsupportedOperationException(
errorClass = "CANNOT_SET_TIMEOUT_DURATION",
messageParameters = Array.empty
)
}

def cannotGetEventTimeWatermarkError(): Throwable = {
Expand Down