Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
15 commits
Select commit Hold shift + click to select a range
e7145ae
[SPARK-46504][PS][TESTS][FOLLOWUPS] Moving slow tests out of `Indexes…
zhengruifeng Dec 30, 2023
b05c612
[SPARK-46490][SQL] Require error classes in `SparkThrowable` sub-classes
MaxGekk Dec 30, 2023
9a6b27e
[SPARK-46548][PYTHON][DOCS] Refine docstring of `get/array_zip/sort_a…
LuciferYang Dec 31, 2023
ae91063
[SPARK-46504][PS][TESTS][FOLLOWUPS] Moving move slow tests out of `In…
zhengruifeng Dec 31, 2023
9bf5e82
[SPARK-46551][PYTHON][DOCS] Refine docstring of `flatten/sequence/shu…
LuciferYang Jan 2, 2024
e608211
[SPARK-46554][BUILD] Upgrade slf4j to 2.0.10
panbingkun Jan 2, 2024
f3e1623
[SPARK-46543][PYTHON][CONNECT] Make `json_tuple` throw PySparkValueEr…
zhengruifeng Jan 2, 2024
fb594cb
[SPARK-46555][PYTHON][DOCS] Refine docstring for DataFrame.createTemp…
HyukjinKwon Jan 2, 2024
3fd9876
[SPARK-46556][PYTHON][DOCS] Refine docstring for DataFrame.createGlob…
HyukjinKwon Jan 2, 2024
48a09c4
[SPARK-46540][PYTHON] Respect column names when Python data source re…
allisonwang-db Jan 2, 2024
760cd66
[SPARK-46557][PYTHON][DOCS] Refine docstring for DataFrame.schema/exp…
HyukjinKwon Jan 2, 2024
c5dd72c
[SPARK-46553][PS] `FutureWarning` for `interpolate` with object dtype
itholic Jan 2, 2024
7d09991
[SPARK-46544][SQL] Support v2 DESCRIBE TABLE EXTENDED with table stats
Zouxxyy Jan 2, 2024
8cec633
[SPARK-46558][CONNECT] Extract a helper function to eliminate the dup…
LuciferYang Jan 2, 2024
e690e96
[SPARK-46559][MLLIB] Wrap the `export` in the package name with backt…
LuciferYang Jan 2, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions common/utils/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -7073,6 +7073,36 @@
"Namespace '<namespace>' is non empty. <details>"
]
},
"_LEGACY_ERROR_TEMP_3104" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3105" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3106" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3107" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3108" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_TEMP_3109" : {
"message" : [
"<message>"
]
},
"_LEGACY_ERROR_USER_RAISED_EXCEPTION" : {
"message" : [
"<errorMessage>"
Expand Down
112 changes: 29 additions & 83 deletions common/utils/src/main/scala/org/apache/spark/SparkException.scala
Original file line number Diff line number Diff line change
Expand Up @@ -133,11 +133,11 @@ private[spark] case class ExecutorDeadException(message: String)
/**
* Exception thrown when Spark returns different result after upgrading to a new version.
*/
private[spark] class SparkUpgradeException(
message: String,
cause: Option[Throwable],
errorClass: Option[String],
messageParameters: Map[String, String])
private[spark] class SparkUpgradeException private(
message: String,
cause: Option[Throwable],
errorClass: Option[String],
messageParameters: Map[String, String])
extends RuntimeException(message, cause.orNull) with SparkThrowable {

def this(
Expand All @@ -152,15 +152,6 @@ private[spark] class SparkUpgradeException(
)
}

def this(message: String, cause: Option[Throwable]) = {
this(
message,
cause = cause,
errorClass = None,
messageParameters = Map.empty
)
}

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
Expand All @@ -169,7 +160,7 @@ private[spark] class SparkUpgradeException(
/**
* Arithmetic exception thrown from Spark with an error class.
*/
private[spark] class SparkArithmeticException(
private[spark] class SparkArithmeticException private(
message: String,
errorClass: Option[String],
messageParameters: Map[String, String],
Expand All @@ -189,14 +180,10 @@ private[spark] class SparkArithmeticException(
)
}

def this(message: String) = {
this(
message,
errorClass = None,
messageParameters = Map.empty,
context = Array.empty
)
}
def this(
errorClass: String,
messageParameters: Map[String, String],
context: Array[QueryContext]) = this(errorClass, messageParameters, context, "")

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

Expand All @@ -207,7 +194,7 @@ private[spark] class SparkArithmeticException(
/**
* Unsupported operation exception thrown from Spark with an error class.
*/
private[spark] class SparkUnsupportedOperationException(
private[spark] class SparkUnsupportedOperationException private(
message: String,
errorClass: Option[String],
messageParameters: Map[String, String])
Expand All @@ -223,14 +210,6 @@ private[spark] class SparkUnsupportedOperationException(
)
}

def this(message: String) = {
this(
message,
errorClass = None,
messageParameters = Map.empty
)
}

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
Expand Down Expand Up @@ -271,7 +250,7 @@ private[spark] class SparkConcurrentModificationException(
/**
* Datetime exception thrown from Spark with an error class.
*/
private[spark] class SparkDateTimeException(
private[spark] class SparkDateTimeException private(
message: String,
errorClass: Option[String],
messageParameters: Map[String, String],
Expand All @@ -291,14 +270,10 @@ private[spark] class SparkDateTimeException(
)
}

def this(message: String) = {
this(
message,
errorClass = None,
messageParameters = Map.empty,
context = Array.empty
)
}
def this(
errorClass: String,
messageParameters: Map[String, String],
context: Array[QueryContext]) = this(errorClass, messageParameters, context, "")

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

Expand All @@ -324,7 +299,7 @@ private[spark] class SparkFileNotFoundException(
/**
* Number format exception thrown from Spark with an error class.
*/
private[spark] class SparkNumberFormatException private[spark](
private[spark] class SparkNumberFormatException private(
message: String,
errorClass: Option[String],
messageParameters: Map[String, String],
Expand All @@ -345,14 +320,10 @@ private[spark] class SparkNumberFormatException private[spark](
)
}

def this(message: String) = {
this(
message,
errorClass = None,
messageParameters = Map.empty,
context = Array.empty
)
}
def this(
errorClass: String,
messageParameters: Map[String, String],
context: Array[QueryContext]) = this(errorClass, messageParameters, context, "")

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

Expand All @@ -363,7 +334,7 @@ private[spark] class SparkNumberFormatException private[spark](
/**
* Illegal argument exception thrown from Spark with an error class.
*/
private[spark] class SparkIllegalArgumentException(
private[spark] class SparkIllegalArgumentException private(
message: String,
cause: Option[Throwable],
errorClass: Option[String],
Expand All @@ -387,30 +358,19 @@ private[spark] class SparkIllegalArgumentException(
)
}

def this(message: String, cause: Option[Throwable]) = {
this(
message,
cause = cause,
errorClass = None,
messageParameters = Map.empty,
context = Array.empty
)
}

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getQueryContext: Array[QueryContext] = context
}

private[spark] class SparkRuntimeException(
private[spark] class SparkRuntimeException private(
message: String,
cause: Option[Throwable],
errorClass: Option[String],
messageParameters: Map[String, String],
context: Array[QueryContext])
extends RuntimeException(message, cause.orNull)
with SparkThrowable {
extends RuntimeException(message, cause.orNull) with SparkThrowable {

def this(
errorClass: String,
Expand All @@ -427,16 +387,6 @@ private[spark] class SparkRuntimeException(
)
}

def this(message: String, cause: Option[Throwable]) = {
this(
message,
cause = cause,
errorClass = None,
messageParameters = Map.empty,
context = Array.empty
)
}

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
Expand Down Expand Up @@ -480,7 +430,7 @@ private[spark] class SparkSecurityException(
/**
* Array index out of bounds exception thrown from Spark with an error class.
*/
private[spark] class SparkArrayIndexOutOfBoundsException(
private[spark] class SparkArrayIndexOutOfBoundsException private(
message: String,
errorClass: Option[String],
messageParameters: Map[String, String],
Expand All @@ -501,14 +451,10 @@ private[spark] class SparkArrayIndexOutOfBoundsException(
)
}

def this(message: String) = {
this(
message,
errorClass = None,
messageParameters = Map.empty,
context = Array.empty
)
}
def this(
errorClass: String,
messageParameters: Map[String, String],
context: Array[QueryContext]) = this(errorClass, messageParameters, context, "")

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,23 +211,36 @@ class SparkConnectClientSuite extends ConnectFunSuite with BeforeAndAfterEach {
}
}

for ((name, constructor) <- GrpcExceptionConverter.errorFactory) {
test(s"error framework parameters - $name") {
val testParams = GrpcExceptionConverter.ErrorParams(
message = "Found duplicate keys `abc`",
cause = None,
errorClass = Some("DUPLICATE_KEY"),
messageParameters = Map("keyColumn" -> "`abc`"),
queryContext = Array.empty)
val error = constructor(testParams)
assert(error.getMessage.contains(testParams.message))
assert(error.getCause == null)
error match {
case sparkThrowable: SparkThrowable =>
assert(sparkThrowable.getErrorClass == testParams.errorClass.get)
assert(sparkThrowable.getMessageParameters.asScala == testParams.messageParameters)
assert(sparkThrowable.getQueryContext.isEmpty)
case _ =>
test("error framework parameters") {
val errors = GrpcExceptionConverter.errorFactory
for ((name, constructor) <- errors if name.startsWith("org.apache.spark")) {
withClue(name) {
val testParams = GrpcExceptionConverter.ErrorParams(
message = "",
cause = None,
errorClass = Some("DUPLICATE_KEY"),
messageParameters = Map("keyColumn" -> "`abc`"),
queryContext = Array.empty)
val error = constructor(testParams).asInstanceOf[Throwable with SparkThrowable]
assert(error.getMessage.contains(testParams.message))
assert(error.getCause == null)
assert(error.getErrorClass == testParams.errorClass.get)
assert(error.getMessageParameters.asScala == testParams.messageParameters)
assert(error.getQueryContext.isEmpty)
}
}

for ((name, constructor) <- errors if !name.startsWith("org.apache.spark")) {
withClue(name) {
val testParams = GrpcExceptionConverter.ErrorParams(
message = "Found duplicate keys `abc`",
cause = None,
errorClass = None,
messageParameters = Map.empty,
queryContext = Array.empty)
val error = constructor(testParams)
assert(error.getMessage.contains(testParams.message))
assert(error.getCause == null)
}
}
}
Expand Down
Loading