diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index fd09e99b9ee6..68243233216e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2398,7 +2398,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { "autoBroadcastjoinThreshold" -> SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key, "driverMemory" -> SparkLauncher.DRIVER_MEMORY, "analyzeTblMsg" -> analyzeTblMsg), - cause = oe).initCause(oe.getCause) + cause = oe.getCause) } def executeCodePathUnsupportedError(execName: String): SparkUnsupportedOperationException = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala index 61349c38d2b4..069fce237f2b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala @@ -308,6 +308,10 @@ class QueryExecutionErrorsSuite } } + test("SPARK-42290: NotEnoughMemory error can't be create") { + QueryExecutionErrors.notEnoughMemoryToBuildAndBroadcastTableError(new OutOfMemoryError(), Seq()) + } + test("UNSUPPORTED_FEATURE - SPARK-38504: can't read TimestampNTZ as TimestampLTZ") { withTempPath { file => sql("select timestamp_ntz'2019-03-21 00:02:03'").write.orc(file.getCanonicalPath)