diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeReadWriteSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeReadWriteSuite.scala index 6c509297c1ab9..05cd751d43483 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeReadWriteSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeReadWriteSuite.scala @@ -22,6 +22,7 @@ import java.nio.charset.StandardCharsets import java.nio.file.Files import java.sql.{Date, Timestamp} +import org.apache.commons.lang3.{JavaVersion, SystemUtils} import org.apache.hadoop.fs.Path import org.apache.hadoop.hive.ql.io.{DelegateSymlinkTextInputFormat, SymlinkTextInputFormat} import org.apache.hadoop.mapred.FileSplit; @@ -227,6 +228,9 @@ class HiveSerDeReadWriteSuite extends QueryTest with SQLTestUtils with TestHiveS } test("SPARK-40815: DelegateSymlinkTextInputFormat serialization") { + // Ignored due to JDK 11 failures reported in https://github.com/apache/spark/pull/38277. + assume(!SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9)) + def assertSerDe(split: DelegateSymlinkTextInputFormat.DelegateSymlinkTextInputSplit): Unit = { val buf = new ByteArrayOutputStream() val out = new DataOutputStream(buf) @@ -262,6 +266,9 @@ class HiveSerDeReadWriteSuite extends QueryTest with SQLTestUtils with TestHiveS } test("SPARK-40815: Read SymlinkTextInputFormat") { + // Ignored due to JDK 11 failures reported in https://github.com/apache/spark/pull/38277. + assume(!SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9)) + withTable("t") { withTempDir { root => val dataPath = new File(root, "data")