diff --git a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/HoodieClientTestHarness.java b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/HoodieClientTestHarness.java index 75c139e43dfff..72b277128162d 100644 --- a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/HoodieClientTestHarness.java +++ b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/HoodieClientTestHarness.java @@ -48,6 +48,7 @@ import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.SQLContext; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.TestInfo; import scala.Tuple2; @@ -86,6 +87,11 @@ public abstract class HoodieClientTestHarness extends HoodieCommonTestHarness im protected transient MiniDFSCluster dfsCluster; protected transient DistributedFileSystem dfs; + @AfterAll + public static void tearDownAll() throws IOException { + FileSystem.closeAll(); + } + @BeforeEach public void setTestMethodName(TestInfo testInfo) { if (testInfo.getTestMethod().isPresent()) { @@ -246,7 +252,6 @@ protected void cleanupClients() throws IOException { * @throws IOException */ protected void initDFS() throws IOException { - FileSystem.closeAll(); hdfsTestService = new HdfsTestService(); dfsCluster = hdfsTestService.start(true);