From 57a17cba02f0efa7b7ba2a68a7c77ceea2ca446e Mon Sep 17 00:00:00 2001 From: fanshilun Date: Tue, 4 Feb 2025 10:29:42 +0800 Subject: [PATCH 1/6] HADOOP-19415. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-common Part2. --- .../hadoop/fs/FileContextTestHelper.java | 22 +- .../hadoop/fs/FileContextTestWrapper.java | 22 +- .../hadoop/fs/FileSystemTestHelper.java | 14 +- .../hadoop/fs/FileSystemTestWrapper.java | 22 +- .../apache/hadoop/fs/TestAfsCheckPath.java | 14 +- .../org/apache/hadoop/fs/TestAvroFSInput.java | 4 +- .../apache/hadoop/fs/TestBlockLocation.java | 15 +- .../hadoop/fs/TestChecksumFileSystem.java | 28 +- .../apache/hadoop/fs/TestCommandFormat.java | 8 +- .../apache/hadoop/fs/TestContentSummary.java | 62 +-- .../hadoop/fs/TestDFCachingGetSpaceUsed.java | 12 +- .../apache/hadoop/fs/TestDFVariations.java | 52 ++- .../java/org/apache/hadoop/fs/TestDU.java | 36 +- .../org/apache/hadoop/fs/TestDefaultUri.java | 2 +- .../hadoop/fs/TestDelegateToFileSystem.java | 6 +- .../hadoop/fs/TestDelegateToFsCheckPath.java | 2 +- .../hadoop/fs/TestDelegationTokenRenewer.java | 20 +- .../org/apache/hadoop/fs/TestFileContext.java | 22 +- .../fs/TestFileContextDeleteOnExit.java | 28 +- .../hadoop/fs/TestFileContextResolveAfs.java | 14 +- .../org/apache/hadoop/fs/TestFileStatus.java | 8 +- .../fs/TestFileSystemCanonicalization.java | 12 +- .../fs/TestFileSystemInitialization.java | 4 +- .../fs/TestFileSystemStorageStatistics.java | 12 +- .../hadoop/fs/TestFileSystemTokens.java | 4 +- .../org/apache/hadoop/fs/TestFileUtil.java | 405 ++++++++++-------- .../hadoop/fs/TestFilterFileSystem.java | 22 +- .../org/apache/hadoop/fs/TestFilterFs.java | 2 +- .../org/apache/hadoop/fs/TestFsOptions.java | 4 +- .../org/apache/hadoop/fs/TestFsShell.java | 2 +- .../org/apache/hadoop/fs/TestFsShellCopy.java | 18 +- .../org/apache/hadoop/fs/TestFsShellList.java | 32 +- .../hadoop/fs/TestFsShellReturnCode.java | 61 +-- .../apache/hadoop/fs/TestFsShellTouch.java | 10 +- .../hadoop/fs/TestFsUrlConnectionPath.java | 20 +- .../hadoop/fs/TestGetFileBlockLocations.java | 12 +- .../apache/hadoop/fs/TestGetSpaceUsed.java | 12 +- .../apache/hadoop/fs/TestGlobExpander.java | 12 +- .../org/apache/hadoop/fs/TestGlobPattern.java | 14 +- .../apache/hadoop/fs/TestHarFileSystem.java | 6 +- .../hadoop/fs/TestHarFileSystemBasics.java | 56 +-- .../org/apache/hadoop/fs/TestHardLink.java | 16 +- .../org/apache/hadoop/fs/TestListFiles.java | 20 +- .../hadoop/fs/TestLocalDirAllocator.java | 58 ++- .../apache/hadoop/fs/TestLocalFileSystem.java | 74 ++-- .../fs/TestLocalFileSystemPermission.java | 12 +- .../hadoop/fs/TestLocatedFileStatus.java | 2 +- .../java/org/apache/hadoop/fs/TestPath.java | 106 +++-- .../org/apache/hadoop/fs/TestQuotaUsage.java | 34 +- .../fs/TestSymlinkLocalFSFileContext.java | 4 +- .../fs/TestSymlinkLocalFSFileSystem.java | 26 +- .../java/org/apache/hadoop/fs/TestTrash.java | 164 +++---- .../hadoop/fs/TestTruncatedInputBug.java | 4 +- .../fs/audit/TestCommonAuditContext.java | 2 +- .../hadoop/fs/contract/ftp/FTPContract.java | 4 +- .../hadoop/fs/ftp/TestFTPFileSystem.java | 21 +- .../hadoop/fs/http/TestHttpFileSystem.java | 8 +- .../apache/hadoop/fs/permission/TestAcl.java | 8 +- .../fs/permission/TestFsPermission.java | 8 +- .../fs/protocolPB/TestFSSerialization.java | 4 +- .../hadoop/fs/sftp/TestSFTPFileSystem.java | 58 +-- .../hadoop/fs/shell/TestAclCommands.java | 74 ++-- .../hadoop/fs/shell/TestCommandFactory.java | 8 +- .../org/apache/hadoop/fs/shell/TestCopy.java | 12 +- .../hadoop/fs/shell/TestCopyFromLocal.java | 36 +- .../hadoop/fs/shell/TestCopyPreserveFlag.java | 69 +-- .../hadoop/fs/shell/TestCopyToLocal.java | 47 +- .../org/apache/hadoop/fs/shell/TestCount.java | 24 +- .../apache/hadoop/fs/shell/TestCpCommand.java | 47 +- .../org/apache/hadoop/fs/shell/TestLs.java | 74 ++-- .../org/apache/hadoop/fs/shell/TestMove.java | 23 +- .../apache/hadoop/fs/shell/TestPathData.java | 64 +-- .../hadoop/fs/shell/TestPathExceptions.java | 6 +- .../hadoop/fs/shell/TestPrintableString.java | 2 +- .../org/apache/hadoop/fs/shell/TestTail.java | 4 +- .../hadoop/fs/shell/TestTextCommand.java | 99 +++-- .../hadoop/fs/shell/TestXAttrCommands.java | 42 +- .../apache/hadoop/fs/shell/find/TestAnd.java | 4 +- .../fs/shell/find/TestFilterExpression.java | 8 +- .../apache/hadoop/fs/shell/find/TestFind.java | 8 +- .../hadoop/fs/shell/find/TestIname.java | 8 +- .../apache/hadoop/fs/shell/find/TestName.java | 8 +- .../hadoop/fs/shell/find/TestPrint.java | 8 +- .../hadoop/fs/shell/find/TestPrint0.java | 8 +- .../hadoop/fs/shell/find/TestResult.java | 4 +- .../hadoop/fs/store/TestDataBlocks.java | 22 +- .../hadoop/fs/store/TestEtagChecksum.java | 6 +- .../fs/viewfs/TestChRootedFileSystem.java | 162 +++---- .../hadoop/fs/viewfs/TestChRootedFs.java | 149 ++++--- ...TestRegexMountPointInterceptorFactory.java | 8 +- ...ointResolvedDstPathReplaceInterceptor.java | 20 +- .../viewfs/TestViewFileSystemDelegation.java | 7 +- ...tViewFileSystemDelegationTokenSupport.java | 8 +- ...leSystemOverloadSchemeLocalFileSystem.java | 40 +- .../hadoop/fs/viewfs/TestViewFsConfig.java | 54 +-- .../hadoop/fs/viewfs/TestViewFsTrash.java | 12 +- .../hadoop/fs/viewfs/TestViewFsURIs.java | 2 +- .../fs/viewfs/TestViewfsFileStatus.java | 22 +- .../hadoop/fs/viewfs/ViewFsTestSetup.java | 6 +- 99 files changed, 1587 insertions(+), 1384 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java index b5307a4e27669..b2782224ab297 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java @@ -26,7 +26,7 @@ import org.apache.hadoop.fs.Options.CreateOpts.BlockSize; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; /** * Helper class for unit tests. @@ -220,28 +220,28 @@ public enum fileType {isDir, isFile, isSymlink}; public static void checkFileStatus(FileContext aFc, String path, fileType expectedType) throws IOException { FileStatus s = aFc.getFileStatus(new Path(path)); - Assert.assertNotNull(s); + Assertions.assertNotNull(s); if (expectedType == fileType.isDir) { - Assert.assertTrue(s.isDirectory()); + Assertions.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assert.assertTrue(s.isFile()); + Assertions.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assert.assertTrue(s.isSymlink()); + Assertions.assertTrue(s.isSymlink()); } - Assert.assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); + Assertions.assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); } public static void checkFileLinkStatus(FileContext aFc, String path, fileType expectedType) throws IOException { FileStatus s = aFc.getFileLinkStatus(new Path(path)); - Assert.assertNotNull(s); + Assertions.assertNotNull(s); if (expectedType == fileType.isDir) { - Assert.assertTrue(s.isDirectory()); + Assertions.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assert.assertTrue(s.isFile()); + Assertions.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assert.assertTrue(s.isSymlink()); + Assertions.assertTrue(s.isSymlink()); } - Assert.assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); + Assertions.assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java index 0dd1e9aa3e0f7..6c170d6b29fa5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java @@ -28,7 +28,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.AccessControlException; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; /** * Helper class for unit tests. @@ -169,29 +169,29 @@ public FileStatus containsPath(String path, FileStatus[] dirList) public void checkFileStatus(String path, fileType expectedType) throws IOException { FileStatus s = fc.getFileStatus(new Path(path)); - Assert.assertNotNull(s); + Assertions.assertNotNull(s); if (expectedType == fileType.isDir) { - Assert.assertTrue(s.isDirectory()); + Assertions.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assert.assertTrue(s.isFile()); + Assertions.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assert.assertTrue(s.isSymlink()); + Assertions.assertTrue(s.isSymlink()); } - Assert.assertEquals(fc.makeQualified(new Path(path)), s.getPath()); + Assertions.assertEquals(fc.makeQualified(new Path(path)), s.getPath()); } public void checkFileLinkStatus(String path, fileType expectedType) throws IOException { FileStatus s = fc.getFileLinkStatus(new Path(path)); - Assert.assertNotNull(s); + Assertions.assertNotNull(s); if (expectedType == fileType.isDir) { - Assert.assertTrue(s.isDirectory()); + Assertions.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assert.assertTrue(s.isFile()); + Assertions.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assert.assertTrue(s.isSymlink()); + Assertions.assertTrue(s.isSymlink()); } - Assert.assertEquals(fc.makeQualified(new Path(path)), s.getPath()); + Assertions.assertEquals(fc.makeQualified(new Path(path)), s.getPath()); } // diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java index ef9e094c4c978..f313687ebcc2e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java @@ -25,9 +25,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.mock; /** @@ -241,15 +241,15 @@ public enum fileType {isDir, isFile, isSymlink}; public static void checkFileStatus(FileSystem aFs, String path, fileType expectedType) throws IOException { FileStatus s = aFs.getFileStatus(new Path(path)); - Assert.assertNotNull(s); + Assertions.assertNotNull(s); if (expectedType == fileType.isDir) { - Assert.assertTrue(s.isDirectory()); + Assertions.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assert.assertTrue(s.isFile()); + Assertions.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assert.assertTrue(s.isSymlink()); + Assertions.assertTrue(s.isSymlink()); } - Assert.assertEquals(aFs.makeQualified(new Path(path)), s.getPath()); + Assertions.assertEquals(aFs.makeQualified(new Path(path)), s.getPath()); } /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java index 933ad1a2358cd..1c159d44028cd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java @@ -29,7 +29,7 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.Progressable; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; /** * Helper class for unit tests. @@ -170,29 +170,29 @@ public FileStatus containsPath(String path, FileStatus[] dirList) public void checkFileStatus(String path, fileType expectedType) throws IOException { FileStatus s = fs.getFileStatus(new Path(path)); - Assert.assertNotNull(s); + Assertions.assertNotNull(s); if (expectedType == fileType.isDir) { - Assert.assertTrue(s.isDirectory()); + Assertions.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assert.assertTrue(s.isFile()); + Assertions.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assert.assertTrue(s.isSymlink()); + Assertions.assertTrue(s.isSymlink()); } - Assert.assertEquals(fs.makeQualified(new Path(path)), s.getPath()); + Assertions.assertEquals(fs.makeQualified(new Path(path)), s.getPath()); } public void checkFileLinkStatus(String path, fileType expectedType) throws IOException { FileStatus s = fs.getFileLinkStatus(new Path(path)); - Assert.assertNotNull(s); + Assertions.assertNotNull(s); if (expectedType == fileType.isDir) { - Assert.assertTrue(s.isDirectory()); + Assertions.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assert.assertTrue(s.isFile()); + Assertions.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assert.assertTrue(s.isSymlink()); + Assertions.assertTrue(s.isSymlink()); } - Assert.assertEquals(fs.makeQualified(new Path(path)), s.getPath()); + Assertions.assertEquals(fs.makeQualified(new Path(path)), s.getPath()); } // diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java index da429ffe960a4..69b67958c2532 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java @@ -27,7 +27,9 @@ import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.Progressable; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; public class TestAfsCheckPath { @@ -56,11 +58,13 @@ public void testCheckPathWithTheSameNonDefaultPort() afs.checkPath(new Path("dummy://dummy-host:" + OTHER_PORT)); } - @Test(expected=InvalidPathException.class) + @Test public void testCheckPathWithDifferentPorts() throws URISyntaxException { - URI uri = new URI("dummy://dummy-host:" + DEFAULT_PORT); - AbstractFileSystem afs = new DummyFileSystem(uri); - afs.checkPath(new Path("dummy://dummy-host:" + OTHER_PORT)); + assertThrows(InvalidPathException.class, () -> { + URI uri = new URI("dummy://dummy-host:" + DEFAULT_PORT); + AbstractFileSystem afs = new DummyFileSystem(uri); + afs.checkPath(new Path("dummy://dummy-host:" + OTHER_PORT)); + }); } private static class DummyFileSystem extends AbstractFileSystem { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java index f182fe5da7c36..c7b765d5a724e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java @@ -24,8 +24,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; public class TestAvroFSInput { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java index 72e850b1313d5..e0c812cc8fa4a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestBlockLocation.java @@ -17,11 +17,12 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestBlockLocation { @@ -70,7 +71,8 @@ private static void checkBlockLocation(final BlockLocation loc, /** * Call all the constructors and verify the delegation is working properly */ - @Test(timeout = 5000) + @Test + @Timeout(value = 5) public void testBlockLocationConstructors() throws Exception { // BlockLocation loc; @@ -91,7 +93,8 @@ public void testBlockLocationConstructors() throws Exception { /** * Call each of the setters and verify */ - @Test(timeout = 5000) + @Test + @Timeout(value = 5) public void testBlockLocationSetters() throws Exception { BlockLocation loc; loc = new BlockLocation(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java index 8b42aa6779dad..c69a6b0131346 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java @@ -26,8 +26,10 @@ import static org.apache.hadoop.fs.FileSystemTestHelper.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.*; -import static org.junit.Assert.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; public class TestChecksumFileSystem { static final String TEST_ROOT_DIR = @@ -35,7 +37,7 @@ public class TestChecksumFileSystem { static LocalFileSystem localFs; - @Before + @BeforeEach public void resetLocalFs() throws Exception { localFs = FileSystem.getLocal(new Configuration()); localFs.setVerifyChecksum(true); @@ -77,12 +79,12 @@ public void testVerifyChecksum() throws Exception { readFile(localFs, testPath, 1025); localFs.delete(localFs.getChecksumFile(testPath), true); - assertTrue("checksum deleted", !localFs.exists(localFs.getChecksumFile(testPath))); + assertTrue(!localFs.exists(localFs.getChecksumFile(testPath)), "checksum deleted"); //copying the wrong checksum file FileUtil.copy(localFs, localFs.getChecksumFile(testPath11), localFs, localFs.getChecksumFile(testPath),false,true,localFs.getConf()); - assertTrue("checksum exists", localFs.exists(localFs.getChecksumFile(testPath))); + assertTrue(localFs.exists(localFs.getChecksumFile(testPath)), "checksum exists"); boolean errorRead = false; try { @@ -90,12 +92,12 @@ public void testVerifyChecksum() throws Exception { }catch(ChecksumException ie) { errorRead = true; } - assertTrue("error reading", errorRead); + assertTrue(errorRead, "error reading"); //now setting verify false, the read should succeed localFs.setVerifyChecksum(false); String str = readFile(localFs, testPath, 1024).toString(); - assertTrue("read", "testing".equals(str)); + assertTrue("testing".equals(str), "read"); } @Test @@ -153,7 +155,7 @@ public void testTruncatedChecksum() throws Exception { // telling it not to verify checksums, should avoid issue. localFs.setVerifyChecksum(false); String str = readFile(localFs, testPath, 1024).toString(); - assertTrue("read", "testing truncation".equals(str)); + assertTrue("testing truncation".equals(str), "read"); } @Test @@ -164,13 +166,13 @@ public void testStreamType() throws Exception { localFs.setVerifyChecksum(true); in = localFs.open(testPath); - assertTrue("stream is input checker", - in.getWrappedStream() instanceof FSInputChecker); + assertTrue( + in.getWrappedStream() instanceof FSInputChecker, "stream is input checker"); localFs.setVerifyChecksum(false); in = localFs.open(testPath); - assertFalse("stream is not input checker", - in.getWrappedStream() instanceof FSInputChecker); + assertFalse( + in.getWrappedStream() instanceof FSInputChecker, "stream is not input checker"); } @Test @@ -200,7 +202,7 @@ public void testCorruptedChecksum() throws Exception { } catch (ChecksumException ce) { e = ce; } finally { - assertNotNull("got checksum error", e); + assertNotNull(e, "got checksum error"); } localFs.setVerifyChecksum(false); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java index 084c6a0aef83d..76ab123f56659 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import java.util.ArrayList; import java.util.Arrays; @@ -30,8 +30,8 @@ import org.apache.hadoop.fs.shell.CommandFormat.NotEnoughArgumentsException; import org.apache.hadoop.fs.shell.CommandFormat.TooManyArgumentsException; import org.apache.hadoop.fs.shell.CommandFormat.UnknownOptionException; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; /** * This class tests the command line parsing @@ -41,7 +41,7 @@ public class TestCommandFormat { private static List expectedArgs; private static Set expectedOpts; - @Before + @BeforeEach public void setUp() { args = new ArrayList<>(); expectedOpts = new HashSet<>(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java index 98f9f2021f8b4..d5125ba170d01 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.InOrder; public class TestContentSummary { @@ -33,12 +33,12 @@ public class TestContentSummary { @Test public void testConstructorEmpty() { ContentSummary contentSummary = new ContentSummary.Builder().build(); - assertEquals("getLength", 0, contentSummary.getLength()); - assertEquals("getFileCount", 0, contentSummary.getFileCount()); - assertEquals("getDirectoryCount", 0, contentSummary.getDirectoryCount()); - assertEquals("getQuota", -1, contentSummary.getQuota()); - assertEquals("getSpaceConsumed", 0, contentSummary.getSpaceConsumed()); - assertEquals("getSpaceQuota", -1, contentSummary.getSpaceQuota()); + assertEquals(0, contentSummary.getLength(), "getLength"); + assertEquals(0, contentSummary.getFileCount(), "getFileCount"); + assertEquals(0, contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(-1, contentSummary.getQuota(), "getQuota"); + assertEquals(0, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(-1, contentSummary.getSpaceQuota(), "getSpaceQuota"); } // check the full constructor with quota information @@ -54,14 +54,14 @@ public void testConstructorWithQuota() { ContentSummary contentSummary = new ContentSummary.Builder().length(length). fileCount(fileCount).directoryCount(directoryCount).quota(quota). spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); - assertEquals("getLength", length, contentSummary.getLength()); - assertEquals("getFileCount", fileCount, contentSummary.getFileCount()); - assertEquals("getDirectoryCount", directoryCount, - contentSummary.getDirectoryCount()); - assertEquals("getQuota", quota, contentSummary.getQuota()); - assertEquals("getSpaceConsumed", spaceConsumed, - contentSummary.getSpaceConsumed()); - assertEquals("getSpaceQuota", spaceQuota, contentSummary.getSpaceQuota()); + assertEquals(length, contentSummary.getLength(), "getLength"); + assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); + assertEquals(directoryCount +, contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(quota, contentSummary.getQuota(), "getQuota"); + assertEquals(spaceConsumed +, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota"); } // check the constructor with quota information @@ -74,13 +74,13 @@ public void testConstructorNoQuota() { ContentSummary contentSummary = new ContentSummary.Builder().length(length). fileCount(fileCount).directoryCount(directoryCount). spaceConsumed(length).build(); - assertEquals("getLength", length, contentSummary.getLength()); - assertEquals("getFileCount", fileCount, contentSummary.getFileCount()); - assertEquals("getDirectoryCount", directoryCount, - contentSummary.getDirectoryCount()); - assertEquals("getQuota", -1, contentSummary.getQuota()); - assertEquals("getSpaceConsumed", length, contentSummary.getSpaceConsumed()); - assertEquals("getSpaceQuota", -1, contentSummary.getSpaceQuota()); + assertEquals(length, contentSummary.getLength(), "getLength"); + assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); + assertEquals(directoryCount +, contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(-1, contentSummary.getQuota(), "getQuota"); + assertEquals(length, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(-1, contentSummary.getSpaceQuota(), "getSpaceQuota"); } // check the write method @@ -127,14 +127,14 @@ public void testReadFields() throws IOException { .thenReturn(spaceQuota); contentSummary.readFields(in); - assertEquals("getLength", length, contentSummary.getLength()); - assertEquals("getFileCount", fileCount, contentSummary.getFileCount()); - assertEquals("getDirectoryCount", directoryCount, - contentSummary.getDirectoryCount()); - assertEquals("getQuota", quota, contentSummary.getQuota()); - assertEquals("getSpaceConsumed", spaceConsumed, - contentSummary.getSpaceConsumed()); - assertEquals("getSpaceQuota", spaceQuota, contentSummary.getSpaceQuota()); + assertEquals(length, contentSummary.getLength(), "getLength"); + assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); + assertEquals(directoryCount +, contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(quota, contentSummary.getQuota(), "getQuota"); + assertEquals(spaceConsumed +, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota"); } // check the header with quotas diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java index 6b9a34c3b32eb..782a4e6411e50 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFCachingGetSpaceUsed.java @@ -19,16 +19,16 @@ import org.apache.commons.lang3.RandomStringUtils; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test to make sure df can run and work. @@ -37,13 +37,13 @@ public class TestDFCachingGetSpaceUsed { final static private File DF_DIR = GenericTestUtils.getTestDir("testdfspace"); public static final int FILE_SIZE = 1024; - @Before + @BeforeEach public void setUp() { FileUtil.fullyDelete(DF_DIR); assertTrue(DF_DIR.mkdirs()); } - @After + @AfterEach public void tearDown() throws IOException { FileUtil.fullyDelete(DF_DIR); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java index 3476f3eef4329..9cdcc2f31623f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedReader; import java.io.File; @@ -29,24 +29,25 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; public class TestDFVariations { private static final String TEST_ROOT_DIR = GenericTestUtils.getTestDir("testdfvariations").getAbsolutePath(); private static File test_root = null; - @Before + @BeforeEach public void setup() throws IOException { test_root = new File(TEST_ROOT_DIR); test_root.mkdirs(); } - @After + @AfterEach public void after() throws IOException { FileUtil.setWritable(test_root, true); FileUtil.fullyDelete(test_root); @@ -65,25 +66,28 @@ protected String[] getExecString() { } } - @Test(timeout=5000) + @Test + @Timeout(value = 5) public void testMount() throws Exception { XXDF df = new XXDF(); String expectedMount = Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/foo/bar"; - assertEquals("Invalid mount point", - expectedMount, df.getMount()); + assertEquals( + expectedMount, df.getMount(), "Invalid mount point"); } - @Test(timeout=5000) + @Test + @Timeout(value = 5) public void testFileSystem() throws Exception { XXDF df = new XXDF(); String expectedFileSystem = Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/dev/sda3"; - assertEquals("Invalid filesystem", - expectedFileSystem, df.getFilesystem()); + assertEquals( + expectedFileSystem, df.getFilesystem(), "Invalid filesystem"); } - @Test(timeout=5000) + @Test + @Timeout(value = 5) public void testDFInvalidPath() throws Exception { // Generate a path that doesn't exist Random random = new Random(0xDEADBEEFl); @@ -106,7 +110,8 @@ public void testDFInvalidPath() throws Exception { } } - @Test(timeout=5000) + @Test + @Timeout(value = 5) public void testDFMalformedOutput() throws Exception { DF df = new DF(new File("/"), 0l); BufferedReader reader = new BufferedReader(new StringReader( @@ -152,19 +157,20 @@ public void testDFMalformedOutput() throws Exception { } } - @Test(timeout=5000) + @Test + @Timeout(value = 5) public void testGetMountCurrentDirectory() throws Exception { File currentDirectory = new File("."); String workingDir = currentDirectory.getAbsoluteFile().getCanonicalPath(); DF df = new DF(new File(workingDir), 0L); String mountPath = df.getMount(); File mountDir = new File(mountPath); - assertTrue("Mount dir ["+mountDir.getAbsolutePath()+"] should exist.", - mountDir.exists()); - assertTrue("Mount dir ["+mountDir.getAbsolutePath()+"] should be directory.", - mountDir.isDirectory()); - assertTrue("Working dir ["+workingDir+"] should start with ["+mountPath+"].", - workingDir.startsWith(mountPath)); + assertTrue( + mountDir.exists(), "Mount dir ["+mountDir.getAbsolutePath()+"] should exist."); + assertTrue( + mountDir.isDirectory(), "Mount dir ["+mountDir.getAbsolutePath()+"] should be directory."); + assertTrue( + workingDir.startsWith(mountPath), "Working dir ["+workingDir+"] should start with ["+mountPath+"]."); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java index f340cc202ed01..208a7becdb474 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java @@ -18,10 +18,10 @@ package org.apache.hadoop.fs; import org.apache.hadoop.util.Shell; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; import static org.junit.Assume.assumeFalse; import java.io.File; @@ -37,14 +37,14 @@ public class TestDU { final static private File DU_DIR = GenericTestUtils.getTestDir("dutmp"); - @Before + @BeforeEach public void setUp() { assumeFalse(Shell.WINDOWS); FileUtil.fullyDelete(DU_DIR); assertTrue(DU_DIR.mkdirs()); } - @After + @AfterEach public void tearDown() throws IOException { FileUtil.fullyDelete(DU_DIR); } @@ -91,9 +91,9 @@ public void testDU() throws IOException, InterruptedException { long duSize = du.getUsed(); du.close(); - assertTrue("Invalid on-disk size", - duSize >= writtenSize && - writtenSize <= (duSize + slack)); + assertTrue( + duSize >= writtenSize && + writtenSize <= (duSize + slack), "Invalid on-disk size"); //test with 0 interval, will not launch thread du = new DU(file, 0, 1, -1); @@ -101,18 +101,18 @@ public void testDU() throws IOException, InterruptedException { duSize = du.getUsed(); du.close(); - assertTrue("Invalid on-disk size", - duSize >= writtenSize && - writtenSize <= (duSize + slack)); + assertTrue( + duSize >= writtenSize && + writtenSize <= (duSize + slack), "Invalid on-disk size"); //test without launching thread du = new DU(file, 10000, 0, -1); du.init(); duSize = du.getUsed(); - assertTrue("Invalid on-disk size", - duSize >= writtenSize && - writtenSize <= (duSize + slack)); + assertTrue( + duSize >= writtenSize && + writtenSize <= (duSize + slack), "Invalid on-disk size"); } @Test @@ -124,7 +124,7 @@ public void testDUGetUsedWillNotReturnNegative() throws IOException { DU du = new DU(file, 10000L, 0, -1); du.incDfsUsed(-Long.MAX_VALUE); long duSize = du.getUsed(); - assertTrue(String.valueOf(duSize), duSize >= 0L); + assertTrue(duSize >= 0L, String.valueOf(duSize)); } @Test @@ -133,14 +133,14 @@ public void testDUSetInitialValue() throws IOException { createFile(file, 8192); DU du = new DU(file, 3000, 0, 1024); du.init(); - assertTrue("Initial usage setting not honored", du.getUsed() == 1024); + assertTrue(du.getUsed() == 1024, "Initial usage setting not honored"); // wait until the first du runs. try { Thread.sleep(5000); } catch (InterruptedException ie) {} - assertTrue("Usage didn't get updated", du.getUsed() == 8192); + assertTrue(du.getUsed() == 8192, "Usage didn't get updated"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java index 9572bed4098f4..1a500ae9b65cb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDefaultUri.java @@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.test.LambdaTestUtils.*; /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java index 5de32861db68d..c877c6860960c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java @@ -21,8 +21,8 @@ import org.apache.commons.net.ftp.FTP; import org.apache.hadoop.conf.Configuration; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; public class TestDelegateToFileSystem { @@ -37,7 +37,7 @@ private void testDefaultUriInternal(String defaultUri) FileSystem.setDefaultUri(conf, defaultUri); final AbstractFileSystem ftpFs = AbstractFileSystem.get(FTP_URI_NO_PORT, conf); - Assert.assertEquals(FTP_URI_WITH_PORT, ftpFs.getUri()); + Assertions.assertEquals(FTP_URI_WITH_PORT, ftpFs.getUri()); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java index 6030c12c16c4d..51638985bc7c6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFsCheckPath.java @@ -26,7 +26,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.Progressable; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * The default port of DelegateToFileSystem is set from child file system. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java index 582bc3142c872..1990178f50a81 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs; import java.io.IOException; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import org.apache.hadoop.conf.Configuration; @@ -27,8 +27,9 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.Time; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -42,7 +43,7 @@ public abstract class RenewableFileSystem extends FileSystem Configuration conf; FileSystem fs; - @Before + @BeforeEach public void setup() { DelegationTokenRenewer.renewCycle = RENEW_CYCLE; DelegationTokenRenewer.reset(); @@ -69,8 +70,8 @@ public Long answer(InvocationOnMock invocation) { renewer.addRenewAction(fs); - assertEquals("FileSystem not added to DelegationTokenRenewer", 1, - renewer.getRenewQueueLength()); + assertEquals(1 +, renewer.getRenewQueueLength(), "FileSystem not added to DelegationTokenRenewer"); Thread.sleep(RENEW_CYCLE*2); verify(token, atLeast(2)).renew(eq(conf)); @@ -82,8 +83,8 @@ public Long answer(InvocationOnMock invocation) { verify(fs, never()).getDelegationToken(null); verify(fs, never()).setDelegationToken(any()); - assertEquals("FileSystem not removed from DelegationTokenRenewer", 0, - renewer.getRenewQueueLength()); + assertEquals(0 +, renewer.getRenewQueueLength(), "FileSystem not removed from DelegationTokenRenewer"); } @Test @@ -179,7 +180,8 @@ public Long answer(InvocationOnMock invocation) { assertEquals(0, renewer.getRenewQueueLength()); } - @Test(timeout=4000) + @Test + @Timeout(value = 4) public void testMultipleTokensDoNotDeadlock() throws IOException, InterruptedException { Configuration conf = mock(Configuration.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java index 60b24c776c14e..eaf484faaf71d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,25 +58,25 @@ public void testConfBasedAndAPIBasedSetUMask() throws Exception { FileContext fc1 = FileContext.getFileContext(uri1, conf); FileContext fc2 = FileContext.getFileContext(uri2, conf); - assertEquals("Umask for fc1 is incorrect", 022, fc1.getUMask().toShort()); - assertEquals("Umask for fc2 is incorrect", 022, fc2.getUMask().toShort()); + assertEquals(022, fc1.getUMask().toShort(), "Umask for fc1 is incorrect"); + assertEquals(022, fc2.getUMask().toShort(), "Umask for fc2 is incorrect"); // Till a user explicitly calls FileContext.setUMask(), the updates through // configuration should be reflected.. conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "011"); - assertEquals("Umask for fc1 is incorrect", 011, fc1.getUMask().toShort()); - assertEquals("Umask for fc2 is incorrect", 011, fc2.getUMask().toShort()); + assertEquals(011, fc1.getUMask().toShort(), "Umask for fc1 is incorrect"); + assertEquals(011, fc2.getUMask().toShort(), "Umask for fc2 is incorrect"); // Stop reflecting the conf update for specific FileContexts, once an // explicit setUMask is done. conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "066"); fc1.setUMask(FsPermission.createImmutable((short) 00033)); - assertEquals("Umask for fc1 is incorrect", 033, fc1.getUMask().toShort()); - assertEquals("Umask for fc2 is incorrect", 066, fc2.getUMask().toShort()); + assertEquals(033, fc1.getUMask().toShort(), "Umask for fc1 is incorrect"); + assertEquals(066, fc2.getUMask().toShort(), "Umask for fc2 is incorrect"); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077"); fc2.setUMask(FsPermission.createImmutable((short) 00044)); - assertEquals("Umask for fc1 is incorrect", 033, fc1.getUMask().toShort()); - assertEquals("Umask for fc2 is incorrect", 044, fc2.getUMask().toShort()); + assertEquals(033, fc1.getUMask().toShort(), "Umask for fc1 is incorrect"); + assertEquals(044, fc2.getUMask().toShort(), "Umask for fc2 is incorrect"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java index 40db1fdda2130..ceeee537dee95 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java @@ -20,16 +20,16 @@ import java.io.IOException; import java.util.Set; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; import org.apache.hadoop.util.ShutdownHookManager; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.FileContextTestHelper.*; /** - * Tests {@link FileContext.#deleteOnExit(Path)} functionality. + * Tests {@link FileContext#deleteOnExit(Path)} functionality. */ public class TestFileContextDeleteOnExit { private static int blockSize = 1024; @@ -38,23 +38,23 @@ public class TestFileContextDeleteOnExit { private final FileContextTestHelper helper = new FileContextTestHelper(); private FileContext fc; - @Before + @BeforeEach public void setup() throws IOException { fc = FileContext.getLocalFSFileContext(); } - @After + @AfterEach public void tearDown() throws IOException { fc.delete(helper.getTestRootPath(fc), true); } private void checkDeleteOnExitData(int size, FileContext fc, Path... paths) { - Assert.assertEquals(size, FileContext.DELETE_ON_EXIT.size()); + Assertions.assertEquals(size, FileContext.DELETE_ON_EXIT.size()); Set set = FileContext.DELETE_ON_EXIT.get(fc); - Assert.assertEquals(paths.length, (set == null ? 0 : set.size())); + Assertions.assertEquals(paths.length, (set == null ? 0 : set.size())); for (Path path : paths) { - Assert.assertTrue(set.contains(path)); + Assertions.assertTrue(set.contains(path)); } } @@ -67,7 +67,7 @@ public void testDeleteOnExit() throws Exception { checkDeleteOnExitData(1, fc, file1); // Ensure shutdown hook is added - Assert.assertTrue(ShutdownHookManager.get().hasShutdownHook(FileContext.FINALIZER)); + Assertions.assertTrue(ShutdownHookManager.get().hasShutdownHook(FileContext.FINALIZER)); Path file2 = helper.getTestRootPath(fc, "dir1/file2"); createFile(fc, file2, numBlocks, blockSize); @@ -83,8 +83,8 @@ public void testDeleteOnExit() throws Exception { // paths are cleaned up FileContext.FINALIZER.run(); checkDeleteOnExitData(0, fc, new Path[0]); - Assert.assertFalse(exists(fc, file1)); - Assert.assertFalse(exists(fc, file2)); - Assert.assertFalse(exists(fc, dir)); + Assertions.assertFalse(exists(fc, file1)); + Assertions.assertFalse(exists(fc, file2)); + Assertions.assertFalse(exists(fc, dir)); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java index 2919de20bffd9..cbb618d98b4e1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java @@ -24,9 +24,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; /** * Tests resolution of AbstractFileSystems for a given path with symlinks. @@ -42,12 +43,13 @@ public class TestFileContextResolveAfs { private FileContext fc; private FileSystem localFs; - @Before + @BeforeEach public void setup() throws IOException { fc = FileContext.getFileContext(); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFileContextResolveAfs() throws IOException { Configuration conf = new Configuration(); localFs = FileSystem.get(conf); @@ -60,7 +62,7 @@ public void testFileContextResolveAfs() throws IOException { fc.createSymlink(localPath, linkPath, true); Set afsList = fc.resolveAbstractFileSystems(linkPath); - Assert.assertEquals(1, afsList.size()); + Assertions.assertEquals(1, afsList.size()); localFs.delete(linkPath, true); localFs.delete(localPath, true); localFs.close(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java index 61a688ea4ee8b..6c74ca7c48dc4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -32,7 +32,7 @@ import java.util.Collections; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; @@ -85,8 +85,8 @@ public void testFileStatusWritable() throws Exception { int iterator = 0; for (FileStatus fs : tests) { dest.readFields(in); - assertEquals("Different FileStatuses in iteration " + iterator, - dest, fs); + assertEquals( + dest, fs, "Different FileStatuses in iteration " + iterator); iterator++; } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java index 2b8be39193a03..d792a49554b3c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java @@ -18,8 +18,8 @@ package org.apache.hadoop.fs; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.io.IOException; import java.net.URI; @@ -29,8 +29,8 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.NetUtilsTestResolver; import org.apache.hadoop.util.Progressable; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class TestFileSystemCanonicalization { static String[] authorities = { @@ -44,7 +44,7 @@ public class TestFileSystemCanonicalization { }; - @BeforeClass + @BeforeAll public static void initialize() throws Exception { NetUtilsTestResolver.install(); } @@ -288,7 +288,7 @@ void verifyCheckPath(FileSystem fs, String path, boolean shouldPass) { } assertEquals(pathAuthority, fqPath.toUri().getAuthority()); } else { - assertNotNull("did not fail", e); + assertNotNull(e, "did not fail"); assertEquals("Wrong FS: "+rawPath+", expected: "+fs.getUri(), e.getMessage()); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java index 10ad8a14487ef..ff22b61abde98 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java @@ -27,11 +27,11 @@ import java.net.URL; import java.util.ServiceConfigurationError; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.test.LambdaTestUtils.intercept; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; /** * Tests related to filesystem creation and lifecycle. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java index 5710049afb104..0245a19c4361d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java @@ -21,9 +21,9 @@ import org.apache.commons.lang3.RandomUtils; import org.apache.hadoop.fs.StorageStatistics.LongStatistic; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.junit.rules.Timeout; import org.slf4j.Logger; @@ -32,9 +32,9 @@ import java.util.Iterator; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; /** * This tests basic operations of {@link FileSystemStorageStatistics} class. @@ -65,7 +65,7 @@ public class TestFileSystemStorageStatistics { @Rule public final Timeout globalTimeout = new Timeout(10, TimeUnit.SECONDS); - @Before + @BeforeEach public void setup() { statistics.incrementBytesRead(RandomUtils.nextInt(0, 100)); statistics.incrementBytesWritten(RandomUtils.nextInt(0, 100)); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java index 0372537cb3475..eada1425d95d9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.IOException; @@ -28,7 +28,7 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java index 177223dc08254..7c162d2140c20 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java @@ -19,13 +19,20 @@ import static org.apache.hadoop.test.LambdaTestUtils.intercept; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotSame; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -66,13 +73,12 @@ import org.apache.tools.tar.TarEntry; import org.apache.tools.tar.TarOutputStream; -import org.assertj.core.api.Assertions; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.Ignore; import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.junit.rules.TemporaryFolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -139,7 +145,7 @@ public class TestFileUtil { * file: part-r-00000, contents: "foo" * file: part-r-00001, contents: "bar" */ - @Before + @BeforeEach public void setup() throws IOException { del = testFolder.newFolder("del"); tmp = testFolder.newFolder("tmp"); @@ -176,7 +182,7 @@ public void setup() throws IOException { // create a symlink to dir File linkDir = new File(del, "tmpDir"); FileUtil.symLink(tmp.toString(), linkDir.toString()); - Assert.assertEquals(5, Objects.requireNonNull(del.listFiles()).length); + assertEquals(5, Objects.requireNonNull(del.listFiles()).length); // create files in partitioned directories createFile(partitioned, "part-r-00000", "foo"); @@ -186,7 +192,7 @@ public void setup() throws IOException { FileUtil.symLink(del.toString(), dir1.toString() + "/cycle"); } - @After + @AfterEach public void tearDown() throws IOException { testFolder.delete(); } @@ -209,62 +215,65 @@ private File createFile(File directory, String name, String contents) return newFile; } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testListFiles() throws IOException { //Test existing files case File[] files = FileUtil.listFiles(partitioned); - Assert.assertEquals(2, files.length); + assertEquals(2, files.length); //Test existing directory with no files case File newDir = new File(tmp.getPath(),"test"); Verify.mkdir(newDir); - Assert.assertTrue("Failed to create test dir", newDir.exists()); + assertTrue(newDir.exists(), "Failed to create test dir"); files = FileUtil.listFiles(newDir); - Assert.assertEquals(0, files.length); + assertEquals(0, files.length); assertTrue(newDir.delete()); - Assert.assertFalse("Failed to delete test dir", newDir.exists()); + assertFalse(newDir.exists(), "Failed to delete test dir"); //Test non-existing directory case, this throws //IOException try { files = FileUtil.listFiles(newDir); - Assert.fail("IOException expected on listFiles() for non-existent dir " + fail("IOException expected on listFiles() for non-existent dir " + newDir.toString()); } catch(IOException ioe) { //Expected an IOException } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testListAPI() throws IOException { //Test existing files case String[] files = FileUtil.list(partitioned); - Assert.assertEquals("Unexpected number of pre-existing files", 2, files.length); + assertEquals(2, files.length, "Unexpected number of pre-existing files"); //Test existing directory with no files case File newDir = new File(tmp.getPath(),"test"); Verify.mkdir(newDir); - Assert.assertTrue("Failed to create test dir", newDir.exists()); + assertTrue(newDir.exists(), "Failed to create test dir"); files = FileUtil.list(newDir); - Assert.assertEquals("New directory unexpectedly contains files", 0, files.length); + assertEquals(0, files.length, "New directory unexpectedly contains files"); assertTrue(newDir.delete()); - Assert.assertFalse("Failed to delete test dir", newDir.exists()); + assertFalse(newDir.exists(), "Failed to delete test dir"); //Test non-existing directory case, this throws //IOException try { files = FileUtil.list(newDir); - Assert.fail("IOException expected on list() for non-existent dir " + fail("IOException expected on list() for non-existent dir " + newDir.toString()); } catch(IOException ioe) { //Expected an IOException } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFullyDelete() throws IOException { boolean ret = FileUtil.fullyDelete(del); - Assert.assertTrue(ret); + assertTrue(ret); Verify.notExists(del); validateTmpDir(); } @@ -275,14 +284,15 @@ public void testFullyDelete() throws IOException { * (b) symlink to dir only and not the dir pointed to by symlink. * @throws IOException */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFullyDeleteSymlinks() throws IOException { File link = new File(del, LINK); assertDelListLength(5); // Since tmpDir is symlink to tmp, fullyDelete(tmpDir) should not // delete contents of tmp. See setupDirs for details. boolean ret = FileUtil.fullyDelete(link); - Assert.assertTrue(ret); + assertTrue(ret); Verify.notExists(link); assertDelListLength(4); validateTmpDir(); @@ -291,7 +301,7 @@ public void testFullyDeleteSymlinks() throws IOException { // Since tmpDir is symlink to tmp, fullyDelete(tmpDir) should not // delete contents of tmp. See setupDirs for details. ret = FileUtil.fullyDelete(linkDir); - Assert.assertTrue(ret); + assertTrue(ret); Verify.notExists(linkDir); assertDelListLength(3); validateTmpDir(); @@ -303,12 +313,13 @@ public void testFullyDeleteSymlinks() throws IOException { * (b) dangling symlink to directory properly * @throws IOException */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFullyDeleteDanglingSymlinks() throws IOException { // delete the directory tmp to make tmpDir a dangling link to dir tmp and // to make y as a dangling link to file tmp/x boolean ret = FileUtil.fullyDelete(tmp); - Assert.assertTrue(ret); + assertTrue(ret); Verify.notExists(tmp); // dangling symlink to file @@ -317,7 +328,7 @@ public void testFullyDeleteDanglingSymlinks() throws IOException { // Even though 'y' is dangling symlink to file tmp/x, fullyDelete(y) // should delete 'y' properly. ret = FileUtil.fullyDelete(link); - Assert.assertTrue(ret); + assertTrue(ret); assertDelListLength(4); // dangling symlink to directory @@ -325,22 +336,23 @@ public void testFullyDeleteDanglingSymlinks() throws IOException { // Even though tmpDir is dangling symlink to tmp, fullyDelete(tmpDir) should // delete tmpDir properly. ret = FileUtil.fullyDelete(linkDir); - Assert.assertTrue(ret); + assertTrue(ret); assertDelListLength(3); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFullyDeleteContents() throws IOException { boolean ret = FileUtil.fullyDeleteContents(del); - Assert.assertTrue(ret); + assertTrue(ret); Verify.exists(del); - Assert.assertEquals(0, Objects.requireNonNull(del.listFiles()).length); + assertEquals(0, Objects.requireNonNull(del.listFiles()).length); validateTmpDir(); } private void validateTmpDir() { Verify.exists(tmp); - Assert.assertEquals(1, Objects.requireNonNull(tmp.listFiles()).length); + assertEquals(1, Objects.requireNonNull(tmp.listFiles()).length); Verify.exists(new File(tmp, FILE)); } @@ -408,28 +420,29 @@ private void validateAndSetWritablePermissions( grantPermissions(xSubDir); grantPermissions(xSubSubDir); - Assert.assertFalse("The return value should have been false.", ret); - Assert.assertTrue("The file file1 should not have been deleted.", - new File(del, FILE_1_NAME).exists()); + assertFalse(ret, "The return value should have been false."); + assertTrue( + new File(del, FILE_1_NAME).exists(), "The file file1 should not have been deleted."); - Assert.assertEquals( - "The directory xSubDir *should* not have been deleted.", - expectedRevokedPermissionDirsExist, xSubDir.exists()); - Assert.assertEquals("The file file2 *should* not have been deleted.", - expectedRevokedPermissionDirsExist, file2.exists()); - Assert.assertEquals( - "The directory xSubSubDir *should* not have been deleted.", - expectedRevokedPermissionDirsExist, xSubSubDir.exists()); - Assert.assertEquals("The file file22 *should* not have been deleted.", - expectedRevokedPermissionDirsExist, file22.exists()); + assertEquals( - Assert.assertFalse("The directory ySubDir should have been deleted.", - ySubDir.exists()); - Assert.assertFalse("The link zlink should have been deleted.", - zlink.exists()); + expectedRevokedPermissionDirsExist, xSubDir.exists(), "The directory xSubDir *should* not have been deleted."); + assertEquals( + expectedRevokedPermissionDirsExist, file2.exists(), "The file file2 *should* not have been deleted."); + assertEquals( + + expectedRevokedPermissionDirsExist, xSubSubDir.exists(), "The directory xSubSubDir *should* not have been deleted."); + assertEquals( + expectedRevokedPermissionDirsExist, file22.exists(), "The file file22 *should* not have been deleted."); + + assertFalse( + ySubDir.exists(), "The directory ySubDir should have been deleted."); + assertFalse( + zlink.exists(), "The link zlink should have been deleted."); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFailFullyDelete() throws IOException { // Windows Dir.setWritable(false) does not work for directories assumeNotWindows(); @@ -439,7 +452,8 @@ public void testFailFullyDelete() throws IOException { validateAndSetWritablePermissions(true, ret); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFailFullyDeleteGrantPermissions() throws IOException { setupDirsAndNonWritablePermissions(); boolean ret = FileUtil.fullyDelete(new MyFile(del), true); @@ -452,7 +466,8 @@ public void testFailFullyDeleteGrantPermissions() throws IOException { * Tests if fullyDelete deletes symlink's content when deleting unremovable dir symlink. * @throws IOException */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFailFullyDeleteDirSymlinks() throws IOException { File linkDir = new File(del, "tmpDir"); FileUtil.setWritable(del, false); @@ -460,7 +475,7 @@ public void testFailFullyDeleteDirSymlinks() throws IOException { // delete contents of tmp. See setupDirs for details. boolean ret = FileUtil.fullyDelete(linkDir); // fail symlink deletion - Assert.assertFalse(ret); + assertFalse(ret); Verify.exists(linkDir); assertDelListLength(5); // tmp dir should exist @@ -469,7 +484,7 @@ public void testFailFullyDeleteDirSymlinks() throws IOException { FileUtil.setWritable(del, true); ret = FileUtil.fullyDelete(linkDir); // success symlink deletion - Assert.assertTrue(ret); + assertTrue(ret); Verify.notExists(linkDir); assertDelListLength(4); // tmp dir should exist @@ -482,7 +497,7 @@ public void testFailFullyDeleteDirSymlinks() throws IOException { * @param expectedLength The expected length of the {@link TestFileUtil#del}. */ private void assertDelListLength(int expectedLength) { - Assertions.assertThat(del.list()).describedAs("del list").isNotNull().hasSize(expectedLength); + assertThat(del.list()).describedAs("del list").isNotNull().hasSize(expectedLength); } /** @@ -497,7 +512,7 @@ public static class Verify { * @throws IOException As per {@link File#createNewFile()}. */ public static File createNewFile(File file) throws IOException { - assertTrue("Unable to create new file " + file, file.createNewFile()); + assertTrue(file.createNewFile(), "Unable to create new file " + file); return file; } @@ -508,7 +523,7 @@ public static File createNewFile(File file) throws IOException { * @return The result of {@link File#mkdir()}. */ public static File mkdir(File file) { - assertTrue("Unable to mkdir for " + file, file.mkdir()); + assertTrue(file.mkdir(), "Unable to mkdir for " + file); return file; } @@ -519,7 +534,7 @@ public static File mkdir(File file) { * @return The result of {@link File#mkdirs()}. */ public static File mkdirs(File file) { - assertTrue("Unable to mkdirs for " + file, file.mkdirs()); + assertTrue(file.mkdirs(), "Unable to mkdirs for " + file); return file; } @@ -530,7 +545,7 @@ public static File mkdirs(File file) { * @return The result of {@link File#delete()}. */ public static File delete(File file) { - assertTrue("Unable to delete " + file, file.delete()); + assertTrue(file.delete(), "Unable to delete " + file); return file; } @@ -541,7 +556,7 @@ public static File delete(File file) { * @return The result of {@link File#exists()}. */ public static File exists(File file) { - assertTrue("Expected file " + file + " doesn't exist", file.exists()); + assertTrue(file.exists(), "Expected file " + file + " doesn't exist"); return file; } @@ -553,7 +568,7 @@ public static File exists(File file) { * @return The negation of the result of {@link File#exists()}. */ public static File notExists(File file) { - assertFalse("Expected file " + file + " must not exist", file.exists()); + assertFalse(file.exists(), "Expected file " + file + " must not exist"); return file; } } @@ -619,7 +634,8 @@ public File[] listFiles() { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFailFullyDeleteContents() throws IOException { // Windows Dir.setWritable(false) does not work for directories assumeNotWindows(); @@ -629,7 +645,8 @@ public void testFailFullyDeleteContents() throws IOException { validateAndSetWritablePermissions(true, ret); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFailFullyDeleteContentsGrantPermissions() throws IOException { setupDirsAndNonWritablePermissions(); boolean ret = FileUtil.fullyDeleteContents(new MyFile(del), true); @@ -642,13 +659,14 @@ public void testFailFullyDeleteContentsGrantPermissions() throws IOException { * and that directory sizes are not added to the final calculated size * @throws IOException */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testGetDU() throws Exception { long du = FileUtil.getDU(testFolder.getRoot()); // Only two files (in partitioned). Each has 3 characters + system-specific // line separator. final long expected = 2 * (3 + System.getProperty("line.separator").length()); - Assert.assertEquals(expected, du); + assertEquals(expected, du); // target file does not exist: final File doesNotExist = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog"); @@ -691,7 +709,8 @@ public void testGetDU() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testUnTar() throws Exception { // make a simple tar: final File simpleTar = new File(del, FILE); @@ -718,7 +737,8 @@ public void testUnTar() throws Exception { LambdaTestUtils.intercept(IOException.class, () -> FileUtil.unTar(simpleTar, regularFile)); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testReplaceFile() throws IOException { // src exists, and target does not exist: final File srcFile = Verify.createNewFile(new File(tmp, "src")); @@ -754,7 +774,8 @@ public void testReplaceFile() throws IOException { Verify.exists(obstacle); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testCreateLocalTempFile() throws IOException { final File baseFile = new File(tmp, "base"); File tmp1 = FileUtil.createLocalTempFile(baseFile, "foo", false); @@ -769,7 +790,8 @@ public void testCreateLocalTempFile() throws IOException { assertTrue(!tmp1.exists() && !tmp2.exists()); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testUnZip() throws Exception { // make sa simple zip final File simpleZip = new File(del, FILE); @@ -811,40 +833,41 @@ public void testUnZip() throws Exception { assertTrue(foo6.exists()); assertEquals(12, foo0.length()); // tests whether file foo_0 has executable permissions - assertTrue("file lacks execute permissions", foo0.canExecute()); - assertFalse("file has write permissions", foo0.canWrite()); - assertFalse("file has read permissions", foo0.canRead()); + assertTrue(foo0.canExecute(), "file lacks execute permissions"); + assertFalse(foo0.canWrite(), "file has write permissions"); + assertFalse(foo0.canRead(), "file has read permissions"); // tests whether file foo_1 has writable permissions - assertFalse("file has execute permissions", foo1.canExecute()); - assertTrue("file lacks write permissions", foo1.canWrite()); - assertFalse("file has read permissions", foo1.canRead()); + assertFalse(foo1.canExecute(), "file has execute permissions"); + assertTrue(foo1.canWrite(), "file lacks write permissions"); + assertFalse(foo1.canRead(), "file has read permissions"); // tests whether file foo_2 has executable and writable permissions - assertTrue("file lacks execute permissions", foo2.canExecute()); - assertTrue("file lacks write permissions", foo2.canWrite()); - assertFalse("file has read permissions", foo2.canRead()); + assertTrue(foo2.canExecute(), "file lacks execute permissions"); + assertTrue(foo2.canWrite(), "file lacks write permissions"); + assertFalse(foo2.canRead(), "file has read permissions"); // tests whether file foo_3 has readable permissions - assertFalse("file has execute permissions", foo3.canExecute()); - assertFalse("file has write permissions", foo3.canWrite()); - assertTrue("file lacks read permissions", foo3.canRead()); + assertFalse(foo3.canExecute(), "file has execute permissions"); + assertFalse(foo3.canWrite(), "file has write permissions"); + assertTrue(foo3.canRead(), "file lacks read permissions"); // tests whether file foo_4 has readable and executable permissions - assertTrue("file lacks execute permissions", foo4.canExecute()); - assertFalse("file has write permissions", foo4.canWrite()); - assertTrue("file lacks read permissions", foo4.canRead()); + assertTrue(foo4.canExecute(), "file lacks execute permissions"); + assertFalse(foo4.canWrite(), "file has write permissions"); + assertTrue(foo4.canRead(), "file lacks read permissions"); // tests whether file foo_5 has readable and writable permissions - assertFalse("file has execute permissions", foo5.canExecute()); - assertTrue("file lacks write permissions", foo5.canWrite()); - assertTrue("file lacks read permissions", foo5.canRead()); + assertFalse(foo5.canExecute(), "file has execute permissions"); + assertTrue(foo5.canWrite(), "file lacks write permissions"); + assertTrue(foo5.canRead(), "file lacks read permissions"); // tests whether file foo_6 has readable, writable and executable permissions - assertTrue("file lacks execute permissions", foo6.canExecute()); - assertTrue("file lacks write permissions", foo6.canWrite()); - assertTrue("file lacks read permissions", foo6.canRead()); + assertTrue(foo6.canExecute(), "file lacks execute permissions"); + assertTrue(foo6.canWrite(), "file lacks write permissions"); + assertTrue(foo6.canRead(), "file lacks read permissions"); final File regularFile = Verify.createNewFile(new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog")); LambdaTestUtils.intercept(IOException.class, () -> FileUtil.unZip(simpleZip, regularFile)); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testUnZip2() throws IOException { // make a simple zip final File simpleZip = new File(del, FILE); @@ -871,7 +894,8 @@ public void testUnZip2() throws IOException { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) /* * Test method copy(FileSystem srcFS, Path src, File dst, boolean deleteSource, Configuration conf) */ @@ -919,7 +943,8 @@ public void testCopy5() throws IOException { Verify.notExists(partitioned); // should be deleted } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testStat2Paths1() { assertNull(FileUtil.stat2Paths(null)); @@ -939,7 +964,8 @@ public void testStat2Paths1() { assertEquals(paths[1], path2); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testStat2Paths2() { Path defaultPath = new Path("file://default"); Path[] paths = FileUtil.stat2Paths(null, defaultPath); @@ -963,7 +989,8 @@ public void testStat2Paths2() { assertEquals(paths[1], path2); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testSymlink() throws Exception { byte[] data = "testSymLink".getBytes(); @@ -979,8 +1006,8 @@ public void testSymlink() throws Exception { FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); //ensure that symlink length is correctly reported by Java - Assert.assertEquals(data.length, file.length()); - Assert.assertEquals(data.length, link.length()); + assertEquals(data.length, file.length()); + assertEquals(data.length, link.length()); //ensure that we can read from link. FileInputStream in = new FileInputStream(link); @@ -989,13 +1016,14 @@ public void testSymlink() throws Exception { len++; } in.close(); - Assert.assertEquals(data.length, len); + assertEquals(data.length, len); } /** * Test that rename on a symlink works as expected. */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testSymlinkRenameTo() throws Exception { File file = new File(del, FILE); file.createNewFile(); @@ -1010,7 +1038,7 @@ public void testSymlinkRenameTo() throws Exception { File link2 = new File(del, "_link2"); // Rename the symlink - Assert.assertTrue(link.renameTo(link2)); + assertTrue(link.renameTo(link2)); // Make sure the file still exists // (NOTE: this would fail on Java6 on Windows if we didn't @@ -1024,7 +1052,8 @@ public void testSymlinkRenameTo() throws Exception { /** * Test that deletion of a symlink works as expected. */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testSymlinkDelete() throws Exception { File file = new File(del, FILE); file.createNewFile(); @@ -1045,7 +1074,8 @@ public void testSymlinkDelete() throws Exception { /** * Test that length on a symlink works as expected. */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testSymlinkLength() throws Exception { byte[] data = "testSymLinkData".getBytes(); @@ -1057,19 +1087,19 @@ public void testSymlinkLength() throws Exception { os.write(data); os.close(); - Assert.assertEquals(0, link.length()); + assertEquals(0, link.length()); // create the symlink FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); // ensure that File#length returns the target file and link size - Assert.assertEquals(data.length, file.length()); - Assert.assertEquals(data.length, link.length()); + assertEquals(data.length, file.length()); + assertEquals(data.length, link.length()); Verify.delete(file); Verify.notExists(file); - Assert.assertEquals(0, link.length()); + assertEquals(0, link.length()); Verify.delete(link); Verify.notExists(link); @@ -1089,17 +1119,17 @@ public void testSymlinkWithNullInput() throws IOException { // Create the same symbolic link // The operation should fail and returns 1 int result = FileUtil.symLink(null, null); - Assert.assertEquals(1, result); + assertEquals(1, result); // Create the same symbolic link // The operation should fail and returns 1 result = FileUtil.symLink(file.getAbsolutePath(), null); - Assert.assertEquals(1, result); + assertEquals(1, result); // Create the same symbolic link // The operation should fail and returns 1 result = FileUtil.symLink(null, link.getAbsolutePath()); - Assert.assertEquals(1, result); + assertEquals(1, result); } /** @@ -1118,13 +1148,13 @@ public void testSymlinkFileAlreadyExists() throws IOException { int result1 = FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); - Assert.assertEquals(0, result1); + assertEquals(0, result1); // Create the same symbolic link // The operation should fail and returns 1 result1 = FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); - Assert.assertEquals(1, result1); + assertEquals(1, result1); } /** @@ -1145,7 +1175,7 @@ public void testSymlinkSameFile() throws IOException { int result = FileUtil.symLink(file.getAbsolutePath(), file.getAbsolutePath()); - Assert.assertEquals(0, result); + assertEquals(0, result); } /** @@ -1166,13 +1196,13 @@ public void testSymlink2DifferentFile() throws IOException { int result = FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); - Assert.assertEquals(0, result); + assertEquals(0, result); // The operation should fail and returns 1 result = FileUtil.symLink(fileSecond.getAbsolutePath(), link.getAbsolutePath()); - Assert.assertEquals(1, result); + assertEquals(1, result); } /** @@ -1193,13 +1223,13 @@ public void testSymlink2DifferentLinks() throws IOException { int result = FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); - Assert.assertEquals(0, result); + assertEquals(0, result); // The operation should succeed result = FileUtil.symLink(file.getAbsolutePath(), linkSecond.getAbsolutePath()); - Assert.assertEquals(0, result); + assertEquals(0, result); } private void doUntarAndVerify(File tarFile, File untarDir) @@ -1212,24 +1242,25 @@ private void doUntarAndVerify(File tarFile, File untarDir) String parentDir = untarDir.getCanonicalPath() + Path.SEPARATOR + "name"; File testFile = new File(parentDir + Path.SEPARATOR + "version"); Verify.exists(testFile); - Assert.assertTrue(testFile.length() == 0); + assertTrue(testFile.length() == 0); String imageDir = parentDir + Path.SEPARATOR + "image"; testFile = new File(imageDir + Path.SEPARATOR + "fsimage"); Verify.exists(testFile); - Assert.assertTrue(testFile.length() == 157); + assertTrue(testFile.length() == 157); String currentDir = parentDir + Path.SEPARATOR + "current"; testFile = new File(currentDir + Path.SEPARATOR + "fsimage"); Verify.exists(testFile); - Assert.assertTrue(testFile.length() == 4331); + assertTrue(testFile.length() == 4331); testFile = new File(currentDir + Path.SEPARATOR + "edits"); Verify.exists(testFile); - Assert.assertTrue(testFile.length() == 1033); + assertTrue(testFile.length() == 1033); testFile = new File(currentDir + Path.SEPARATOR + "fstime"); Verify.exists(testFile); - Assert.assertTrue(testFile.length() == 8); + assertTrue(testFile.length() == 8); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testUntar() throws IOException { String tarGzFileName = System.getProperty("test.cache.data", "target/test/cache") + "/test-untar.tgz"; @@ -1247,7 +1278,8 @@ public void testUntar() throws IOException { * This will test different codepaths on Windows from unix, * but both MUST throw an IOE of some kind. */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testUntarMissingFile() throws Throwable { File dataDir = GenericTestUtils.getTestDir(); File tarFile = new File(dataDir, "missing; true"); @@ -1262,7 +1294,8 @@ public void testUntarMissingFile() throws Throwable { * This is how {@code FileUtil.unTar(File, File} * will behave on Windows, */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testUntarMissingFileThroughJava() throws Throwable { File dataDir = GenericTestUtils.getTestDir(); File tarFile = new File(dataDir, "missing; true"); @@ -1274,15 +1307,16 @@ public void testUntarMissingFileThroughJava() throws Throwable { FileUtil.unTarUsingJava(tarFile, untarDir, false)); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testCreateJarWithClassPath() throws Exception { // create files expected to match a wildcard List wildcardMatches = Arrays.asList(new File(tmp, "wildcard1.jar"), new File(tmp, "wildcard2.jar"), new File(tmp, "wildcard3.JAR"), new File(tmp, "wildcard4.JAR")); for (File wildcardMatch: wildcardMatches) { - Assert.assertTrue("failure creating file: " + wildcardMatch, - wildcardMatch.createNewFile()); + assertTrue( + wildcardMatch.createNewFile(), "failure creating file: " + wildcardMatch); } // create non-jar files, which we expect to not be included in the classpath @@ -1300,19 +1334,19 @@ public void testCreateJarWithClassPath() throws Exception { String[] jarCp = FileUtil.createJarWithClassPath(inputClassPath + File.pathSeparator + "unexpandedwildcard/*", new Path(tmp.getCanonicalPath()), System.getenv()); String classPathJar = jarCp[0]; - assertNotEquals("Unexpanded wildcard was not placed in extra classpath", jarCp[1].indexOf("unexpanded"), -1); + assertNotEquals(jarCp[1].indexOf("unexpanded"), -1, "Unexpanded wildcard was not placed in extra classpath"); // verify classpath by reading manifest from jar file JarFile jarFile = null; try { jarFile = new JarFile(classPathJar); Manifest jarManifest = jarFile.getManifest(); - Assert.assertNotNull(jarManifest); + assertNotNull(jarManifest); Attributes mainAttributes = jarManifest.getMainAttributes(); - Assert.assertNotNull(mainAttributes); - Assert.assertTrue(mainAttributes.containsKey(Attributes.Name.CLASS_PATH)); + assertNotNull(mainAttributes); + assertTrue(mainAttributes.containsKey(Attributes.Name.CLASS_PATH)); String classPathAttr = mainAttributes.getValue(Attributes.Name.CLASS_PATH); - Assert.assertNotNull(classPathAttr); + assertNotNull(classPathAttr); List expectedClassPaths = new ArrayList(); for (String classPath: classPaths) { if (classPath.length() == 0) { @@ -1346,7 +1380,7 @@ public void testCreateJarWithClassPath() throws Exception { List actualClassPaths = Arrays.asList(classPathAttr.split(" ")); Collections.sort(expectedClassPaths); Collections.sort(actualClassPaths); - Assert.assertEquals(expectedClassPaths, actualClassPaths); + assertEquals(expectedClassPaths, actualClassPaths); } finally { if (jarFile != null) { try { @@ -1361,8 +1395,8 @@ public void testCreateJarWithClassPath() throws Exception { @Test public void testGetJarsInDirectory() throws Exception { List jars = FileUtil.getJarsInDirectory("/foo/bar/bogus/"); - assertTrue("no jars should be returned for a bogus path", - jars.isEmpty()); + assertTrue( + jars.isEmpty(), "no jars should be returned for a bogus path"); // create jar files to be returned @@ -1370,7 +1404,7 @@ public void testGetJarsInDirectory() throws Exception { File jar2 = new File(tmp, "wildcard2.JAR"); List matches = Arrays.asList(jar1, jar2); for (File match: matches) { - assertTrue("failure creating file: " + match, match.createNewFile()); + assertTrue(match.createNewFile(), "failure creating file: " + match); } // create non-jar files, which we expect to not be included in the result @@ -1381,12 +1415,12 @@ public void testGetJarsInDirectory() throws Exception { // pass in the directory String directory = tmp.getCanonicalPath(); jars = FileUtil.getJarsInDirectory(directory); - assertEquals("there should be 2 jars", 2, jars.size()); + assertEquals(2, jars.size(), "there should be 2 jars"); for (Path jar: jars) { URL url = jar.toUri().toURL(); - assertTrue("the jar should match either of the jars", - url.equals(jar1.getCanonicalFile().toURI().toURL()) || - url.equals(jar2.getCanonicalFile().toURI().toURL())); + assertTrue( + url.equals(jar1.getCanonicalFile().toURI().toURL()) || + url.equals(jar2.getCanonicalFile().toURI().toURL()), "the jar should match either of the jars"); } } @@ -1468,7 +1502,8 @@ public void testCompareFsDirectories() throws Exception { assertFalse(FileUtil.compareFs(fs1, fs6)); } - @Test(timeout = 8000) + @Test + @Timeout(value = 8) public void testCreateSymbolicLinkUsingJava() throws IOException { final File simpleTar = new File(del, FILE); OutputStream os = new FileOutputStream(simpleTar); @@ -1502,39 +1537,41 @@ public void testCreateSymbolicLinkUsingJava() throws IOException { } } - @Test(expected = IOException.class) + @Test public void testCreateArbitrarySymlinkUsingJava() throws IOException { - final File simpleTar = new File(del, FILE); - OutputStream os = new FileOutputStream(simpleTar); - - File rootDir = new File("tmp"); - try (TarArchiveOutputStream tos = new TarArchiveOutputStream(os)) { - tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); - - // Create arbitrary dir - File arbitraryDir = new File(rootDir, "arbitrary-dir/"); - Verify.mkdirs(arbitraryDir); - - // We will tar from the tar-root lineage - File tarRoot = new File(rootDir, "tar-root/"); - File symlinkRoot = new File(tarRoot, "dir1/"); - Verify.mkdirs(symlinkRoot); - - // Create Symbolic Link to an arbitrary dir - java.nio.file.Path symLink = Paths.get(symlinkRoot.getPath(), "sl"); - Files.createSymbolicLink(symLink, arbitraryDir.toPath().toAbsolutePath()); - - // Put entries in tar file - putEntriesInTar(tos, tarRoot); - putEntriesInTar(tos, new File(symLink.toFile(), "dir-outside-tar-root/")); - tos.close(); - - // Untar using Java - File untarFile = new File(rootDir, "extracted"); - FileUtil.unTarUsingJava(simpleTar, untarFile, false); - } finally { - FileUtils.deleteDirectory(rootDir); - } + assertThrows(IOException.class, () -> { + final File simpleTar = new File(del, FILE); + OutputStream os = new FileOutputStream(simpleTar); + + File rootDir = new File("tmp"); + try (TarArchiveOutputStream tos = new TarArchiveOutputStream(os)) { + tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); + + // Create arbitrary dir + File arbitraryDir = new File(rootDir, "arbitrary-dir/"); + Verify.mkdirs(arbitraryDir); + + // We will tar from the tar-root lineage + File tarRoot = new File(rootDir, "tar-root/"); + File symlinkRoot = new File(tarRoot, "dir1/"); + Verify.mkdirs(symlinkRoot); + + // Create Symbolic Link to an arbitrary dir + java.nio.file.Path symLink = Paths.get(symlinkRoot.getPath(), "sl"); + Files.createSymbolicLink(symLink, arbitraryDir.toPath().toAbsolutePath()); + + // Put entries in tar file + putEntriesInTar(tos, tarRoot); + putEntriesInTar(tos, new File(symLink.toFile(), "dir-outside-tar-root/")); + tos.close(); + + // Untar using Java + File untarFile = new File(rootDir, "extracted"); + FileUtil.unTarUsingJava(simpleTar, untarFile, false); + } finally { + FileUtils.deleteDirectory(rootDir); + } + }); } private void putEntriesInTar(TarArchiveOutputStream tos, File f) @@ -1578,7 +1615,7 @@ private void putEntriesInTar(TarArchiveOutputStream tos, File f) @Test public void testReadSymlinkWithNullInput() { String result = FileUtil.readLink(null); - Assert.assertEquals("", result); + assertEquals("", result); } /** @@ -1595,7 +1632,7 @@ public void testReadSymlink() throws IOException { FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); String result = FileUtil.readLink(link); - Assert.assertEquals(file.getAbsolutePath(), result); + assertEquals(file.getAbsolutePath(), result); } @Test @@ -1626,7 +1663,7 @@ public void testReadSymlinkWithAFileAsInput() throws IOException { File file = new File(del, FILE); String result = FileUtil.readLink(file); - Assert.assertEquals("", result); + assertEquals("", result); Verify.delete(file); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java index 1b42290cedc5e..e746e3aed82f3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.IOException; @@ -36,8 +36,8 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.DelegationTokenIssuer; import org.apache.hadoop.util.Progressable; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; public class TestFilterFileSystem { @@ -45,7 +45,7 @@ public class TestFilterFileSystem { private static final Logger LOG = FileSystem.LOG; private static final Configuration conf = new Configuration(); - @BeforeClass + @BeforeAll public static void setup() { conf.set("fs.flfs.impl", FilterLocalFileSystem.class.getName()); conf.setBoolean("fs.flfs.impl.disable.cache", true); @@ -179,8 +179,8 @@ public void testFilterFileSystem() throws Exception { } } } - assertTrue((errors + " methods were not overridden correctly - see" + - " log"), errors <= 0); + assertTrue(errors <= 0, (errors + " methods were not overridden correctly - see" + + " log")); } @Test @@ -300,10 +300,10 @@ public void testFilterPathCapabilites() throws Exception { flfs.initialize(URI.create("filter:/"), conf); Path src = new Path("/src"); assertFalse( - "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for " - + flfs, - flfs.hasPathCapability(src, - CommonPathCapabilities.FS_MULTIPART_UPLOADER)); + + flfs.hasPathCapability(src, + CommonPathCapabilities.FS_MULTIPART_UPLOADER), "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for " + + flfs); } } @@ -325,7 +325,7 @@ private void checkFsConf(FileSystem fs, Configuration conf, int expectDepth) { int depth = 0; while (true) { depth++; - assertFalse("depth "+depth+">"+expectDepth, depth > expectDepth); + assertFalse(depth > expectDepth, "depth "+depth+">"+expectDepth); assertEquals(conf, fs.getConf()); if (!(fs instanceof FilterFileSystem)) { break; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java index 396924810d98e..77794490744c3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFs.java @@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.viewfs.ConfigUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; public class TestFilterFs { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java index 574ed704da277..1d2d348a741e8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.util.DataChecksum; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestFsOptions { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java index 67906d526bc8a..bba5dac6fc6bb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java @@ -23,7 +23,7 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.ToolRunner; import org.assertj.core.api.Assertions; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; public class TestFsShell { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java index 7556bc75fb27a..319ae0e2d8a5b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java @@ -20,10 +20,10 @@ import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayOutputStream; import java.io.File; @@ -34,9 +34,9 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -48,7 +48,7 @@ public class TestFsShellCopy { static LocalFileSystem lfs; static Path testRootDir, srcPath, dstPath; - @BeforeClass + @BeforeAll public static void setup() throws Exception { conf = new Configuration(); shell = new FsShell(conf); @@ -62,7 +62,7 @@ public static void setup() throws Exception { dstPath = new Path(testRootDir, "dstFile"); } - @Before + @BeforeEach public void prepFiles() throws Exception { lfs.setVerifyChecksum(true); lfs.setWriteChecksum(true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java index 05ad5c23e6542..41ff47def2893 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java @@ -19,11 +19,12 @@ package org.apache.hadoop.fs; import org.apache.hadoop.conf.Configuration; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; /** * Test FsShell -ls command. @@ -34,7 +35,7 @@ public class TestFsShellList { private static LocalFileSystem lfs; private static Path testRootDir; - @BeforeClass + @BeforeAll public static void setup() throws Exception { conf = new Configuration(); shell = new FsShell(conf); @@ -47,7 +48,7 @@ public static void setup() throws Exception { assertThat(lfs.mkdirs(testRootDir)).isTrue(); } - @AfterClass + @AfterAll public static void teardown() throws Exception { lfs.delete(testRootDir, true); } @@ -76,16 +77,17 @@ public void testList() throws Exception { } /* - UGI params should take effect when we pass. - */ - @Test(expected = IllegalArgumentException.class) + * UGI params should take effect when we pass. + */ + @Test public void testListWithUGI() throws Exception { - FsShell fsShell = new FsShell(new Configuration()); - //Passing Dummy such that it should through IAE - fsShell.getConf() - .set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, - "DUMMYAUTH"); - String[] lsArgv = new String[] {"-ls", testRootDir.toString()}; - fsShell.run(lsArgv); + assertThrows(IllegalArgumentException.class, () -> { + FsShell fsShell = new FsShell(new Configuration()); + //Passing Dummy such that it should through IAE + fsShell.getConf().set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, + "DUMMYAUTH"); + String[] lsArgv = new String[]{"-ls", testRootDir.toString()}; + fsShell.run(lsArgv); + }); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java index 77b2f445a48de..50a32876c33ad 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java @@ -19,9 +19,9 @@ package org.apache.hadoop.fs; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -42,8 +42,9 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,7 +60,7 @@ public class TestFsShellReturnCode { private static FileSystem fileSys; private static FsShell fsShell; - @BeforeClass + @BeforeAll public static void setup() throws IOException { conf.setClass("fs.file.impl", LocalFileSystemExtn.class, LocalFileSystem.class); fileSys = FileSystem.get(conf); @@ -105,13 +106,13 @@ private void change(int exit, String owner, String group, String...files) FileStatus[] stats = fileSys.globStatus(new Path(files[i])); if (stats != null) { for (int j=0; j < stats.length; j++) { - assertEquals("check owner of " + files[i], - ((owner != null) ? "STUB-"+owner : oldStats[i][j].getOwner()), - stats[j].getOwner() + assertEquals( + ((owner != null) ? "STUB-"+owner : oldStats[i][j].getOwner()) +, stats[j].getOwner(), "check owner of " + files[i] ); - assertEquals("check group of " + files[i], - ((group != null) ? "STUB-"+group : oldStats[i][j].getGroup()), - stats[j].getGroup() + assertEquals( + ((group != null) ? "STUB-"+group : oldStats[i][j].getGroup()) +, stats[j].getGroup(), "check group of " + files[i] ); } } @@ -127,7 +128,8 @@ private void change(int exit, String owner, String group, String...files) * * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testChmod() throws Exception { Path p1 = new Path(TEST_ROOT_DIR, "testChmod/fileExists"); @@ -183,7 +185,8 @@ public void testChmod() throws Exception { * * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testChown() throws Exception { Path p1 = new Path(TEST_ROOT_DIR, "testChown/fileExists"); @@ -239,7 +242,8 @@ public void testChown() throws Exception { * * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testChgrp() throws Exception { Path p1 = new Path(TEST_ROOT_DIR, "testChgrp/fileExists"); @@ -284,7 +288,8 @@ public void testChgrp() throws Exception { change(1, null, "admin", f2, f7); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole() throws Exception { Configuration conf = new Configuration(); @@ -303,20 +308,21 @@ public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole() args[0] = "-get"; args[1] = new Path(tdir.toUri().getPath(), "/invalidSrc").toString(); args[2] = new Path(tdir.toUri().getPath(), "/invalidDst").toString(); - assertTrue("file exists", !fileSys.exists(new Path(args[1]))); - assertTrue("file exists", !fileSys.exists(new Path(args[2]))); + assertTrue(!fileSys.exists(new Path(args[1])), "file exists"); + assertTrue(!fileSys.exists(new Path(args[2])), "file exists"); int run = shell.run(args); results = bytes.toString(); - assertEquals("Return code should be 1", 1, run); - assertTrue(" Null is coming when source path is invalid. ",!results.contains("get: null")); - assertTrue(" Not displaying the intended message ",results.contains("get: `"+args[1]+"': No such file or directory")); + assertEquals(1, run, "Return code should be 1"); + assertTrue(!results.contains("get: null"), " Null is coming when source path is invalid. "); + assertTrue(results.contains("get: `"+args[1]+"': No such file or directory"), " Not displaying the intended message "); } finally { IOUtils.closeStream(out); System.setErr(oldErr); } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRmWithNonexistentGlob() throws Exception { Configuration conf = new Configuration(); FsShell shell = new FsShell(); @@ -337,7 +343,8 @@ public void testRmWithNonexistentGlob() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRmForceWithNonexistentGlob() throws Exception { Configuration conf = new Configuration(); FsShell shell = new FsShell(); @@ -356,7 +363,8 @@ public void testRmForceWithNonexistentGlob() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testInvalidDefaultFS() throws Exception { // if default fs doesn't exist or is invalid, but the path provided in // arguments is valid - fsshell should work @@ -379,7 +387,7 @@ public void testInvalidDefaultFS() throws Exception { int run = shell.run(args); results = bytes.toString(); LOG.info("result=" + results); - assertTrue("Return code should be 0", run == 0); + assertTrue(run == 0, "Return code should be 0"); } finally { IOUtils.closeStream(out); System.setErr(oldErr); @@ -387,7 +395,8 @@ public void testInvalidDefaultFS() throws Exception { } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testInterrupt() throws Exception { MyFsShell shell = new MyFsShell(); shell.setConf(new Configuration()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java index c2bd5b2133d47..e76ed27bb9e58 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java @@ -24,9 +24,9 @@ import org.apache.hadoop.fs.shell.TouchCommands.Touch; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,7 +39,7 @@ public class TestFsShellTouch { static LocalFileSystem lfs; static Path testRootDir; - @BeforeClass + @BeforeAll public static void setup() throws Exception { Configuration conf = new Configuration(); shell = new FsShell(conf); @@ -51,7 +51,7 @@ public static void setup() throws Exception { lfs.setWorkingDirectory(testRootDir); } - @Before + @BeforeEach public void prepFiles() throws Exception { lfs.setVerifyChecksum(true); lfs.setWriteChecksum(true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java index d15c1ac515856..b87e6ab6bf49a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java @@ -14,10 +14,10 @@ package org.apache.hadoop.fs; import org.apache.hadoop.conf.Configuration; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import java.io.*; import java.net.URL; @@ -43,7 +43,7 @@ public class TestFsUrlConnectionPath { private static final Configuration CONFIGURATION = new Configuration(); - @BeforeClass + @BeforeAll public static void initialize() throws IOException{ write(ABSOLUTE_PATH.substring(5), DATA); write(RELATIVE_PATH.substring(5), DATA); @@ -52,7 +52,7 @@ public static void initialize() throws IOException{ URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory()); } - @AfterClass + @AfterAll public static void cleanup(){ delete(ABSOLUTE_PATH.substring(5)); delete(RELATIVE_PATH.substring(5)); @@ -83,25 +83,25 @@ public static int readStream(String path) throws Exception{ @Test public void testAbsolutePath() throws Exception{ int length = readStream(ABSOLUTE_PATH); - Assert.assertTrue(length > 1); + Assertions.assertTrue(length > 1); } @Test public void testRelativePath() throws Exception{ int length = readStream(RELATIVE_PATH); - Assert.assertTrue(length > 1); + Assertions.assertTrue(length > 1); } @Test public void testAbsolutePathWithSpace() throws Exception{ int length = readStream(ABSOLUTE_PATH_W_ENCODED_SPACE); - Assert.assertTrue(length > 1); + Assertions.assertTrue(length > 1); } @Test public void testRelativePathWithSpace() throws Exception{ int length = readStream(RELATIVE_PATH_W_ENCODED_SPACE); - Assert.assertTrue(length > 1); + Assertions.assertTrue(length > 1); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java index f43480e78df35..4155a787daef7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java @@ -22,10 +22,10 @@ import java.util.Comparator; import java.util.Random; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; @@ -42,7 +42,7 @@ public class TestGetFileBlockLocations { private FileSystem fs; private Random random; - @Before + @BeforeEach public void setUp() throws IOException { conf = new Configuration(); Path rootPath = new Path(TEST_ROOT_DIR); @@ -92,7 +92,7 @@ public int compare(BlockLocation arg0, BlockLocation arg1) { } } - @After + @AfterEach public void tearDown() throws IOException { fs.delete(path, true); fs.close(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java index d696dbfe40f57..454c0a684f3fc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java @@ -19,26 +19,26 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; public class TestGetSpaceUsed { final static private File DIR = GenericTestUtils.getTestDir("TestGetSpaceUsed"); - @Before + @BeforeEach public void setUp() { FileUtil.fullyDelete(DIR); assertTrue(DIR.mkdirs()); } - @After + @AfterEach public void tearDown() throws IOException { FileUtil.fullyDelete(DIR); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java index 9d75ba0160ba7..23cd59729a321 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java @@ -20,8 +20,8 @@ import java.io.IOException; import java.util.List; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; public class TestGlobExpander { @@ -55,11 +55,11 @@ private void checkExpansionIsIdentical(String filePattern) throws IOException { private void checkExpansion(String filePattern, String... expectedExpansions) throws IOException { List actualExpansions = GlobExpander.expand(filePattern); - assertEquals("Different number of expansions", expectedExpansions.length, - actualExpansions.size()); + assertEquals(expectedExpansions.length +, actualExpansions.size(), "Different number of expansions"); for (int i = 0; i < expectedExpansions.length; i++) { - assertEquals("Expansion of " + filePattern, expectedExpansions[i], - actualExpansions.get(i)); + assertEquals(expectedExpansions[i] +, actualExpansions.get(i), "Expansion of " + filePattern); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java index b409a8f929421..085314f54022a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java @@ -18,8 +18,9 @@ package org.apache.hadoop.fs; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import static org.junit.jupiter.api.Assertions.*; import com.google.re2j.PatternSyntaxException; /** @@ -31,8 +32,8 @@ private void assertMatch(boolean yes, String glob, String...input) { for (String s : input) { boolean result = pattern.matches(s); - assertTrue(glob +" should"+ (yes ? "" : " not") +" match "+ s, - yes ? result : !result); + assertTrue( + yes ? result : !result, glob +" should"+ (yes ? "" : " not") +" match "+ s); } } @@ -45,7 +46,7 @@ private void shouldThrow(String... globs) { e.printStackTrace(); continue; } - assertTrue("glob "+ glob +" should throw", false); + assertTrue(false, "glob "+ glob +" should throw"); } } @@ -72,7 +73,8 @@ private void shouldThrow(String... globs) { shouldThrow("[", "[[]]", "{", "\\"); } - @Test(timeout=10000) public void testPathologicalPatterns() { + @Test @Timeout(value = 10) + public void testPathologicalPatterns() { String badFilename = "job_1429571161900_4222-1430338332599-tda%2D%2D+******************************+++...%270%27%28Stage-1430338580443-39-2000-SUCCEEDED-production%2Dhigh-1430338340360.jhist"; assertMatch(true, badFilename, badFilename); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java index 26d0361d6a255..16d5b0e7919f2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java @@ -28,8 +28,8 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.DelegationTokenIssuer; import org.apache.hadoop.util.Progressable; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -277,7 +277,7 @@ static void checkInvalidPath(String s, Configuration conf) { final Path p = new Path(s); try { p.getFileSystem(conf); - Assert.fail(p + " is an invalid path."); + Assertions.fail(p + " is an invalid path."); } catch (IOException e) { // Expected } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java index eccf491cca8e3..938859d2a48b8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java @@ -22,10 +22,10 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; @@ -34,9 +34,9 @@ import java.util.HashSet; import java.util.Set; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** @@ -125,7 +125,7 @@ private void writeVersionToMasterIndexImpl(int version, Path masterIndexPath) th } } - @Before + @BeforeEach public void before() throws Exception { final File rootDirIoFile = new File(rootPath.toUri().getPath()); rootDirIoFile.mkdirs(); @@ -138,7 +138,7 @@ public void before() throws Exception { harFileSystem = createHarFileSystem(conf); } - @After + @AfterEach public void after() throws Exception { // close Har FS: final FileSystem harFS = harFileSystem; @@ -256,11 +256,11 @@ public void testListLocatedStatus() throws Exception { RemoteIterator fileList = hfs.listLocatedStatus(path); while (fileList.hasNext()) { String fileName = fileList.next().getPath().getName(); - assertTrue(fileName + " not in expected files list", expectedFileNames.contains(fileName)); + assertTrue(expectedFileNames.contains(fileName), fileName + " not in expected files list"); expectedFileNames.remove(fileName); } - assertEquals("Didn't find all of the expected file names: " + expectedFileNames, - 0, expectedFileNames.size()); + assertEquals( + 0, expectedFileNames.size(), "Didn't find all of the expected file names: " + expectedFileNames); } @Test @@ -273,10 +273,10 @@ public void testMakeQualifiedPath() throws Exception { + harPath.toUri().getPath().toString(); Path path = new Path(harPathWithUserinfo); Path qualifiedPath = path.getFileSystem(conf).makeQualified(path); - assertTrue(String.format( + assertTrue( + qualifiedPath.toString().equals(harPathWithUserinfo), String.format( "The qualified path (%s) did not match the expected path (%s).", - qualifiedPath.toString(), harPathWithUserinfo), - qualifiedPath.toString().equals(harPathWithUserinfo)); + qualifiedPath.toString(), harPathWithUserinfo)); } // ========== Negative: @@ -291,7 +291,7 @@ public void testNegativeInitWithoutIndex() throws Exception { final URI uri = new URI("har://" + harPath.toString()); try { hfs.initialize(uri, new Configuration()); - Assert.fail("Exception expected."); + Assertions.fail("Exception expected."); } catch (IOException ioe) { // ok, expected. } @@ -302,7 +302,7 @@ public void testNegativeGetHarVersionOnNotInitializedFS() throws Exception { final HarFileSystem hfs = new HarFileSystem(localFileSystem); try { int version = hfs.getHarVersion(); - Assert.fail("Exception expected, but got a Har version " + version + "."); + Assertions.fail("Exception expected, but got a Har version " + version + "."); } catch (IOException ioe) { // ok, expected. } @@ -326,7 +326,7 @@ public void testNegativeInitWithAnUnsupportedVersion() throws Exception { final URI uri = new URI("har://" + harPath.toString()); try { hfs.initialize(uri, new Configuration()); - Assert.fail("IOException expected."); + Assertions.fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } @@ -340,28 +340,28 @@ public void testNegativeHarFsModifications() throws Exception { try { harFileSystem.create(fooPath, new FsPermission("+rwx"), true, 1024, (short) 88, 1024, null); - Assert.fail("IOException expected."); + Assertions.fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.setReplication(fooPath, (short) 55); - Assert.fail("IOException expected."); + Assertions.fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.delete(fooPath, true); - Assert.fail("IOException expected."); + Assertions.fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.mkdirs(fooPath, new FsPermission("+rwx")); - Assert.fail("IOException expected."); + Assertions.fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } @@ -369,35 +369,35 @@ public void testNegativeHarFsModifications() throws Exception { final Path indexPath = new Path(harPath, "_index"); try { harFileSystem.copyFromLocalFile(false, indexPath, fooPath); - Assert.fail("IOException expected."); + Assertions.fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.startLocalOutput(fooPath, indexPath); - Assert.fail("IOException expected."); + Assertions.fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.completeLocalOutput(fooPath, indexPath); - Assert.fail("IOException expected."); + Assertions.fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.setOwner(fooPath, "user", "group"); - Assert.fail("IOException expected."); + Assertions.fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.setPermission(fooPath, new FsPermission("+x")); - Assert.fail("IOException expected."); + Assertions.fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } @@ -406,7 +406,7 @@ public void testNegativeHarFsModifications() throws Exception { @Test public void testHarFsWithoutAuthority() throws Exception { final URI uri = harFileSystem.getUri(); - Assert.assertNull("har uri authority not null: " + uri, uri.getAuthority()); + Assertions.assertNull(uri.getAuthority(), "har uri authority not null: " + uri); FileContext.getFileContext(uri, conf); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java index 98ae8df891958..a5f1c9c5de703 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java @@ -25,11 +25,11 @@ import java.util.Arrays; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import static org.junit.Assert.*; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.HardLink.*; @@ -85,7 +85,7 @@ public class TestHardLink { * Assure clean environment for start of testing * @throws IOException */ - @BeforeClass + @BeforeAll public static void setupClean() { //delete source and target directories if they exist FileUtil.fullyDelete(src); @@ -100,7 +100,7 @@ public static void setupClean() { /** * Initialize clean environment for start of each test */ - @Before + @BeforeEach public void setupDirs() throws IOException { //check that we start out with empty top-level test data directory assertFalse(src.exists()); @@ -176,7 +176,7 @@ private void validateTgtMult() throws IOException { assertTrue(fetchFileContents(x3_mult).equals(str3)); } - @After + @AfterEach public void tearDown() throws IOException { setupClean(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java index dce3b956d47ef..8c65dbbd0cb2a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java @@ -25,9 +25,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import static org.junit.Assert.*; -import org.junit.Test; -import org.junit.BeforeClass; +import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.BeforeAll; import org.slf4j.event.Level; /** @@ -74,7 +74,7 @@ protected static void setTestPaths(Path testDir) { FILE3 = new Path(DIR1, "file3"); } - @BeforeClass + @BeforeAll public static void testSetUp() throws Exception { fs = FileSystem.getLocal(conf); fs.delete(TEST_DIR, true); @@ -160,18 +160,18 @@ public void testDirectory() throws IOException { itor = fs.listFiles(TEST_DIR, true); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue("Path " + stat.getPath() + " unexpected", - filesToFind.remove(stat.getPath())); + assertTrue( + filesToFind.remove(stat.getPath()), "Path " + stat.getPath() + " unexpected"); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue("Path " + stat.getPath() + " unexpected", - filesToFind.remove(stat.getPath())); + assertTrue( + filesToFind.remove(stat.getPath()), "Path " + stat.getPath() + " unexpected"); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue("Path " + stat.getPath() + " unexpected", - filesToFind.remove(stat.getPath())); + assertTrue( + filesToFind.remove(stat.getPath()), "Path " + stat.getPath() + " unexpected"); assertFalse(itor.hasNext()); assertTrue(filesToFind.isEmpty()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java index 3693b4f0acde3..b9505f8516fe2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java @@ -33,10 +33,11 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; /** This test LocalDirAllocator works correctly; * Every test case uses different buffer dirs to @@ -107,8 +108,8 @@ private static void rmBufferDirs() throws IOException { private static void validateTempDirCreation(String dir) throws IOException { File result = createTempFile(SMALL_FILE_SIZE); - assertTrue("Checking for " + dir + " in " + result + " - FAILED!", - result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath())); + assertTrue( + result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath()), "Checking for " + dir + " in " + result + " - FAILED!"); } private static File createTempFile() throws IOException { @@ -129,7 +130,8 @@ private String buildBufferDir(String dir, int i) { * The second dir exists & is RW * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void test0() throws Exception { assumeNotWindows(); String dir0 = buildBufferDir(ROOT, 0); @@ -151,7 +153,8 @@ public void test0() throws Exception { * The second dir exists & is RW * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testROBufferDirAndRWBufferDir() throws Exception { assumeNotWindows(); String dir1 = buildBufferDir(ROOT, 1); @@ -171,7 +174,8 @@ public void testROBufferDirAndRWBufferDir() throws Exception { /** Two buffer dirs. Both do not exist but on a RW disk. * Check if tmp dirs are allocated in a round-robin */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testDirsNotExist() throws Exception { assumeNotWindows(); String dir2 = buildBufferDir(ROOT, 2); @@ -197,7 +201,8 @@ public void testDirsNotExist() throws Exception { * Later disk1 becomes read-only. * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRWBufferDirBecomesRO() throws Exception { assumeNotWindows(); String dir3 = buildBufferDir(ROOT, 3); @@ -235,7 +240,8 @@ public void testRWBufferDirBecomesRO() throws Exception { * @throws Exception */ static final int TRIALS = 100; - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testCreateManyFiles() throws Exception { assumeNotWindows(); String dir5 = buildBufferDir(ROOT, 5); @@ -278,7 +284,8 @@ public void testCreateManyFiles() throws Exception { * * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testCreateManyFilesRandom() throws Exception { assumeNotWindows(); final int numDirs = 5; @@ -331,7 +338,8 @@ public void testCreateManyFilesRandom() throws Exception { * directory. With checkAccess true, the directory should not be created. * @throws Exception */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testLocalPathForWriteDirCreation() throws IOException { String dir0 = buildBufferDir(ROOT, 0); String dir1 = buildBufferDir(ROOT, 1); @@ -362,7 +370,8 @@ public void testLocalPathForWriteDirCreation() throws IOException { * Test when mapred.local.dir not configured and called * getLocalPathForWrite */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testShouldNotthrowNPE() throws Exception { Configuration conf1 = new Configuration(); try { @@ -404,7 +413,8 @@ public void testShouldNotthrowNPE() throws Exception { * are mistakenly created from fully qualified path strings. * @throws IOException */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testNoSideEffects() throws IOException { assumeNotWindows(); String dir = buildBufferDir(ROOT, 0); @@ -426,7 +436,8 @@ public void testNoSideEffects() throws IOException { * * @throws IOException */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testGetLocalPathToRead() throws IOException { assumeNotWindows(); String dir = buildBufferDir(ROOT, 0); @@ -451,7 +462,8 @@ public void testGetLocalPathToRead() throws IOException { * * @throws IOException */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testGetAllLocalPathsToRead() throws IOException { assumeNotWindows(); @@ -478,8 +490,8 @@ public void testGetAllLocalPathsToRead() throws IOException { // test #next() while no element to iterate any more: try { Path p = pathIterable.iterator().next(); - assertFalse("NoSuchElementException must be thrown, but returned ["+p - +"] instead.", true); // exception expected + assertFalse(true, "NoSuchElementException must be thrown, but returned ["+p + +"] instead."); // exception expected } catch (NoSuchElementException nsee) { // okay } @@ -499,7 +511,8 @@ public void testGetAllLocalPathsToRead() throws IOException { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRemoveContext() throws IOException { String dir = buildBufferDir(ROOT, 0); try { @@ -521,7 +534,8 @@ public void testRemoveContext() throws IOException { * * @throws Exception */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testGetLocalPathForWriteForInvalidPaths() throws Exception { conf.set(CONTEXT, " "); try { @@ -538,7 +552,8 @@ public void testGetLocalPathForWriteForInvalidPaths() throws Exception { * * @throws Exception */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testGetLocalPathForWriteForLessSpace() throws Exception { String dir0 = buildBufferDir(ROOT, 0); String dir1 = buildBufferDir(ROOT, 1); @@ -552,7 +567,8 @@ public void testGetLocalPathForWriteForLessSpace() throws Exception { /** * Test for HADOOP-18636 LocalDirAllocator cannot recover from directory tree deletion. */ - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testDirectoryRecovery() throws Throwable { String dir0 = buildBufferDir(ROOT, 0); String subdir = dir0 + "/subdir1/subdir2"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java index 79049d3837134..49b131836264f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java @@ -45,18 +45,18 @@ import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.*; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.junit.rules.Timeout; import javax.annotation.Nonnull; @@ -87,7 +87,7 @@ private void cleanupFile(FileSystem fs, Path name) throws IOException { assertTrue(!fs.exists(name)); } - @Before + @BeforeEach public void setup() throws IOException { conf = new Configuration(false); conf.set("fs.file.impl", LocalFileSystem.class.getName()); @@ -95,7 +95,7 @@ public void setup() throws IOException { fileSys.delete(new Path(TEST_ROOT_DIR), true); } - @After + @AfterEach public void after() throws IOException { FileUtil.setWritable(base, true); FileUtil.fullyDelete(base); @@ -248,9 +248,9 @@ public void testCreateFileAndMkdirs() throws IOException { { //check FileStatus and ContentSummary final FileStatus status = fileSys.getFileStatus(test_file); - Assert.assertEquals(fileSize, status.getLen()); + Assertions.assertEquals(fileSize, status.getLen()); final ContentSummary summary = fileSys.getContentSummary(test_dir); - Assert.assertEquals(fileSize, summary.getLength()); + Assertions.assertEquals(fileSize, summary.getLength()); } // creating dir over a file @@ -281,10 +281,10 @@ public void testBasicDelete() throws IOException { assertTrue(fileSys.mkdirs(dir1)); writeFile(fileSys, file1, 1); writeFile(fileSys, file2, 1); - assertFalse("Returned true deleting non-existant path", - fileSys.delete(file3)); - assertTrue("Did not delete file", fileSys.delete(file1)); - assertTrue("Did not delete non-empty dir", fileSys.delete(dir1)); + assertFalse( + fileSys.delete(file3), "Returned true deleting non-existant path"); + assertTrue(fileSys.delete(file1), "Did not delete file"); + assertTrue(fileSys.delete(dir1), "Did not delete non-empty dir"); } @Test @@ -318,9 +318,9 @@ public void testListStatusWithColons() throws IOException { File colonFile = new File(TEST_ROOT_DIR, "foo:bar"); colonFile.mkdirs(); FileStatus[] stats = fileSys.listStatus(new Path(TEST_ROOT_DIR)); - assertEquals("Unexpected number of stats", 1, stats.length); - assertEquals("Bad path from stat", colonFile.getAbsolutePath(), - stats[0].getPath().toUri().getPath()); + assertEquals(1, stats.length, "Unexpected number of stats"); + assertEquals(colonFile.getAbsolutePath() +, stats[0].getPath().toUri().getPath(), "Bad path from stat"); } @Test @@ -333,9 +333,9 @@ public void testListStatusReturnConsistentPathOnWindows() throws IOException { File file = new File(dirNoDriveSpec, "foo"); file.mkdirs(); FileStatus[] stats = fileSys.listStatus(new Path(dirNoDriveSpec)); - assertEquals("Unexpected number of stats", 1, stats.length); - assertEquals("Bad path from stat", new Path(file.getPath()).toUri().getPath(), - stats[0].getPath().toUri().getPath()); + assertEquals(1, stats.length, "Unexpected number of stats"); + assertEquals(new Path(file.getPath()).toUri().getPath() +, stats[0].getPath().toUri().getPath(), "Bad path from stat"); } @Test @@ -429,8 +429,8 @@ public void testSetTimes() throws Exception { long newAccTime = 23456000; FileStatus status = fileSys.getFileStatus(path); - assertTrue("check we're actually changing something", newModTime != status.getModificationTime()); - assertTrue("check we're actually changing something", newAccTime != status.getAccessTime()); + assertTrue(newModTime != status.getModificationTime(), "check we're actually changing something"); + assertTrue(newAccTime != status.getAccessTime(), "check we're actually changing something"); fileSys.setTimes(path, newModTime, newAccTime); checkTimesStatus(path, newModTime, newAccTime); @@ -606,8 +606,8 @@ public void testStripFragmentFromPath() throws Exception { // Create test file with fragment FileSystemTestHelper.createFile(fs, pathWithFragment); Path resolved = fs.resolvePath(pathWithFragment); - assertEquals("resolvePath did not strip fragment from Path", pathQualified, - resolved); + assertEquals(pathQualified +, resolved, "resolvePath did not strip fragment from Path"); } @Test @@ -683,8 +683,8 @@ public void testFSOutputStreamBuilder() throws Exception { new byte[(int) (fileSys.getFileStatus(path).getLen())]; input.readFully(0, buffer); input.close(); - Assert.assertArrayEquals("The data be read should equals with the " - + "data written.", contentOrigin, buffer); + Assertions.assertArrayEquals(contentOrigin, buffer, "The data be read should equals with the " + + "data written."); } catch (IOException e) { throw e; } @@ -799,8 +799,8 @@ protected Statistics getFileStatistics() { .stream() .filter(s -> s.getScheme().equals("file")) .collect(Collectors.toList()); - assertEquals("Number of statistics counters for file://", - 1, fileStats.size()); + assertEquals( + 1, fileStats.size(), "Number of statistics counters for file://"); // this should be used for local and rawLocal, as they share the // same schema (although their class is different) return fileStats.get(0); @@ -832,8 +832,8 @@ private void assertWritesCRC(String operation, Path path, final long bytesOut0 = stats.getBytesWritten(); try { callable.call(); - assertEquals("Bytes written in " + operation + "; stats=" + stats, - CRC_SIZE + DATA.length, stats.getBytesWritten() - bytesOut0); + assertEquals( + CRC_SIZE + DATA.length, stats.getBytesWritten() - bytesOut0, "Bytes written in " + operation + "; stats=" + stats); } finally { if (delete) { // clean up @@ -862,8 +862,8 @@ public void testCRCwithClassicAPIs() throws Throwable { final long bytesRead0 = stats.getBytesRead(); fileSys.open(file).close(); final long bytesRead1 = stats.getBytesRead(); - assertEquals("Bytes read in open() call with stats " + stats, - CRC_SIZE, bytesRead1 - bytesRead0); + assertEquals( + CRC_SIZE, bytesRead1 - bytesRead0, "Bytes read in open() call with stats " + stats); } /** @@ -974,8 +974,8 @@ public void testReadIncludesCRCwithBuilders() throws Throwable { // now read back the data, again with the builder API final long bytesRead0 = stats.getBytesRead(); fileSys.openFile(file).build().get().close(); - assertEquals("Bytes read in openFile() call with stats " + stats, - CRC_SIZE, stats.getBytesRead() - bytesRead0); + assertEquals( + CRC_SIZE, stats.getBytesRead() - bytesRead0, "Bytes read in openFile() call with stats " + stats); // now write with overwrite = true assertWritesCRC("createFileNonRecursive()", file, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java index 8e48035d7bd85..87d2cf4eb403d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java @@ -22,7 +22,7 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; import org.assertj.core.api.Assertions; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.event.Level; @@ -33,7 +33,7 @@ import java.util.StringTokenizer; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; /** * This class tests the local file system via the FileSystem abstraction. @@ -235,8 +235,8 @@ public void testSetUmaskInRealTime() throws Exception { assertTrue(localfs.mkdirs(dir)); FsPermission initialPermission = getPermission(localfs, dir); assertEquals( - "With umask 022 permission should be 755 since the default " + - "permission is 777", new FsPermission("755"), initialPermission); + new FsPermission("755"), initialPermission, "With umask 022 permission should be 755 since the default " + + "permission is 777"); // Modify umask and create a new directory // and check if new umask is applied @@ -248,8 +248,8 @@ public void testSetUmaskInRealTime() throws Exception { "With umask 062 permission should not be 755 since the " + "default permission is 777").isNotEqualTo(finalPermission); assertEquals( - "With umask 062 we expect 715 since the default permission is 777", - new FsPermission("715"), finalPermission); + + new FsPermission("715"), finalPermission, "With umask 062 we expect 715 since the default permission is 777"); } finally { conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "022"); cleanup(localfs, dir); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocatedFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocatedFileStatus.java index 4490f923e2459..a64d960d994dd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocatedFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocatedFileStatus.java @@ -19,7 +19,7 @@ package org.apache.hadoop.fs; import org.apache.hadoop.fs.permission.FsPermission; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java index 4204faaada332..ff241ed14f9d9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java @@ -18,8 +18,9 @@ package org.apache.hadoop.fs; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -40,10 +41,10 @@ import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** * Test Hadoop Filesystem Paths. @@ -76,7 +77,8 @@ public static String mergeStatuses(FileStatus statuses[]) { return mergeStatuses(paths); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testToString() { toStringTest("/"); toStringTest("/foo"); @@ -109,7 +111,8 @@ private void toStringTest(String pathString) { assertEquals(pathString, new Path(pathString).toString()); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testNormalize() throws URISyntaxException { assertEquals("", new Path(".").toString()); assertEquals("..", new Path("..").toString()); @@ -133,7 +136,8 @@ public void testNormalize() throws URISyntaxException { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testIsAbsolute() { assertTrue(new Path("/").isAbsolute()); assertTrue(new Path("/foo").isAbsolute()); @@ -146,7 +150,8 @@ public void testIsAbsolute() { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testParent() { assertEquals(new Path("/foo"), new Path("/foo/bar").getParent()); assertEquals(new Path("foo"), new Path("foo/bar").getParent()); @@ -157,7 +162,8 @@ public void testParent() { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testChild() { assertEquals(new Path("."), new Path(".", ".")); assertEquals(new Path("/"), new Path("/", ".")); @@ -177,7 +183,8 @@ public void testChild() { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testPathThreeArgContructor() { assertEquals(new Path("foo"), new Path(null, null, "foo")); assertEquals(new Path("scheme:///foo"), new Path("scheme", null, "/foo")); @@ -213,12 +220,14 @@ public void testPathThreeArgContructor() { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testEquals() { assertFalse(new Path("/").equals(new Path("/foo"))); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testDots() { // Test Path(String) assertEquals(new Path("/foo/bar/baz").toString(), "/foo/bar/baz"); @@ -257,7 +266,8 @@ public void testDots() { } /** Test that Windows paths are correctly handled */ - @Test (timeout = 5000) + @Test + @Timeout(value = 5) public void testWindowsPaths() throws URISyntaxException, IOException { assumeWindows(); @@ -268,7 +278,8 @@ public void testWindowsPaths() throws URISyntaxException, IOException { } /** Test invalid paths on Windows are correctly rejected */ - @Test (timeout = 5000) + @Test + @Timeout(value = 5) public void testInvalidWindowsPaths() throws URISyntaxException, IOException { assumeWindows(); @@ -286,20 +297,23 @@ public void testInvalidWindowsPaths() throws URISyntaxException, IOException { } /** Test Path objects created from other Path objects */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testChildParentResolution() throws URISyntaxException, IOException { Path parent = new Path("foo1://bar1/baz1"); Path child = new Path("foo2://bar2/baz2"); assertEquals(child, new Path(parent, child)); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testScheme() throws java.io.IOException { assertEquals("foo:/bar", new Path("foo:/","/bar").toString()); assertEquals("foo://bar/baz", new Path("foo://bar/","/baz").toString()); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testURI() throws URISyntaxException, IOException { URI uri = new URI("file:///bar#baz"); Path path = new Path(uri); @@ -322,18 +336,19 @@ public void testURI() throws URISyntaxException, IOException { } /** Test URIs created from Path objects */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testPathToUriConversion() throws URISyntaxException, IOException { // Path differs from URI in that it ignores the query part.. - assertEquals("? mark char in to URI", - new URI(null, null, "/foo?bar", null, null), - new Path("/foo?bar").toUri()); - assertEquals("escape slashes chars in to URI", - new URI(null, null, "/foo\"bar", null, null), - new Path("/foo\"bar").toUri()); - assertEquals("spaces in chars to URI", - new URI(null, null, "/foo bar", null, null), - new Path("/foo bar").toUri()); + assertEquals( + new URI(null, null, "/foo?bar", null, null) +, new Path("/foo?bar").toUri(), "? mark char in to URI"); + assertEquals( + new URI(null, null, "/foo\"bar", null, null) +, new Path("/foo\"bar").toUri(), "escape slashes chars in to URI"); + assertEquals( + new URI(null, null, "/foo bar", null, null) +, new Path("/foo bar").toUri(), "spaces in chars to URI"); // therefore "foo?bar" is a valid Path, so a URI created from a Path // has path "foo?bar" where in a straight URI the path part is just "foo" assertEquals("/foo?bar", @@ -350,7 +365,8 @@ public void testPathToUriConversion() throws URISyntaxException, IOException { } /** Test reserved characters in URIs (and therefore Paths) */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testReservedCharacters() throws URISyntaxException, IOException { // URI encodes the path assertEquals("/foo%20bar", @@ -380,7 +396,8 @@ public void testReservedCharacters() throws URISyntaxException, IOException { toURL().getPath()); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testMakeQualified() throws URISyntaxException { URI defaultUri = new URI("hdfs://host1/dir1"); URI wd = new URI("hdfs://host2/dir2"); @@ -394,7 +411,8 @@ public void testMakeQualified() throws URISyntaxException { new Path("file").makeQualified(defaultUri, new Path(wd))); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testGetName() { assertEquals("", new Path("/").getName()); assertEquals("foo", new Path("foo").getName()); @@ -404,7 +422,8 @@ public void testGetName() { assertEquals("bar", new Path("hdfs://host/foo/bar").getName()); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testAvroReflect() throws Exception { // Avro expects explicitely stated, trusted packages used for (de-)serialization System.setProperty(ConfigConstants.CONFIG_AVRO_SERIALIZABLE_PACKAGES, "org.apache.hadoop.fs"); @@ -413,7 +432,8 @@ public void testAvroReflect() throws Exception { "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}"); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testGlobEscapeStatus() throws Exception { // This test is not meaningful on Windows where * is disallowed in file name. assumeNotWindows(); @@ -472,7 +492,8 @@ public void testGlobEscapeStatus() throws Exception { assertEquals(new Path(testRoot, "*/f"), stats[0].getPath()); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testMergePaths() { assertEquals(new Path("/foo/bar"), Path.mergePaths(new Path("/foo"), @@ -506,7 +527,8 @@ public void testMergePaths() { new Path("file://fileauthority/bar"))); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testIsWindowsAbsolutePath() { assumeWindows(); assertTrue(Path.isWindowsAbsolutePath("C:\\test", false)); @@ -518,7 +540,8 @@ public void testIsWindowsAbsolutePath() { assertFalse(Path.isWindowsAbsolutePath("/C:test", true)); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testSerDeser() throws Throwable { Path source = new Path("hdfs://localhost:4040/scratch"); ByteArrayOutputStream baos = new ByteArrayOutputStream(256); @@ -528,15 +551,16 @@ public void testSerDeser() throws Throwable { ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); try (ObjectInputStream ois = new ObjectInputStream(bais)) { Path deser = (Path) ois.readObject(); - Assert.assertEquals(source, deser); + Assertions.assertEquals(source, deser); } } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testSuffixFromRoot() { Path root = new Path("/"); - Assert.assertNull(root.getParent()); - Assert.assertEquals(new Path("/bar"), root.suffix("bar")); + Assertions.assertNull(root.getParent()); + Assertions.assertEquals(new Path("/bar"), root.suffix("bar")); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java index e3e20020e3242..f47cbeb412409 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.fs; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestQuotaUsage { @@ -27,9 +27,9 @@ public class TestQuotaUsage { @Test public void testConstructorEmpty() { QuotaUsage quotaUsage = new QuotaUsage.Builder().build(); - assertEquals("getQuota", -1, quotaUsage.getQuota()); - assertEquals("getSpaceConsumed", 0, quotaUsage.getSpaceConsumed()); - assertEquals("getSpaceQuota", -1, quotaUsage.getSpaceQuota()); + assertEquals(-1, quotaUsage.getQuota(), "getQuota"); + assertEquals(0, quotaUsage.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(-1, quotaUsage.getSpaceQuota(), "getSpaceQuota"); } // check the full constructor with quota information @@ -43,12 +43,12 @@ public void testConstructorWithQuota() { QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount).quota(quota). spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); - assertEquals("getFileAndDirectoryCount", fileAndDirCount, - quotaUsage.getFileAndDirectoryCount()); - assertEquals("getQuota", quota, quotaUsage.getQuota()); - assertEquals("getSpaceConsumed", spaceConsumed, - quotaUsage.getSpaceConsumed()); - assertEquals("getSpaceQuota", spaceQuota, quotaUsage.getSpaceQuota()); + assertEquals(fileAndDirCount +, quotaUsage.getFileAndDirectoryCount(), "getFileAndDirectoryCount"); + assertEquals(quota, quotaUsage.getQuota(), "getQuota"); + assertEquals(spaceConsumed +, quotaUsage.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceQuota, quotaUsage.getSpaceQuota(), "getSpaceQuota"); } // check the constructor with quota information @@ -59,12 +59,12 @@ public void testConstructorNoQuota() { QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount). spaceConsumed(spaceConsumed).build(); - assertEquals("getFileAndDirectoryCount", fileAndDirCount, - quotaUsage.getFileAndDirectoryCount()); - assertEquals("getQuota", -1, quotaUsage.getQuota()); - assertEquals("getSpaceConsumed", spaceConsumed, - quotaUsage.getSpaceConsumed()); - assertEquals("getSpaceQuota", -1, quotaUsage.getSpaceQuota()); + assertEquals(fileAndDirCount +, quotaUsage.getFileAndDirectoryCount(), "getFileAndDirectoryCount"); + assertEquals(-1, quotaUsage.getQuota(), "getQuota"); + assertEquals(spaceConsumed +, quotaUsage.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(-1, quotaUsage.getSpaceQuota(), "getSpaceQuota"); } // check the header diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java index 301bf046cd257..a2a1d55ee7bed 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs; -import org.junit.BeforeClass; +import org.junit.jupiter.api.BeforeAll; import java.io.IOException; @@ -25,7 +25,7 @@ public class TestSymlinkLocalFSFileContext extends TestSymlinkLocalFS { - @BeforeClass + @BeforeAll public static void testSetup() throws Exception { FileContext context = FileContext.getLocalFSFileContext(); wrapper = new FileContextTestWrapper(context); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java index 98449493fa5e1..cf5dd658a36b2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java @@ -22,17 +22,18 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Options.Rename; -import org.junit.BeforeClass; +import org.junit.jupiter.api.BeforeAll; import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class TestSymlinkLocalFSFileSystem extends TestSymlinkLocalFS { - @BeforeClass + @BeforeAll public static void testSetup() throws Exception { FileSystem filesystem = FileSystem.getLocal(new Configuration()); wrapper = new FileSystemTestWrapper(filesystem); @@ -41,24 +42,28 @@ public static void testSetup() throws Exception { @Ignore("RawLocalFileSystem#mkdir does not treat existence of directory" + " as an error") @Override - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testMkdirExistingLink() throws IOException {} @Ignore("FileSystem#create defaults to creating parents," + " throwing an IOException instead of FileNotFoundException") @Override - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testCreateFileViaDanglingLinkParent() throws IOException {} @Ignore("RawLocalFileSystem does not throw an exception if the path" + " already exists") @Override - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testCreateFileDirExistingLink() throws IOException {} @Ignore("ChecksumFileSystem does not support append") @Override - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testAccessFileViaInterSymlinkAbsTarget() throws IOException {} @Override @@ -68,7 +73,8 @@ public void testRenameFileWithDestParentSymlink() throws IOException { } @Override - @Test(timeout=10000) + @Test + @Timeout(value = 10) /** Rename a symlink to itself */ public void testRenameSymlinkToItself() throws IOException { Path file = new Path(testBaseDir1(), "file"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java index 30c9a31fda4ea..f236a7caade3e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java @@ -34,15 +34,15 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.TrashPolicyDefault.Emptier; @@ -60,7 +60,7 @@ public class TestTrash { private final static Path TEST_DIR = new Path(BASE_PATH.getAbsolutePath()); - @Before + @BeforeEach public void setUp() throws IOException { // ensure each test initiates a FileSystem instance, // avoid getting an old instance from cache. @@ -78,7 +78,7 @@ protected static Path mkdir(FileSystem fs, Path p) throws IOException { protected static void checkTrash(FileSystem trashFs, Path trashRoot, Path path) throws IOException { Path p = Path.mergePaths(trashRoot, path); - assertTrue("Could not find file in trash: "+ p , trashFs.exists(p)); + assertTrue( trashFs.exists(p), "Could not find file in trash: "+ p); } // counts how many instances of the file are in the Trash @@ -169,7 +169,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Expunge should return zero", 0, val); + assertEquals(0, val, "Expunge should return zero"); } // Verify that we succeed in removing the file we created. @@ -181,7 +181,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Remove should return zero", 0, val); + assertEquals(0, val, "Remove should return zero"); checkTrash(trashRootFs, trashRoot, fs.makeQualified(myFile)); } @@ -197,7 +197,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Remove should return zero", 0, val); + assertEquals(0, val, "Remove should return zero"); } // Verify that we can recreate the file @@ -212,7 +212,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Recursive Remove should return zero", 0, val); + assertEquals(0, val, "Recursive Remove should return zero"); } // recreate directory @@ -226,7 +226,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Recursive Remove should return zero", 0, val); + assertEquals(0, val, "Recursive Remove should return zero"); } // Check that we can delete a file from the trash @@ -237,7 +237,7 @@ public static void trashShell(final Configuration conf, final Path base, val = shell.run(new String[] {"-rm", toErase.toString()}); - assertEquals("Recursive Remove should return zero", 0, val); + assertEquals(0, val, "Recursive Remove should return zero"); checkNotInTrash(trashRootFs, trashRoot, toErase.toString()); checkNotInTrash(trashRootFs, trashRoot, toErase.toString()+".1"); } @@ -249,7 +249,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Expunge should return zero", 0, val); + assertEquals(0, val, "Expunge should return zero"); } // verify that after expunging the Trash, it really goes away @@ -268,7 +268,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Remove should return zero", 0, val); + assertEquals(0, val, "Remove should return zero"); checkTrash(trashRootFs, trashRoot, myFile); args = new String[2]; @@ -277,7 +277,7 @@ public static void trashShell(final Configuration conf, final Path base, val = -1; val = shell.run(args); - assertEquals("Recursive Remove should return zero", 0, val); + assertEquals(0, val, "Recursive Remove should return zero"); checkTrash(trashRootFs, trashRoot, myPath); } @@ -289,7 +289,7 @@ public static void trashShell(final Configuration conf, final Path base, int val = -1; val = shell.run(args); - assertEquals("Recursive Remove should return exit code 1", 1, val); + assertEquals(1, val, "Recursive Remove should return exit code 1"); assertTrue(trashRootFs.exists(trashRoot)); } @@ -307,17 +307,17 @@ public static void trashShell(final Configuration conf, final Path base, args[2] = myFile.toString(); int val = -1; // Clear out trash - assertEquals("-expunge failed", - 0, shell.run(new String[] {"-expunge" })); + assertEquals( + 0, shell.run(new String[] {"-expunge" }), "-expunge failed"); val = shell.run(args); - assertFalse("Expected TrashRoot (" + trashRoot + + assertFalse( + trashRootFs.exists(trashRoot), "Expected TrashRoot (" + trashRoot + ") to exist in file system:" - + trashRootFs.getUri(), - trashRootFs.exists(trashRoot)); // No new Current should be created + + trashRootFs.getUri()); // No new Current should be created assertFalse(fs.exists(myFile)); - assertEquals("Remove with skipTrash should return zero", 0, val); + assertEquals(0, val, "Remove with skipTrash should return zero"); } // recreate directory and file @@ -340,15 +340,15 @@ public static void trashShell(final Configuration conf, final Path base, assertFalse(trashRootFs.exists(trashRoot)); // No new Current should be created assertFalse(fs.exists(myPath)); assertFalse(fs.exists(myFile)); - assertEquals("Remove with skipTrash should return zero", 0, val); + assertEquals(0, val, "Remove with skipTrash should return zero"); } // deleting same file multiple times { int val = -1; mkdir(fs, myPath); - assertEquals("Expunge should return zero", - 0, shell.run(new String[] {"-expunge" })); + assertEquals( + 0, shell.run(new String[] {"-expunge" }), "Expunge should return zero"); // create a file in that directory. @@ -363,7 +363,7 @@ public static void trashShell(final Configuration conf, final Path base, // delete file val = shell.run(args); - assertEquals("Remove should return zero", 0, val); + assertEquals(0, val, "Remove should return zero"); } // current trash directory Path trashDir = Path.mergePaths(new Path(trashRoot.toUri().getPath()), @@ -377,7 +377,7 @@ public static void trashShell(final Configuration conf, final Path base, int count = countSameDeletedFiles(fs, trashDir, myFile); System.out.println("counted " + count + " files " + myFile.getName() + "* in " + trashDir); - assertEquals("Count should have returned 10", num_runs, count); + assertEquals(num_runs, count, "Count should have returned 10"); } //Verify skipTrash option is suggested when rm fails due to its absence @@ -397,11 +397,11 @@ public static void trashShell(final Configuration conf, final Path base, String output = byteStream.toString(); System.setOut(stdout); System.setErr(stderr); - assertTrue("skipTrash wasn't suggested as remedy to failed rm command" + - " or we deleted / even though we could not get server defaults", - output.indexOf("Consider using -skipTrash option") != -1 || + assertTrue( + output.indexOf("Consider using -skipTrash option") != -1 || output.indexOf("Failed to determine server " - + "trash configuration") != -1); + + "trash configuration") != -1, "skipTrash wasn't suggested as remedy to failed rm command" + + " or we deleted / even though we could not get server defaults"); } // Verify old checkpoint format is recognized @@ -423,11 +423,11 @@ public static void trashShell(final Configuration conf, final Path base, int rc = -1; rc = shell.run(new String[] {"-expunge" }); - assertEquals("Expunge should return zero", 0, rc); - assertFalse("old checkpoint format not recognized", - trashRootFs.exists(dirToDelete)); - assertTrue("old checkpoint format directory should not be removed", - trashRootFs.exists(dirToKeep)); + assertEquals(0, rc, "Expunge should return zero"); + assertFalse( + trashRootFs.exists(dirToDelete), "old checkpoint format not recognized"); + assertTrue( + trashRootFs.exists(dirToKeep), "old checkpoint format directory should not be removed"); } // Verify expunge -immediate removes all checkpoints and current folder @@ -451,15 +451,15 @@ public static void trashShell(final Configuration conf, final Path base, int rc = -1; rc = shell.run(new String[] {"-expunge", "-immediate"}); - assertEquals("Expunge immediate should return zero", 0, rc); - assertFalse("Old checkpoint should be removed", - trashRootFs.exists(oldCheckpoint)); - assertFalse("Recent checkpoint should be removed", - trashRootFs.exists(recentCheckpoint)); - assertFalse("Current folder should be removed", - trashRootFs.exists(currentFolder)); - assertEquals("Ensure trash folder is empty", 0, - trashRootFs.listStatus(trashRoot.getParent()).length); + assertEquals(0, rc, "Expunge immediate should return zero"); + assertFalse( + trashRootFs.exists(oldCheckpoint), "Old checkpoint should be removed"); + assertFalse( + trashRootFs.exists(recentCheckpoint), "Recent checkpoint should be removed"); + assertFalse( + trashRootFs.exists(currentFolder), "Current folder should be removed"); + assertEquals(0 +, trashRootFs.listStatus(trashRoot.getParent()).length, "Ensure trash folder is empty"); } } @@ -510,16 +510,16 @@ public void testExpungeWithFileSystem() throws Exception { "-fs", "testlfs:/"}; int val = testlfsshell.run(args); - assertEquals("Expunge immediate with filesystem should return zero", - 0, val); - assertFalse("Old checkpoint should be removed", - testlfs.exists(oldCheckpoint)); - assertFalse("Recent checkpoint should be removed", - testlfs.exists(recentCheckpoint)); - assertFalse("Current folder should be removed", - testlfs.exists(currentFolder)); - assertEquals("Ensure trash folder is empty", 0, - testlfs.listStatus(trashRoot.getParent()).length); + assertEquals( + 0, val, "Expunge immediate with filesystem should return zero"); + assertFalse( + testlfs.exists(oldCheckpoint), "Old checkpoint should be removed"); + assertFalse( + testlfs.exists(recentCheckpoint), "Recent checkpoint should be removed"); + assertFalse( + testlfs.exists(currentFolder), "Current folder should be removed"); + assertEquals(0 +, testlfs.listStatus(trashRoot.getParent()).length, "Ensure trash folder is empty"); // Incorrect FileSystem scheme String incorrectFS = "incorrectfs:/"; @@ -527,17 +527,17 @@ public void testExpungeWithFileSystem() throws Exception { "-fs", incorrectFS}; val = testlfsshell.run(args); - assertEquals("Expunge immediate should return exit code 1 when " - + "incorrect Filesystem is passed", - 1, val); + assertEquals( + 1, val, "Expunge immediate should return exit code 1 when " + + "incorrect Filesystem is passed"); // Empty FileSystem scheme args = new String[]{"-expunge", "-immediate", "-fs", ""}; val = testlfsshell.run(args); - assertNotEquals("Expunge immediate should fail when filesystem is NULL", - 0, val); + assertNotEquals( + 0, val, "Expunge immediate should fail when filesystem is NULL"); FileSystem.removeFileSystemForTesting(testlfsURI, config, testlfs); } } @@ -836,7 +836,7 @@ public Boolean get() { emptierThread.join(); } - @After + @AfterEach public void tearDown() throws IOException { File trashDir = new File(TEST_DIR.toUri().getPath()); if (trashDir.exists() && !FileUtil.fullyDelete(trashDir)) { @@ -969,18 +969,18 @@ public static void verifyMoveEmptyDirToTrash(FileSystem fs, Path trashRoot = trash.getCurrentTrashDir(emptyDir); fileSystem.delete(trashRoot, true); // Move to trash should be succeed - assertTrue("Move an empty directory to trash failed", - trash.moveToTrash(emptyDir)); + assertTrue( + trash.moveToTrash(emptyDir), "Move an empty directory to trash failed"); // Verify the empty dir is removed - assertFalse("The empty directory still exists on file system", - fileSystem.exists(emptyDir)); + assertFalse( + fileSystem.exists(emptyDir), "The empty directory still exists on file system"); emptyDir = fileSystem.makeQualified(emptyDir); Path dirInTrash = Path.mergePaths(trashRoot, emptyDir); - assertTrue("Directory wasn't moved to trash", - fileSystem.exists(dirInTrash)); + assertTrue( + fileSystem.exists(dirInTrash), "Directory wasn't moved to trash"); FileStatus[] flist = fileSystem.listStatus(dirInTrash); - assertTrue("Directory is not empty", - flist!= null && flist.length == 0); + assertTrue( + flist!= null && flist.length == 0, "Directory is not empty"); } } @@ -1029,15 +1029,15 @@ public static void verifyTrashPermission(FileSystem fs, Configuration conf) } Path fileInTrash = Path.mergePaths(trashDir, file); FileStatus fstat = wrapper.getFileStatus(fileInTrash); - assertTrue(String.format("File %s is not moved to trash", - fileInTrash.toString()), - wrapper.exists(fileInTrash)); + assertTrue( + wrapper.exists(fileInTrash), String.format("File %s is not moved to trash", + fileInTrash.toString())); // Verify permission not change - assertTrue(String.format("Expected file: %s is %s, but actual is %s", + assertTrue( + fstat.getPermission().equals(fsPermission), String.format("Expected file: %s is %s, but actual is %s", fileInTrash.toString(), fsPermission.toString(), - fstat.getPermission().toString()), - fstat.getPermission().equals(fsPermission)); + fstat.getPermission().toString())); } // Verify the trash directory can be removed @@ -1078,10 +1078,10 @@ private void verifyAuditableTrashEmptier(Trash trash, AuditableTrashPolicy at = (AuditableTrashPolicy) trash.getTrashPolicy(); assertEquals( - String.format("Expected num of checkpoints is %s, but actual is %s", - expectedNumOfCheckpoints, at.getNumberOfCheckpoints()), - expectedNumOfCheckpoints, - at.getNumberOfCheckpoints()); + + expectedNumOfCheckpoints +, at.getNumberOfCheckpoints(), String.format("Expected num of checkpoints is %s, but actual is %s", + expectedNumOfCheckpoints, at.getNumberOfCheckpoints())); } catch (InterruptedException e) { // Ignore } finally { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java index 799471b8c0355..d8eec6ade4777 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java @@ -20,11 +20,11 @@ import java.io.DataOutputStream; import java.io.IOException; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * test for the input truncation bug when mark/reset is used. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/audit/TestCommonAuditContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/audit/TestCommonAuditContext.java index 9782eb276d306..31df40815bc21 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/audit/TestCommonAuditContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/audit/TestCommonAuditContext.java @@ -25,7 +25,7 @@ import java.util.stream.StreamSupport; import org.assertj.core.api.AbstractStringAssert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java index 62648ec58bcc7..5a9929e4e6d05 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java @@ -25,7 +25,7 @@ import java.net.URI; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; /** * The contract of FTP; requires the option "test.testdir" to be set @@ -55,7 +55,7 @@ public String getScheme() { @Override public Path getTestPath() { String pathString = getOption(TEST_FS_TESTDIR, null); - assertNotNull("Undefined test option " + TEST_FS_TESTDIR, pathString); + assertNotNull(pathString, "Undefined test option " + TEST_FS_TESTDIR); Path path = new Path(pathString); return path; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java index 618ddf97b5460..312b6e1e68dd3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java @@ -39,16 +39,15 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.LambdaTestUtils; -import org.junit.After; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.junit.rules.Timeout; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test basic @{link FTPFileSystem} class methods. Contract tests are in @@ -61,7 +60,7 @@ public class TestFTPFileSystem { @Rule public Timeout testTimeout = new Timeout(180000, TimeUnit.MILLISECONDS); - @Before + @BeforeEach public void setUp() throws Exception { testDir = Files.createTempDirectory( GenericTestUtils.getTestDir().toPath(), getClass().getName() @@ -69,7 +68,7 @@ public void setUp() throws Exception { server = new FtpTestServer(testDir).start(); } - @After + @AfterEach @SuppressWarnings("ResultOfMethodCallIgnored") public void tearDown() throws Exception { if (server != null) { @@ -98,7 +97,7 @@ public void testCreateWithWritePermissions() throws Exception { outputStream.write(bytesExpected); } try (FSDataInputStream input = fs.open(new Path("test1.txt"))) { - assertThat(bytesExpected, equalTo(IOUtils.readFullyToByteArray(input))); + assertThat(bytesExpected).isEqualTo(IOUtils.readFullyToByteArray(input)); } } @@ -193,7 +192,7 @@ private void enhancedAssertEquals(FsAction actionA, FsAction actionB){ String errorMessageFormat = "expect FsAction is %s, whereas it is %s now."; String notEqualErrorMessage = String.format(errorMessageFormat, actionA.name(), actionB.name()); - assertEquals(notEqualErrorMessage, actionA, actionB); + assertEquals(actionA, actionB, notEqualErrorMessage); } private FTPFile getFTPFileOf(int access, FsAction action) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/http/TestHttpFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/http/TestHttpFileSystem.java index 4c6cf823a7659..aa27075e8163b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/http/TestHttpFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/http/TestHttpFileSystem.java @@ -25,8 +25,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.IOException; @@ -37,7 +37,7 @@ import java.nio.charset.StandardCharsets; import java.util.stream.IntStream; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Testing HttpFileSystem. @@ -45,7 +45,7 @@ public class TestHttpFileSystem { private final Configuration conf = new Configuration(false); - @Before + @BeforeEach public void setUp() { conf.set("fs.http.impl", HttpFileSystem.class.getCanonicalName()); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java index f33da8aa8be65..ce53f2117a6b3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.fs.permission; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; /** * Tests covering basic functionality of the ACL objects. @@ -30,7 +30,7 @@ public class TestAcl { ENTRY7, ENTRY8, ENTRY9, ENTRY10, ENTRY11, ENTRY12, ENTRY13; private static AclStatus STATUS1, STATUS2, STATUS3, STATUS4; - @BeforeClass + @BeforeAll public static void setUp() { // named user AclEntry.Builder aclEntryBuilder = new AclEntry.Builder() diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java index 0c5b415f28279..3f3ae7fd87a82 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java @@ -21,8 +21,8 @@ import org.apache.hadoop.conf.Configuration; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; import static org.apache.hadoop.fs.permission.FsAction.*; @@ -252,8 +252,8 @@ public void testBadUmasks() { FsPermission.getUMask(conf); fail("Shouldn't have been able to parse bad umask"); } catch(IllegalArgumentException iae) { - assertTrue("Exception should specify parsing error and invalid umask: " - + iae.getMessage(), isCorrectExceptionMessage(iae.getMessage(), b)); + assertTrue(isCorrectExceptionMessage(iae.getMessage(), b), "Exception should specify parsing error and invalid umask: " + + iae.getMessage()); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java index 31cacf786d805..1037cc89fd990 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java @@ -24,8 +24,8 @@ import org.apache.hadoop.io.DataOutputBuffer; import static org.apache.hadoop.fs.FSProtos.*; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; /** * Verify PB serialization of FS data structures. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java index e425c2dea284a..87e83be11f654 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java @@ -44,17 +44,14 @@ import org.apache.sshd.sftp.server.SftpSubsystemFactory; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; +import static org.junit.jupiter.api.Assertions.*; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.junit.rules.TestName; public class TestSFTPFileSystem { @@ -102,12 +99,12 @@ public boolean authenticate(String username, String password, port = sshd.getPort(); } - @Before + @BeforeEach public void init() throws Exception { sftpFs = FileSystem.get(URI.create(connection), conf); } - @After + @AfterEach public void cleanUp() throws Exception { if (sftpFs != null) { try { @@ -118,7 +115,7 @@ public void cleanUp() throws Exception { } } - @BeforeClass + @BeforeAll public static void setUp() throws Exception { // skip all tests if running on Windows assumeNotWindows(); @@ -138,7 +135,7 @@ public static void setUp() throws Exception { localFs.mkdirs(localDir); } - @AfterClass + @AfterAll public static void tearDown() { if (localFs != null) { try { @@ -268,13 +265,14 @@ public void testStatFile() throws Exception { * * @throws Exception */ - @Test(expected=java.io.IOException.class) + @Test public void testDeleteNonEmptyDir() throws Exception { - Path file = touch(localFs, name.getMethodName().toLowerCase()); - sftpFs.delete(localDir, false); - assertThat( - ((SFTPFileSystem) sftpFs).getConnectionPool().getLiveConnCount()) - .isEqualTo(1); + assertThrows(IOException.class, () -> { + Path file = touch(localFs, name.getMethodName().toLowerCase()); + sftpFs.delete(localDir, false); + assertThat(((SFTPFileSystem) sftpFs).getConnectionPool().getLiveConnCount()). + isEqualTo(1); + }); } /** @@ -321,11 +319,13 @@ public void testRenameFile() throws Exception { * * @throws Exception */ - @Test(expected=java.io.IOException.class) + @Test public void testRenameNonExistFile() throws Exception { - Path file1 = new Path(localDir, name.getMethodName().toLowerCase() + "1"); - Path file2 = new Path(localDir, name.getMethodName().toLowerCase() + "2"); - sftpFs.rename(file1, file2); + assertThrows(IOException.class, ()->{ + Path file1 = new Path(localDir, name.getMethodName().toLowerCase() + "1"); + Path file2 = new Path(localDir, name.getMethodName().toLowerCase() + "2"); + sftpFs.rename(file1, file2); + }); } /** @@ -333,11 +333,13 @@ public void testRenameNonExistFile() throws Exception { * * @throws Exception */ - @Test(expected=java.io.IOException.class) + @Test public void testRenamingFileOntoExistingFile() throws Exception { - Path file1 = touch(localFs, name.getMethodName().toLowerCase() + "1"); - Path file2 = touch(localFs, name.getMethodName().toLowerCase() + "2"); - sftpFs.rename(file1, file2); + assertThrows(IOException.class, ()->{ + Path file1 = touch(localFs, name.getMethodName().toLowerCase() + "1"); + Path file2 = touch(localFs, name.getMethodName().toLowerCase() + "2"); + sftpFs.rename(file1, file2); + }); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java index 5637e70f32fa5..b46a4f674db3a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import java.io.IOException; import java.net.URI; @@ -42,9 +42,9 @@ import org.apache.hadoop.ipc.RpcNoSuchMethodException; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ToolRunner; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.junit.rules.TemporaryFolder; public class TestAclCommands { @@ -55,7 +55,7 @@ public class TestAclCommands { private Configuration conf = null; - @Before + @BeforeEach public void setup() throws IOException { conf = new Configuration(); path = testFolder.newFile("file").getPath(); @@ -63,34 +63,34 @@ public void setup() throws IOException { @Test public void testGetfaclValidations() throws Exception { - assertFalse("getfacl should fail without path", - 0 == runCommand(new String[] {"-getfacl"})); - assertFalse("getfacl should fail with extra argument", - 0 == runCommand(new String[] {"-getfacl", path, "extraArg"})); + assertFalse( + 0 == runCommand(new String[] {"-getfacl"}), "getfacl should fail without path"); + assertFalse( + 0 == runCommand(new String[] {"-getfacl", path, "extraArg"}), "getfacl should fail with extra argument"); } @Test public void testSetfaclValidations() throws Exception { - assertFalse("setfacl should fail without options", - 0 == runCommand(new String[] {"-setfacl", path})); - assertFalse("setfacl should fail without options -b, -k, -m, -x or --set", - 0 == runCommand(new String[] {"-setfacl", "-R", path})); - assertFalse("setfacl should fail without path", - 0 == runCommand(new String[] {"-setfacl"})); - assertFalse("setfacl should fail without aclSpec", - 0 == runCommand(new String[] {"-setfacl", "-m", path})); - assertFalse("setfacl should fail with conflicting options", - 0 == runCommand(new String[] {"-setfacl", "-m", path})); - assertFalse("setfacl should fail with extra arguments", - 0 == runCommand(new String[] {"-setfacl", path, "extra"})); - assertFalse("setfacl should fail with extra arguments", - 0 == runCommand(new String[] {"-setfacl", "--set", - "default:user::rwx", path, "extra"})); - assertFalse("setfacl should fail with permissions for -x", - 0 == runCommand(new String[] {"-setfacl", "-x", "user:user1:rwx", - path})); - assertFalse("setfacl should fail ACL spec missing", - 0 == runCommand(new String[] {"-setfacl", "-m", "", path})); + assertFalse( + 0 == runCommand(new String[] {"-setfacl", path}), "setfacl should fail without options"); + assertFalse( + 0 == runCommand(new String[] {"-setfacl", "-R", path}), "setfacl should fail without options -b, -k, -m, -x or --set"); + assertFalse( + 0 == runCommand(new String[] {"-setfacl"}), "setfacl should fail without path"); + assertFalse( + 0 == runCommand(new String[] {"-setfacl", "-m", path}), "setfacl should fail without aclSpec"); + assertFalse( + 0 == runCommand(new String[] {"-setfacl", "-m", path}), "setfacl should fail with conflicting options"); + assertFalse( + 0 == runCommand(new String[] {"-setfacl", path, "extra"}), "setfacl should fail with extra arguments"); + assertFalse( + 0 == runCommand(new String[] {"-setfacl", "--set", + "default:user::rwx", path, "extra"}), "setfacl should fail with extra arguments"); + assertFalse( + 0 == runCommand(new String[] {"-setfacl", "-x", "user:user1:rwx", + path}), "setfacl should fail with permissions for -x"); + assertFalse( + 0 == runCommand(new String[] {"-setfacl", "-m", "", path}), "setfacl should fail ACL spec missing"); } @Test @@ -101,9 +101,9 @@ public void testSetfaclValidationsWithoutPermissions() throws Exception { } catch (IllegalArgumentException e) { } assertTrue(parsedList.size() == 0); - assertFalse("setfacl should fail with less arguments", - 0 == runCommand(new String[] { "-setfacl", "-m", "user:user1:", - "/path" })); + assertFalse( + 0 == runCommand(new String[] { "-setfacl", "-m", "user:user1:", + "/path" }), "setfacl should fail with less arguments"); } @Test @@ -129,7 +129,7 @@ public void testMultipleAclSpecParsing() throws Exception { expectedList.add(user2Acl); expectedList.add(group1Acl); expectedList.add(defaultAcl); - assertEquals("Parsed Acl not correct", expectedList, parsedList); + assertEquals(expectedList, parsedList, "Parsed Acl not correct"); } @Test @@ -160,7 +160,7 @@ public void testMultipleAclSpecParsingWithoutPermissions() throws Exception { expectedList.add(other); expectedList.add(defaultUser); expectedList.add(defaultMask); - assertEquals("Parsed Acl not correct", expectedList, parsedList); + assertEquals(expectedList, parsedList, "Parsed Acl not correct"); } @Test @@ -169,8 +169,8 @@ public void testLsNoRpcForGetAclStatus() throws Exception { conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "stubfs:///"); conf.setClass("fs.stubfs.impl", StubFileSystem.class, FileSystem.class); conf.setBoolean("stubfs.noRpcForGetAclStatus", true); - assertEquals("ls must succeed even if getAclStatus RPC does not exist.", - 0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" })); + assertEquals( + 0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" }), "ls must succeed even if getAclStatus RPC does not exist."); } @Test @@ -178,8 +178,8 @@ public void testLsAclsUnsupported() throws Exception { Configuration conf = new Configuration(); conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "stubfs:///"); conf.setClass("fs.stubfs.impl", StubFileSystem.class, FileSystem.class); - assertEquals("ls must succeed even if FileSystem does not implement ACLs.", - 0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" })); + assertEquals( + 0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" }), "ls must succeed even if FileSystem does not implement ACLs."); } public static class StubFileSystem extends FileSystem { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java index db7fc2488c848..0c42ecbdaaf08 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java @@ -18,11 +18,11 @@ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import org.apache.hadoop.conf.Configuration; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestCommandFactory { static CommandFactory factory; @@ -31,7 +31,7 @@ public class TestCommandFactory { static void registerCommands(CommandFactory factory) { } - @Before + @BeforeEach public void testSetup() { factory = new CommandFactory(conf); assertNotNull(factory); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java index 9172f85eb9cb7..ec513f0a39a78 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java @@ -19,7 +19,7 @@ package org.apache.hadoop.fs.shell; import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.IOException; @@ -37,9 +37,9 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.shell.CopyCommands.Put; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.mockito.stubbing.OngoingStubbing; public class TestCopy { @@ -51,7 +51,7 @@ public class TestCopy { static PathData target; static FileStatus fileStat; - @BeforeClass + @BeforeAll public static void setup() throws IOException { conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); @@ -60,7 +60,7 @@ public static void setup() throws IOException { when(fileStat.isDirectory()).thenReturn(false); } - @Before + @BeforeEach public void resetMock() throws IOException { reset(mockFs); target = new PathData(path.toString(), conf); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java index 757c588104ea1..af2269b81f732 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java @@ -21,11 +21,12 @@ import java.util.LinkedList; import java.util.concurrent.ThreadPoolExecutor; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomUtils; @@ -37,7 +38,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.shell.CopyCommands.CopyFromLocal; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Test for copyFromLocal. @@ -82,7 +83,7 @@ public static int initialize(Path dir) throws Exception { return numTotalFiles; } - @BeforeClass + @BeforeAll public static void init() throws Exception { conf = new Configuration(false); conf.set("fs.file.impl", LocalFileSystem.class.getName()); @@ -95,13 +96,13 @@ public static void init() throws Exception { fs.setWorkingDirectory(testDir); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { fs.delete(testDir, true); fs.close(); } - @Before + @BeforeEach public void initDirectory() throws Exception { dir = new Path("dir" + RandomStringUtils.randomNumeric(4)); numFiles = initialize(dir); @@ -113,14 +114,16 @@ private void run(CommandWithDestination cmd, String... args) { assertEquals(0, cmd.run(args)); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocal() { run(new TestMultiThreadedCopy(1, 0), new Path(dir, FROM_DIR_NAME).toString(), new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocalWithThreads(){ int threads = Runtime.getRuntime().availableProcessors() * 2 + 1; run(new TestMultiThreadedCopy(threads, numFiles), @@ -129,7 +132,8 @@ public void testCopyFromLocalWithThreads(){ new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocalWithThreadWrong(){ run(new TestMultiThreadedCopy(1, 0), "-t", "0", new Path(dir, FROM_DIR_NAME).toString(), @@ -150,7 +154,7 @@ private class TestMultiThreadedCopy extends CopyFromLocal { protected void processArguments(LinkedList args) throws IOException { // Check if the correct number of threads are spawned - Assert.assertEquals(expectedThreads, getThreadCount()); + Assertions.assertEquals(expectedThreads, getThreadCount()); super.processArguments(args); if (isMultiThreadNecessary(args)) { @@ -159,10 +163,10 @@ protected void processArguments(LinkedList args) // 2) There are no active tasks in the executor // 3) Executor has shutdown correctly ThreadPoolExecutor executor = getExecutor(); - Assert.assertEquals(expectedCompletedTaskCount, + Assertions.assertEquals(expectedCompletedTaskCount, executor.getCompletedTaskCount()); - Assert.assertEquals(0, executor.getActiveCount()); - Assert.assertTrue(executor.isTerminated()); + Assertions.assertEquals(0, executor.getActiveCount()); + Assertions.assertTrue(executor.isTerminated()); } else { assert getExecutor() == null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java index b68be243c956e..411a3f2582db9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java @@ -19,9 +19,10 @@ import java.io.IOException; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -37,8 +38,8 @@ import org.apache.hadoop.fs.shell.CopyCommands.Get; import org.apache.hadoop.fs.shell.CopyCommands.Put; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; public class TestCopyPreserveFlag { private static final int MODIFICATION_TIME = 12345000; @@ -59,7 +60,7 @@ public class TestCopyPreserveFlag { private Path testDir; private Configuration conf; - @Before + @BeforeEach public void initialize() throws Exception { conf = new Configuration(false); conf.set("fs.file.impl", LocalFileSystem.class.getName()); @@ -86,7 +87,7 @@ public void initialize() throws Exception { fs.setTimes(DIR_FROM, MODIFICATION_TIME, ACCESS_TIME); } - @After + @AfterEach public void cleanup() throws Exception { fs.delete(testDir, true); fs.close(); @@ -111,19 +112,22 @@ private void run(CommandWithDestination cmd, String... args) { assertEquals(0, cmd.run(args)); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutWithP() throws Exception { run(new Put(), "-p", FROM.toString(), TO.toString()); assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutWithoutP() throws Exception { run(new Put(), FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutWithPQ() throws Exception { Put put = new Put(); run(put, "-p", "-q", "100", FROM.toString(), TO.toString()); @@ -131,7 +135,8 @@ public void testPutWithPQ() throws Exception { assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutWithQ() throws Exception { Put put = new Put(); run(put, "-q", "100", FROM.toString(), TO.toString()); @@ -139,7 +144,8 @@ public void testPutWithQ() throws Exception { assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutWithSplCharacter() throws Exception { fs.mkdirs(DIR_FROM_SPL); fs.createNewFile(FROM_SPL); @@ -147,37 +153,43 @@ public void testPutWithSplCharacter() throws Exception { assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocal() throws Exception { run(new CopyFromLocal(), FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocalWithThreads() throws Exception { run(new CopyFromLocal(), "-t", "10", FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyFromLocalWithThreadsPreserve() throws Exception { run(new CopyFromLocal(), "-p", "-t", "10", FROM.toString(), TO.toString()); assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithP() throws Exception { run(new Get(), "-p", FROM.toString(), TO.toString()); assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithoutP() throws Exception { run(new Get(), FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithPQ() throws Exception { Get get = new Get(); run(get, "-p", "-q", "100", FROM.toString(), TO.toString()); @@ -185,7 +197,8 @@ public void testGetWithPQ() throws Exception { assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithQ() throws Exception { Get get = new Get(); run(get, "-q", "100", FROM.toString(), TO.toString()); @@ -193,37 +206,43 @@ public void testGetWithQ() throws Exception { assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithThreads() throws Exception { run(new Get(), "-t", "10", FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testGetWithThreadsPreserve() throws Exception { run(new Get(), "-p", "-t", "10", FROM.toString(), TO.toString()); assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithP() throws Exception { run(new Cp(), "-p", FROM.toString(), TO.toString()); assertAttributesPreserved(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithoutP() throws Exception { run(new Cp(), FROM.toString(), TO.toString()); assertAttributesChanged(TO); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDirectoryCpWithP() throws Exception { run(new Cp(), "-p", DIR_FROM.toString(), DIR_TO2.toString()); assertAttributesPreserved(DIR_TO2); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDirectoryCpWithoutP() throws Exception { run(new Cp(), DIR_FROM.toString(), DIR_TO2.toString()); assertAttributesChanged(DIR_TO2); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java index 202b81912c104..4357b5b95b18b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java @@ -21,11 +21,12 @@ import java.util.LinkedList; import java.util.concurrent.ThreadPoolExecutor; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomUtils; @@ -38,7 +39,7 @@ import org.apache.hadoop.fs.shell.CopyCommands.CopyToLocal; import static org.apache.hadoop.fs.shell.CopyCommandWithMultiThread.DEFAULT_QUEUE_SIZE; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestCopyToLocal { @@ -81,7 +82,7 @@ private static int initialize(Path dir) throws Exception { return numTotalFiles; } - @BeforeClass + @BeforeAll public static void init() throws Exception { conf = new Configuration(false); conf.set("fs.file.impl", LocalFileSystem.class.getName()); @@ -94,7 +95,7 @@ public static void init() throws Exception { fs.setWorkingDirectory(testDir); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { fs.delete(testDir, true); fs.close(); @@ -105,13 +106,14 @@ private void run(CopyCommandWithMultiThread cmd, String... args) { assertEquals(0, cmd.run(args)); } - @Before + @BeforeEach public void initDirectory() throws Exception { dir = new Path("dir" + RandomStringUtils.randomNumeric(4)); numFiles = initialize(dir); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopy() throws Exception { MultiThreadedCopy copy = new MultiThreadedCopy(1, DEFAULT_QUEUE_SIZE, 0); run(copy, new Path(dir, FROM_DIR_NAME).toString(), @@ -119,21 +121,24 @@ public void testCopy() throws Exception { assert copy.getExecutor() == null; } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyWithThreads() { run(new MultiThreadedCopy(5, DEFAULT_QUEUE_SIZE, numFiles), "-t", "5", new Path(dir, FROM_DIR_NAME).toString(), new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyWithThreadWrong() { run(new MultiThreadedCopy(1, DEFAULT_QUEUE_SIZE, 0), "-t", "0", new Path(dir, FROM_DIR_NAME).toString(), new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyWithThreadsAndQueueSize() { int queueSize = 256; run(new MultiThreadedCopy(5, queueSize, numFiles), "-t", "5", "-q", @@ -142,7 +147,8 @@ public void testCopyWithThreadsAndQueueSize() { new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopyWithThreadsAndQueueSizeWrong() { int queueSize = 0; run(new MultiThreadedCopy(5, DEFAULT_QUEUE_SIZE, numFiles), "-t", "5", "-q", @@ -151,7 +157,8 @@ public void testCopyWithThreadsAndQueueSizeWrong() { new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCopySingleFile() throws Exception { Path fromDirPath = new Path(dir, FROM_DIR_NAME); Path subFile = new Path(fromDirPath, "file0"); @@ -186,9 +193,9 @@ private static class MultiThreadedCopy extends CopyToLocal { protected void processArguments(LinkedList args) throws IOException { // Check if the number of threads are same as expected - Assert.assertEquals(expectedThreads, getThreadCount()); + Assertions.assertEquals(expectedThreads, getThreadCount()); // Check if the queue pool size of executor is same as expected - Assert.assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); + Assertions.assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); super.processArguments(args); @@ -198,10 +205,10 @@ protected void processArguments(LinkedList args) // 2) There are no active tasks in the executor // 3) Executor has shutdown correctly ThreadPoolExecutor executor = getExecutor(); - Assert.assertEquals(expectedCompletedTaskCount, + Assertions.assertEquals(expectedCompletedTaskCount, executor.getCompletedTaskCount()); - Assert.assertEquals(0, executor.getActiveCount()); - Assert.assertTrue(executor.isTerminated()); + Assertions.assertEquals(0, executor.getActiveCount()); + Assertions.assertTrue(executor.isTerminated()); } else { assert getExecutor() == null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java index a2af500c30c9b..a2bbd3c2988c7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.PrintStream; @@ -35,9 +35,9 @@ import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FilterFileSystem; import org.apache.hadoop.fs.shell.CommandFormat.NotEnoughArgumentsException; -import org.junit.Test; -import org.junit.Before; -import org.junit.BeforeClass; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; /** * JUnit test class for {@link org.apache.hadoop.fs.shell.Count} @@ -53,7 +53,7 @@ public class TestCount { private static FileSystem mockFs; private static FileStatus fileStat; - @BeforeClass + @BeforeAll public static void setup() { conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); @@ -62,7 +62,7 @@ public static void setup() { when(fileStat.isFile()).thenReturn(true); } - @Before + @BeforeEach public void resetMock() { reset(mockFs); } @@ -436,7 +436,7 @@ public void getCommandName() { Count count = new Count(); String actual = count.getCommandName(); String expected = "count"; - assertEquals("Count.getCommandName", expected, actual); + assertEquals(expected, actual, "Count.getCommandName"); } @Test @@ -444,7 +444,7 @@ public void isDeprecated() { Count count = new Count(); boolean actual = count.isDeprecated(); boolean expected = false; - assertEquals("Count.isDeprecated", expected, actual); + assertEquals(expected, actual, "Count.isDeprecated"); } @Test @@ -452,7 +452,7 @@ public void getReplacementCommand() { Count count = new Count(); String actual = count.getReplacementCommand(); String expected = null; - assertEquals("Count.getReplacementCommand", expected, actual); + assertEquals(expected, actual, "Count.getReplacementCommand"); } @Test @@ -460,7 +460,7 @@ public void getName() { Count count = new Count(); String actual = count.getName(); String expected = "count"; - assertEquals("Count.getName", expected, actual); + assertEquals(expected, actual, "Count.getName"); } @Test @@ -470,7 +470,7 @@ public void getUsage() { String expected = "-count [-q] [-h] [-v] [-t []]" + " [-u] [-x] [-e] [-s] ..."; - assertEquals("Count.getUsage", expected, actual); + assertEquals(expected, actual, "Count.getUsage"); } // check the correct description is returned @@ -504,7 +504,7 @@ public void getDescription() { + "The -e option shows the erasure coding policy." + "The -s option shows snapshot counts."; - assertEquals("Count.getDescription", expected, actual); + assertEquals(expected, actual, "Count.getDescription"); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java index 214f1a0686cd9..1a47888bc8b45 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java @@ -21,11 +21,12 @@ import java.util.LinkedList; import java.util.concurrent.ThreadPoolExecutor; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomUtils; @@ -38,7 +39,7 @@ import org.apache.hadoop.fs.shell.CopyCommands.Cp; import static org.apache.hadoop.fs.shell.CopyCommandWithMultiThread.DEFAULT_QUEUE_SIZE; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestCpCommand { @@ -81,7 +82,7 @@ private static int initialize(Path dir) throws Exception { return numTotalFiles; } - @BeforeClass + @BeforeAll public static void init() throws Exception { conf = new Configuration(false); conf.set("fs.file.impl", LocalFileSystem.class.getName()); @@ -94,7 +95,7 @@ public static void init() throws Exception { fs.setWorkingDirectory(testDir); } - @AfterClass + @AfterAll public static void cleanup() throws Exception { fs.delete(testDir, true); fs.close(); @@ -105,13 +106,14 @@ private void run(CopyCommandWithMultiThread cmd, String... args) { assertEquals(0, cmd.run(args)); } - @Before + @BeforeEach public void initDirectory() throws Exception { dir = new Path("dir" + RandomStringUtils.randomNumeric(4)); numFiles = initialize(dir); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCp() throws Exception { MultiThreadedCp copy = new MultiThreadedCp(1, DEFAULT_QUEUE_SIZE, 0); run(copy, new Path(dir, FROM_DIR_NAME).toString(), @@ -119,21 +121,24 @@ public void testCp() throws Exception { assert copy.getExecutor() == null; } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithThreads() { run(new MultiThreadedCp(5, DEFAULT_QUEUE_SIZE, numFiles), "-t", "5", new Path(dir, FROM_DIR_NAME).toString(), new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithThreadWrong() { run(new MultiThreadedCp(1, DEFAULT_QUEUE_SIZE, 0), "-t", "0", new Path(dir, FROM_DIR_NAME).toString(), new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithThreadsAndQueueSize() { int queueSize = 256; run(new MultiThreadedCp(5, queueSize, numFiles), "-t", "5", "-q", @@ -142,7 +147,8 @@ public void testCpWithThreadsAndQueueSize() { new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpWithThreadsAndQueueSizeWrong() { int queueSize = 0; run(new MultiThreadedCp(5, DEFAULT_QUEUE_SIZE, numFiles), "-t", "5", "-q", @@ -151,7 +157,8 @@ public void testCpWithThreadsAndQueueSizeWrong() { new Path(dir, TO_DIR_NAME).toString()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCpSingleFile() throws Exception { Path fromDirPath = new Path(dir, FROM_DIR_NAME); Path subFile = new Path(fromDirPath, "file0"); @@ -186,9 +193,9 @@ private static class MultiThreadedCp extends Cp { protected void processArguments(LinkedList args) throws IOException { // Check if the number of threads are same as expected - Assert.assertEquals(expectedThreads, getThreadCount()); + Assertions.assertEquals(expectedThreads, getThreadCount()); // Check if the queue pool size of executor is same as expected - Assert.assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); + Assertions.assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); super.processArguments(args); @@ -198,10 +205,10 @@ protected void processArguments(LinkedList args) // 2) There are no active tasks in the executor // 3) Executor has shutdown correctly ThreadPoolExecutor executor = getExecutor(); - Assert.assertEquals(expectedCompletedTaskCount, + Assertions.assertEquals(expectedCompletedTaskCount, executor.getCompletedTaskCount()); - Assert.assertEquals(0, executor.getActiveCount()); - Assert.assertTrue(executor.isTerminated()); + Assertions.assertEquals(0, executor.getActiveCount()); + Assertions.assertTrue(executor.isTerminated()); } else { assert getExecutor() == null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java index 4a4f453d5e801..0773eebca999a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java @@ -19,7 +19,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_KEY; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.*; @@ -41,9 +41,9 @@ import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import org.mockito.InOrder; /** @@ -56,7 +56,7 @@ public class TestLs { private static final Date NOW = new Date(); - @BeforeClass + @BeforeAll public static void setup() throws IOException { conf = new Configuration(); conf.set(FS_DEFAULT_NAME_KEY, "mockfs:///"); @@ -64,7 +64,7 @@ public static void setup() throws IOException { mockFs = mock(FileSystem.class); } - @Before + @BeforeEach public void resetMock() throws IOException, URISyntaxException { reset(mockFs); AclStatus mockAclStatus = mock(AclStatus.class); @@ -1113,7 +1113,7 @@ public void isDeprecated() { Ls ls = new Ls(); boolean actual = ls.isDeprecated(); boolean expected = false; - assertEquals("Ls.isDeprecated", expected, actual); + assertEquals(expected, actual, "Ls.isDeprecated"); } // check there's no replacement command @@ -1122,7 +1122,7 @@ public void getReplacementCommand() { Ls ls = new Ls(); String actual = ls.getReplacementCommand(); String expected = null; - assertEquals("Ls.getReplacementCommand", expected, actual); + assertEquals(expected, actual, "Ls.getReplacementCommand"); } // check the correct name is returned @@ -1131,36 +1131,40 @@ public void getName() { Ls ls = new Ls(); String actual = ls.getName(); String expected = "ls"; - assertEquals("Ls.getName", expected, actual); + assertEquals(expected, actual, "Ls.getName"); } - @Test(expected = UnsupportedOperationException.class) + @Test public void processPathFileDisplayECPolicyWhenUnsupported() throws IOException { - TestFile testFile = new TestFile("testDirectory", "testFile"); - LinkedList pathData = new LinkedList(); - pathData.add(testFile.getPathData()); - Ls ls = new Ls(); - LinkedList options = new LinkedList(); - options.add("-e"); - ls.processOptions(options); - ls.processArguments(pathData); + assertThrows(UnsupportedOperationException.class, ()->{ + TestFile testFile = new TestFile("testDirectory", "testFile"); + LinkedList pathData = new LinkedList(); + pathData.add(testFile.getPathData()); + Ls ls = new Ls(); + LinkedList options = new LinkedList(); + options.add("-e"); + ls.processOptions(options); + ls.processArguments(pathData); + }); } - @Test(expected = UnsupportedOperationException.class) + @Test public void processPathDirDisplayECPolicyWhenUnsupported() throws IOException { - TestFile testFile = new TestFile("testDirectory", "testFile"); - TestFile testDir = new TestFile("", "testDirectory"); - testDir.setIsDir(true); - testDir.addContents(testFile); - LinkedList pathData = new LinkedList(); - pathData.add(testDir.getPathData()); - Ls ls = new Ls(); - LinkedList options = new LinkedList(); - options.add("-e"); - ls.processOptions(options); - ls.processArguments(pathData); + assertThrows(UnsupportedOperationException.class, () -> { + TestFile testFile = new TestFile("testDirectory", "testFile"); + TestFile testDir = new TestFile("", "testDirectory"); + testDir.setIsDir(true); + testDir.addContents(testFile); + LinkedList pathData = new LinkedList(); + pathData.add(testDir.getPathData()); + Ls ls = new Ls(); + LinkedList options = new LinkedList(); + options.add("-e"); + ls.processOptions(options); + ls.processArguments(pathData); + }); } // test class representing a file to be listed @@ -1325,10 +1329,6 @@ private FileStatus[] getContents() { * * @param lineFormat * format mask - * @param fileStatus - * file status - * @param fileName - * file name * @return formated line */ private String formatLineMtime(String lineFormat) { @@ -1344,11 +1344,7 @@ private String formatLineMtime(String lineFormat) { * * @param lineFormat * format mask - * @param fileStatus - * file status - * @param fileName - * file name - * @return formated line + * @return formatted line */ private String formatLineAtime(String lineFormat) { return String.format(lineFormat, (isDir() ? "d" : "-"), getPermission(), diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java index b9e87d3dacefe..988201ad8bc27 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.*; @@ -33,22 +33,22 @@ import org.apache.hadoop.fs.FilterFileSystem; import org.apache.hadoop.fs.PathExistsException; import org.apache.hadoop.fs.shell.CommandFormat.UnknownOptionException; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class TestMove { static Configuration conf; static FileSystem mockFs; - @BeforeClass + @BeforeAll public static void setup() throws IOException, URISyntaxException { mockFs = mock(FileSystem.class); conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); } - @Before + @BeforeEach public void resetMock() throws IOException { reset(mockFs); } @@ -91,14 +91,15 @@ public void testMoveTargetExistsWithoutExplicitRename() throws Exception { cmd.run(cmdargs); // make sure command failed with the proper exception - assertTrue("Rename should have failed with path exists exception", - cmd.error instanceof PathExistsException); + assertTrue(cmd.error instanceof PathExistsException, + "Rename should have failed with path exists exception"); } - @Test(expected = UnknownOptionException.class) + @Test public void testMoveFromLocalDoesNotAllowTOption() { - new MoveCommands.MoveFromLocal().run("-t", "2", - null, null); + assertThrows(UnknownOptionException.class, () -> { + new MoveCommands.MoveFromLocal().run("-t", "2", null, null); + }); } static class MockFileSystem extends FilterFileSystem { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java index 130ee5edee768..ea44f546c0634 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java @@ -18,9 +18,9 @@ package org.apache.hadoop.fs.shell; import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.IOException; @@ -32,10 +32,11 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestPathData { private static final String TEST_ROOT_DIR = @@ -44,7 +45,7 @@ public class TestPathData { protected FileSystem fs; protected Path testDir; - @Before + @BeforeEach public void initialize() throws Exception { conf = new Configuration(); fs = FileSystem.getLocal(conf); @@ -64,13 +65,14 @@ public void initialize() throws Exception { fs.create(new Path("d2","f3")); } - @After + @AfterEach public void cleanup() throws Exception { fs.delete(testDir, true); fs.close(); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testWithDirStringAndConf() throws Exception { String dirString = "d1"; PathData item = new PathData(dirString, conf); @@ -83,7 +85,8 @@ public void testWithDirStringAndConf() throws Exception { checkPathData(dirString, item); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testUnqualifiedUriContents() throws Exception { String dirString = "d1"; PathData item = new PathData(dirString, conf); @@ -94,7 +97,8 @@ public void testUnqualifiedUriContents() throws Exception { ); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testQualifiedUriContents() throws Exception { String dirString = fs.makeQualified(new Path("d1")).toString(); PathData item = new PathData(dirString, conf); @@ -105,7 +109,8 @@ public void testQualifiedUriContents() throws Exception { ); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testCwdContents() throws Exception { String dirString = Path.CUR_DIR; PathData item = new PathData(dirString, conf); @@ -116,7 +121,8 @@ public void testCwdContents() throws Exception { ); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testToFile() throws Exception { PathData item = new PathData(".", conf); assertEquals(new File(testDir.toString()), item.toFile()); @@ -126,7 +132,8 @@ public void testToFile() throws Exception { assertEquals(new File(testDir + "/d1/f1"), item.toFile()); } - @Test (timeout = 5000) + @Test + @Timeout(value = 5) public void testToFileRawWindowsPaths() throws Exception { assumeWindows(); @@ -153,7 +160,8 @@ public void testToFileRawWindowsPaths() throws Exception { assertEquals(new File(testDir + "\\foo\\bar"), item.toFile()); } - @Test (timeout = 5000) + @Test + @Timeout(value = 5) public void testInvalidWindowsPath() throws Exception { assumeWindows(); @@ -171,7 +179,8 @@ public void testInvalidWindowsPath() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testAbsoluteGlob() throws Exception { PathData[] items = PathData.expandAsGlob(testDir+"/d1/f1*", conf); assertEquals( @@ -199,7 +208,8 @@ public void testAbsoluteGlob() throws Exception { ); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRelativeGlob() throws Exception { PathData[] items = PathData.expandAsGlob("d1/f1*", conf); assertEquals( @@ -208,7 +218,8 @@ public void testRelativeGlob() throws Exception { ); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRelativeGlobBack() throws Exception { fs.setWorkingDirectory(new Path("d1")); PathData[] items = PathData.expandAsGlob("../d2/*", conf); @@ -226,7 +237,7 @@ public void testGlobThrowsExceptionForUnreadableDir() throws Exception { fs.setPermission(obscuredDir, new FsPermission((short)0)); //no access try { PathData.expandAsGlob("foo/*", conf); - Assert.fail("Should throw IOException"); + Assertions.fail("Should throw IOException"); } catch (IOException ioe) { // expected } finally { @@ -235,7 +246,8 @@ public void testGlobThrowsExceptionForUnreadableDir() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testWithStringAndConfForBuggyPath() throws Exception { String dirString = "file:///tmp"; Path tmpDir = new Path(dirString); @@ -249,13 +261,13 @@ public void testWithStringAndConfForBuggyPath() throws Exception { } public void checkPathData(String dirString, PathData item) throws Exception { - assertEquals("checking fs", fs, item.fs); + assertEquals(fs, item.fs, "checking fs"); assertEquals("checking string", dirString, item.toString()); - assertEquals("checking path", - fs.makeQualified(new Path(item.toString())), item.path + assertEquals( + fs.makeQualified(new Path(item.toString())), item.path, "checking path" ); - assertTrue("checking exist", item.stat != null); - assertTrue("checking isDir", item.stat.isDirectory()); + assertTrue(item.stat != null, "checking exist"); + assertTrue(item.stat.isDirectory(), "checking isDir"); } /* junit does a lousy job of comparing arrays diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java index d4f000576b066..41ece0a782447 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathExceptions.java @@ -18,15 +18,15 @@ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.ipc.RemoteException; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestPathExceptions { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPrintableString.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPrintableString.java index bb325b4832c10..49d5368bc3d36 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPrintableString.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPrintableString.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs.shell; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTail.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTail.java index 31a5a4ee17801..e50f60f41ece4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTail.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTail.java @@ -18,12 +18,12 @@ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.LinkedList; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Test class to verify Tail shell command. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java index e8520181a1642..efa24514e1e00 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java @@ -19,6 +19,7 @@ package org.apache.hadoop.fs.shell; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.io.File; import java.io.FileOutputStream; @@ -36,7 +37,7 @@ import org.apache.hadoop.test.GenericTestUtils; import org.assertj.core.api.Assertions; import org.junit.Rule; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.junit.rules.Timeout; /** @@ -95,34 +96,40 @@ public void testEmptyAvroFile() throws Exception { Assertions.assertThat(output).describedAs("output").isEmpty(); } - @Test(expected = NullPointerException.class) + @Test public void testAvroFileInputStreamNullBuffer() throws Exception { - createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); - URI uri = new URI(AVRO_FILENAME); - Configuration conf = new Configuration(); - try (InputStream is = getInputStream(uri, conf)) { - is.read(null, 0, 10); - } + assertThrows(NullPointerException.class, () -> { + createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); + URI uri = new URI(AVRO_FILENAME); + Configuration conf = new Configuration(); + try (InputStream is = getInputStream(uri, conf)) { + is.read(null, 0, 10); + } + }); } - @Test(expected = IndexOutOfBoundsException.class) + @Test public void testAvroFileInputStreamNegativePosition() throws Exception { - createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); - URI uri = new URI(AVRO_FILENAME); - Configuration conf = new Configuration(); - try (InputStream is = getInputStream(uri, conf)) { - is.read(new byte[10], -1, 10); - } + assertThrows(IndexOutOfBoundsException.class, () -> { + createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); + URI uri = new URI(AVRO_FILENAME); + Configuration conf = new Configuration(); + try (InputStream is = getInputStream(uri, conf)) { + is.read(new byte[10], -1, 10); + } + }); } - @Test(expected = IndexOutOfBoundsException.class) + @Test public void testAvroFileInputStreamTooLong() throws Exception { - createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); - URI uri = new URI(AVRO_FILENAME); - Configuration conf = new Configuration(); - try (InputStream is = getInputStream(uri, conf)) { - is.read(new byte[10], 0, 11); - } + assertThrows(IndexOutOfBoundsException.class, () -> { + createFile(AVRO_FILENAME, generateWeatherAvroBinaryData()); + URI uri = new URI(AVRO_FILENAME); + Configuration conf = new Configuration(); + try (InputStream is = getInputStream(uri, conf)) { + is.read(new byte[10], 0, 11); + } + }); } @Test @@ -223,34 +230,40 @@ public void testEmptySequenceFile() throws Exception { Assertions.assertThat(output).describedAs("output").isEmpty(); } - @Test(expected = NullPointerException.class) + @Test public void testSequenceFileInputStreamNullBuffer() throws Exception { - Configuration conf = new Configuration(); - createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); - URI uri = new URI(SEQUENCE_FILENAME); - try (InputStream is = getInputStream(uri, conf)) { - is.read(null, 0, 10); - } + assertThrows(NullPointerException.class, () -> { + Configuration conf = new Configuration(); + createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); + URI uri = new URI(SEQUENCE_FILENAME); + try (InputStream is = getInputStream(uri, conf)) { + is.read(null, 0, 10); + } + }); } - @Test(expected = IndexOutOfBoundsException.class) + @Test public void testSequenceFileInputStreamNegativePosition() throws Exception { - Configuration conf = new Configuration(); - createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); - URI uri = new URI(SEQUENCE_FILENAME); - try (InputStream is = getInputStream(uri, conf)) { - is.read(new byte[10], -1, 10); - } + assertThrows(IndexOutOfBoundsException.class, () -> { + Configuration conf = new Configuration(); + createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); + URI uri = new URI(SEQUENCE_FILENAME); + try (InputStream is = getInputStream(uri, conf)) { + is.read(new byte[10], -1, 10); + } + }); } - @Test(expected = IndexOutOfBoundsException.class) + @Test public void testSequenceFileInputStreamTooLong() throws Exception { - Configuration conf = new Configuration(); - createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); - URI uri = new URI(SEQUENCE_FILENAME); - try (InputStream is = getInputStream(uri, conf)) { - is.read(new byte[10], 0, 11); - } + assertThrows(IndexOutOfBoundsException.class, () -> { + Configuration conf = new Configuration(); + createNonWritableSequenceFile(SEQUENCE_FILENAME, conf); + URI uri = new URI(SEQUENCE_FILENAME); + try (InputStream is = getInputStream(uri, conf)) { + is.read(new byte[10], 0, 11); + } + }); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java index af0a2c352d267..793853252dd85 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.fs.shell; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -27,9 +27,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.util.ToolRunner; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestXAttrCommands { private final ByteArrayOutputStream errContent = @@ -37,7 +37,7 @@ public class TestXAttrCommands { private Configuration conf = null; private PrintStream initialStdErr; - @Before + @BeforeEach public void setup() throws IOException { errContent.reset(); initialStdErr = System.err; @@ -45,7 +45,7 @@ public void setup() throws IOException { conf = new Configuration(); } - @After + @AfterEach public void cleanUp() throws Exception { errContent.reset(); System.setErr(initialStdErr); @@ -54,41 +54,41 @@ public void cleanUp() throws Exception { @Test public void testGetfattrValidations() throws Exception { errContent.reset(); - assertFalse("getfattr should fail without path", - 0 == runCommand(new String[] { "-getfattr", "-d"})); + assertFalse( + 0 == runCommand(new String[] { "-getfattr", "-d"}), "getfattr should fail without path"); assertTrue(errContent.toString().contains(" is missing")); errContent.reset(); - assertFalse("getfattr should fail with extra argument", - 0 == runCommand(new String[] { "-getfattr", "extra", "-d", "/test"})); + assertFalse( + 0 == runCommand(new String[] { "-getfattr", "extra", "-d", "/test"}), "getfattr should fail with extra argument"); assertTrue(errContent.toString().contains("Too many arguments")); errContent.reset(); - assertFalse("getfattr should fail without \"-n name\" or \"-d\"", - 0 == runCommand(new String[] { "-getfattr", "/test"})); + assertFalse( + 0 == runCommand(new String[] { "-getfattr", "/test"}), "getfattr should fail without \"-n name\" or \"-d\""); assertTrue(errContent.toString().contains("Must specify '-n name' or '-d' option")); errContent.reset(); - assertFalse("getfattr should fail with invalid encoding", - 0 == runCommand(new String[] { "-getfattr", "-d", "-e", "aaa", "/test"})); + assertFalse( + 0 == runCommand(new String[] { "-getfattr", "-d", "-e", "aaa", "/test"}), "getfattr should fail with invalid encoding"); assertTrue(errContent.toString().contains("Invalid/unsupported encoding option specified: aaa")); } @Test public void testSetfattrValidations() throws Exception { errContent.reset(); - assertFalse("setfattr should fail without path", - 0 == runCommand(new String[] { "-setfattr", "-n", "user.a1" })); + assertFalse( + 0 == runCommand(new String[] { "-setfattr", "-n", "user.a1" }), "setfattr should fail without path"); assertTrue(errContent.toString().contains(" is missing")); errContent.reset(); - assertFalse("setfattr should fail with extra arguments", - 0 == runCommand(new String[] { "-setfattr", "extra", "-n", "user.a1", "/test"})); + assertFalse( + 0 == runCommand(new String[] { "-setfattr", "extra", "-n", "user.a1", "/test"}), "setfattr should fail with extra arguments"); assertTrue(errContent.toString().contains("Too many arguments")); errContent.reset(); - assertFalse("setfattr should fail without \"-n name\" or \"-x name\"", - 0 == runCommand(new String[] { "-setfattr", "/test"})); + assertFalse( + 0 == runCommand(new String[] { "-setfattr", "/test"}), "setfattr should fail without \"-n name\" or \"-x name\""); assertTrue(errContent.toString().contains("Must specify '-n name' or '-x name' option")); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java index 9111062ef00a3..9d821505652eb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.IOException; @@ -29,7 +29,7 @@ import org.apache.hadoop.fs.shell.PathData; import org.junit.Rule; import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestAnd { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java index b03be79b03165..0c5050559d115 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.IOException; @@ -26,10 +26,10 @@ import org.apache.hadoop.fs.shell.PathData; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; import org.junit.Rule; import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestFilterExpression { private Expression expr; @@ -38,7 +38,7 @@ public class TestFilterExpression { @Rule public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - @Before + @BeforeEach public void setup() { expr = mock(Expression.class); test = new FilterExpression(expr) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java index 959dc59a270b8..d7fb3075ffd66 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.IOException; @@ -39,10 +39,10 @@ import org.apache.hadoop.fs.shell.find.Find; import org.apache.hadoop.fs.shell.find.FindOptions; import org.apache.hadoop.fs.shell.find.Result; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; import org.junit.Rule; import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.InOrder; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; @@ -56,7 +56,7 @@ public class TestFind { private static FileSystem mockFs; private static Configuration conf; - @Before + @BeforeEach public void setup() throws IOException { mockFs = MockFileSystem.setup(); conf = mockFs.getConf(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java index f6eafd77b5d2e..286faeb6d6702 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.apache.hadoop.fs.shell.find.TestHelper.*; import java.io.IOException; @@ -25,10 +25,10 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.shell.PathData; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; import org.junit.Rule; import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestIname { private FileSystem mockFs; @@ -37,7 +37,7 @@ public class TestIname { @Rule public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - @Before + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java index 8217655b523bb..5ed67e10aa5b4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.apache.hadoop.fs.shell.find.TestHelper.*; import java.io.IOException; @@ -25,10 +25,10 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.shell.PathData; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; import org.junit.Rule; import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestName { private FileSystem mockFs; @@ -37,7 +37,7 @@ public class TestName { @Rule public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - @Before + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java index 5e861fc35f085..1e5c14d957d9a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.IOException; @@ -28,10 +28,10 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; import org.junit.Rule; import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestPrint { private FileSystem mockFs; @@ -39,7 +39,7 @@ public class TestPrint { @Rule public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - @Before + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java index 94c5c403bec38..3475df720e854 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.IOException; @@ -28,10 +28,10 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; import org.junit.Rule; import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestPrint0 { private FileSystem mockFs; @@ -39,7 +39,7 @@ public class TestPrint0 { @Rule public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - @Before + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestResult.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestResult.java index 058a0923a43a5..77d3d2b5e5d9d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestResult.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestResult.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import org.junit.Rule; import org.junit.rules.Timeout; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.TimeUnit; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java index 5698a08c7e16b..c40eaf0bbb400 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.Random; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,9 +32,9 @@ import static org.apache.hadoop.fs.store.DataBlocks.DATA_BLOCKS_BUFFER_ARRAY; import static org.apache.hadoop.fs.store.DataBlocks.DATA_BLOCKS_BUFFER_DISK; import static org.apache.hadoop.fs.store.DataBlocks.DATA_BLOCKS_BYTEBUFFER; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * UTs to test {@link DataBlocks} functionalities. @@ -86,13 +86,13 @@ private void assertWriteBlock(DataBlocks.DataBlock dataBlock) // Verify DataBlock state is at Writing. dataBlock.verifyState(DataBlocks.DataBlock.DestState.Writing); // Verify that the DataBlock has data written. - assertTrue("Expected Data block to have data", dataBlock.hasData()); + assertTrue(dataBlock.hasData(), "Expected Data block to have data"); // Verify the size of data. - assertEquals("Mismatch in data size in block", ONE_KB, - dataBlock.dataSize()); + assertEquals(ONE_KB +, dataBlock.dataSize(), "Mismatch in data size in block"); // Verify that no capacity is left in the data block to write more. - assertFalse("Expected the data block to have no capacity to write 1 byte " - + "of data", dataBlock.hasCapacity(1)); + assertFalse(dataBlock.hasCapacity(1), "Expected the data block to have no capacity to write 1 byte " + + "of data"); } /** @@ -110,8 +110,8 @@ private void assertToByteArray(DataBlocks.DataBlock dataBlock) byte[] bytesWritten = blockUploadData.toByteArray(); // Verify that we can call toByteArray() more than once and gives the // same byte[]. - assertEquals("Mismatch in byteArray provided by toByteArray() the second " - + "time", bytesWritten, blockUploadData.toByteArray()); + assertEquals(bytesWritten, blockUploadData.toByteArray(), "Mismatch in byteArray provided by toByteArray() the second " + + "time"); IOUtils.close(blockUploadData); // Verify that after closing blockUploadData, we can't call toByteArray(). LambdaTestUtils.intercept(IllegalStateException.class, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestEtagChecksum.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestEtagChecksum.java index ef9613f5af127..767e386626414 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestEtagChecksum.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestEtagChecksum.java @@ -20,8 +20,8 @@ import java.io.IOException; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; @@ -29,7 +29,7 @@ /** * Unit test of etag operations. */ -public class TestEtagChecksum extends Assert { +public class TestEtagChecksum extends Assertions { private final EtagChecksum empty1 = tag(""); private final EtagChecksum empty2 = tag(""); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java index 8267b214d53bc..10955f215c7bd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java @@ -33,10 +33,13 @@ import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.AclEntry; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; + +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.*; public class TestChRootedFileSystem { @@ -45,7 +48,7 @@ public class TestChRootedFileSystem { Path chrootedTo; FileSystemTestHelper fileSystemTestHelper; - @Before + @BeforeEach public void setUp() throws Exception { // create the test root on local_fs Configuration conf = new Configuration(); @@ -62,7 +65,7 @@ public void setUp() throws Exception { fSys = new ChRootedFileSystem(chrootedTo.toUri(), conf); } - @After + @AfterEach public void tearDown() throws Exception { fSysTarget.delete(chrootedTo, true); } @@ -70,17 +73,17 @@ public void tearDown() throws Exception { @Test public void testURI() { URI uri = fSys.getUri(); - Assert.assertEquals(chrootedTo.toUri(), uri); + Assertions.assertEquals(chrootedTo.toUri(), uri); } @Test public void testBasicPaths() { URI uri = fSys.getUri(); - Assert.assertEquals(chrootedTo.toUri(), uri); - Assert.assertEquals(fSys.makeQualified( + Assertions.assertEquals(chrootedTo.toUri(), uri); + Assertions.assertEquals(fSys.makeQualified( new Path(System.getProperty("user.home"))), fSys.getWorkingDirectory()); - Assert.assertEquals(fSys.makeQualified( + Assertions.assertEquals(fSys.makeQualified( new Path(System.getProperty("user.home"))), fSys.getHomeDirectory()); /* @@ -90,13 +93,13 @@ public void testBasicPaths() { * But if we were to fix Path#makeQualified() then the next test should * have been: - Assert.assertEquals( + Assertions.assertEquals( new Path(chrootedTo + "/foo/bar").makeQualified( FsConstants.LOCAL_FS_URI, null), fSys.makeQualified(new Path( "/foo/bar"))); */ - Assert.assertEquals( + Assertions.assertEquals( new Path("/foo/bar").makeQualified(FsConstants.LOCAL_FS_URI, null), fSys.makeQualified(new Path("/foo/bar"))); } @@ -113,50 +116,50 @@ public void testCreateDelete() throws IOException { // Create file fileSystemTestHelper.createFile(fSys, "/foo"); - Assert.assertTrue(fSys.isFile(new Path("/foo"))); - Assert.assertTrue(fSysTarget.isFile(new Path(chrootedTo, "foo"))); + Assertions.assertTrue(fSys.isFile(new Path("/foo"))); + Assertions.assertTrue(fSysTarget.isFile(new Path(chrootedTo, "foo"))); // Create file with recursive dir fileSystemTestHelper.createFile(fSys, "/newDir/foo"); - Assert.assertTrue(fSys.isFile(new Path("/newDir/foo"))); - Assert.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/foo"))); + Assertions.assertTrue(fSys.isFile(new Path("/newDir/foo"))); + Assertions.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/foo"))); // Delete the created file - Assert.assertTrue(fSys.delete(new Path("/newDir/foo"), false)); - Assert.assertFalse(fSys.exists(new Path("/newDir/foo"))); - Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo, "newDir/foo"))); + Assertions.assertTrue(fSys.delete(new Path("/newDir/foo"), false)); + Assertions.assertFalse(fSys.exists(new Path("/newDir/foo"))); + Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo, "newDir/foo"))); // Create file with a 2 component dirs recursively fileSystemTestHelper.createFile(fSys, "/newDir/newDir2/foo"); - Assert.assertTrue(fSys.isFile(new Path("/newDir/newDir2/foo"))); - Assert.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/newDir2/foo"))); + Assertions.assertTrue(fSys.isFile(new Path("/newDir/newDir2/foo"))); + Assertions.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/newDir2/foo"))); // Delete the created file - Assert.assertTrue(fSys.delete(new Path("/newDir/newDir2/foo"), false)); - Assert.assertFalse(fSys.exists(new Path("/newDir/newDir2/foo"))); - Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/newDir2/foo"))); + Assertions.assertTrue(fSys.delete(new Path("/newDir/newDir2/foo"), false)); + Assertions.assertFalse(fSys.exists(new Path("/newDir/newDir2/foo"))); + Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/newDir2/foo"))); } @Test public void testMkdirDelete() throws IOException { fSys.mkdirs(fileSystemTestHelper.getTestRootPath(fSys, "/dirX")); - Assert.assertTrue(fSys.isDirectory(new Path("/dirX"))); - Assert.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"dirX"))); + Assertions.assertTrue(fSys.isDirectory(new Path("/dirX"))); + Assertions.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"dirX"))); fSys.mkdirs(fileSystemTestHelper.getTestRootPath(fSys, "/dirX/dirY")); - Assert.assertTrue(fSys.isDirectory(new Path("/dirX/dirY"))); - Assert.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"dirX/dirY"))); + Assertions.assertTrue(fSys.isDirectory(new Path("/dirX/dirY"))); + Assertions.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"dirX/dirY"))); // Delete the created dir - Assert.assertTrue(fSys.delete(new Path("/dirX/dirY"), false)); - Assert.assertFalse(fSys.exists(new Path("/dirX/dirY"))); - Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo,"dirX/dirY"))); + Assertions.assertTrue(fSys.delete(new Path("/dirX/dirY"), false)); + Assertions.assertFalse(fSys.exists(new Path("/dirX/dirY"))); + Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo,"dirX/dirY"))); - Assert.assertTrue(fSys.delete(new Path("/dirX"), false)); - Assert.assertFalse(fSys.exists(new Path("/dirX"))); - Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo,"dirX"))); + Assertions.assertTrue(fSys.delete(new Path("/dirX"), false)); + Assertions.assertFalse(fSys.exists(new Path("/dirX"))); + Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo,"dirX"))); } @Test @@ -164,19 +167,19 @@ public void testRename() throws IOException { // Rename a file fileSystemTestHelper.createFile(fSys, "/newDir/foo"); fSys.rename(new Path("/newDir/foo"), new Path("/newDir/fooBar")); - Assert.assertFalse(fSys.exists(new Path("/newDir/foo"))); - Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/foo"))); - Assert.assertTrue(fSys.isFile(fileSystemTestHelper.getTestRootPath(fSys,"/newDir/fooBar"))); - Assert.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/fooBar"))); + Assertions.assertFalse(fSys.exists(new Path("/newDir/foo"))); + Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/foo"))); + Assertions.assertTrue(fSys.isFile(fileSystemTestHelper.getTestRootPath(fSys,"/newDir/fooBar"))); + Assertions.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/fooBar"))); // Rename a dir fSys.mkdirs(new Path("/newDir/dirFoo")); fSys.rename(new Path("/newDir/dirFoo"), new Path("/newDir/dirFooBar")); - Assert.assertFalse(fSys.exists(new Path("/newDir/dirFoo"))); - Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/dirFoo"))); - Assert.assertTrue(fSys.isDirectory(fileSystemTestHelper.getTestRootPath(fSys,"/newDir/dirFooBar"))); - Assert.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"newDir/dirFooBar"))); + Assertions.assertFalse(fSys.exists(new Path("/newDir/dirFoo"))); + Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/dirFoo"))); + Assertions.assertTrue(fSys.isDirectory(fileSystemTestHelper.getTestRootPath(fSys,"/newDir/dirFooBar"))); + Assertions.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"newDir/dirFooBar"))); } @Test @@ -184,8 +187,8 @@ public void testGetContentSummary() throws IOException { // GetContentSummary of a dir fSys.mkdirs(new Path("/newDir/dirFoo")); ContentSummary cs = fSys.getContentSummary(new Path("/newDir/dirFoo")); - Assert.assertEquals(-1L, cs.getQuota()); - Assert.assertEquals(-1L, cs.getSpaceQuota()); + Assertions.assertEquals(-1L, cs.getQuota()); + Assertions.assertEquals(-1L, cs.getSpaceQuota()); } /** @@ -207,15 +210,15 @@ public void testRenameAcrossFs() throws IOException { public void testList() throws IOException { FileStatus fs = fSys.getFileStatus(new Path("/")); - Assert.assertTrue(fs.isDirectory()); + Assertions.assertTrue(fs.isDirectory()); // should return the full path not the chrooted path - Assert.assertEquals(fs.getPath(), chrootedTo); + Assertions.assertEquals(fs.getPath(), chrootedTo); // list on Slash FileStatus[] dirPaths = fSys.listStatus(new Path("/")); - Assert.assertEquals(0, dirPaths.length); + Assertions.assertEquals(0, dirPaths.length); @@ -226,21 +229,21 @@ public void testList() throws IOException { fSys.mkdirs(new Path("/dirX/dirXX")); dirPaths = fSys.listStatus(new Path("/")); - Assert.assertEquals(4, dirPaths.length); // note 2 crc files + Assertions.assertEquals(4, dirPaths.length); // note 2 crc files // Note the the file status paths are the full paths on target fs = FileSystemTestHelper.containsPath(new Path(chrootedTo, "foo"), dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isFile()); + Assertions.assertNotNull(fs); + Assertions.assertTrue(fs.isFile()); fs = FileSystemTestHelper.containsPath(new Path(chrootedTo, "bar"), dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isFile()); + Assertions.assertNotNull(fs); + Assertions.assertTrue(fs.isFile()); fs = FileSystemTestHelper.containsPath(new Path(chrootedTo, "dirX"), dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isDirectory()); + Assertions.assertNotNull(fs); + Assertions.assertTrue(fs.isDirectory()); fs = FileSystemTestHelper.containsPath(new Path(chrootedTo, "dirY"), dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isDirectory()); + Assertions.assertNotNull(fs); + Assertions.assertTrue(fs.isDirectory()); } @Test @@ -250,31 +253,31 @@ public void testWorkingDirectory() throws Exception { fSys.mkdirs(new Path("/testWd")); Path workDir = new Path("/testWd"); fSys.setWorkingDirectory(workDir); - Assert.assertEquals(workDir, fSys.getWorkingDirectory()); + Assertions.assertEquals(workDir, fSys.getWorkingDirectory()); fSys.setWorkingDirectory(new Path(".")); - Assert.assertEquals(workDir, fSys.getWorkingDirectory()); + Assertions.assertEquals(workDir, fSys.getWorkingDirectory()); fSys.setWorkingDirectory(new Path("..")); - Assert.assertEquals(workDir.getParent(), fSys.getWorkingDirectory()); + Assertions.assertEquals(workDir.getParent(), fSys.getWorkingDirectory()); // cd using a relative path // Go back to our test root workDir = new Path("/testWd"); fSys.setWorkingDirectory(workDir); - Assert.assertEquals(workDir, fSys.getWorkingDirectory()); + Assertions.assertEquals(workDir, fSys.getWorkingDirectory()); Path relativeDir = new Path("existingDir1"); Path absoluteDir = new Path(workDir,"existingDir1"); fSys.mkdirs(absoluteDir); fSys.setWorkingDirectory(relativeDir); - Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory()); + Assertions.assertEquals(absoluteDir, fSys.getWorkingDirectory()); // cd using a absolute path absoluteDir = new Path("/test/existingDir2"); fSys.mkdirs(absoluteDir); fSys.setWorkingDirectory(absoluteDir); - Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory()); + Assertions.assertEquals(absoluteDir, fSys.getWorkingDirectory()); // Now open a file relative to the wd we just set above. Path absoluteFooPath = new Path(absoluteDir, "foo"); @@ -283,14 +286,14 @@ public void testWorkingDirectory() throws Exception { // Now mkdir relative to the dir we cd'ed to fSys.mkdirs(new Path("newDir")); - Assert.assertTrue(fSys.isDirectory(new Path(absoluteDir, "newDir"))); + Assertions.assertTrue(fSys.isDirectory(new Path(absoluteDir, "newDir"))); /* Filesystem impls (RawLocal and DistributedFileSystem do not check * for existing of working dir absoluteDir = getTestRootPath(fSys, "nonexistingPath"); try { fSys.setWorkingDirectory(absoluteDir); - Assert.fail("cd to non existing dir should have failed"); + Assertions.fail("cd to non existing dir should have failed"); } catch (Exception e) { // Exception as expected } @@ -301,7 +304,7 @@ public void testWorkingDirectory() throws Exception { absoluteDir = new Path(LOCAL_FS_ROOT_URI + "/existingDir"); fSys.mkdirs(absoluteDir); fSys.setWorkingDirectory(absoluteDir); - Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory()); + Assertions.assertEquals(absoluteDir, fSys.getWorkingDirectory()); } @@ -311,15 +314,17 @@ public void testWorkingDirectory() throws Exception { @Test public void testResolvePath() throws IOException { - Assert.assertEquals(chrootedTo, fSys.resolvePath(new Path("/"))); + Assertions.assertEquals(chrootedTo, fSys.resolvePath(new Path("/"))); fileSystemTestHelper.createFile(fSys, "/foo"); - Assert.assertEquals(new Path(chrootedTo, "foo"), + Assertions.assertEquals(new Path(chrootedTo, "foo"), fSys.resolvePath(new Path("/foo"))); } - @Test(expected=FileNotFoundException.class) + @Test public void testResolvePathNonExisting() throws IOException { + assertThrows(FileNotFoundException.class, () -> { fSys.resolvePath(new Path("/nonExisting")); + }); } @Test @@ -435,7 +440,8 @@ public void initialize(URI name, Configuration conf) throws IOException { } } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testCreateSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path("/a/b/snapPath"); @@ -452,7 +458,8 @@ public void testCreateSnapshot() throws Exception { verify(mockFs).createSnapshot(chRootedSnapRootPath, "snap1"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testDeleteSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path("/a/b/snapPath"); @@ -469,7 +476,8 @@ public void testDeleteSnapshot() throws Exception { verify(mockFs).deleteSnapshot(chRootedSnapRootPath, "snap1"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testRenameSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path("/a/b/snapPath"); @@ -487,7 +495,8 @@ public void testRenameSnapshot() throws Exception { "snapNewName"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testSetStoragePolicy() throws Exception { Path storagePolicyPath = new Path("/storagePolicy"); Path chRootedStoragePolicyPath = new Path("/a/b/storagePolicy"); @@ -504,7 +513,8 @@ public void testSetStoragePolicy() throws Exception { verify(mockFs).setStoragePolicy(chRootedStoragePolicyPath, "HOT"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testUnsetStoragePolicy() throws Exception { Path storagePolicyPath = new Path("/storagePolicy"); Path chRootedStoragePolicyPath = new Path("/a/b/storagePolicy"); @@ -521,7 +531,8 @@ public void testUnsetStoragePolicy() throws Exception { verify(mockFs).unsetStoragePolicy(chRootedStoragePolicyPath); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testGetStoragePolicy() throws Exception { Path storagePolicyPath = new Path("/storagePolicy"); Path chRootedStoragePolicyPath = new Path("/a/b/storagePolicy"); @@ -538,7 +549,8 @@ public void testGetStoragePolicy() throws Exception { verify(mockFs).getStoragePolicy(chRootedStoragePolicyPath); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testGetAllStoragePolicy() throws Exception { Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java index 20825e312c9e5..7736af85f4b1a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java @@ -23,6 +23,7 @@ import java.util.EnumSet; import static org.apache.hadoop.fs.FileContextTestHelper.*; +import static org.junit.jupiter.api.Assertions.assertThrows; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.AbstractFileSystem; @@ -33,10 +34,11 @@ import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.viewfs.ChRootedFs; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.mockito.Mockito; public class TestChRootedFs { @@ -45,7 +47,7 @@ public class TestChRootedFs { FileContext fcTarget; // Path chrootedTo; - @Before + @BeforeEach public void setUp() throws Exception { // create the test root on local_fs fcTarget = FileContext.getLocalFSFileContext(); @@ -62,7 +64,7 @@ public void setUp() throws Exception { new ChRootedFs(fcTarget.getDefaultFileSystem(), chrootedTo), conf); } - @After + @AfterEach public void tearDown() throws Exception { fcTarget.delete(chrootedTo, true); } @@ -71,11 +73,11 @@ public void tearDown() throws Exception { @Test public void testBasicPaths() { URI uri = fc.getDefaultFileSystem().getUri(); - Assert.assertEquals(chrootedTo.toUri(), uri); - Assert.assertEquals(fc.makeQualified( + Assertions.assertEquals(chrootedTo.toUri(), uri); + Assertions.assertEquals(fc.makeQualified( new Path(System.getProperty("user.home"))), fc.getWorkingDirectory()); - Assert.assertEquals(fc.makeQualified( + Assertions.assertEquals(fc.makeQualified( new Path(System.getProperty("user.home"))), fc.getHomeDirectory()); /* @@ -85,13 +87,13 @@ public void testBasicPaths() { * But if we were to fix Path#makeQualified() then the next test should * have been: - Assert.assertEquals( + Assertions.assertEquals( new Path(chrootedTo + "/foo/bar").makeQualified( FsConstants.LOCAL_FS_URI, null), fc.makeQualified(new Path( "/foo/bar"))); */ - Assert.assertEquals( + Assertions.assertEquals( new Path("/foo/bar").makeQualified(FsConstants.LOCAL_FS_URI, null), fc.makeQualified(new Path("/foo/bar"))); } @@ -109,50 +111,50 @@ public void testCreateDelete() throws IOException { // Create file fileContextTestHelper.createFileNonRecursive(fc, "/foo"); - Assert.assertTrue(isFile(fc, new Path("/foo"))); - Assert.assertTrue(isFile(fcTarget, new Path(chrootedTo, "foo"))); + Assertions.assertTrue(isFile(fc, new Path("/foo"))); + Assertions.assertTrue(isFile(fcTarget, new Path(chrootedTo, "foo"))); // Create file with recursive dir fileContextTestHelper.createFile(fc, "/newDir/foo"); - Assert.assertTrue(isFile(fc, new Path("/newDir/foo"))); - Assert.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/foo"))); + Assertions.assertTrue(isFile(fc, new Path("/newDir/foo"))); + Assertions.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/foo"))); // Delete the created file - Assert.assertTrue(fc.delete(new Path("/newDir/foo"), false)); - Assert.assertFalse(exists(fc, new Path("/newDir/foo"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); + Assertions.assertTrue(fc.delete(new Path("/newDir/foo"), false)); + Assertions.assertFalse(exists(fc, new Path("/newDir/foo"))); + Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); // Create file with a 2 component dirs recursively fileContextTestHelper.createFile(fc, "/newDir/newDir2/foo"); - Assert.assertTrue(isFile(fc, new Path("/newDir/newDir2/foo"))); - Assert.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); + Assertions.assertTrue(isFile(fc, new Path("/newDir/newDir2/foo"))); + Assertions.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); // Delete the created file - Assert.assertTrue(fc.delete(new Path("/newDir/newDir2/foo"), false)); - Assert.assertFalse(exists(fc, new Path("/newDir/newDir2/foo"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); + Assertions.assertTrue(fc.delete(new Path("/newDir/newDir2/foo"), false)); + Assertions.assertFalse(exists(fc, new Path("/newDir/newDir2/foo"))); + Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); } @Test public void testMkdirDelete() throws IOException { fc.mkdir(fileContextTestHelper.getTestRootPath(fc, "/dirX"), FileContext.DEFAULT_PERM, false); - Assert.assertTrue(isDir(fc, new Path("/dirX"))); - Assert.assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX"))); + Assertions.assertTrue(isDir(fc, new Path("/dirX"))); + Assertions.assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX"))); fc.mkdir(fileContextTestHelper.getTestRootPath(fc, "/dirX/dirY"), FileContext.DEFAULT_PERM, false); - Assert.assertTrue(isDir(fc, new Path("/dirX/dirY"))); - Assert.assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX/dirY"))); + Assertions.assertTrue(isDir(fc, new Path("/dirX/dirY"))); + Assertions.assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX/dirY"))); // Delete the created dir - Assert.assertTrue(fc.delete(new Path("/dirX/dirY"), false)); - Assert.assertFalse(exists(fc, new Path("/dirX/dirY"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX/dirY"))); + Assertions.assertTrue(fc.delete(new Path("/dirX/dirY"), false)); + Assertions.assertFalse(exists(fc, new Path("/dirX/dirY"))); + Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX/dirY"))); - Assert.assertTrue(fc.delete(new Path("/dirX"), false)); - Assert.assertFalse(exists(fc, new Path("/dirX"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX"))); + Assertions.assertTrue(fc.delete(new Path("/dirX"), false)); + Assertions.assertFalse(exists(fc, new Path("/dirX"))); + Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX"))); } @Test @@ -160,19 +162,19 @@ public void testRename() throws IOException { // Rename a file fileContextTestHelper.createFile(fc, "/newDir/foo"); fc.rename(new Path("/newDir/foo"), new Path("/newDir/fooBar")); - Assert.assertFalse(exists(fc, new Path("/newDir/foo"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); - Assert.assertTrue(isFile(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/fooBar"))); - Assert.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/fooBar"))); + Assertions.assertFalse(exists(fc, new Path("/newDir/foo"))); + Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); + Assertions.assertTrue(isFile(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/fooBar"))); + Assertions.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/fooBar"))); // Rename a dir fc.mkdir(new Path("/newDir/dirFoo"), FileContext.DEFAULT_PERM, false); fc.rename(new Path("/newDir/dirFoo"), new Path("/newDir/dirFooBar")); - Assert.assertFalse(exists(fc, new Path("/newDir/dirFoo"))); - Assert.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/dirFoo"))); - Assert.assertTrue(isDir(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/dirFooBar"))); - Assert.assertTrue(isDir(fcTarget, new Path(chrootedTo,"newDir/dirFooBar"))); + Assertions.assertFalse(exists(fc, new Path("/newDir/dirFoo"))); + Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/dirFoo"))); + Assertions.assertTrue(isDir(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/dirFooBar"))); + Assertions.assertTrue(isDir(fcTarget, new Path(chrootedTo,"newDir/dirFooBar"))); } @@ -193,15 +195,15 @@ public void testRenameAcrossFs() throws IOException { public void testList() throws IOException { FileStatus fs = fc.getFileStatus(new Path("/")); - Assert.assertTrue(fs.isDirectory()); + Assertions.assertTrue(fs.isDirectory()); // should return the full path not the chrooted path - Assert.assertEquals(fs.getPath(), chrootedTo); + Assertions.assertEquals(fs.getPath(), chrootedTo); // list on Slash FileStatus[] dirPaths = fc.util().listStatus(new Path("/")); - Assert.assertEquals(0, dirPaths.length); + Assertions.assertEquals(0, dirPaths.length); @@ -213,21 +215,21 @@ public void testList() throws IOException { fc.mkdir(new Path("/dirX/dirXX"), FileContext.DEFAULT_PERM, false); dirPaths = fc.util().listStatus(new Path("/")); - Assert.assertEquals(4, dirPaths.length); + Assertions.assertEquals(4, dirPaths.length); // Note the the file status paths are the full paths on target fs = fileContextTestHelper.containsPath(fcTarget, "foo", dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isFile()); + Assertions.assertNotNull(fs); + Assertions.assertTrue(fs.isFile()); fs = fileContextTestHelper.containsPath(fcTarget, "bar", dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isFile()); + Assertions.assertNotNull(fs); + Assertions.assertTrue(fs.isFile()); fs = fileContextTestHelper.containsPath(fcTarget, "dirX", dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isDirectory()); + Assertions.assertNotNull(fs); + Assertions.assertTrue(fs.isDirectory()); fs = fileContextTestHelper.containsPath(fcTarget, "dirY", dirPaths); - Assert.assertNotNull(fs); - Assert.assertTrue(fs.isDirectory()); + Assertions.assertNotNull(fs); + Assertions.assertTrue(fs.isDirectory()); } @Test @@ -238,13 +240,13 @@ public void testWorkingDirectory() throws Exception { Path workDir = new Path("/testWd"); Path fqWd = fc.makeQualified(workDir); fc.setWorkingDirectory(workDir); - Assert.assertEquals(fqWd, fc.getWorkingDirectory()); + Assertions.assertEquals(fqWd, fc.getWorkingDirectory()); fc.setWorkingDirectory(new Path(".")); - Assert.assertEquals(fqWd, fc.getWorkingDirectory()); + Assertions.assertEquals(fqWd, fc.getWorkingDirectory()); fc.setWorkingDirectory(new Path("..")); - Assert.assertEquals(fqWd.getParent(), fc.getWorkingDirectory()); + Assertions.assertEquals(fqWd.getParent(), fc.getWorkingDirectory()); // cd using a relative path @@ -252,20 +254,20 @@ public void testWorkingDirectory() throws Exception { workDir = new Path("/testWd"); fqWd = fc.makeQualified(workDir); fc.setWorkingDirectory(workDir); - Assert.assertEquals(fqWd, fc.getWorkingDirectory()); + Assertions.assertEquals(fqWd, fc.getWorkingDirectory()); Path relativeDir = new Path("existingDir1"); Path absoluteDir = new Path(workDir,"existingDir1"); fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true); Path fqAbsoluteDir = fc.makeQualified(absoluteDir); fc.setWorkingDirectory(relativeDir); - Assert.assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); + Assertions.assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); // cd using a absolute path absoluteDir = new Path("/test/existingDir2"); fqAbsoluteDir = fc.makeQualified(absoluteDir); fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true); fc.setWorkingDirectory(absoluteDir); - Assert.assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); + Assertions.assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); // Now open a file relative to the wd we just set above. Path absolutePath = new Path(absoluteDir, "foo"); @@ -274,12 +276,12 @@ public void testWorkingDirectory() throws Exception { // Now mkdir relative to the dir we cd'ed to fc.mkdir(new Path("newDir"), FileContext.DEFAULT_PERM, true); - Assert.assertTrue(isDir(fc, new Path(absoluteDir, "newDir"))); + Assertions.assertTrue(isDir(fc, new Path(absoluteDir, "newDir"))); absoluteDir = fileContextTestHelper.getTestRootPath(fc, "nonexistingPath"); try { fc.setWorkingDirectory(absoluteDir); - Assert.fail("cd to non existing dir should have failed"); + Assertions.fail("cd to non existing dir should have failed"); } catch (Exception e) { // Exception as expected } @@ -289,7 +291,7 @@ public void testWorkingDirectory() throws Exception { absoluteDir = new Path(LOCAL_FS_ROOT_URI + "/existingDir"); fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true); fc.setWorkingDirectory(absoluteDir); - Assert.assertEquals(absoluteDir, fc.getWorkingDirectory()); + Assertions.assertEquals(absoluteDir, fc.getWorkingDirectory()); } @@ -299,15 +301,17 @@ public void testWorkingDirectory() throws Exception { @Test public void testResolvePath() throws IOException { - Assert.assertEquals(chrootedTo, fc.getDefaultFileSystem().resolvePath(new Path("/"))); + Assertions.assertEquals(chrootedTo, fc.getDefaultFileSystem().resolvePath(new Path("/"))); fileContextTestHelper.createFile(fc, "/foo"); - Assert.assertEquals(new Path(chrootedTo, "foo"), + Assertions.assertEquals(new Path(chrootedTo, "foo"), fc.getDefaultFileSystem().resolvePath(new Path("/foo"))); } - @Test(expected=FileNotFoundException.class) + @Test public void testResolvePathNonExisting() throws IOException { + assertThrows(FileNotFoundException.class, () -> { fc.getDefaultFileSystem().resolvePath(new Path("/nonExisting")); + }); } @Test @@ -315,7 +319,7 @@ public void testIsValidNameValidInBaseFs() throws Exception { AbstractFileSystem baseFs = Mockito.spy(fc.getDefaultFileSystem()); ChRootedFs chRootedFs = new ChRootedFs(baseFs, new Path("/chroot")); Mockito.doReturn(true).when(baseFs).isValidName(Mockito.anyString()); - Assert.assertTrue(chRootedFs.isValidName("/test")); + Assertions.assertTrue(chRootedFs.isValidName("/test")); Mockito.verify(baseFs).isValidName("/chroot/test"); } @@ -324,11 +328,12 @@ public void testIsValidNameInvalidInBaseFs() throws Exception { AbstractFileSystem baseFs = Mockito.spy(fc.getDefaultFileSystem()); ChRootedFs chRootedFs = new ChRootedFs(baseFs, new Path("/chroot")); Mockito.doReturn(false).when(baseFs).isValidName(Mockito.anyString()); - Assert.assertFalse(chRootedFs.isValidName("/test")); + Assertions.assertFalse(chRootedFs.isValidName("/test")); Mockito.verify(baseFs).isValidName("/chroot/test"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testCreateSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path( @@ -337,12 +342,13 @@ public void testCreateSnapshot() throws Exception { ChRootedFs chRootedFs = new ChRootedFs(baseFs, chrootedTo); Mockito.doReturn(snapRootPath).when(baseFs) .createSnapshot(chRootedSnapRootPath, "snap1"); - Assert.assertEquals(snapRootPath, + Assertions.assertEquals(snapRootPath, chRootedFs.createSnapshot(snapRootPath, "snap1")); Mockito.verify(baseFs).createSnapshot(chRootedSnapRootPath, "snap1"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testDeleteSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path( @@ -355,7 +361,8 @@ public void testDeleteSnapshot() throws Exception { Mockito.verify(baseFs).deleteSnapshot(chRootedSnapRootPath, "snap1"); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testRenameSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path( diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java index c567944ffe307..9d9f4bd615c37 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.fs.viewfs; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; /** * Test Regex Mount Point Interceptor Factory. @@ -34,7 +34,7 @@ public void testCreateNormalCase() { .toString(RegexMountPoint.INTERCEPTOR_INTERNAL_SEP) + "replace"; RegexMountPointInterceptor interceptor = RegexMountPointInterceptorFactory.create(replaceInterceptorStr); - Assert.assertTrue( + Assertions.assertTrue( interceptor instanceof RegexMountPointResolvedDstPathReplaceInterceptor); } @@ -49,6 +49,6 @@ public void testCreateBadCase() { + "replace"; RegexMountPointInterceptor interceptor = RegexMountPointInterceptorFactory.create(replaceInterceptorStr); - Assert.assertTrue(interceptor == null); + Assertions.assertTrue(interceptor == null); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java index 9fdf0f6ac9c5c..a6249c65c07b0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java @@ -19,8 +19,8 @@ import java.io.IOException; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.viewfs.RegexMountPointInterceptorType.REPLACE_RESOLVED_DST_PATH; @@ -43,11 +43,11 @@ public void testDeserializeFromStringNormalCase() throws IOException { RegexMountPointResolvedDstPathReplaceInterceptor interceptor = RegexMountPointResolvedDstPathReplaceInterceptor .deserializeFromString(serializedString); - Assert.assertEquals(srcRegex, interceptor.getSrcRegexString()); - Assert.assertEquals(replaceString, interceptor.getReplaceString()); - Assert.assertNull(interceptor.getSrcRegexPattern()); + Assertions.assertEquals(srcRegex, interceptor.getSrcRegexString()); + Assertions.assertEquals(replaceString, interceptor.getReplaceString()); + Assertions.assertNull(interceptor.getSrcRegexPattern()); interceptor.initialize(); - Assert.assertEquals(srcRegex, + Assertions.assertEquals(srcRegex, interceptor.getSrcRegexPattern().toString()); } @@ -60,7 +60,7 @@ public void testDeserializeFromStringBadCase() throws IOException { RegexMountPointResolvedDstPathReplaceInterceptor interceptor = RegexMountPointResolvedDstPathReplaceInterceptor .deserializeFromString(serializedString); - Assert.assertNull(interceptor); + Assertions.assertNull(interceptor); } @Test @@ -71,7 +71,7 @@ public void testSerialization() { RegexMountPointResolvedDstPathReplaceInterceptor interceptor = new RegexMountPointResolvedDstPathReplaceInterceptor(srcRegex, replaceString); - Assert.assertEquals(interceptor.serializeToString(), serializedString); + Assertions.assertEquals(interceptor.serializeToString(), serializedString); } @Test @@ -82,7 +82,7 @@ public void testInterceptSource() { new RegexMountPointResolvedDstPathReplaceInterceptor(srcRegex, replaceString); String sourcePath = "/a/b/l3/dd"; - Assert.assertEquals(sourcePath, interceptor.interceptSource(sourcePath)); + Assertions.assertEquals(sourcePath, interceptor.interceptSource(sourcePath)); } @Test @@ -95,7 +95,7 @@ public void testInterceptResolve() throws IOException { new RegexMountPointResolvedDstPathReplaceInterceptor(srcRegex, replaceString); interceptor.initialize(); - Assert.assertEquals("/user-hdfs", + Assertions.assertEquals("/user-hdfs", interceptor.interceptResolvedDestPathStr(pathAfterResolution)); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java index 3a60d6ecdda94..9bbec07a96733 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java @@ -31,10 +31,11 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.viewfs.TestChRootedFileSystem.MockFileSystem; -import org.junit.*; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.BeforeAll; import static org.apache.hadoop.fs.viewfs.TestChRootedFileSystem.getChildFileSystem; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; /** @@ -46,7 +47,7 @@ public class TestViewFileSystemDelegation { //extends ViewFileSystemTestSetup { static FakeFileSystem fs1; static FakeFileSystem fs2; - @BeforeClass + @BeforeAll public static void setup() throws Exception { conf = ViewFileSystemTestSetup.createConfig(); setupFileSystem(new URI("fs1:/"), FakeFileSystem.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java index 239f47d1da6f3..4d90eabce1891 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegationTokenSupport.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs.viewfs; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.apache.hadoop.fs.viewfs.TestChRootedFileSystem.getChildFileSystem; import java.io.IOException; @@ -34,8 +34,8 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; /** * Test ViewFileSystem's support for having delegation tokens fetched and cached @@ -52,7 +52,7 @@ public class TestViewFileSystemDelegationTokenSupport { static FakeFileSystem fs1; static FakeFileSystem fs2; - @BeforeClass + @BeforeAll public static void setup() throws Exception { conf = ViewFileSystemTestSetup.createConfig(); setupFileSystem(new URI("fs1:///"), FakeFileSystem.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java index 1e86a91c141c1..22bc916de5b61 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java @@ -29,13 +29,15 @@ import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.jupiter.api.Assertions.assertThrows; + /** * * Test the TestViewFileSystemOverloadSchemeLF using a file with authority: @@ -51,7 +53,7 @@ public class TestViewFileSystemOverloadSchemeLocalFileSystem { private FileSystemTestHelper fileSystemTestHelper = new FileSystemTestHelper(); - @Before + @BeforeEach public void setUp() throws Exception { conf = new Configuration(); conf.set(String.format("fs.%s.impl", FILE), @@ -94,7 +96,7 @@ public void testLocalTargetLinkWriteSimple() } try (FSDataInputStream lViewIs = lViewFs.open(testPath)) { - Assert.assertEquals(testString, lViewIs.readUTF()); + Assertions.assertEquals(testString, lViewIs.readUTF()); } } } @@ -111,9 +113,9 @@ public void testLocalFsCreateAndDelete() throws Exception { try (FileSystem lViewFS = FileSystem.get(mountURI, conf)) { Path testPath = new Path(mountURI.toString() + "/lfsroot/test"); lViewFS.createNewFile(testPath); - Assert.assertTrue(lViewFS.exists(testPath)); + Assertions.assertTrue(lViewFS.exists(testPath)); lViewFS.delete(testPath, true); - Assert.assertFalse(lViewFS.exists(testPath)); + Assertions.assertFalse(lViewFS.exists(testPath)); } } @@ -131,7 +133,7 @@ public void testLocalFsLinkSlashMerge() throws Exception { try (FileSystem lViewFS = FileSystem.get(mountURI, conf)) { Path fileOnRoot = new Path(mountURI.toString() + "/NewFile"); lViewFS.createNewFile(fileOnRoot); - Assert.assertTrue(lViewFS.exists(fileOnRoot)); + Assertions.assertTrue(lViewFS.exists(fileOnRoot)); } } @@ -139,18 +141,20 @@ public void testLocalFsLinkSlashMerge() throws Exception { * Tests with linkMergeSlash and other mounts in * ViewFileSystemOverloadScheme. */ - @Test(expected = IOException.class) + @Test public void testLocalFsLinkSlashMergeWithOtherMountLinks() throws Exception { - LOG.info("Starting testLocalFsLinkSlashMergeWithOtherMountLinks"); - addMountLinks("mt", - new String[] {"/lfsroot", Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH }, - new String[] {targetTestRoot + "/wd2", targetTestRoot + "/wd2" }, conf); - final URI mountURI = URI.create("file://mt/"); - FileSystem.get(mountURI, conf); - Assert.fail("A merge slash cannot be configured with other mount links."); + assertThrows(IOException.class, ()->{ + LOG.info("Starting testLocalFsLinkSlashMergeWithOtherMountLinks"); + addMountLinks("mt", + new String[] {"/lfsroot", Constants.CONFIG_VIEWFS_LINK_MERGE_SLASH }, + new String[] {targetTestRoot + "/wd2", targetTestRoot + "/wd2" }, conf); + final URI mountURI = URI.create("file://mt/"); + FileSystem.get(mountURI, conf); + Assertions.fail("A merge slash cannot be configured with other mount links."); + }); } - @After + @AfterEach public void tearDown() throws Exception { if (null != fsTarget) { fsTarget.delete(fileSystemTestHelper.getTestRootPath(fsTarget), true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsConfig.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsConfig.java index 9d7c58f8197b3..150fed4c80c81 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsConfig.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsConfig.java @@ -25,41 +25,45 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileAlreadyExistsException; -import org.junit.Test; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; public class TestViewFsConfig { - @Test(expected = FileAlreadyExistsException.class) + @Test public void testInvalidConfig() throws IOException, URISyntaxException { - Configuration conf = new Configuration(); - ConfigUtil.setIsNestedMountPointSupported(conf, false); - ConfigUtil.addLink(conf, "/internalDir/linkToDir2", - new Path("file:///dir2").toUri()); - ConfigUtil.addLink(conf, "/internalDir/linkToDir2/linkToDir3", - new Path("file:///dir3").toUri()); + assertThrows(FileAlreadyExistsException.class, ()-> { + Configuration conf = new Configuration(); + ConfigUtil.setIsNestedMountPointSupported(conf, false); + ConfigUtil.addLink(conf, "/internalDir/linkToDir2", + new Path("file:///dir2").toUri()); + ConfigUtil.addLink(conf, "/internalDir/linkToDir2/linkToDir3", + new Path("file:///dir3").toUri()); - class Foo { - } + class Foo { + } - new InodeTree(conf, null, null, false) { + new InodeTree(conf, null, null, false) { - @Override - protected Function initAndGetTargetFs() { - return null; - } + @Override + protected Function initAndGetTargetFs() { + return null; + } - @Override - protected Foo getTargetFileSystem(final INodeDir dir) { - return null; - } + @Override + protected Foo getTargetFileSystem(final INodeDir dir) { + return null; + } - @Override - protected Foo getTargetFileSystem(final String settings, - final URI[] mergeFsURIList) { - return null; - } + @Override + protected Foo getTargetFileSystem(final String settings, + final URI[] mergeFsURIList) { + return null; + } - }; + }; + }); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java index 06cbdab8d210f..febdd99aba020 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java @@ -29,12 +29,12 @@ import org.apache.hadoop.fs.Trash; import org.apache.hadoop.fs.TrashPolicyDefault; import org.apache.hadoop.fs.contract.ContractTestUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.*; import static org.apache.hadoop.fs.viewfs.Constants.*; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; public class TestViewFsTrash { FileSystem fsTarget; // the target file system - the mount will point here @@ -42,7 +42,7 @@ public class TestViewFsTrash { Configuration conf; private FileSystemTestHelper fileSystemTestHelper; - @Before + @BeforeEach public void setUp() throws Exception { Configuration targetFSConf = new Configuration(); targetFSConf.setClass("fs.file.impl", TestTrash.TestLFS.class, FileSystem.class); @@ -62,7 +62,7 @@ public void setUp() throws Exception { } - @After + @AfterEach public void tearDown() throws Exception { ViewFileSystemTestSetup.tearDown(fileSystemTestHelper, fsTarget); fsTarget.delete(new Path(fsTarget.getHomeDirectory(), ".Trash/Current"), diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsURIs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsURIs.java index 6bc014ab8929f..8a6d0a0b9458a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsURIs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsURIs.java @@ -22,7 +22,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FsConstants; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestViewFsURIs { @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java index 8ac447eb02e9b..7c5d9b73fddef 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java @@ -33,13 +33,13 @@ import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; /** * The FileStatus is being serialized in MR as jobs are submitted. @@ -51,13 +51,13 @@ public class TestViewfsFileStatus { private static final File TEST_DIR = GenericTestUtils.getTestDir( TestViewfsFileStatus.class.getSimpleName()); - @Before + @BeforeEach public void setUp() { FileUtil.fullyDelete(TEST_DIR); assertTrue(TEST_DIR.mkdirs()); } - @After + @AfterEach public void tearDown() throws IOException { FileUtil.fullyDelete(TEST_DIR); } @@ -83,9 +83,9 @@ public void testFileStatusSerialziation() FileStatus stat = vfs.getFileStatus(path); assertEquals(content.length, stat.getLen()); ContractTestUtils.assertNotErasureCoded(vfs, path); - assertTrue(path + " should have erasure coding unset in " + - "FileStatus#toString(): " + stat, - stat.toString().contains("isErasureCoded=false")); + assertTrue( + stat.toString().contains("isErasureCoded=false"), path + " should have erasure coding unset in " + + "FileStatus#toString(): " + stat); // check serialization/deserialization DataOutputBuffer dob = new DataOutputBuffer(); @@ -180,7 +180,7 @@ public void testGetFileChecksum() throws IOException { Mockito.verify(mockFS).getFileChecksum(new Path("someFile")); } - @AfterClass + @AfterAll public static void cleanup() throws IOException { FileUtil.fullyDelete(TEST_DIR); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java index b2d7416aa7675..e7057596c6839 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java @@ -31,7 +31,7 @@ import org.apache.hadoop.fs.viewfs.ViewFileSystemOverloadScheme.ChildFsGetter; import org.apache.hadoop.util.Shell; import org.eclipse.jetty.util.log.Log; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; /** @@ -164,7 +164,7 @@ static void addMountLinksToFile(String mountTable, String[] sources, out.writeBytes(""); if (isNfly) { String[] srcParts = src.split("[.]"); - Assert.assertEquals("Invalid NFlyLink format", 3, srcParts.length); + Assertions.assertEquals(3, srcParts.length, "Invalid NFlyLink format"); String actualSrc = srcParts[srcParts.length - 1]; String params = srcParts[srcParts.length - 2]; out.writeBytes(prefix + Constants.CONFIG_VIEWFS_LINK_NFLY + "." @@ -202,7 +202,7 @@ public static void addMountLinksToConf(String mountTable, String[] sources, boolean isNfly = src.startsWith(Constants.CONFIG_VIEWFS_LINK_NFLY); if (isNfly) { String[] srcParts = src.split("[.]"); - Assert.assertEquals("Invalid NFlyLink format", 3, srcParts.length); + Assertions.assertEquals(3, srcParts.length, "Invalid NFlyLink format"); String actualSrc = srcParts[srcParts.length - 1]; String params = srcParts[srcParts.length - 2]; ConfigUtil.addLinkNfly(config, mountTableName, actualSrc, params, From 92afe1646599dcd52088ff12f5fb75102fd2e04c Mon Sep 17 00:00:00 2001 From: fanshilun Date: Tue, 4 Feb 2025 16:10:20 +0800 Subject: [PATCH 2/6] HADOOP-19415. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-common Part2. --- .../hadoop/fs/FileContextTestHelper.java | 25 +++++++++++-------- .../hadoop/fs/FileContextTestWrapper.java | 25 +++++++++++-------- .../hadoop/fs/FileSystemTestHelper.java | 15 +++++------ 3 files changed, 36 insertions(+), 29 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java index b2782224ab297..427246365a5f8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java @@ -26,7 +26,10 @@ import org.apache.hadoop.fs.Options.CreateOpts.BlockSize; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.jupiter.api.Assertions; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Helper class for unit tests. @@ -220,28 +223,28 @@ public enum fileType {isDir, isFile, isSymlink}; public static void checkFileStatus(FileContext aFc, String path, fileType expectedType) throws IOException { FileStatus s = aFc.getFileStatus(new Path(path)); - Assertions.assertNotNull(s); + assertNotNull(s); if (expectedType == fileType.isDir) { - Assertions.assertTrue(s.isDirectory()); + assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assertions.assertTrue(s.isFile()); + assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assertions.assertTrue(s.isSymlink()); + assertTrue(s.isSymlink()); } - Assertions.assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); + assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); } public static void checkFileLinkStatus(FileContext aFc, String path, fileType expectedType) throws IOException { FileStatus s = aFc.getFileLinkStatus(new Path(path)); - Assertions.assertNotNull(s); + assertNotNull(s); if (expectedType == fileType.isDir) { - Assertions.assertTrue(s.isDirectory()); + assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assertions.assertTrue(s.isFile()); + assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assertions.assertTrue(s.isSymlink()); + assertTrue(s.isSymlink()); } - Assertions.assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); + assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java index 6c170d6b29fa5..42df1bd5c362f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java @@ -28,7 +28,10 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.AccessControlException; -import org.junit.jupiter.api.Assertions; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Helper class for unit tests. @@ -169,29 +172,29 @@ public FileStatus containsPath(String path, FileStatus[] dirList) public void checkFileStatus(String path, fileType expectedType) throws IOException { FileStatus s = fc.getFileStatus(new Path(path)); - Assertions.assertNotNull(s); + assertNotNull(s); if (expectedType == fileType.isDir) { - Assertions.assertTrue(s.isDirectory()); + assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assertions.assertTrue(s.isFile()); + assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assertions.assertTrue(s.isSymlink()); + assertTrue(s.isSymlink()); } - Assertions.assertEquals(fc.makeQualified(new Path(path)), s.getPath()); + assertEquals(fc.makeQualified(new Path(path)), s.getPath()); } public void checkFileLinkStatus(String path, fileType expectedType) throws IOException { FileStatus s = fc.getFileLinkStatus(new Path(path)); - Assertions.assertNotNull(s); + assertNotNull(s); if (expectedType == fileType.isDir) { - Assertions.assertTrue(s.isDirectory()); + assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assertions.assertTrue(s.isFile()); + assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assertions.assertTrue(s.isSymlink()); + assertTrue(s.isSymlink()); } - Assertions.assertEquals(fc.makeQualified(new Path(path)), s.getPath()); + assertEquals(fc.makeQualified(new Path(path)), s.getPath()); } // diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java index f313687ebcc2e..78bfab1a8d532 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java @@ -25,9 +25,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.jupiter.api.Assertions; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; /** @@ -241,15 +242,15 @@ public enum fileType {isDir, isFile, isSymlink}; public static void checkFileStatus(FileSystem aFs, String path, fileType expectedType) throws IOException { FileStatus s = aFs.getFileStatus(new Path(path)); - Assertions.assertNotNull(s); + assertNotNull(s); if (expectedType == fileType.isDir) { - Assertions.assertTrue(s.isDirectory()); + assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assertions.assertTrue(s.isFile()); + assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assertions.assertTrue(s.isSymlink()); + assertTrue(s.isSymlink()); } - Assertions.assertEquals(aFs.makeQualified(new Path(path)), s.getPath()); + assertEquals(aFs.makeQualified(new Path(path)), s.getPath()); } /** From 08ff51a1cc8352241110a53a970403af5798cc09 Mon Sep 17 00:00:00 2001 From: fanshilun Date: Tue, 4 Feb 2025 19:27:03 +0800 Subject: [PATCH 3/6] HADOOP-19415. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-common Part2. --- hadoop-common-project/hadoop-common/pom.xml | 20 + .../hadoop/fs/FileContextTestHelper.java | 25 +- .../hadoop/fs/FileContextTestWrapper.java | 25 +- .../hadoop/fs/FileSystemTestHelper.java | 15 +- .../hadoop/fs/FileSystemTestWrapper.java | 22 +- .../org/apache/hadoop/fs/TestAvroFSInput.java | 2 +- .../hadoop/fs/TestChecksumFileSystem.java | 6 +- .../apache/hadoop/fs/TestCommandFormat.java | 3 +- .../apache/hadoop/fs/TestContentSummary.java | 6 +- .../apache/hadoop/fs/TestDFVariations.java | 15 +- .../java/org/apache/hadoop/fs/TestDU.java | 4 +- .../hadoop/fs/TestDelegateToFileSystem.java | 5 +- .../hadoop/fs/TestDelegationTokenRenewer.java | 21 +- .../org/apache/hadoop/fs/TestFileContext.java | 2 +- .../fs/TestFileContextDeleteOnExit.java | 18 +- .../hadoop/fs/TestFileContextResolveAfs.java | 5 +- .../org/apache/hadoop/fs/TestFileStatus.java | 4 +- .../fs/TestFileSystemInitialization.java | 3 +- .../fs/TestFileSystemStorageStatistics.java | 10 +- .../hadoop/fs/TestFileSystemTokens.java | 11 +- .../org/apache/hadoop/fs/TestFileUtil.java | 405 ++++++++---------- .../hadoop/fs/TestFilterFileSystem.java | 12 +- .../org/apache/hadoop/fs/TestFsOptions.java | 2 +- .../org/apache/hadoop/fs/TestFsShell.java | 25 +- .../org/apache/hadoop/fs/TestFsShellList.java | 4 +- .../hadoop/fs/TestFsShellReturnCode.java | 12 +- .../hadoop/fs/TestFsUrlConnectionPath.java | 11 +- .../hadoop/fs/TestGetFileBlockLocations.java | 3 +- .../apache/hadoop/fs/TestGetSpaceUsed.java | 5 +- .../org/apache/hadoop/fs/TestGlobPattern.java | 5 +- .../apache/hadoop/fs/TestHarFileSystem.java | 4 +- .../hadoop/fs/TestHarFileSystemBasics.java | 33 +- .../org/apache/hadoop/fs/TestHardLink.java | 4 +- .../org/apache/hadoop/fs/TestListFiles.java | 16 +- .../hadoop/fs/TestLocalDirAllocator.java | 126 +++--- .../apache/hadoop/fs/TestLocalFileSystem.java | 33 +- .../fs/TestLocalFileSystemPermission.java | 19 +- .../java/org/apache/hadoop/fs/TestPath.java | 106 ++--- .../org/apache/hadoop/fs/TestQuotaUsage.java | 16 +- .../fs/TestSymlinkLocalFSFileContext.java | 4 +- .../fs/TestSymlinkLocalFSFileSystem.java | 26 +- .../java/org/apache/hadoop/fs/TestTrash.java | 87 ++-- .../hadoop/fs/TestTruncatedInputBug.java | 2 +- .../fs/audit/TestCommonAuditContext.java | 2 +- .../hadoop/fs/contract/ftp/FTPContract.java | 4 +- .../hadoop/fs/ftp/TestFTPFileSystem.java | 6 +- .../apache/hadoop/fs/permission/TestAcl.java | 4 +- .../fs/permission/TestFsPermission.java | 8 +- .../fs/protocolPB/TestFSSerialization.java | 2 +- .../hadoop/fs/sftp/TestSFTPFileSystem.java | 68 +-- .../hadoop/fs/shell/TestAclCommands.java | 71 ++- .../hadoop/fs/shell/TestCommandFactory.java | 5 +- .../org/apache/hadoop/fs/shell/TestCopy.java | 16 +- .../hadoop/fs/shell/TestCopyFromLocal.java | 10 +- .../hadoop/fs/shell/TestCopyToLocal.java | 12 +- .../org/apache/hadoop/fs/shell/TestCount.java | 12 +- .../apache/hadoop/fs/shell/TestCpCommand.java | 12 +- .../org/apache/hadoop/fs/shell/TestLs.java | 22 +- .../org/apache/hadoop/fs/shell/TestMove.java | 7 +- .../apache/hadoop/fs/shell/TestPathData.java | 5 +- .../hadoop/fs/shell/TestXAttrCommands.java | 28 +- .../apache/hadoop/fs/shell/find/TestAnd.java | 17 +- .../fs/shell/find/TestFilterExpression.java | 21 +- .../apache/hadoop/fs/shell/find/TestFind.java | 25 +- .../hadoop/fs/shell/find/TestIname.java | 12 +- .../apache/hadoop/fs/shell/find/TestName.java | 12 +- .../hadoop/fs/shell/find/TestPrint.java | 16 +- .../hadoop/fs/shell/find/TestPrint0.java | 17 +- .../hadoop/fs/store/TestDataBlocks.java | 3 +- .../fs/viewfs/TestChRootedFileSystem.java | 162 ++++--- .../hadoop/fs/viewfs/TestChRootedFs.java | 128 +++--- ...TestRegexMountPointInterceptorFactory.java | 7 +- ...ointResolvedDstPathReplaceInterceptor.java | 19 +- .../viewfs/TestViewFileSystemDelegation.java | 4 +- ...leSystemOverloadSchemeLocalFileSystem.java | 15 +- .../hadoop/fs/viewfs/TestViewFsTrash.java | 1 - .../fs/viewfs/TestViewfsFileStatus.java | 8 +- .../hadoop/fs/viewfs/ViewFsTestSetup.java | 6 +- 78 files changed, 984 insertions(+), 965 deletions(-) diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 7a7e0e853e162..1d9fc166c745a 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -381,6 +381,26 @@ lz4-java provided + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.jupiter + junit-jupiter-params + test + + + org.junit.platform + junit-platform-launcher + test + diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java index 427246365a5f8..b5307a4e27669 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java @@ -26,10 +26,7 @@ import org.apache.hadoop.fs.Options.CreateOpts.BlockSize; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import org.junit.Assert; /** * Helper class for unit tests. @@ -223,28 +220,28 @@ public enum fileType {isDir, isFile, isSymlink}; public static void checkFileStatus(FileContext aFc, String path, fileType expectedType) throws IOException { FileStatus s = aFc.getFileStatus(new Path(path)); - assertNotNull(s); + Assert.assertNotNull(s); if (expectedType == fileType.isDir) { - assertTrue(s.isDirectory()); + Assert.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - assertTrue(s.isFile()); + Assert.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - assertTrue(s.isSymlink()); + Assert.assertTrue(s.isSymlink()); } - assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); + Assert.assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); } public static void checkFileLinkStatus(FileContext aFc, String path, fileType expectedType) throws IOException { FileStatus s = aFc.getFileLinkStatus(new Path(path)); - assertNotNull(s); + Assert.assertNotNull(s); if (expectedType == fileType.isDir) { - assertTrue(s.isDirectory()); + Assert.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - assertTrue(s.isFile()); + Assert.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - assertTrue(s.isSymlink()); + Assert.assertTrue(s.isSymlink()); } - assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); + Assert.assertEquals(aFc.makeQualified(new Path(path)), s.getPath()); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java index 42df1bd5c362f..0dd1e9aa3e0f7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestWrapper.java @@ -28,10 +28,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.AccessControlException; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import org.junit.Assert; /** * Helper class for unit tests. @@ -172,29 +169,29 @@ public FileStatus containsPath(String path, FileStatus[] dirList) public void checkFileStatus(String path, fileType expectedType) throws IOException { FileStatus s = fc.getFileStatus(new Path(path)); - assertNotNull(s); + Assert.assertNotNull(s); if (expectedType == fileType.isDir) { - assertTrue(s.isDirectory()); + Assert.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - assertTrue(s.isFile()); + Assert.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - assertTrue(s.isSymlink()); + Assert.assertTrue(s.isSymlink()); } - assertEquals(fc.makeQualified(new Path(path)), s.getPath()); + Assert.assertEquals(fc.makeQualified(new Path(path)), s.getPath()); } public void checkFileLinkStatus(String path, fileType expectedType) throws IOException { FileStatus s = fc.getFileLinkStatus(new Path(path)); - assertNotNull(s); + Assert.assertNotNull(s); if (expectedType == fileType.isDir) { - assertTrue(s.isDirectory()); + Assert.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - assertTrue(s.isFile()); + Assert.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - assertTrue(s.isSymlink()); + Assert.assertTrue(s.isSymlink()); } - assertEquals(fc.makeQualified(new Path(path)), s.getPath()); + Assert.assertEquals(fc.makeQualified(new Path(path)), s.getPath()); } // diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java index 78bfab1a8d532..ef9e094c4c978 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java @@ -25,10 +25,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.test.GenericTestUtils; +import org.junit.Assert; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.Assert.*; import static org.mockito.Mockito.mock; /** @@ -242,15 +241,15 @@ public enum fileType {isDir, isFile, isSymlink}; public static void checkFileStatus(FileSystem aFs, String path, fileType expectedType) throws IOException { FileStatus s = aFs.getFileStatus(new Path(path)); - assertNotNull(s); + Assert.assertNotNull(s); if (expectedType == fileType.isDir) { - assertTrue(s.isDirectory()); + Assert.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - assertTrue(s.isFile()); + Assert.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - assertTrue(s.isSymlink()); + Assert.assertTrue(s.isSymlink()); } - assertEquals(aFs.makeQualified(new Path(path)), s.getPath()); + Assert.assertEquals(aFs.makeQualified(new Path(path)), s.getPath()); } /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java index 1c159d44028cd..933ad1a2358cd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestWrapper.java @@ -29,7 +29,7 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.Progressable; -import org.junit.jupiter.api.Assertions; +import org.junit.Assert; /** * Helper class for unit tests. @@ -170,29 +170,29 @@ public FileStatus containsPath(String path, FileStatus[] dirList) public void checkFileStatus(String path, fileType expectedType) throws IOException { FileStatus s = fs.getFileStatus(new Path(path)); - Assertions.assertNotNull(s); + Assert.assertNotNull(s); if (expectedType == fileType.isDir) { - Assertions.assertTrue(s.isDirectory()); + Assert.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assertions.assertTrue(s.isFile()); + Assert.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assertions.assertTrue(s.isSymlink()); + Assert.assertTrue(s.isSymlink()); } - Assertions.assertEquals(fs.makeQualified(new Path(path)), s.getPath()); + Assert.assertEquals(fs.makeQualified(new Path(path)), s.getPath()); } public void checkFileLinkStatus(String path, fileType expectedType) throws IOException { FileStatus s = fs.getFileLinkStatus(new Path(path)); - Assertions.assertNotNull(s); + Assert.assertNotNull(s); if (expectedType == fileType.isDir) { - Assertions.assertTrue(s.isDirectory()); + Assert.assertTrue(s.isDirectory()); } else if (expectedType == fileType.isFile) { - Assertions.assertTrue(s.isFile()); + Assert.assertTrue(s.isFile()); } else if (expectedType == fileType.isSymlink) { - Assertions.assertTrue(s.isSymlink()); + Assert.assertTrue(s.isSymlink()); } - Assertions.assertEquals(fs.makeQualified(new Path(path)), s.getPath()); + Assert.assertEquals(fs.makeQualified(new Path(path)), s.getPath()); } // diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java index c7b765d5a724e..647144206f122 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAvroFSInput.java @@ -25,7 +25,7 @@ import org.apache.hadoop.test.GenericTestUtils; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestAvroFSInput { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java index c69a6b0131346..4415605bf6ae6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java @@ -29,7 +29,11 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; public class TestChecksumFileSystem { static final String TEST_ROOT_DIR = diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java index 76ab123f56659..aeac1fbad7556 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestCommandFormat.java @@ -17,8 +17,7 @@ */ package org.apache.hadoop.fs; - -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.ArrayList; import java.util.Arrays; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java index d5125ba170d01..213a832463489 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java @@ -17,8 +17,10 @@ */ package org.apache.hadoop.fs; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import java.io.DataInput; import java.io.DataOutput; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java index 9cdcc2f31623f..804e8fea50302 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java @@ -18,8 +18,8 @@ package org.apache.hadoop.fs; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; - import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; @@ -34,8 +34,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; -import static org.junit.jupiter.api.Assertions.*; - public class TestDFVariations { private static final String TEST_ROOT_DIR = GenericTestUtils.getTestDir("testdfvariations").getAbsolutePath(); @@ -165,12 +163,11 @@ public void testGetMountCurrentDirectory() throws Exception { DF df = new DF(new File(workingDir), 0L); String mountPath = df.getMount(); File mountDir = new File(mountPath); - assertTrue( - mountDir.exists(), "Mount dir ["+mountDir.getAbsolutePath()+"] should exist."); - assertTrue( - mountDir.isDirectory(), "Mount dir ["+mountDir.getAbsolutePath()+"] should be directory."); - assertTrue( - workingDir.startsWith(mountPath), "Working dir ["+workingDir+"] should start with ["+mountPath+"]."); + assertTrue(mountDir.exists(), "Mount dir ["+mountDir.getAbsolutePath()+"] should exist."); + assertTrue(mountDir.isDirectory(), + "Mount dir ["+mountDir.getAbsolutePath()+"] should be directory."); + assertTrue(workingDir.startsWith(mountPath), + "Working dir ["+workingDir+"] should start with ["+mountPath+"]."); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java index 208a7becdb474..cc9a698841361 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java @@ -21,8 +21,8 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; -import static org.junit.Assume.assumeFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeFalse; import java.io.File; import java.io.IOException; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java index c877c6860960c..28e937f53bd15 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegateToFileSystem.java @@ -21,9 +21,10 @@ import org.apache.commons.net.ftp.FTP; import org.apache.hadoop.conf.Configuration; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; + public class TestDelegateToFileSystem { private static final String FTP_DUMMYHOST = "ftp://dummyhost"; @@ -37,7 +38,7 @@ private void testDefaultUriInternal(String defaultUri) FileSystem.setDefaultUri(conf, defaultUri); final AbstractFileSystem ftpFs = AbstractFileSystem.get(FTP_URI_NO_PORT, conf); - Assertions.assertEquals(FTP_URI_WITH_PORT, ftpFs.getUri()); + assertEquals(FTP_URI_WITH_PORT, ftpFs.getUri()); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java index 1990178f50a81..1606a95490925 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java @@ -18,8 +18,17 @@ package org.apache.hadoop.fs; import java.io.IOException; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.atMost; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.DelegationTokenRenewer.Renewable; @@ -70,8 +79,8 @@ public Long answer(InvocationOnMock invocation) { renewer.addRenewAction(fs); - assertEquals(1 -, renewer.getRenewQueueLength(), "FileSystem not added to DelegationTokenRenewer"); + assertEquals(1, renewer.getRenewQueueLength(), + "FileSystem not added to DelegationTokenRenewer"); Thread.sleep(RENEW_CYCLE*2); verify(token, atLeast(2)).renew(eq(conf)); @@ -83,8 +92,8 @@ public Long answer(InvocationOnMock invocation) { verify(fs, never()).getDelegationToken(null); verify(fs, never()).setDelegationToken(any()); - assertEquals(0 -, renewer.getRenewQueueLength(), "FileSystem not removed from DelegationTokenRenewer"); + assertEquals(0, renewer.getRenewQueueLength(), + "FileSystem not removed from DelegationTokenRenewer"); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java index eaf484faaf71d..5d792713bfd70 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java @@ -51,7 +51,7 @@ public void testConfBasedAndAPIBasedSetUMask() throws Exception { String defaultlUMask = conf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY); - assertEquals("Default UMask changed!", "022", defaultlUMask); + assertEquals("022", defaultlUMask, "Default UMask changed!"); URI uri1 = new URI("file://mydfs:50070/"); URI uri2 = new URI("file://tmp"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java index ceeee537dee95..df742f7223d52 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextDeleteOnExit.java @@ -20,13 +20,15 @@ import java.io.IOException; import java.util.Set; -import org.junit.jupiter.api.Assertions; import org.apache.hadoop.util.ShutdownHookManager; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.FileContextTestHelper.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Tests {@link FileContext#deleteOnExit(Path)} functionality. @@ -50,11 +52,11 @@ public void tearDown() throws IOException { private void checkDeleteOnExitData(int size, FileContext fc, Path... paths) { - Assertions.assertEquals(size, FileContext.DELETE_ON_EXIT.size()); + assertEquals(size, FileContext.DELETE_ON_EXIT.size()); Set set = FileContext.DELETE_ON_EXIT.get(fc); - Assertions.assertEquals(paths.length, (set == null ? 0 : set.size())); + assertEquals(paths.length, (set == null ? 0 : set.size())); for (Path path : paths) { - Assertions.assertTrue(set.contains(path)); + assertTrue(set.contains(path)); } } @@ -67,7 +69,7 @@ public void testDeleteOnExit() throws Exception { checkDeleteOnExitData(1, fc, file1); // Ensure shutdown hook is added - Assertions.assertTrue(ShutdownHookManager.get().hasShutdownHook(FileContext.FINALIZER)); + assertTrue(ShutdownHookManager.get().hasShutdownHook(FileContext.FINALIZER)); Path file2 = helper.getTestRootPath(fc, "dir1/file2"); createFile(fc, file2, numBlocks, blockSize); @@ -83,8 +85,8 @@ public void testDeleteOnExit() throws Exception { // paths are cleaned up FileContext.FINALIZER.run(); checkDeleteOnExitData(0, fc, new Path[0]); - Assertions.assertFalse(exists(fc, file1)); - Assertions.assertFalse(exists(fc, file2)); - Assertions.assertFalse(exists(fc, dir)); + assertFalse(exists(fc, file1)); + assertFalse(exists(fc, file2)); + assertFalse(exists(fc, dir)); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java index cbb618d98b4e1..7dd9590d944e7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java @@ -24,11 +24,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; +import static org.junit.jupiter.api.Assertions.assertEquals; + /** * Tests resolution of AbstractFileSystems for a given path with symlinks. */ @@ -62,7 +63,7 @@ public void testFileContextResolveAfs() throws IOException { fc.createSymlink(localPath, linkPath, true); Set afsList = fc.resolveAbstractFileSystems(linkPath); - Assertions.assertEquals(1, afsList.size()); + assertEquals(1, afsList.size()); localFs.delete(linkPath, true); localFs.delete(localPath, true); localFs.close(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java index 6c74ca7c48dc4..028feaca2749d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java @@ -17,7 +17,9 @@ */ package org.apache.hadoop.fs; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java index ff22b61abde98..c65ba2d7dfa9d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemInitialization.java @@ -31,7 +31,8 @@ import static org.apache.hadoop.test.LambdaTestUtils.intercept; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.fail; /** * Tests related to filesystem creation and lifecycle. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java index 0245a19c4361d..c74cb2f880d54 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java @@ -22,15 +22,13 @@ import org.apache.hadoop.fs.StorageStatistics.LongStatistic; import org.junit.jupiter.api.BeforeEach; -import org.junit.Rule; import org.junit.jupiter.api.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Iterator; -import java.util.concurrent.TimeUnit; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -39,6 +37,7 @@ /** * This tests basic operations of {@link FileSystemStorageStatistics} class. */ +@Timeout(10) public class TestFileSystemStorageStatistics { private static final Logger LOG = LoggerFactory.getLogger( TestFileSystemStorageStatistics.class); @@ -61,10 +60,7 @@ public class TestFileSystemStorageStatistics { new FileSystem.Statistics("test-scheme"); private FileSystemStorageStatistics storageStatistics = new FileSystemStorageStatistics(FS_STORAGE_STATISTICS_NAME, statistics); - - @Rule - public final Timeout globalTimeout = new Timeout(10, TimeUnit.SECONDS); - + @BeforeEach public void setup() { statistics.incrementBytesRead(RandomUtils.nextInt(0, 100)); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java index eada1425d95d9..90edf7d4ff5b8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemTokens.java @@ -18,8 +18,15 @@ package org.apache.hadoop.fs; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.IOException; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java index 7c162d2140c20..177223dc08254 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java @@ -19,20 +19,13 @@ import static org.apache.hadoop.test.LambdaTestUtils.intercept; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertArrayEquals; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertInstanceOf; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNotSame; -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; -import static org.junit.jupiter.api.Assumptions.assumeTrue; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -73,12 +66,13 @@ import org.apache.tools.tar.TarEntry; import org.apache.tools.tar.TarOutputStream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; +import org.assertj.core.api.Assertions; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.Timeout; +import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -145,7 +139,7 @@ public class TestFileUtil { * file: part-r-00000, contents: "foo" * file: part-r-00001, contents: "bar" */ - @BeforeEach + @Before public void setup() throws IOException { del = testFolder.newFolder("del"); tmp = testFolder.newFolder("tmp"); @@ -182,7 +176,7 @@ public void setup() throws IOException { // create a symlink to dir File linkDir = new File(del, "tmpDir"); FileUtil.symLink(tmp.toString(), linkDir.toString()); - assertEquals(5, Objects.requireNonNull(del.listFiles()).length); + Assert.assertEquals(5, Objects.requireNonNull(del.listFiles()).length); // create files in partitioned directories createFile(partitioned, "part-r-00000", "foo"); @@ -192,7 +186,7 @@ public void setup() throws IOException { FileUtil.symLink(del.toString(), dir1.toString() + "/cycle"); } - @AfterEach + @After public void tearDown() throws IOException { testFolder.delete(); } @@ -215,65 +209,62 @@ private File createFile(File directory, String name, String contents) return newFile; } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testListFiles() throws IOException { //Test existing files case File[] files = FileUtil.listFiles(partitioned); - assertEquals(2, files.length); + Assert.assertEquals(2, files.length); //Test existing directory with no files case File newDir = new File(tmp.getPath(),"test"); Verify.mkdir(newDir); - assertTrue(newDir.exists(), "Failed to create test dir"); + Assert.assertTrue("Failed to create test dir", newDir.exists()); files = FileUtil.listFiles(newDir); - assertEquals(0, files.length); + Assert.assertEquals(0, files.length); assertTrue(newDir.delete()); - assertFalse(newDir.exists(), "Failed to delete test dir"); + Assert.assertFalse("Failed to delete test dir", newDir.exists()); //Test non-existing directory case, this throws //IOException try { files = FileUtil.listFiles(newDir); - fail("IOException expected on listFiles() for non-existent dir " + Assert.fail("IOException expected on listFiles() for non-existent dir " + newDir.toString()); } catch(IOException ioe) { //Expected an IOException } } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testListAPI() throws IOException { //Test existing files case String[] files = FileUtil.list(partitioned); - assertEquals(2, files.length, "Unexpected number of pre-existing files"); + Assert.assertEquals("Unexpected number of pre-existing files", 2, files.length); //Test existing directory with no files case File newDir = new File(tmp.getPath(),"test"); Verify.mkdir(newDir); - assertTrue(newDir.exists(), "Failed to create test dir"); + Assert.assertTrue("Failed to create test dir", newDir.exists()); files = FileUtil.list(newDir); - assertEquals(0, files.length, "New directory unexpectedly contains files"); + Assert.assertEquals("New directory unexpectedly contains files", 0, files.length); assertTrue(newDir.delete()); - assertFalse(newDir.exists(), "Failed to delete test dir"); + Assert.assertFalse("Failed to delete test dir", newDir.exists()); //Test non-existing directory case, this throws //IOException try { files = FileUtil.list(newDir); - fail("IOException expected on list() for non-existent dir " + Assert.fail("IOException expected on list() for non-existent dir " + newDir.toString()); } catch(IOException ioe) { //Expected an IOException } } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testFullyDelete() throws IOException { boolean ret = FileUtil.fullyDelete(del); - assertTrue(ret); + Assert.assertTrue(ret); Verify.notExists(del); validateTmpDir(); } @@ -284,15 +275,14 @@ public void testFullyDelete() throws IOException { * (b) symlink to dir only and not the dir pointed to by symlink. * @throws IOException */ - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testFullyDeleteSymlinks() throws IOException { File link = new File(del, LINK); assertDelListLength(5); // Since tmpDir is symlink to tmp, fullyDelete(tmpDir) should not // delete contents of tmp. See setupDirs for details. boolean ret = FileUtil.fullyDelete(link); - assertTrue(ret); + Assert.assertTrue(ret); Verify.notExists(link); assertDelListLength(4); validateTmpDir(); @@ -301,7 +291,7 @@ public void testFullyDeleteSymlinks() throws IOException { // Since tmpDir is symlink to tmp, fullyDelete(tmpDir) should not // delete contents of tmp. See setupDirs for details. ret = FileUtil.fullyDelete(linkDir); - assertTrue(ret); + Assert.assertTrue(ret); Verify.notExists(linkDir); assertDelListLength(3); validateTmpDir(); @@ -313,13 +303,12 @@ public void testFullyDeleteSymlinks() throws IOException { * (b) dangling symlink to directory properly * @throws IOException */ - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testFullyDeleteDanglingSymlinks() throws IOException { // delete the directory tmp to make tmpDir a dangling link to dir tmp and // to make y as a dangling link to file tmp/x boolean ret = FileUtil.fullyDelete(tmp); - assertTrue(ret); + Assert.assertTrue(ret); Verify.notExists(tmp); // dangling symlink to file @@ -328,7 +317,7 @@ public void testFullyDeleteDanglingSymlinks() throws IOException { // Even though 'y' is dangling symlink to file tmp/x, fullyDelete(y) // should delete 'y' properly. ret = FileUtil.fullyDelete(link); - assertTrue(ret); + Assert.assertTrue(ret); assertDelListLength(4); // dangling symlink to directory @@ -336,23 +325,22 @@ public void testFullyDeleteDanglingSymlinks() throws IOException { // Even though tmpDir is dangling symlink to tmp, fullyDelete(tmpDir) should // delete tmpDir properly. ret = FileUtil.fullyDelete(linkDir); - assertTrue(ret); + Assert.assertTrue(ret); assertDelListLength(3); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testFullyDeleteContents() throws IOException { boolean ret = FileUtil.fullyDeleteContents(del); - assertTrue(ret); + Assert.assertTrue(ret); Verify.exists(del); - assertEquals(0, Objects.requireNonNull(del.listFiles()).length); + Assert.assertEquals(0, Objects.requireNonNull(del.listFiles()).length); validateTmpDir(); } private void validateTmpDir() { Verify.exists(tmp); - assertEquals(1, Objects.requireNonNull(tmp.listFiles()).length); + Assert.assertEquals(1, Objects.requireNonNull(tmp.listFiles()).length); Verify.exists(new File(tmp, FILE)); } @@ -420,29 +408,28 @@ private void validateAndSetWritablePermissions( grantPermissions(xSubDir); grantPermissions(xSubSubDir); - assertFalse(ret, "The return value should have been false."); - assertTrue( - new File(del, FILE_1_NAME).exists(), "The file file1 should not have been deleted."); + Assert.assertFalse("The return value should have been false.", ret); + Assert.assertTrue("The file file1 should not have been deleted.", + new File(del, FILE_1_NAME).exists()); - assertEquals( + Assert.assertEquals( + "The directory xSubDir *should* not have been deleted.", + expectedRevokedPermissionDirsExist, xSubDir.exists()); + Assert.assertEquals("The file file2 *should* not have been deleted.", + expectedRevokedPermissionDirsExist, file2.exists()); + Assert.assertEquals( + "The directory xSubSubDir *should* not have been deleted.", + expectedRevokedPermissionDirsExist, xSubSubDir.exists()); + Assert.assertEquals("The file file22 *should* not have been deleted.", + expectedRevokedPermissionDirsExist, file22.exists()); - expectedRevokedPermissionDirsExist, xSubDir.exists(), "The directory xSubDir *should* not have been deleted."); - assertEquals( - expectedRevokedPermissionDirsExist, file2.exists(), "The file file2 *should* not have been deleted."); - assertEquals( - - expectedRevokedPermissionDirsExist, xSubSubDir.exists(), "The directory xSubSubDir *should* not have been deleted."); - assertEquals( - expectedRevokedPermissionDirsExist, file22.exists(), "The file file22 *should* not have been deleted."); - - assertFalse( - ySubDir.exists(), "The directory ySubDir should have been deleted."); - assertFalse( - zlink.exists(), "The link zlink should have been deleted."); + Assert.assertFalse("The directory ySubDir should have been deleted.", + ySubDir.exists()); + Assert.assertFalse("The link zlink should have been deleted.", + zlink.exists()); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testFailFullyDelete() throws IOException { // Windows Dir.setWritable(false) does not work for directories assumeNotWindows(); @@ -452,8 +439,7 @@ public void testFailFullyDelete() throws IOException { validateAndSetWritablePermissions(true, ret); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testFailFullyDeleteGrantPermissions() throws IOException { setupDirsAndNonWritablePermissions(); boolean ret = FileUtil.fullyDelete(new MyFile(del), true); @@ -466,8 +452,7 @@ public void testFailFullyDeleteGrantPermissions() throws IOException { * Tests if fullyDelete deletes symlink's content when deleting unremovable dir symlink. * @throws IOException */ - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testFailFullyDeleteDirSymlinks() throws IOException { File linkDir = new File(del, "tmpDir"); FileUtil.setWritable(del, false); @@ -475,7 +460,7 @@ public void testFailFullyDeleteDirSymlinks() throws IOException { // delete contents of tmp. See setupDirs for details. boolean ret = FileUtil.fullyDelete(linkDir); // fail symlink deletion - assertFalse(ret); + Assert.assertFalse(ret); Verify.exists(linkDir); assertDelListLength(5); // tmp dir should exist @@ -484,7 +469,7 @@ public void testFailFullyDeleteDirSymlinks() throws IOException { FileUtil.setWritable(del, true); ret = FileUtil.fullyDelete(linkDir); // success symlink deletion - assertTrue(ret); + Assert.assertTrue(ret); Verify.notExists(linkDir); assertDelListLength(4); // tmp dir should exist @@ -497,7 +482,7 @@ public void testFailFullyDeleteDirSymlinks() throws IOException { * @param expectedLength The expected length of the {@link TestFileUtil#del}. */ private void assertDelListLength(int expectedLength) { - assertThat(del.list()).describedAs("del list").isNotNull().hasSize(expectedLength); + Assertions.assertThat(del.list()).describedAs("del list").isNotNull().hasSize(expectedLength); } /** @@ -512,7 +497,7 @@ public static class Verify { * @throws IOException As per {@link File#createNewFile()}. */ public static File createNewFile(File file) throws IOException { - assertTrue(file.createNewFile(), "Unable to create new file " + file); + assertTrue("Unable to create new file " + file, file.createNewFile()); return file; } @@ -523,7 +508,7 @@ public static File createNewFile(File file) throws IOException { * @return The result of {@link File#mkdir()}. */ public static File mkdir(File file) { - assertTrue(file.mkdir(), "Unable to mkdir for " + file); + assertTrue("Unable to mkdir for " + file, file.mkdir()); return file; } @@ -534,7 +519,7 @@ public static File mkdir(File file) { * @return The result of {@link File#mkdirs()}. */ public static File mkdirs(File file) { - assertTrue(file.mkdirs(), "Unable to mkdirs for " + file); + assertTrue("Unable to mkdirs for " + file, file.mkdirs()); return file; } @@ -545,7 +530,7 @@ public static File mkdirs(File file) { * @return The result of {@link File#delete()}. */ public static File delete(File file) { - assertTrue(file.delete(), "Unable to delete " + file); + assertTrue("Unable to delete " + file, file.delete()); return file; } @@ -556,7 +541,7 @@ public static File delete(File file) { * @return The result of {@link File#exists()}. */ public static File exists(File file) { - assertTrue(file.exists(), "Expected file " + file + " doesn't exist"); + assertTrue("Expected file " + file + " doesn't exist", file.exists()); return file; } @@ -568,7 +553,7 @@ public static File exists(File file) { * @return The negation of the result of {@link File#exists()}. */ public static File notExists(File file) { - assertFalse(file.exists(), "Expected file " + file + " must not exist"); + assertFalse("Expected file " + file + " must not exist", file.exists()); return file; } } @@ -634,8 +619,7 @@ public File[] listFiles() { } } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testFailFullyDeleteContents() throws IOException { // Windows Dir.setWritable(false) does not work for directories assumeNotWindows(); @@ -645,8 +629,7 @@ public void testFailFullyDeleteContents() throws IOException { validateAndSetWritablePermissions(true, ret); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testFailFullyDeleteContentsGrantPermissions() throws IOException { setupDirsAndNonWritablePermissions(); boolean ret = FileUtil.fullyDeleteContents(new MyFile(del), true); @@ -659,14 +642,13 @@ public void testFailFullyDeleteContentsGrantPermissions() throws IOException { * and that directory sizes are not added to the final calculated size * @throws IOException */ - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testGetDU() throws Exception { long du = FileUtil.getDU(testFolder.getRoot()); // Only two files (in partitioned). Each has 3 characters + system-specific // line separator. final long expected = 2 * (3 + System.getProperty("line.separator").length()); - assertEquals(expected, du); + Assert.assertEquals(expected, du); // target file does not exist: final File doesNotExist = new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog"); @@ -709,8 +691,7 @@ public void testGetDU() throws Exception { } } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testUnTar() throws Exception { // make a simple tar: final File simpleTar = new File(del, FILE); @@ -737,8 +718,7 @@ public void testUnTar() throws Exception { LambdaTestUtils.intercept(IOException.class, () -> FileUtil.unTar(simpleTar, regularFile)); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testReplaceFile() throws IOException { // src exists, and target does not exist: final File srcFile = Verify.createNewFile(new File(tmp, "src")); @@ -774,8 +754,7 @@ public void testReplaceFile() throws IOException { Verify.exists(obstacle); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testCreateLocalTempFile() throws IOException { final File baseFile = new File(tmp, "base"); File tmp1 = FileUtil.createLocalTempFile(baseFile, "foo", false); @@ -790,8 +769,7 @@ public void testCreateLocalTempFile() throws IOException { assertTrue(!tmp1.exists() && !tmp2.exists()); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testUnZip() throws Exception { // make sa simple zip final File simpleZip = new File(del, FILE); @@ -833,41 +811,40 @@ public void testUnZip() throws Exception { assertTrue(foo6.exists()); assertEquals(12, foo0.length()); // tests whether file foo_0 has executable permissions - assertTrue(foo0.canExecute(), "file lacks execute permissions"); - assertFalse(foo0.canWrite(), "file has write permissions"); - assertFalse(foo0.canRead(), "file has read permissions"); + assertTrue("file lacks execute permissions", foo0.canExecute()); + assertFalse("file has write permissions", foo0.canWrite()); + assertFalse("file has read permissions", foo0.canRead()); // tests whether file foo_1 has writable permissions - assertFalse(foo1.canExecute(), "file has execute permissions"); - assertTrue(foo1.canWrite(), "file lacks write permissions"); - assertFalse(foo1.canRead(), "file has read permissions"); + assertFalse("file has execute permissions", foo1.canExecute()); + assertTrue("file lacks write permissions", foo1.canWrite()); + assertFalse("file has read permissions", foo1.canRead()); // tests whether file foo_2 has executable and writable permissions - assertTrue(foo2.canExecute(), "file lacks execute permissions"); - assertTrue(foo2.canWrite(), "file lacks write permissions"); - assertFalse(foo2.canRead(), "file has read permissions"); + assertTrue("file lacks execute permissions", foo2.canExecute()); + assertTrue("file lacks write permissions", foo2.canWrite()); + assertFalse("file has read permissions", foo2.canRead()); // tests whether file foo_3 has readable permissions - assertFalse(foo3.canExecute(), "file has execute permissions"); - assertFalse(foo3.canWrite(), "file has write permissions"); - assertTrue(foo3.canRead(), "file lacks read permissions"); + assertFalse("file has execute permissions", foo3.canExecute()); + assertFalse("file has write permissions", foo3.canWrite()); + assertTrue("file lacks read permissions", foo3.canRead()); // tests whether file foo_4 has readable and executable permissions - assertTrue(foo4.canExecute(), "file lacks execute permissions"); - assertFalse(foo4.canWrite(), "file has write permissions"); - assertTrue(foo4.canRead(), "file lacks read permissions"); + assertTrue("file lacks execute permissions", foo4.canExecute()); + assertFalse("file has write permissions", foo4.canWrite()); + assertTrue("file lacks read permissions", foo4.canRead()); // tests whether file foo_5 has readable and writable permissions - assertFalse(foo5.canExecute(), "file has execute permissions"); - assertTrue(foo5.canWrite(), "file lacks write permissions"); - assertTrue(foo5.canRead(), "file lacks read permissions"); + assertFalse("file has execute permissions", foo5.canExecute()); + assertTrue("file lacks write permissions", foo5.canWrite()); + assertTrue("file lacks read permissions", foo5.canRead()); // tests whether file foo_6 has readable, writable and executable permissions - assertTrue(foo6.canExecute(), "file lacks execute permissions"); - assertTrue(foo6.canWrite(), "file lacks write permissions"); - assertTrue(foo6.canRead(), "file lacks read permissions"); + assertTrue("file lacks execute permissions", foo6.canExecute()); + assertTrue("file lacks write permissions", foo6.canWrite()); + assertTrue("file lacks read permissions", foo6.canRead()); final File regularFile = Verify.createNewFile(new File(tmp, "QuickBrownFoxJumpsOverTheLazyDog")); LambdaTestUtils.intercept(IOException.class, () -> FileUtil.unZip(simpleZip, regularFile)); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testUnZip2() throws IOException { // make a simple zip final File simpleZip = new File(del, FILE); @@ -894,8 +871,7 @@ public void testUnZip2() throws IOException { } } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) /* * Test method copy(FileSystem srcFS, Path src, File dst, boolean deleteSource, Configuration conf) */ @@ -943,8 +919,7 @@ public void testCopy5() throws IOException { Verify.notExists(partitioned); // should be deleted } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testStat2Paths1() { assertNull(FileUtil.stat2Paths(null)); @@ -964,8 +939,7 @@ public void testStat2Paths1() { assertEquals(paths[1], path2); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testStat2Paths2() { Path defaultPath = new Path("file://default"); Path[] paths = FileUtil.stat2Paths(null, defaultPath); @@ -989,8 +963,7 @@ public void testStat2Paths2() { assertEquals(paths[1], path2); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testSymlink() throws Exception { byte[] data = "testSymLink".getBytes(); @@ -1006,8 +979,8 @@ public void testSymlink() throws Exception { FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); //ensure that symlink length is correctly reported by Java - assertEquals(data.length, file.length()); - assertEquals(data.length, link.length()); + Assert.assertEquals(data.length, file.length()); + Assert.assertEquals(data.length, link.length()); //ensure that we can read from link. FileInputStream in = new FileInputStream(link); @@ -1016,14 +989,13 @@ public void testSymlink() throws Exception { len++; } in.close(); - assertEquals(data.length, len); + Assert.assertEquals(data.length, len); } /** * Test that rename on a symlink works as expected. */ - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testSymlinkRenameTo() throws Exception { File file = new File(del, FILE); file.createNewFile(); @@ -1038,7 +1010,7 @@ public void testSymlinkRenameTo() throws Exception { File link2 = new File(del, "_link2"); // Rename the symlink - assertTrue(link.renameTo(link2)); + Assert.assertTrue(link.renameTo(link2)); // Make sure the file still exists // (NOTE: this would fail on Java6 on Windows if we didn't @@ -1052,8 +1024,7 @@ public void testSymlinkRenameTo() throws Exception { /** * Test that deletion of a symlink works as expected. */ - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testSymlinkDelete() throws Exception { File file = new File(del, FILE); file.createNewFile(); @@ -1074,8 +1045,7 @@ public void testSymlinkDelete() throws Exception { /** * Test that length on a symlink works as expected. */ - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testSymlinkLength() throws Exception { byte[] data = "testSymLinkData".getBytes(); @@ -1087,19 +1057,19 @@ public void testSymlinkLength() throws Exception { os.write(data); os.close(); - assertEquals(0, link.length()); + Assert.assertEquals(0, link.length()); // create the symlink FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); // ensure that File#length returns the target file and link size - assertEquals(data.length, file.length()); - assertEquals(data.length, link.length()); + Assert.assertEquals(data.length, file.length()); + Assert.assertEquals(data.length, link.length()); Verify.delete(file); Verify.notExists(file); - assertEquals(0, link.length()); + Assert.assertEquals(0, link.length()); Verify.delete(link); Verify.notExists(link); @@ -1119,17 +1089,17 @@ public void testSymlinkWithNullInput() throws IOException { // Create the same symbolic link // The operation should fail and returns 1 int result = FileUtil.symLink(null, null); - assertEquals(1, result); + Assert.assertEquals(1, result); // Create the same symbolic link // The operation should fail and returns 1 result = FileUtil.symLink(file.getAbsolutePath(), null); - assertEquals(1, result); + Assert.assertEquals(1, result); // Create the same symbolic link // The operation should fail and returns 1 result = FileUtil.symLink(null, link.getAbsolutePath()); - assertEquals(1, result); + Assert.assertEquals(1, result); } /** @@ -1148,13 +1118,13 @@ public void testSymlinkFileAlreadyExists() throws IOException { int result1 = FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); - assertEquals(0, result1); + Assert.assertEquals(0, result1); // Create the same symbolic link // The operation should fail and returns 1 result1 = FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); - assertEquals(1, result1); + Assert.assertEquals(1, result1); } /** @@ -1175,7 +1145,7 @@ public void testSymlinkSameFile() throws IOException { int result = FileUtil.symLink(file.getAbsolutePath(), file.getAbsolutePath()); - assertEquals(0, result); + Assert.assertEquals(0, result); } /** @@ -1196,13 +1166,13 @@ public void testSymlink2DifferentFile() throws IOException { int result = FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); - assertEquals(0, result); + Assert.assertEquals(0, result); // The operation should fail and returns 1 result = FileUtil.symLink(fileSecond.getAbsolutePath(), link.getAbsolutePath()); - assertEquals(1, result); + Assert.assertEquals(1, result); } /** @@ -1223,13 +1193,13 @@ public void testSymlink2DifferentLinks() throws IOException { int result = FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); - assertEquals(0, result); + Assert.assertEquals(0, result); // The operation should succeed result = FileUtil.symLink(file.getAbsolutePath(), linkSecond.getAbsolutePath()); - assertEquals(0, result); + Assert.assertEquals(0, result); } private void doUntarAndVerify(File tarFile, File untarDir) @@ -1242,25 +1212,24 @@ private void doUntarAndVerify(File tarFile, File untarDir) String parentDir = untarDir.getCanonicalPath() + Path.SEPARATOR + "name"; File testFile = new File(parentDir + Path.SEPARATOR + "version"); Verify.exists(testFile); - assertTrue(testFile.length() == 0); + Assert.assertTrue(testFile.length() == 0); String imageDir = parentDir + Path.SEPARATOR + "image"; testFile = new File(imageDir + Path.SEPARATOR + "fsimage"); Verify.exists(testFile); - assertTrue(testFile.length() == 157); + Assert.assertTrue(testFile.length() == 157); String currentDir = parentDir + Path.SEPARATOR + "current"; testFile = new File(currentDir + Path.SEPARATOR + "fsimage"); Verify.exists(testFile); - assertTrue(testFile.length() == 4331); + Assert.assertTrue(testFile.length() == 4331); testFile = new File(currentDir + Path.SEPARATOR + "edits"); Verify.exists(testFile); - assertTrue(testFile.length() == 1033); + Assert.assertTrue(testFile.length() == 1033); testFile = new File(currentDir + Path.SEPARATOR + "fstime"); Verify.exists(testFile); - assertTrue(testFile.length() == 8); + Assert.assertTrue(testFile.length() == 8); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testUntar() throws IOException { String tarGzFileName = System.getProperty("test.cache.data", "target/test/cache") + "/test-untar.tgz"; @@ -1278,8 +1247,7 @@ public void testUntar() throws IOException { * This will test different codepaths on Windows from unix, * but both MUST throw an IOE of some kind. */ - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testUntarMissingFile() throws Throwable { File dataDir = GenericTestUtils.getTestDir(); File tarFile = new File(dataDir, "missing; true"); @@ -1294,8 +1262,7 @@ public void testUntarMissingFile() throws Throwable { * This is how {@code FileUtil.unTar(File, File} * will behave on Windows, */ - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testUntarMissingFileThroughJava() throws Throwable { File dataDir = GenericTestUtils.getTestDir(); File tarFile = new File(dataDir, "missing; true"); @@ -1307,16 +1274,15 @@ public void testUntarMissingFileThroughJava() throws Throwable { FileUtil.unTarUsingJava(tarFile, untarDir, false)); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testCreateJarWithClassPath() throws Exception { // create files expected to match a wildcard List wildcardMatches = Arrays.asList(new File(tmp, "wildcard1.jar"), new File(tmp, "wildcard2.jar"), new File(tmp, "wildcard3.JAR"), new File(tmp, "wildcard4.JAR")); for (File wildcardMatch: wildcardMatches) { - assertTrue( - wildcardMatch.createNewFile(), "failure creating file: " + wildcardMatch); + Assert.assertTrue("failure creating file: " + wildcardMatch, + wildcardMatch.createNewFile()); } // create non-jar files, which we expect to not be included in the classpath @@ -1334,19 +1300,19 @@ public void testCreateJarWithClassPath() throws Exception { String[] jarCp = FileUtil.createJarWithClassPath(inputClassPath + File.pathSeparator + "unexpandedwildcard/*", new Path(tmp.getCanonicalPath()), System.getenv()); String classPathJar = jarCp[0]; - assertNotEquals(jarCp[1].indexOf("unexpanded"), -1, "Unexpanded wildcard was not placed in extra classpath"); + assertNotEquals("Unexpanded wildcard was not placed in extra classpath", jarCp[1].indexOf("unexpanded"), -1); // verify classpath by reading manifest from jar file JarFile jarFile = null; try { jarFile = new JarFile(classPathJar); Manifest jarManifest = jarFile.getManifest(); - assertNotNull(jarManifest); + Assert.assertNotNull(jarManifest); Attributes mainAttributes = jarManifest.getMainAttributes(); - assertNotNull(mainAttributes); - assertTrue(mainAttributes.containsKey(Attributes.Name.CLASS_PATH)); + Assert.assertNotNull(mainAttributes); + Assert.assertTrue(mainAttributes.containsKey(Attributes.Name.CLASS_PATH)); String classPathAttr = mainAttributes.getValue(Attributes.Name.CLASS_PATH); - assertNotNull(classPathAttr); + Assert.assertNotNull(classPathAttr); List expectedClassPaths = new ArrayList(); for (String classPath: classPaths) { if (classPath.length() == 0) { @@ -1380,7 +1346,7 @@ public void testCreateJarWithClassPath() throws Exception { List actualClassPaths = Arrays.asList(classPathAttr.split(" ")); Collections.sort(expectedClassPaths); Collections.sort(actualClassPaths); - assertEquals(expectedClassPaths, actualClassPaths); + Assert.assertEquals(expectedClassPaths, actualClassPaths); } finally { if (jarFile != null) { try { @@ -1395,8 +1361,8 @@ public void testCreateJarWithClassPath() throws Exception { @Test public void testGetJarsInDirectory() throws Exception { List jars = FileUtil.getJarsInDirectory("/foo/bar/bogus/"); - assertTrue( - jars.isEmpty(), "no jars should be returned for a bogus path"); + assertTrue("no jars should be returned for a bogus path", + jars.isEmpty()); // create jar files to be returned @@ -1404,7 +1370,7 @@ public void testGetJarsInDirectory() throws Exception { File jar2 = new File(tmp, "wildcard2.JAR"); List matches = Arrays.asList(jar1, jar2); for (File match: matches) { - assertTrue(match.createNewFile(), "failure creating file: " + match); + assertTrue("failure creating file: " + match, match.createNewFile()); } // create non-jar files, which we expect to not be included in the result @@ -1415,12 +1381,12 @@ public void testGetJarsInDirectory() throws Exception { // pass in the directory String directory = tmp.getCanonicalPath(); jars = FileUtil.getJarsInDirectory(directory); - assertEquals(2, jars.size(), "there should be 2 jars"); + assertEquals("there should be 2 jars", 2, jars.size()); for (Path jar: jars) { URL url = jar.toUri().toURL(); - assertTrue( - url.equals(jar1.getCanonicalFile().toURI().toURL()) || - url.equals(jar2.getCanonicalFile().toURI().toURL()), "the jar should match either of the jars"); + assertTrue("the jar should match either of the jars", + url.equals(jar1.getCanonicalFile().toURI().toURL()) || + url.equals(jar2.getCanonicalFile().toURI().toURL())); } } @@ -1502,8 +1468,7 @@ public void testCompareFsDirectories() throws Exception { assertFalse(FileUtil.compareFs(fs1, fs6)); } - @Test - @Timeout(value = 8) + @Test(timeout = 8000) public void testCreateSymbolicLinkUsingJava() throws IOException { final File simpleTar = new File(del, FILE); OutputStream os = new FileOutputStream(simpleTar); @@ -1537,41 +1502,39 @@ public void testCreateSymbolicLinkUsingJava() throws IOException { } } - @Test + @Test(expected = IOException.class) public void testCreateArbitrarySymlinkUsingJava() throws IOException { - assertThrows(IOException.class, () -> { - final File simpleTar = new File(del, FILE); - OutputStream os = new FileOutputStream(simpleTar); - - File rootDir = new File("tmp"); - try (TarArchiveOutputStream tos = new TarArchiveOutputStream(os)) { - tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); - - // Create arbitrary dir - File arbitraryDir = new File(rootDir, "arbitrary-dir/"); - Verify.mkdirs(arbitraryDir); - - // We will tar from the tar-root lineage - File tarRoot = new File(rootDir, "tar-root/"); - File symlinkRoot = new File(tarRoot, "dir1/"); - Verify.mkdirs(symlinkRoot); - - // Create Symbolic Link to an arbitrary dir - java.nio.file.Path symLink = Paths.get(symlinkRoot.getPath(), "sl"); - Files.createSymbolicLink(symLink, arbitraryDir.toPath().toAbsolutePath()); - - // Put entries in tar file - putEntriesInTar(tos, tarRoot); - putEntriesInTar(tos, new File(symLink.toFile(), "dir-outside-tar-root/")); - tos.close(); - - // Untar using Java - File untarFile = new File(rootDir, "extracted"); - FileUtil.unTarUsingJava(simpleTar, untarFile, false); - } finally { - FileUtils.deleteDirectory(rootDir); - } - }); + final File simpleTar = new File(del, FILE); + OutputStream os = new FileOutputStream(simpleTar); + + File rootDir = new File("tmp"); + try (TarArchiveOutputStream tos = new TarArchiveOutputStream(os)) { + tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); + + // Create arbitrary dir + File arbitraryDir = new File(rootDir, "arbitrary-dir/"); + Verify.mkdirs(arbitraryDir); + + // We will tar from the tar-root lineage + File tarRoot = new File(rootDir, "tar-root/"); + File symlinkRoot = new File(tarRoot, "dir1/"); + Verify.mkdirs(symlinkRoot); + + // Create Symbolic Link to an arbitrary dir + java.nio.file.Path symLink = Paths.get(symlinkRoot.getPath(), "sl"); + Files.createSymbolicLink(symLink, arbitraryDir.toPath().toAbsolutePath()); + + // Put entries in tar file + putEntriesInTar(tos, tarRoot); + putEntriesInTar(tos, new File(symLink.toFile(), "dir-outside-tar-root/")); + tos.close(); + + // Untar using Java + File untarFile = new File(rootDir, "extracted"); + FileUtil.unTarUsingJava(simpleTar, untarFile, false); + } finally { + FileUtils.deleteDirectory(rootDir); + } } private void putEntriesInTar(TarArchiveOutputStream tos, File f) @@ -1615,7 +1578,7 @@ private void putEntriesInTar(TarArchiveOutputStream tos, File f) @Test public void testReadSymlinkWithNullInput() { String result = FileUtil.readLink(null); - assertEquals("", result); + Assert.assertEquals("", result); } /** @@ -1632,7 +1595,7 @@ public void testReadSymlink() throws IOException { FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath()); String result = FileUtil.readLink(link); - assertEquals(file.getAbsolutePath(), result); + Assert.assertEquals(file.getAbsolutePath(), result); } @Test @@ -1663,7 +1626,7 @@ public void testReadSymlinkWithAFileAsInput() throws IOException { File file = new File(del, FILE); String result = FileUtil.readLink(file); - assertEquals("", result); + Assert.assertEquals("", result); Verify.delete(file); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java index e746e3aed82f3..aa434a270bf71 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java @@ -18,8 +18,16 @@ package org.apache.hadoop.fs; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.reset; import java.io.IOException; import java.lang.reflect.Method; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java index 1d2d348a741e8..f3c822a985d29 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsOptions.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.util.DataChecksum; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java index bba5dac6fc6bb..a8020a66183a2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShell.java @@ -22,9 +22,12 @@ import org.apache.hadoop.fs.shell.CommandFactory; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.ToolRunner; -import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; -import org.mockito.Mockito; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TestFsShell { @@ -65,11 +68,11 @@ public void testDFSWithInvalidCommmand() throws Throwable { try (GenericTestUtils.SystemErrCapturer capture = new GenericTestUtils.SystemErrCapturer()) { ToolRunner.run(shell, new String[]{"dfs -mkdirs"}); - Assertions.assertThat(capture.getOutput()) + assertThat(capture.getOutput()) .as("FSShell dfs command did not print the error " + "message when invalid command is passed") .contains("-mkdirs: Unknown command"); - Assertions.assertThat(capture.getOutput()) + assertThat(capture.getOutput()) .as("FSShell dfs command did not print help " + "message when invalid command is passed") .contains("Usage: hadoop fs [generic options]"); @@ -79,22 +82,22 @@ public void testDFSWithInvalidCommmand() throws Throwable { @Test public void testExceptionNullMessage() throws Exception { final String cmdName = "-cmdExNullMsg"; - final Command cmd = Mockito.mock(Command.class); - Mockito.when(cmd.run(Mockito.any())).thenThrow( + final Command cmd = mock(Command.class); + when(cmd.run(any())).thenThrow( new IllegalArgumentException()); - Mockito.when(cmd.getUsage()).thenReturn(cmdName); + when(cmd.getUsage()).thenReturn(cmdName); - final CommandFactory cmdFactory = Mockito.mock(CommandFactory.class); + final CommandFactory cmdFactory = mock(CommandFactory.class); final String[] names = {cmdName}; - Mockito.when(cmdFactory.getNames()).thenReturn(names); - Mockito.when(cmdFactory.getInstance(cmdName)).thenReturn(cmd); + when(cmdFactory.getNames()).thenReturn(names); + when(cmdFactory.getInstance(cmdName)).thenReturn(cmd); FsShell shell = new FsShell(new Configuration()); shell.commandFactory = cmdFactory; try (GenericTestUtils.SystemErrCapturer capture = new GenericTestUtils.SystemErrCapturer()) { ToolRunner.run(shell, new String[]{cmdName}); - Assertions.assertThat(capture.getOutput()) + assertThat(capture.getOutput()) .contains(cmdName + ": Null exception message"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java index 41ff47def2893..c2a3a1c1efdc9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellList.java @@ -77,8 +77,8 @@ public void testList() throws Exception { } /* - * UGI params should take effect when we pass. - */ + UGI params should take effect when we pass. + */ @Test public void testListWithUGI() throws Exception { assertThrows(IllegalArgumentException.class, () -> { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java index 50a32876c33ad..5ce7b074e73df 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java @@ -106,14 +106,10 @@ private void change(int exit, String owner, String group, String...files) FileStatus[] stats = fileSys.globStatus(new Path(files[i])); if (stats != null) { for (int j=0; j < stats.length; j++) { - assertEquals( - ((owner != null) ? "STUB-"+owner : oldStats[i][j].getOwner()) -, stats[j].getOwner(), "check owner of " + files[i] - ); - assertEquals( - ((group != null) ? "STUB-"+group : oldStats[i][j].getGroup()) -, stats[j].getGroup(), "check group of " + files[i] - ); + assertEquals(((owner != null) ? "STUB-"+owner : oldStats[i][j].getOwner()), + stats[j].getOwner(), "check owner of " + files[i]); + assertEquals(((group != null) ? "STUB-"+group : oldStats[i][j].getGroup()), + stats[j].getGroup(), "check group of " + files[i]); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java index b87e6ab6bf49a..37499d3b1cb10 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsUrlConnectionPath.java @@ -15,7 +15,6 @@ import org.apache.hadoop.conf.Configuration; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -23,6 +22,8 @@ import java.net.URL; import java.nio.file.Paths; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * Test case for FsUrlConnection with relativePath and SPACE. */ @@ -83,25 +84,25 @@ public static int readStream(String path) throws Exception{ @Test public void testAbsolutePath() throws Exception{ int length = readStream(ABSOLUTE_PATH); - Assertions.assertTrue(length > 1); + assertTrue(length > 1); } @Test public void testRelativePath() throws Exception{ int length = readStream(RELATIVE_PATH); - Assertions.assertTrue(length > 1); + assertTrue(length > 1); } @Test public void testAbsolutePathWithSpace() throws Exception{ int length = readStream(ABSOLUTE_PATH_W_ENCODED_SPACE); - Assertions.assertTrue(length > 1); + assertTrue(length > 1); } @Test public void testRelativePathWithSpace() throws Exception{ int length = readStream(RELATIVE_PATH_W_ENCODED_SPACE); - Assertions.assertTrue(length > 1); + assertTrue(length > 1); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java index 4155a787daef7..932ace76d2595 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetFileBlockLocations.java @@ -25,7 +25,8 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java index 454c0a684f3fc..7ef34281982a9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java @@ -26,7 +26,10 @@ import java.io.File; import java.io.IOException; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestGetSpaceUsed { final static private File DIR = diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java index 085314f54022a..27ae520aa9fd4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobPattern.java @@ -20,7 +20,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.re2j.PatternSyntaxException; /** @@ -32,8 +32,7 @@ private void assertMatch(boolean yes, String glob, String...input) { for (String s : input) { boolean result = pattern.matches(s); - assertTrue( - yes ? result : !result, glob +" should"+ (yes ? "" : " not") +" match "+ s); + assertTrue(yes ? result : !result, glob +" should"+ (yes ? "" : " not") +" match "+ s); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java index 16d5b0e7919f2..612954de784db 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java @@ -28,7 +28,6 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.DelegationTokenIssuer; import org.apache.hadoop.util.Progressable; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,6 +46,7 @@ import static org.apache.hadoop.fs.Options.CreateOpts; import static org.apache.hadoop.fs.Options.Rename; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.fail; @SuppressWarnings("deprecation") public class TestHarFileSystem { @@ -277,7 +277,7 @@ static void checkInvalidPath(String s, Configuration conf) { final Path p = new Path(s); try { p.getFileSystem(conf); - Assertions.fail(p + " is an invalid path."); + fail(p + " is an invalid path."); } catch (IOException e) { // Expected } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java index 938859d2a48b8..445d13e426b83 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java @@ -23,7 +23,6 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -36,7 +35,9 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** @@ -259,8 +260,8 @@ public void testListLocatedStatus() throws Exception { assertTrue(expectedFileNames.contains(fileName), fileName + " not in expected files list"); expectedFileNames.remove(fileName); } - assertEquals( - 0, expectedFileNames.size(), "Didn't find all of the expected file names: " + expectedFileNames); + assertEquals(0, expectedFileNames.size(), + "Didn't find all of the expected file names: " + expectedFileNames); } @Test @@ -291,7 +292,7 @@ public void testNegativeInitWithoutIndex() throws Exception { final URI uri = new URI("har://" + harPath.toString()); try { hfs.initialize(uri, new Configuration()); - Assertions.fail("Exception expected."); + fail("Exception expected."); } catch (IOException ioe) { // ok, expected. } @@ -302,7 +303,7 @@ public void testNegativeGetHarVersionOnNotInitializedFS() throws Exception { final HarFileSystem hfs = new HarFileSystem(localFileSystem); try { int version = hfs.getHarVersion(); - Assertions.fail("Exception expected, but got a Har version " + version + "."); + fail("Exception expected, but got a Har version " + version + "."); } catch (IOException ioe) { // ok, expected. } @@ -326,7 +327,7 @@ public void testNegativeInitWithAnUnsupportedVersion() throws Exception { final URI uri = new URI("har://" + harPath.toString()); try { hfs.initialize(uri, new Configuration()); - Assertions.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } @@ -340,28 +341,28 @@ public void testNegativeHarFsModifications() throws Exception { try { harFileSystem.create(fooPath, new FsPermission("+rwx"), true, 1024, (short) 88, 1024, null); - Assertions.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.setReplication(fooPath, (short) 55); - Assertions.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.delete(fooPath, true); - Assertions.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.mkdirs(fooPath, new FsPermission("+rwx")); - Assertions.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } @@ -369,35 +370,35 @@ public void testNegativeHarFsModifications() throws Exception { final Path indexPath = new Path(harPath, "_index"); try { harFileSystem.copyFromLocalFile(false, indexPath, fooPath); - Assertions.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.startLocalOutput(fooPath, indexPath); - Assertions.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.completeLocalOutput(fooPath, indexPath); - Assertions.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.setOwner(fooPath, "user", "group"); - Assertions.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } try { harFileSystem.setPermission(fooPath, new FsPermission("+x")); - Assertions.fail("IOException expected."); + fail("IOException expected."); } catch (IOException ioe) { // ok, expected. } @@ -406,7 +407,7 @@ public void testNegativeHarFsModifications() throws Exception { @Test public void testHarFsWithoutAuthority() throws Exception { final URI uri = harFileSystem.getUri(); - Assertions.assertNull(uri.getAuthority(), "har uri authority not null: " + uri); + assertNull(uri.getAuthority(), "har uri authority not null: " + uri); FileContext.getFileContext(uri, conf); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java index a5f1c9c5de703..97023da62d2bf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java @@ -26,7 +26,9 @@ import org.apache.hadoop.test.GenericTestUtils; import org.junit.jupiter.api.AfterEach; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java index 8c65dbbd0cb2a..c619557ccca1c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java @@ -25,7 +25,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeAll; import org.slf4j.event.Level; @@ -160,18 +162,18 @@ public void testDirectory() throws IOException { itor = fs.listFiles(TEST_DIR, true); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue( - filesToFind.remove(stat.getPath()), "Path " + stat.getPath() + " unexpected"); + assertTrue(filesToFind.remove(stat.getPath()), + "Path " + stat.getPath() + " unexpected"); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue( - filesToFind.remove(stat.getPath()), "Path " + stat.getPath() + " unexpected"); + assertTrue(filesToFind.remove(stat.getPath()), + "Path " + stat.getPath() + " unexpected"); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue( - filesToFind.remove(stat.getPath()), "Path " + stat.getPath() + " unexpected"); + assertTrue(filesToFind.remove(stat.getPath()), + "Path " + stat.getPath() + " unexpected"); assertFalse(itor.hasNext()); assertTrue(filesToFind.isEmpty()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java index b9505f8516fe2..e11ff66c6c2fc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java @@ -30,14 +30,16 @@ import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.util.Shell; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; -import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** This test LocalDirAllocator works correctly; * Every test case uses different buffer dirs to @@ -46,7 +48,6 @@ * a directory can be created in a read-only directory * which breaks this test. */ -@RunWith(Parameterized.class) public class TestLocalDirAllocator { final static private Configuration conf = new Configuration(); final static private String BUFFER_DIR_ROOT = "build/test/temp"; @@ -63,8 +64,8 @@ public class TestLocalDirAllocator { final static private String RELATIVE = "/RELATIVE"; final static private String ABSOLUTE = "/ABSOLUTE"; final static private String QUALIFIED = "/QUALIFIED"; - final private String ROOT; - final private String PREFIX; + private String ROOT; + private String PREFIX; static { try { @@ -85,12 +86,11 @@ public class TestLocalDirAllocator { BUFFER_DIR_ROOT).toUri().toString(); } - public TestLocalDirAllocator(String root, String prefix) { + public void initTestLocalDirAllocator(String root, String prefix) { ROOT = root; PREFIX = prefix; } - - @Parameters + public static Collection params() { Object [][] data = new Object[][] { { BUFFER_DIR_ROOT, RELATIVE }, @@ -125,15 +125,13 @@ private static File createTempFile(long size) throws IOException { private String buildBufferDir(String dir, int i) { return dir + PREFIX + i; } - - /** Two buffer dirs. The first dir does not exist & is on a read-only disk; - * The second dir exists & is RW - * @throws Exception - */ - @Test + @Timeout(value = 30) - public void test0() throws Exception { + @MethodSource("params") + @ParameterizedTest + public void test0(String root, String prefix) throws Exception { assumeNotWindows(); + initTestLocalDirAllocator(root, prefix); String dir0 = buildBufferDir(ROOT, 0); String dir1 = buildBufferDir(ROOT, 1); try { @@ -153,10 +151,12 @@ public void test0() throws Exception { * The second dir exists & is RW * @throws Exception */ - @Test @Timeout(value = 30) - public void testROBufferDirAndRWBufferDir() throws Exception { + @MethodSource("params") + @ParameterizedTest + public void testROBufferDirAndRWBufferDir(String root, String prefix) throws Exception { assumeNotWindows(); + initTestLocalDirAllocator(root, prefix); String dir1 = buildBufferDir(ROOT, 1); String dir2 = buildBufferDir(ROOT, 2); try { @@ -171,13 +171,16 @@ public void testROBufferDirAndRWBufferDir() throws Exception { rmBufferDirs(); } } + /** Two buffer dirs. Both do not exist but on a RW disk. * Check if tmp dirs are allocated in a round-robin */ - @Test @Timeout(value = 30) - public void testDirsNotExist() throws Exception { + @MethodSource("params") + @ParameterizedTest + public void testDirsNotExist(String root, String prefix) throws Exception { assumeNotWindows(); + initTestLocalDirAllocator(root, prefix); String dir2 = buildBufferDir(ROOT, 2); String dir3 = buildBufferDir(ROOT, 3); try { @@ -201,10 +204,12 @@ public void testDirsNotExist() throws Exception { * Later disk1 becomes read-only. * @throws Exception */ - @Test @Timeout(value = 30) - public void testRWBufferDirBecomesRO() throws Exception { + @MethodSource("params") + @ParameterizedTest + public void testRWBufferDirBecomesRO(String root, String prefix) throws Exception { assumeNotWindows(); + initTestLocalDirAllocator(root, prefix); String dir3 = buildBufferDir(ROOT, 3); String dir4 = buildBufferDir(ROOT, 4); try { @@ -240,10 +245,13 @@ public void testRWBufferDirBecomesRO() throws Exception { * @throws Exception */ static final int TRIALS = 100; - @Test + @Timeout(value = 30) - public void testCreateManyFiles() throws Exception { + @MethodSource("params") + @ParameterizedTest + public void testCreateManyFiles(String root, String prefix) throws Exception { assumeNotWindows(); + initTestLocalDirAllocator(root, prefix); String dir5 = buildBufferDir(ROOT, 5); String dir6 = buildBufferDir(ROOT, 6); try { @@ -284,10 +292,12 @@ public void testCreateManyFiles() throws Exception { * * @throws Exception */ - @Test @Timeout(value = 30) - public void testCreateManyFilesRandom() throws Exception { + @MethodSource("params") + @ParameterizedTest + public void testCreateManyFilesRandom(String root, String prefix) throws Exception { assumeNotWindows(); + initTestLocalDirAllocator(root, prefix); final int numDirs = 5; final int numTries = 100; String[] dirs = new String[numDirs]; @@ -338,9 +348,11 @@ public void testCreateManyFilesRandom() throws Exception { * directory. With checkAccess true, the directory should not be created. * @throws Exception */ - @Test @Timeout(value = 30) - public void testLocalPathForWriteDirCreation() throws IOException { + @MethodSource("params") + @ParameterizedTest + public void testLocalPathForWriteDirCreation(String root, String prefix) throws IOException { + initTestLocalDirAllocator(root, prefix); String dir0 = buildBufferDir(ROOT, 0); String dir1 = buildBufferDir(ROOT, 1); try { @@ -370,9 +382,11 @@ public void testLocalPathForWriteDirCreation() throws IOException { * Test when mapred.local.dir not configured and called * getLocalPathForWrite */ - @Test @Timeout(value = 30) - public void testShouldNotthrowNPE() throws Exception { + @MethodSource("params") + @ParameterizedTest + public void testShouldNotthrowNPE(String root, String prefix) throws Exception { + initTestLocalDirAllocator(root, prefix); Configuration conf1 = new Configuration(); try { dirAllocator.getLocalPathForWrite("/test", conf1); @@ -413,10 +427,12 @@ public void testShouldNotthrowNPE() throws Exception { * are mistakenly created from fully qualified path strings. * @throws IOException */ - @Test @Timeout(value = 30) - public void testNoSideEffects() throws IOException { + @MethodSource("params") + @ParameterizedTest + public void testNoSideEffects(String root, String prefix) throws IOException { assumeNotWindows(); + initTestLocalDirAllocator(root, prefix); String dir = buildBufferDir(ROOT, 0); try { conf.set(CONTEXT, dir); @@ -436,10 +452,12 @@ public void testNoSideEffects() throws IOException { * * @throws IOException */ - @Test @Timeout(value = 30) - public void testGetLocalPathToRead() throws IOException { + @MethodSource("params") + @ParameterizedTest + public void testGetLocalPathToRead(String root, String prefix) throws IOException { assumeNotWindows(); + initTestLocalDirAllocator(root, prefix); String dir = buildBufferDir(ROOT, 0); try { conf.set(CONTEXT, dir); @@ -462,11 +480,12 @@ public void testGetLocalPathToRead() throws IOException { * * @throws IOException */ - @Test @Timeout(value = 30) - public void testGetAllLocalPathsToRead() throws IOException { + @MethodSource("params") + @ParameterizedTest + public void testGetAllLocalPathsToRead(String root, String prefix) throws IOException { assumeNotWindows(); - + initTestLocalDirAllocator(root, prefix); String dir0 = buildBufferDir(ROOT, 0); String dir1 = buildBufferDir(ROOT, 1); try { @@ -511,9 +530,11 @@ public void testGetAllLocalPathsToRead() throws IOException { } } - @Test @Timeout(value = 30) - public void testRemoveContext() throws IOException { + @MethodSource("params") + @ParameterizedTest + public void testRemoveContext(String root, String prefix) throws IOException { + initTestLocalDirAllocator(root, prefix); String dir = buildBufferDir(ROOT, 0); try { String contextCfgItemName = "application_1340842292563_0004.app.cache.dirs"; @@ -534,16 +555,19 @@ public void testRemoveContext() throws IOException { * * @throws Exception */ - @Test @Timeout(value = 30) - public void testGetLocalPathForWriteForInvalidPaths() throws Exception { + @MethodSource("params") + @ParameterizedTest + public void testGetLocalPathForWriteForInvalidPaths(String root, String prefix) + throws Exception { + initTestLocalDirAllocator(root, prefix); conf.set(CONTEXT, " "); try { dirAllocator.getLocalPathForWrite("/test", conf); fail("not throwing the exception"); } catch (IOException e) { - assertEquals("Incorrect exception message", - "No space available in any of the local directories.", e.getMessage()); + assertEquals("No space available in any of the local directories.", + e.getMessage(), "Incorrect exception message"); } } @@ -552,9 +576,11 @@ public void testGetLocalPathForWriteForInvalidPaths() throws Exception { * * @throws Exception */ - @Test @Timeout(value = 30) - public void testGetLocalPathForWriteForLessSpace() throws Exception { + @MethodSource("params") + @ParameterizedTest + public void testGetLocalPathForWriteForLessSpace(String root, String prefix) throws Exception { + initTestLocalDirAllocator(root, prefix); String dir0 = buildBufferDir(ROOT, 0); String dir1 = buildBufferDir(ROOT, 1); conf.set(CONTEXT, dir0 + "," + dir1); @@ -567,9 +593,11 @@ public void testGetLocalPathForWriteForLessSpace() throws Exception { /** * Test for HADOOP-18636 LocalDirAllocator cannot recover from directory tree deletion. */ - @Test @Timeout(value = 30) - public void testDirectoryRecovery() throws Throwable { + @MethodSource("params") + @ParameterizedTest + public void testDirectoryRecovery(String root, String prefix) throws Throwable { + initTestLocalDirAllocator(root, prefix); String dir0 = buildBufferDir(ROOT, 0); String subdir = dir0 + "/subdir1/subdir2"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java index 49b131836264f..223f611d4b167 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java @@ -40,32 +40,33 @@ import java.util.List; import java.util.Random; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; -import org.junit.Rule; import org.junit.jupiter.api.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.Timeout; import javax.annotation.Nonnull; -import static org.assertj.core.api.Assertions.assertThat; - /** * This class tests the local file system via the FileSystem abstraction. */ +@Timeout(60) public class TestLocalFileSystem { private static final File base = GenericTestUtils.getTestDir("work-dir/localfs"); @@ -74,13 +75,7 @@ public class TestLocalFileSystem { private final Path TEST_PATH = new Path(TEST_ROOT_DIR, "test-file"); private Configuration conf; private LocalFileSystem fileSys; - - /** - * Set the timeout for every test. - */ - @Rule - public Timeout testTimeout = new Timeout(60, TimeUnit.SECONDS); - + private void cleanupFile(FileSystem fs, Path name) throws IOException { assertTrue(fs.exists(name)); fs.delete(name, true); @@ -248,9 +243,9 @@ public void testCreateFileAndMkdirs() throws IOException { { //check FileStatus and ContentSummary final FileStatus status = fileSys.getFileStatus(test_file); - Assertions.assertEquals(fileSize, status.getLen()); + assertEquals(fileSize, status.getLen()); final ContentSummary summary = fileSys.getContentSummary(test_dir); - Assertions.assertEquals(fileSize, summary.getLength()); + assertEquals(fileSize, summary.getLength()); } // creating dir over a file @@ -683,7 +678,7 @@ public void testFSOutputStreamBuilder() throws Exception { new byte[(int) (fileSys.getFileStatus(path).getLen())]; input.readFully(0, buffer); input.close(); - Assertions.assertArrayEquals(contentOrigin, buffer, "The data be read should equals with the " + assertArrayEquals(contentOrigin, buffer, "The data be read should equals with the " + "data written."); } catch (IOException e) { throw e; @@ -770,8 +765,8 @@ public void testFSOutputStreamBuilderOptions() throws Exception { builder.must("strM", "value"); builder.must("unsupported", 12.34); - assertEquals("Optional value should be overwrite by a mandatory value", - "value", builder.getOptions().get("strM")); + assertEquals("value", builder.getOptions().get("strM"), + "Optional value should be overwrite by a mandatory value"); Set mandatoryKeys = builder.getMandatoryKeys(); Set expectedKeys = new HashSet<>(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java index 87d2cf4eb403d..c9c2464039101 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java @@ -21,7 +21,6 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; -import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,7 +32,10 @@ import java.util.StringTokenizer; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; -import static org.junit.jupiter.api.Assertions.*; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * This class tests the local file system via the FileSystem abstraction. @@ -234,9 +236,9 @@ public void testSetUmaskInRealTime() throws Exception { try { assertTrue(localfs.mkdirs(dir)); FsPermission initialPermission = getPermission(localfs, dir); - assertEquals( - new FsPermission("755"), initialPermission, "With umask 022 permission should be 755 since the default " + - "permission is 777"); + assertEquals(new FsPermission("755"), + initialPermission, "With umask 022 permission should be 755 since the default " + + "permission is 777"); // Modify umask and create a new directory // and check if new umask is applied @@ -244,12 +246,11 @@ public void testSetUmaskInRealTime() throws Exception { assertTrue(localfs.mkdirs(dir2)); FsPermission finalPermission = localfs.getFileStatus(dir2) .getPermission(); - Assertions.assertThat(new FsPermission("755")).as( + assertThat(new FsPermission("755")).as( "With umask 062 permission should not be 755 since the " + "default permission is 777").isNotEqualTo(finalPermission); - assertEquals( - - new FsPermission("715"), finalPermission, "With umask 062 we expect 715 since the default permission is 777"); + assertEquals(new FsPermission("715"), finalPermission, + "With umask 062 we expect 715 since the default permission is 777"); } finally { conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "022"); cleanup(localfs, dir); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java index ff241ed14f9d9..4204faaada332 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java @@ -18,9 +18,8 @@ package org.apache.hadoop.fs; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.Timeout; +import org.junit.Assert; +import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -41,10 +40,10 @@ import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; /** * Test Hadoop Filesystem Paths. @@ -77,8 +76,7 @@ public static String mergeStatuses(FileStatus statuses[]) { return mergeStatuses(paths); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testToString() { toStringTest("/"); toStringTest("/foo"); @@ -111,8 +109,7 @@ private void toStringTest(String pathString) { assertEquals(pathString, new Path(pathString).toString()); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testNormalize() throws URISyntaxException { assertEquals("", new Path(".").toString()); assertEquals("..", new Path("..").toString()); @@ -136,8 +133,7 @@ public void testNormalize() throws URISyntaxException { } } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testIsAbsolute() { assertTrue(new Path("/").isAbsolute()); assertTrue(new Path("/foo").isAbsolute()); @@ -150,8 +146,7 @@ public void testIsAbsolute() { } } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testParent() { assertEquals(new Path("/foo"), new Path("/foo/bar").getParent()); assertEquals(new Path("foo"), new Path("foo/bar").getParent()); @@ -162,8 +157,7 @@ public void testParent() { } } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testChild() { assertEquals(new Path("."), new Path(".", ".")); assertEquals(new Path("/"), new Path("/", ".")); @@ -183,8 +177,7 @@ public void testChild() { } } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testPathThreeArgContructor() { assertEquals(new Path("foo"), new Path(null, null, "foo")); assertEquals(new Path("scheme:///foo"), new Path("scheme", null, "/foo")); @@ -220,14 +213,12 @@ public void testPathThreeArgContructor() { } } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testEquals() { assertFalse(new Path("/").equals(new Path("/foo"))); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testDots() { // Test Path(String) assertEquals(new Path("/foo/bar/baz").toString(), "/foo/bar/baz"); @@ -266,8 +257,7 @@ public void testDots() { } /** Test that Windows paths are correctly handled */ - @Test - @Timeout(value = 5) + @Test (timeout = 5000) public void testWindowsPaths() throws URISyntaxException, IOException { assumeWindows(); @@ -278,8 +268,7 @@ public void testWindowsPaths() throws URISyntaxException, IOException { } /** Test invalid paths on Windows are correctly rejected */ - @Test - @Timeout(value = 5) + @Test (timeout = 5000) public void testInvalidWindowsPaths() throws URISyntaxException, IOException { assumeWindows(); @@ -297,23 +286,20 @@ public void testInvalidWindowsPaths() throws URISyntaxException, IOException { } /** Test Path objects created from other Path objects */ - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testChildParentResolution() throws URISyntaxException, IOException { Path parent = new Path("foo1://bar1/baz1"); Path child = new Path("foo2://bar2/baz2"); assertEquals(child, new Path(parent, child)); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testScheme() throws java.io.IOException { assertEquals("foo:/bar", new Path("foo:/","/bar").toString()); assertEquals("foo://bar/baz", new Path("foo://bar/","/baz").toString()); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testURI() throws URISyntaxException, IOException { URI uri = new URI("file:///bar#baz"); Path path = new Path(uri); @@ -336,19 +322,18 @@ public void testURI() throws URISyntaxException, IOException { } /** Test URIs created from Path objects */ - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testPathToUriConversion() throws URISyntaxException, IOException { // Path differs from URI in that it ignores the query part.. - assertEquals( - new URI(null, null, "/foo?bar", null, null) -, new Path("/foo?bar").toUri(), "? mark char in to URI"); - assertEquals( - new URI(null, null, "/foo\"bar", null, null) -, new Path("/foo\"bar").toUri(), "escape slashes chars in to URI"); - assertEquals( - new URI(null, null, "/foo bar", null, null) -, new Path("/foo bar").toUri(), "spaces in chars to URI"); + assertEquals("? mark char in to URI", + new URI(null, null, "/foo?bar", null, null), + new Path("/foo?bar").toUri()); + assertEquals("escape slashes chars in to URI", + new URI(null, null, "/foo\"bar", null, null), + new Path("/foo\"bar").toUri()); + assertEquals("spaces in chars to URI", + new URI(null, null, "/foo bar", null, null), + new Path("/foo bar").toUri()); // therefore "foo?bar" is a valid Path, so a URI created from a Path // has path "foo?bar" where in a straight URI the path part is just "foo" assertEquals("/foo?bar", @@ -365,8 +350,7 @@ public void testPathToUriConversion() throws URISyntaxException, IOException { } /** Test reserved characters in URIs (and therefore Paths) */ - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testReservedCharacters() throws URISyntaxException, IOException { // URI encodes the path assertEquals("/foo%20bar", @@ -396,8 +380,7 @@ public void testReservedCharacters() throws URISyntaxException, IOException { toURL().getPath()); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testMakeQualified() throws URISyntaxException { URI defaultUri = new URI("hdfs://host1/dir1"); URI wd = new URI("hdfs://host2/dir2"); @@ -411,8 +394,7 @@ public void testMakeQualified() throws URISyntaxException { new Path("file").makeQualified(defaultUri, new Path(wd))); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testGetName() { assertEquals("", new Path("/").getName()); assertEquals("foo", new Path("foo").getName()); @@ -422,8 +404,7 @@ public void testGetName() { assertEquals("bar", new Path("hdfs://host/foo/bar").getName()); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testAvroReflect() throws Exception { // Avro expects explicitely stated, trusted packages used for (de-)serialization System.setProperty(ConfigConstants.CONFIG_AVRO_SERIALIZABLE_PACKAGES, "org.apache.hadoop.fs"); @@ -432,8 +413,7 @@ public void testAvroReflect() throws Exception { "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}"); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testGlobEscapeStatus() throws Exception { // This test is not meaningful on Windows where * is disallowed in file name. assumeNotWindows(); @@ -492,8 +472,7 @@ public void testGlobEscapeStatus() throws Exception { assertEquals(new Path(testRoot, "*/f"), stats[0].getPath()); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testMergePaths() { assertEquals(new Path("/foo/bar"), Path.mergePaths(new Path("/foo"), @@ -527,8 +506,7 @@ public void testMergePaths() { new Path("file://fileauthority/bar"))); } - @Test - @Timeout(value = 30) + @Test (timeout = 30000) public void testIsWindowsAbsolutePath() { assumeWindows(); assertTrue(Path.isWindowsAbsolutePath("C:\\test", false)); @@ -540,8 +518,7 @@ public void testIsWindowsAbsolutePath() { assertFalse(Path.isWindowsAbsolutePath("/C:test", true)); } - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testSerDeser() throws Throwable { Path source = new Path("hdfs://localhost:4040/scratch"); ByteArrayOutputStream baos = new ByteArrayOutputStream(256); @@ -551,16 +528,15 @@ public void testSerDeser() throws Throwable { ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); try (ObjectInputStream ois = new ObjectInputStream(bais)) { Path deser = (Path) ois.readObject(); - Assertions.assertEquals(source, deser); + Assert.assertEquals(source, deser); } } - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testSuffixFromRoot() { Path root = new Path("/"); - Assertions.assertNull(root.getParent()); - Assertions.assertEquals(new Path("/bar"), root.suffix("bar")); + Assert.assertNull(root.getParent()); + Assert.assertEquals(new Path("/bar"), root.suffix("bar")); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java index f47cbeb412409..c3e05d4e88758 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java @@ -43,11 +43,11 @@ public void testConstructorWithQuota() { QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount).quota(quota). spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); - assertEquals(fileAndDirCount -, quotaUsage.getFileAndDirectoryCount(), "getFileAndDirectoryCount"); + assertEquals(fileAndDirCount, + quotaUsage.getFileAndDirectoryCount(), "getFileAndDirectoryCount"); assertEquals(quota, quotaUsage.getQuota(), "getQuota"); - assertEquals(spaceConsumed -, quotaUsage.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceConsumed, + quotaUsage.getSpaceConsumed(), "getSpaceConsumed"); assertEquals(spaceQuota, quotaUsage.getSpaceQuota(), "getSpaceQuota"); } @@ -59,11 +59,11 @@ public void testConstructorNoQuota() { QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount). spaceConsumed(spaceConsumed).build(); - assertEquals(fileAndDirCount -, quotaUsage.getFileAndDirectoryCount(), "getFileAndDirectoryCount"); + assertEquals(fileAndDirCount, + quotaUsage.getFileAndDirectoryCount(), "getFileAndDirectoryCount"); assertEquals(-1, quotaUsage.getQuota(), "getQuota"); - assertEquals(spaceConsumed -, quotaUsage.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceConsumed, + quotaUsage.getSpaceConsumed(), "getSpaceConsumed"); assertEquals(-1, quotaUsage.getSpaceQuota(), "getSpaceQuota"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java index a2a1d55ee7bed..301bf046cd257 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.fs; -import org.junit.jupiter.api.BeforeAll; +import org.junit.BeforeClass; import java.io.IOException; @@ -25,7 +25,7 @@ public class TestSymlinkLocalFSFileContext extends TestSymlinkLocalFS { - @BeforeAll + @BeforeClass public static void testSetup() throws Exception { FileContext context = FileContext.getLocalFSFileContext(); wrapper = new FileContextTestWrapper(context); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java index cf5dd658a36b2..98449493fa5e1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java @@ -22,18 +22,17 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Options.Rename; -import org.junit.jupiter.api.BeforeAll; +import org.junit.BeforeClass; import org.junit.Ignore; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.Timeout; +import org.junit.Test; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; public class TestSymlinkLocalFSFileSystem extends TestSymlinkLocalFS { - @BeforeAll + @BeforeClass public static void testSetup() throws Exception { FileSystem filesystem = FileSystem.getLocal(new Configuration()); wrapper = new FileSystemTestWrapper(filesystem); @@ -42,28 +41,24 @@ public static void testSetup() throws Exception { @Ignore("RawLocalFileSystem#mkdir does not treat existence of directory" + " as an error") @Override - @Test - @Timeout(value = 10) + @Test(timeout=10000) public void testMkdirExistingLink() throws IOException {} @Ignore("FileSystem#create defaults to creating parents," + " throwing an IOException instead of FileNotFoundException") @Override - @Test - @Timeout(value = 10) + @Test(timeout=10000) public void testCreateFileViaDanglingLinkParent() throws IOException {} @Ignore("RawLocalFileSystem does not throw an exception if the path" + " already exists") @Override - @Test - @Timeout(value = 10) + @Test(timeout=10000) public void testCreateFileDirExistingLink() throws IOException {} @Ignore("ChecksumFileSystem does not support append") @Override - @Test - @Timeout(value = 10) + @Test(timeout=10000) public void testAccessFileViaInterSymlinkAbsTarget() throws IOException {} @Override @@ -73,8 +68,7 @@ public void testRenameFileWithDestParentSymlink() throws IOException { } @Override - @Test - @Timeout(value = 10) + @Test(timeout=10000) /** Rename a symlink to itself */ public void testRenameSymlinkToItself() throws IOException { Path file = new Path(testBaseDir1(), "file"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java index f236a7caade3e..be60902160765 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java @@ -307,13 +307,11 @@ public static void trashShell(final Configuration conf, final Path base, args[2] = myFile.toString(); int val = -1; // Clear out trash - assertEquals( - 0, shell.run(new String[] {"-expunge" }), "-expunge failed"); + assertEquals(0, shell.run(new String[] {"-expunge" }), "-expunge failed"); val = shell.run(args); - assertFalse( - trashRootFs.exists(trashRoot), "Expected TrashRoot (" + trashRoot + + assertFalse(trashRootFs.exists(trashRoot), "Expected TrashRoot (" + trashRoot + ") to exist in file system:" + trashRootFs.getUri()); // No new Current should be created assertFalse(fs.exists(myFile)); @@ -347,8 +345,7 @@ public static void trashShell(final Configuration conf, final Path base, { int val = -1; mkdir(fs, myPath); - assertEquals( - 0, shell.run(new String[] {"-expunge" }), "Expunge should return zero"); + assertEquals(0, shell.run(new String[] {"-expunge" }), "Expunge should return zero"); // create a file in that directory. @@ -397,10 +394,9 @@ public static void trashShell(final Configuration conf, final Path base, String output = byteStream.toString(); System.setOut(stdout); System.setErr(stderr); - assertTrue( - output.indexOf("Consider using -skipTrash option") != -1 || - output.indexOf("Failed to determine server " - + "trash configuration") != -1, "skipTrash wasn't suggested as remedy to failed rm command" + + assertTrue(output.indexOf("Consider using -skipTrash option") != -1 || + output.indexOf("Failed to determine server " + "trash configuration") != -1, + "skipTrash wasn't suggested as remedy to failed rm command" + " or we deleted / even though we could not get server defaults"); } @@ -424,10 +420,9 @@ public static void trashShell(final Configuration conf, final Path base, rc = shell.run(new String[] {"-expunge" }); assertEquals(0, rc, "Expunge should return zero"); - assertFalse( - trashRootFs.exists(dirToDelete), "old checkpoint format not recognized"); - assertTrue( - trashRootFs.exists(dirToKeep), "old checkpoint format directory should not be removed"); + assertFalse(trashRootFs.exists(dirToDelete), "old checkpoint format not recognized"); + assertTrue(trashRootFs.exists(dirToKeep), + "old checkpoint format directory should not be removed"); } // Verify expunge -immediate removes all checkpoints and current folder @@ -452,14 +447,11 @@ public static void trashShell(final Configuration conf, final Path base, rc = shell.run(new String[] {"-expunge", "-immediate"}); assertEquals(0, rc, "Expunge immediate should return zero"); - assertFalse( - trashRootFs.exists(oldCheckpoint), "Old checkpoint should be removed"); - assertFalse( - trashRootFs.exists(recentCheckpoint), "Recent checkpoint should be removed"); - assertFalse( - trashRootFs.exists(currentFolder), "Current folder should be removed"); - assertEquals(0 -, trashRootFs.listStatus(trashRoot.getParent()).length, "Ensure trash folder is empty"); + assertFalse(trashRootFs.exists(oldCheckpoint), "Old checkpoint should be removed"); + assertFalse(trashRootFs.exists(recentCheckpoint), "Recent checkpoint should be removed"); + assertFalse(trashRootFs.exists(currentFolder), "Current folder should be removed"); + assertEquals(0, trashRootFs.listStatus(trashRoot.getParent()).length, + "Ensure trash folder is empty"); } } @@ -510,16 +502,12 @@ public void testExpungeWithFileSystem() throws Exception { "-fs", "testlfs:/"}; int val = testlfsshell.run(args); - assertEquals( - 0, val, "Expunge immediate with filesystem should return zero"); - assertFalse( - testlfs.exists(oldCheckpoint), "Old checkpoint should be removed"); - assertFalse( - testlfs.exists(recentCheckpoint), "Recent checkpoint should be removed"); - assertFalse( - testlfs.exists(currentFolder), "Current folder should be removed"); - assertEquals(0 -, testlfs.listStatus(trashRoot.getParent()).length, "Ensure trash folder is empty"); + assertEquals(0, val, "Expunge immediate with filesystem should return zero"); + assertFalse(testlfs.exists(oldCheckpoint), "Old checkpoint should be removed"); + assertFalse(testlfs.exists(recentCheckpoint), "Recent checkpoint should be removed"); + assertFalse(testlfs.exists(currentFolder), "Current folder should be removed"); + assertEquals(0, + testlfs.listStatus(trashRoot.getParent()).length, "Ensure trash folder is empty"); // Incorrect FileSystem scheme String incorrectFS = "incorrectfs:/"; @@ -527,17 +515,15 @@ public void testExpungeWithFileSystem() throws Exception { "-fs", incorrectFS}; val = testlfsshell.run(args); - assertEquals( - 1, val, "Expunge immediate should return exit code 1 when " - + "incorrect Filesystem is passed"); + assertEquals(1, val, "Expunge immediate should return exit code 1 when " + + "incorrect Filesystem is passed"); // Empty FileSystem scheme args = new String[]{"-expunge", "-immediate", "-fs", ""}; val = testlfsshell.run(args); - assertNotEquals( - 0, val, "Expunge immediate should fail when filesystem is NULL"); + assertNotEquals(0, val, "Expunge immediate should fail when filesystem is NULL"); FileSystem.removeFileSystemForTesting(testlfsURI, config, testlfs); } } @@ -969,18 +955,14 @@ public static void verifyMoveEmptyDirToTrash(FileSystem fs, Path trashRoot = trash.getCurrentTrashDir(emptyDir); fileSystem.delete(trashRoot, true); // Move to trash should be succeed - assertTrue( - trash.moveToTrash(emptyDir), "Move an empty directory to trash failed"); + assertTrue(trash.moveToTrash(emptyDir), "Move an empty directory to trash failed"); // Verify the empty dir is removed - assertFalse( - fileSystem.exists(emptyDir), "The empty directory still exists on file system"); + assertFalse(fileSystem.exists(emptyDir), "The empty directory still exists on file system"); emptyDir = fileSystem.makeQualified(emptyDir); Path dirInTrash = Path.mergePaths(trashRoot, emptyDir); - assertTrue( - fileSystem.exists(dirInTrash), "Directory wasn't moved to trash"); + assertTrue(fileSystem.exists(dirInTrash), "Directory wasn't moved to trash"); FileStatus[] flist = fileSystem.listStatus(dirInTrash); - assertTrue( - flist!= null && flist.length == 0, "Directory is not empty"); + assertTrue(flist!= null && flist.length == 0, "Directory is not empty"); } } @@ -1029,12 +1011,11 @@ public static void verifyTrashPermission(FileSystem fs, Configuration conf) } Path fileInTrash = Path.mergePaths(trashDir, file); FileStatus fstat = wrapper.getFileStatus(fileInTrash); - assertTrue( - wrapper.exists(fileInTrash), String.format("File %s is not moved to trash", + assertTrue(wrapper.exists(fileInTrash), String.format("File %s is not moved to trash", fileInTrash.toString())); // Verify permission not change - assertTrue( - fstat.getPermission().equals(fsPermission), String.format("Expected file: %s is %s, but actual is %s", + assertTrue(fstat.getPermission().equals(fsPermission), + String.format("Expected file: %s is %s, but actual is %s", fileInTrash.toString(), fsPermission.toString(), fstat.getPermission().toString())); @@ -1077,11 +1058,9 @@ private void verifyAuditableTrashEmptier(Trash trash, emptierThread.join(); AuditableTrashPolicy at = (AuditableTrashPolicy) trash.getTrashPolicy(); - assertEquals( - - expectedNumOfCheckpoints -, at.getNumberOfCheckpoints(), String.format("Expected num of checkpoints is %s, but actual is %s", - expectedNumOfCheckpoints, at.getNumberOfCheckpoints())); + assertEquals(expectedNumOfCheckpoints, at.getNumberOfCheckpoints(), + String.format("Expected num of checkpoints is %s, but actual is %s", + expectedNumOfCheckpoints, at.getNumberOfCheckpoints())); } catch (InterruptedException e) { // Ignore } finally { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java index d8eec6ade4777..56471739d4b15 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTruncatedInputBug.java @@ -20,7 +20,7 @@ import java.io.DataOutputStream; import java.io.IOException; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/audit/TestCommonAuditContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/audit/TestCommonAuditContext.java index 31df40815bc21..9782eb276d306 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/audit/TestCommonAuditContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/audit/TestCommonAuditContext.java @@ -25,7 +25,7 @@ import java.util.stream.StreamSupport; import org.assertj.core.api.AbstractStringAssert; -import org.junit.jupiter.api.Test; +import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java index 5a9929e4e6d05..62648ec58bcc7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ftp/FTPContract.java @@ -25,7 +25,7 @@ import java.net.URI; -import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.Assert.assertNotNull; /** * The contract of FTP; requires the option "test.testdir" to be set @@ -55,7 +55,7 @@ public String getScheme() { @Override public Path getTestPath() { String pathString = getOption(TEST_FS_TESTDIR, null); - assertNotNull(pathString, "Undefined test option " + TEST_FS_TESTDIR); + assertNotNull("Undefined test option " + TEST_FS_TESTDIR, pathString); Path path = new Path(pathString); return path; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java index 312b6e1e68dd3..d3529dcb8d4f9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java @@ -41,9 +41,8 @@ import org.apache.hadoop.test.LambdaTestUtils; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; -import org.junit.Rule; import org.junit.jupiter.api.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.Timeout; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -53,12 +52,11 @@ * Test basic @{link FTPFileSystem} class methods. Contract tests are in * TestFTPContractXXXX. */ +@Timeout(180) public class TestFTPFileSystem { private FtpTestServer server; private java.nio.file.Path testDir; - @Rule - public Timeout testTimeout = new Timeout(180000, TimeUnit.MILLISECONDS); @BeforeEach public void setUp() throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java index ce53f2117a6b3..c8730c4450f11 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestAcl.java @@ -17,7 +17,9 @@ */ package org.apache.hadoop.fs.permission; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotSame; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java index 3f3ae7fd87a82..5fc74d5d066a4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java @@ -22,7 +22,9 @@ import org.apache.hadoop.conf.Configuration; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.apache.hadoop.fs.permission.FsAction.*; @@ -252,8 +254,8 @@ public void testBadUmasks() { FsPermission.getUMask(conf); fail("Shouldn't have been able to parse bad umask"); } catch(IllegalArgumentException iae) { - assertTrue(isCorrectExceptionMessage(iae.getMessage(), b), "Exception should specify parsing error and invalid umask: " - + iae.getMessage()); + assertTrue(isCorrectExceptionMessage(iae.getMessage(), b), + "Exception should specify parsing error and invalid umask: " + iae.getMessage()); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java index 1037cc89fd990..b2a505ff748a4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/protocolPB/TestFSSerialization.java @@ -25,7 +25,7 @@ import static org.apache.hadoop.fs.FSProtos.*; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Verify PB serialization of FS data structures. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java index 87e83be11f654..21cee11e0f6a2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java @@ -44,24 +44,26 @@ import org.apache.sshd.sftp.server.SftpSubsystemFactory; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeAll; -import org.junit.Rule; import org.junit.jupiter.api.Test; -import org.junit.rules.TestName; +import org.junit.jupiter.api.TestInfo; public class TestSFTPFileSystem { private static final String TEST_SFTP_DIR = "testsftp"; private static final String TEST_ROOT_DIR = GenericTestUtils.getTestDir().getAbsolutePath(); - - @Rule public TestName name = new TestName(); - + private static final String connection = "sftp://user:password@localhost"; private static Path localDir = null; private static FileSystem localFs = null; @@ -182,8 +184,8 @@ private static final Path touch(FileSystem fs, String filename, byte[] data) * @throws Exception */ @Test - public void testCreateFile() throws Exception { - Path file = touch(sftpFs, name.getMethodName().toLowerCase()); + public void testCreateFile(TestInfo testInfo) throws Exception { + Path file = touch(sftpFs, testInfo.getDisplayName().toLowerCase()); assertTrue(localFs.exists(file)); assertTrue(sftpFs.delete(file, false)); assertFalse(localFs.exists(file)); @@ -198,8 +200,8 @@ public void testCreateFile() throws Exception { * @throws Exception */ @Test - public void testFileExists() throws Exception { - Path file = touch(localFs, name.getMethodName().toLowerCase()); + public void testFileExists(TestInfo testInfo) throws Exception { + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase()); assertTrue(sftpFs.exists(file)); assertTrue(localFs.exists(file)); assertTrue(sftpFs.delete(file, false)); @@ -216,9 +218,9 @@ public void testFileExists() throws Exception { * @throws Exception */ @Test - public void testReadFile() throws Exception { + public void testReadFile(TestInfo testInfo) throws Exception { byte[] data = "yaks".getBytes(); - Path file = touch(localFs, name.getMethodName().toLowerCase(), data); + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase(), data); FSDataInputStream is = null; try { is = sftpFs.open(file); @@ -242,9 +244,9 @@ public void testReadFile() throws Exception { * @throws Exception */ @Test - public void testStatFile() throws Exception { + public void testStatFile(TestInfo testInfo) throws Exception { byte[] data = "yaks".getBytes(); - Path file = touch(localFs, name.getMethodName().toLowerCase(), data); + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase(), data); FileStatus lstat = localFs.getFileStatus(file); FileStatus sstat = sftpFs.getFileStatus(file); @@ -266,9 +268,9 @@ public void testStatFile() throws Exception { * @throws Exception */ @Test - public void testDeleteNonEmptyDir() throws Exception { + public void testDeleteNonEmptyDir(TestInfo testInfo) throws Exception { assertThrows(IOException.class, () -> { - Path file = touch(localFs, name.getMethodName().toLowerCase()); + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase()); sftpFs.delete(localDir, false); assertThat(((SFTPFileSystem) sftpFs).getConnectionPool().getLiveConnCount()). isEqualTo(1); @@ -281,8 +283,8 @@ public void testDeleteNonEmptyDir() throws Exception { * @throws Exception */ @Test - public void testDeleteNonExistFile() throws Exception { - Path file = new Path(localDir, name.getMethodName().toLowerCase()); + public void testDeleteNonExistFile(TestInfo testInfo) throws Exception { + Path file = new Path(localDir, testInfo.getDisplayName().toLowerCase()); assertFalse(sftpFs.delete(file, false)); assertThat( ((SFTPFileSystem) sftpFs).getConnectionPool().getLiveConnCount()) @@ -295,10 +297,10 @@ public void testDeleteNonExistFile() throws Exception { * @throws Exception */ @Test - public void testRenameFile() throws Exception { + public void testRenameFile(TestInfo testInfo) throws Exception { byte[] data = "dingos".getBytes(); - Path file1 = touch(localFs, name.getMethodName().toLowerCase() + "1"); - Path file2 = new Path(localDir, name.getMethodName().toLowerCase() + "2"); + Path file1 = touch(localFs, testInfo.getDisplayName().toLowerCase() + "1"); + Path file2 = new Path(localDir, testInfo.getDisplayName().toLowerCase() + "2"); assertTrue(sftpFs.rename(file1, file2)); @@ -320,10 +322,10 @@ public void testRenameFile() throws Exception { * @throws Exception */ @Test - public void testRenameNonExistFile() throws Exception { + public void testRenameNonExistFile(TestInfo testInfo) throws Exception { assertThrows(IOException.class, ()->{ - Path file1 = new Path(localDir, name.getMethodName().toLowerCase() + "1"); - Path file2 = new Path(localDir, name.getMethodName().toLowerCase() + "2"); + Path file1 = new Path(localDir, testInfo.getDisplayName().toLowerCase() + "1"); + Path file2 = new Path(localDir, testInfo.getDisplayName().toLowerCase() + "2"); sftpFs.rename(file1, file2); }); } @@ -334,17 +336,17 @@ public void testRenameNonExistFile() throws Exception { * @throws Exception */ @Test - public void testRenamingFileOntoExistingFile() throws Exception { + public void testRenamingFileOntoExistingFile(TestInfo testInfo) throws Exception { assertThrows(IOException.class, ()->{ - Path file1 = touch(localFs, name.getMethodName().toLowerCase() + "1"); - Path file2 = touch(localFs, name.getMethodName().toLowerCase() + "2"); + Path file1 = touch(localFs, testInfo.getDisplayName().toLowerCase() + "1"); + Path file2 = touch(localFs, testInfo.getDisplayName().toLowerCase() + "2"); sftpFs.rename(file1, file2); }); } @Test - public void testGetAccessTime() throws IOException { - Path file = touch(localFs, name.getMethodName().toLowerCase()); + public void testGetAccessTime(TestInfo testInfo) throws IOException { + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase()); LocalFileSystem local = (LocalFileSystem)localFs; java.nio.file.Path path = (local).pathToFile(file).toPath(); long accessTime1 = Files.readAttributes(path, BasicFileAttributes.class) @@ -359,8 +361,8 @@ public void testGetAccessTime() throws IOException { } @Test - public void testGetModifyTime() throws IOException { - Path file = touch(localFs, name.getMethodName().toLowerCase() + "1"); + public void testGetModifyTime(TestInfo testInfo) throws IOException { + Path file = touch(localFs, testInfo.getDisplayName().toLowerCase() + "1"); java.io.File localFile = ((LocalFileSystem) localFs).pathToFile(file); long modifyTime1 = localFile.lastModified(); // SFTPFileSystem doesn't have milliseconds. Excluding it. @@ -373,9 +375,9 @@ public void testGetModifyTime() throws IOException { } @Test - public void testMkDirs() throws IOException { + public void testMkDirs(TestInfo testInfo) throws IOException { Path path = new Path(localDir.toUri().getPath(), - new Path(name.getMethodName(), "subdirectory")); + new Path(testInfo.getDisplayName(), "subdirectory")); sftpFs.mkdirs(path); assertTrue(localFs.exists(path)); assertTrue(localFs.getFileStatus(path).isDirectory()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java index b46a4f674db3a..d30a2bed1773f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java @@ -17,7 +17,9 @@ */ package org.apache.hadoop.fs.shell; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.net.URI; @@ -43,54 +45,48 @@ import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ToolRunner; import org.junit.jupiter.api.BeforeEach; -import org.junit.Rule; import org.junit.jupiter.api.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.io.TempDir; public class TestAclCommands { - @Rule - public TemporaryFolder testFolder = new TemporaryFolder(); private String path; private Configuration conf = null; @BeforeEach - public void setup() throws IOException { + public void setup(@TempDir java.nio.file.Path testFolder) throws IOException { conf = new Configuration(); - path = testFolder.newFile("file").getPath(); + path = testFolder.resolve("file").toFile().getPath(); } @Test public void testGetfaclValidations() throws Exception { - assertFalse( - 0 == runCommand(new String[] {"-getfacl"}), "getfacl should fail without path"); - assertFalse( - 0 == runCommand(new String[] {"-getfacl", path, "extraArg"}), "getfacl should fail with extra argument"); + assertFalse(0 == runCommand(new String[] {"-getfacl"}), "getfacl should fail without path"); + assertFalse(0 == runCommand(new String[] {"-getfacl", path, "extraArg"}), + "getfacl should fail with extra argument"); } @Test public void testSetfaclValidations() throws Exception { - assertFalse( - 0 == runCommand(new String[] {"-setfacl", path}), "setfacl should fail without options"); - assertFalse( - 0 == runCommand(new String[] {"-setfacl", "-R", path}), "setfacl should fail without options -b, -k, -m, -x or --set"); - assertFalse( - 0 == runCommand(new String[] {"-setfacl"}), "setfacl should fail without path"); - assertFalse( - 0 == runCommand(new String[] {"-setfacl", "-m", path}), "setfacl should fail without aclSpec"); - assertFalse( - 0 == runCommand(new String[] {"-setfacl", "-m", path}), "setfacl should fail with conflicting options"); - assertFalse( - 0 == runCommand(new String[] {"-setfacl", path, "extra"}), "setfacl should fail with extra arguments"); - assertFalse( - 0 == runCommand(new String[] {"-setfacl", "--set", - "default:user::rwx", path, "extra"}), "setfacl should fail with extra arguments"); - assertFalse( - 0 == runCommand(new String[] {"-setfacl", "-x", "user:user1:rwx", - path}), "setfacl should fail with permissions for -x"); - assertFalse( - 0 == runCommand(new String[] {"-setfacl", "-m", "", path}), "setfacl should fail ACL spec missing"); + assertFalse(0 == runCommand(new String[] {"-setfacl", path}), + "setfacl should fail without options"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "-R", path}), + "setfacl should fail without options -b, -k, -m, -x or --set"); + assertFalse(0 == runCommand(new String[] {"-setfacl"}), + "setfacl should fail without path"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", path}), + "setfacl should fail without aclSpec"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", path}), + "setfacl should fail with conflicting options"); + assertFalse(0 == runCommand(new String[] {"-setfacl", path, "extra"}), + "setfacl should fail with extra arguments"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "--set", + "default:user::rwx", path, "extra"}), "setfacl should fail with extra arguments"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "-x", "user:user1:rwx", + path}), "setfacl should fail with permissions for -x"); + assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", "", path}), + "setfacl should fail ACL spec missing"); } @Test @@ -101,9 +97,8 @@ public void testSetfaclValidationsWithoutPermissions() throws Exception { } catch (IllegalArgumentException e) { } assertTrue(parsedList.size() == 0); - assertFalse( - 0 == runCommand(new String[] { "-setfacl", "-m", "user:user1:", - "/path" }), "setfacl should fail with less arguments"); + assertFalse(0 == runCommand(new String[] { "-setfacl", "-m", "user:user1:", + "/path" }), "setfacl should fail with less arguments"); } @Test @@ -169,8 +164,8 @@ public void testLsNoRpcForGetAclStatus() throws Exception { conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "stubfs:///"); conf.setClass("fs.stubfs.impl", StubFileSystem.class, FileSystem.class); conf.setBoolean("stubfs.noRpcForGetAclStatus", true); - assertEquals( - 0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" }), "ls must succeed even if getAclStatus RPC does not exist."); + assertEquals(0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" }), + "ls must succeed even if getAclStatus RPC does not exist."); } @Test @@ -178,8 +173,8 @@ public void testLsAclsUnsupported() throws Exception { Configuration conf = new Configuration(); conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "stubfs:///"); conf.setClass("fs.stubfs.impl", StubFileSystem.class, FileSystem.class); - assertEquals( - 0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" }), "ls must succeed even if FileSystem does not implement ACLs."); + assertEquals(0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" }), + "ls must succeed even if FileSystem does not implement ACLs."); } public static class StubFileSystem extends FileSystem { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java index 0c42ecbdaaf08..9eda315fafd25 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java @@ -18,7 +18,10 @@ package org.apache.hadoop.fs.shell; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import org.apache.hadoop.conf.Configuration; import org.junit.jupiter.api.BeforeEach; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java index ec513f0a39a78..e9d5786419faa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopy.java @@ -19,8 +19,20 @@ package org.apache.hadoop.fs.shell; import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyBoolean; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.anyLong; +import static org.mockito.Mockito.anyShort; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.reset; import java.io.IOException; import java.io.InputStream; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java index af2269b81f732..803f14e7c9561 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyFromLocal.java @@ -22,7 +22,6 @@ import java.util.concurrent.ThreadPoolExecutor; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -39,6 +38,7 @@ import org.apache.hadoop.fs.shell.CopyCommands.CopyFromLocal; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test for copyFromLocal. @@ -154,7 +154,7 @@ private class TestMultiThreadedCopy extends CopyFromLocal { protected void processArguments(LinkedList args) throws IOException { // Check if the correct number of threads are spawned - Assertions.assertEquals(expectedThreads, getThreadCount()); + assertEquals(expectedThreads, getThreadCount()); super.processArguments(args); if (isMultiThreadNecessary(args)) { @@ -163,10 +163,10 @@ protected void processArguments(LinkedList args) // 2) There are no active tasks in the executor // 3) Executor has shutdown correctly ThreadPoolExecutor executor = getExecutor(); - Assertions.assertEquals(expectedCompletedTaskCount, + assertEquals(expectedCompletedTaskCount, executor.getCompletedTaskCount()); - Assertions.assertEquals(0, executor.getActiveCount()); - Assertions.assertTrue(executor.isTerminated()); + assertEquals(0, executor.getActiveCount()); + assertTrue(executor.isTerminated()); } else { assert getExecutor() == null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java index 4357b5b95b18b..2ccbddde331dd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyToLocal.java @@ -22,7 +22,6 @@ import java.util.concurrent.ThreadPoolExecutor; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -40,6 +39,7 @@ import static org.apache.hadoop.fs.shell.CopyCommandWithMultiThread.DEFAULT_QUEUE_SIZE; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestCopyToLocal { @@ -193,9 +193,9 @@ private static class MultiThreadedCopy extends CopyToLocal { protected void processArguments(LinkedList args) throws IOException { // Check if the number of threads are same as expected - Assertions.assertEquals(expectedThreads, getThreadCount()); + assertEquals(expectedThreads, getThreadCount()); // Check if the queue pool size of executor is same as expected - Assertions.assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); + assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); super.processArguments(args); @@ -205,10 +205,10 @@ protected void processArguments(LinkedList args) // 2) There are no active tasks in the executor // 3) Executor has shutdown correctly ThreadPoolExecutor executor = getExecutor(); - Assertions.assertEquals(expectedCompletedTaskCount, + assertEquals(expectedCompletedTaskCount, executor.getCompletedTaskCount()); - Assertions.assertEquals(0, executor.getActiveCount()); - Assertions.assertTrue(executor.isTerminated()); + assertEquals(0, executor.getActiveCount()); + assertTrue(executor.isTerminated()); } else { assert getExecutor() == null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java index a2bbd3c2988c7..0e5a104f14e25 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java @@ -17,8 +17,16 @@ */ package org.apache.hadoop.fs.shell; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.reset; import java.io.PrintStream; import java.io.IOException; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java index 1a47888bc8b45..72a180d1e2ffe 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCpCommand.java @@ -22,7 +22,6 @@ import java.util.concurrent.ThreadPoolExecutor; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -40,6 +39,7 @@ import static org.apache.hadoop.fs.shell.CopyCommandWithMultiThread.DEFAULT_QUEUE_SIZE; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestCpCommand { @@ -193,9 +193,9 @@ private static class MultiThreadedCp extends Cp { protected void processArguments(LinkedList args) throws IOException { // Check if the number of threads are same as expected - Assertions.assertEquals(expectedThreads, getThreadCount()); + assertEquals(expectedThreads, getThreadCount()); // Check if the queue pool size of executor is same as expected - Assertions.assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); + assertEquals(expectedQueuePoolSize, getThreadPoolQueueSize()); super.processArguments(args); @@ -205,10 +205,10 @@ protected void processArguments(LinkedList args) // 2) There are no active tasks in the executor // 3) Executor has shutdown correctly ThreadPoolExecutor executor = getExecutor(); - Assertions.assertEquals(expectedCompletedTaskCount, + assertEquals(expectedCompletedTaskCount, executor.getCompletedTaskCount()); - Assertions.assertEquals(0, executor.getActiveCount()); - Assertions.assertTrue(executor.isTerminated()); + assertEquals(0, executor.getActiveCount()); + assertTrue(executor.isTerminated()); } else { assert getExecutor() == null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java index 0773eebca999a..1d4fc461c1b9f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestLs.java @@ -19,9 +19,17 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_KEY; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.reset; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -1329,6 +1337,10 @@ private FileStatus[] getContents() { * * @param lineFormat * format mask + * @param fileStatus + * file status + * @param fileName + * file name * @return formated line */ private String formatLineMtime(String lineFormat) { @@ -1344,7 +1356,11 @@ private String formatLineMtime(String lineFormat) { * * @param lineFormat * format mask - * @return formatted line + * @param fileStatus + * file status + * @param fileName + * file name + * @return formated line */ private String formatLineAtime(String lineFormat) { return String.format(lineFormat, (isDir() ? "d" : "-"), getPermission(), diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java index 988201ad8bc27..9b67ad9e6c20f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestMove.java @@ -18,9 +18,12 @@ package org.apache.hadoop.fs.shell; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.reset; import java.io.IOException; import java.net.URI; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java index ea44f546c0634..f116df227f7f0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java @@ -33,7 +33,6 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; @@ -237,7 +236,7 @@ public void testGlobThrowsExceptionForUnreadableDir() throws Exception { fs.setPermission(obscuredDir, new FsPermission((short)0)); //no access try { PathData.expandAsGlob("foo/*", conf); - Assertions.fail("Should throw IOException"); + fail("Should throw IOException"); } catch (IOException ioe) { // expected } finally { @@ -262,7 +261,7 @@ public void testWithStringAndConfForBuggyPath() throws Exception { public void checkPathData(String dirString, PathData item) throws Exception { assertEquals(fs, item.fs, "checking fs"); - assertEquals("checking string", dirString, item.toString()); + assertEquals(dirString, item.toString(), "checking string"); assertEquals( fs.makeQualified(new Path(item.toString())), item.path, "checking path" ); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java index 793853252dd85..a7db9ef768c58 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java @@ -54,41 +54,41 @@ public void cleanUp() throws Exception { @Test public void testGetfattrValidations() throws Exception { errContent.reset(); - assertFalse( - 0 == runCommand(new String[] { "-getfattr", "-d"}), "getfattr should fail without path"); + assertFalse(0 == runCommand(new String[] { "-getfattr", "-d"}), + "getfattr should fail without path"); assertTrue(errContent.toString().contains(" is missing")); errContent.reset(); - assertFalse( - 0 == runCommand(new String[] { "-getfattr", "extra", "-d", "/test"}), "getfattr should fail with extra argument"); + assertFalse(0 == runCommand(new String[] { "-getfattr", "extra", "-d", "/test"}), + "getfattr should fail with extra argument"); assertTrue(errContent.toString().contains("Too many arguments")); errContent.reset(); - assertFalse( - 0 == runCommand(new String[] { "-getfattr", "/test"}), "getfattr should fail without \"-n name\" or \"-d\""); + assertFalse(0 == runCommand(new String[] { "-getfattr", "/test"}), + "getfattr should fail without \"-n name\" or \"-d\""); assertTrue(errContent.toString().contains("Must specify '-n name' or '-d' option")); errContent.reset(); - assertFalse( - 0 == runCommand(new String[] { "-getfattr", "-d", "-e", "aaa", "/test"}), "getfattr should fail with invalid encoding"); + assertFalse(0 == runCommand(new String[] { "-getfattr", "-d", "-e", "aaa", "/test"}), + "getfattr should fail with invalid encoding"); assertTrue(errContent.toString().contains("Invalid/unsupported encoding option specified: aaa")); } @Test public void testSetfattrValidations() throws Exception { errContent.reset(); - assertFalse( - 0 == runCommand(new String[] { "-setfattr", "-n", "user.a1" }), "setfattr should fail without path"); + assertFalse(0 == runCommand(new String[] { "-setfattr", "-n", "user.a1" }), + "setfattr should fail without path"); assertTrue(errContent.toString().contains(" is missing")); errContent.reset(); - assertFalse( - 0 == runCommand(new String[] { "-setfattr", "extra", "-n", "user.a1", "/test"}), "setfattr should fail with extra arguments"); + assertFalse(0 == runCommand(new String[] { "-setfattr", "extra", "-n", "user.a1", "/test"}), + "setfattr should fail with extra arguments"); assertTrue(errContent.toString().contains("Too many arguments")); errContent.reset(); - assertFalse( - 0 == runCommand(new String[] { "-setfattr", "/test"}), "setfattr should fail without \"-n name\" or \"-x name\""); + assertFalse(0 == runCommand(new String[] { "-setfattr", "/test"}), + "setfattr should fail without \"-n name\" or \"-x name\""); assertTrue(errContent.toString().contains("Must specify '-n name' or '-x name' option")); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java index 9d821505652eb..74169ea0240fe 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java @@ -18,24 +18,23 @@ package org.apache.hadoop.fs.shell.find; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Deque; import java.util.LinkedList; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.shell.PathData; -import org.junit.Rule; -import org.junit.rules.Timeout; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestAnd { - - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - + // test all expressions passing @Test public void testPass() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java index 0c5050559d115..0738831c50164 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java @@ -17,27 +17,30 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Deque; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.shell.PathData; import org.junit.jupiter.api.BeforeEach; -import org.junit.Rule; -import org.junit.rules.Timeout; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestFilterExpression { private Expression expr; private FilterExpression test; - - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - + @BeforeEach public void setup() { expr = mock(Expression.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java index d7fb3075ffd66..3e444990d66a4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java @@ -17,8 +17,19 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; import java.io.IOException; import java.io.PrintStream; @@ -26,7 +37,6 @@ import java.util.Collections; import java.util.LinkedList; import java.util.NoSuchElementException; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; @@ -40,19 +50,16 @@ import org.apache.hadoop.fs.shell.find.FindOptions; import org.apache.hadoop.fs.shell.find.Result; import org.junit.jupiter.api.BeforeEach; -import org.junit.Rule; -import org.junit.rules.Timeout; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.mockito.InOrder; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +@Timeout(10) public class TestFind { - - @Rule - public Timeout timeout = new Timeout(10000, TimeUnit.MILLISECONDS); - + private static FileSystem mockFs; private static Configuration conf; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java index 286faeb6d6702..3ed1cb4cd1cc5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java @@ -17,26 +17,22 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.apache.hadoop.fs.shell.find.TestHelper.*; import java.io.IOException; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.shell.PathData; import org.junit.jupiter.api.BeforeEach; -import org.junit.Rule; -import org.junit.rules.Timeout; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestIname { private FileSystem mockFs; private Name.Iname name; - - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java index 5ed67e10aa5b4..0ecbb53e204cb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java @@ -17,26 +17,22 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.apache.hadoop.fs.shell.find.TestHelper.*; import java.io.IOException; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.shell.PathData; import org.junit.jupiter.api.BeforeEach; -import org.junit.Rule; -import org.junit.rules.Timeout; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestName { private FileSystem mockFs; private Name name; - - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java index 1e5c14d957d9a..e59ad32ccd0bc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java @@ -17,28 +17,26 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import java.io.IOException; import org.apache.hadoop.fs.shell.PathData; import java.io.PrintStream; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; import org.junit.jupiter.api.BeforeEach; -import org.junit.Rule; -import org.junit.rules.Timeout; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestPrint { private FileSystem mockFs; - - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java index 3475df720e854..f61f4baea74b8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java @@ -17,28 +17,25 @@ */ package org.apache.hadoop.fs.shell.find; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; - +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import java.io.IOException; import org.apache.hadoop.fs.shell.PathData; import java.io.PrintStream; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.fs.FileSystem; import org.junit.jupiter.api.BeforeEach; -import org.junit.Rule; -import org.junit.rules.Timeout; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +@Timeout(10) public class TestPrint0 { private FileSystem mockFs; - - @Rule - public Timeout globalTimeout = new Timeout(10000, TimeUnit.MILLISECONDS); - + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java index c40eaf0bbb400..2c10a5448cbbf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java @@ -88,8 +88,7 @@ private void assertWriteBlock(DataBlocks.DataBlock dataBlock) // Verify that the DataBlock has data written. assertTrue(dataBlock.hasData(), "Expected Data block to have data"); // Verify the size of data. - assertEquals(ONE_KB -, dataBlock.dataSize(), "Mismatch in data size in block"); + assertEquals(ONE_KB, dataBlock.dataSize(), "Mismatch in data size in block"); // Verify that no capacity is left in the data block to write more. assertFalse(dataBlock.hasCapacity(1), "Expected the data block to have no capacity to write 1 byte " + "of data"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java index 10955f215c7bd..8267b214d53bc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java @@ -33,13 +33,10 @@ import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.AclEntry; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.Timeout; - -import static org.junit.jupiter.api.Assertions.assertThrows; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; import static org.mockito.Mockito.*; public class TestChRootedFileSystem { @@ -48,7 +45,7 @@ public class TestChRootedFileSystem { Path chrootedTo; FileSystemTestHelper fileSystemTestHelper; - @BeforeEach + @Before public void setUp() throws Exception { // create the test root on local_fs Configuration conf = new Configuration(); @@ -65,7 +62,7 @@ public void setUp() throws Exception { fSys = new ChRootedFileSystem(chrootedTo.toUri(), conf); } - @AfterEach + @After public void tearDown() throws Exception { fSysTarget.delete(chrootedTo, true); } @@ -73,17 +70,17 @@ public void tearDown() throws Exception { @Test public void testURI() { URI uri = fSys.getUri(); - Assertions.assertEquals(chrootedTo.toUri(), uri); + Assert.assertEquals(chrootedTo.toUri(), uri); } @Test public void testBasicPaths() { URI uri = fSys.getUri(); - Assertions.assertEquals(chrootedTo.toUri(), uri); - Assertions.assertEquals(fSys.makeQualified( + Assert.assertEquals(chrootedTo.toUri(), uri); + Assert.assertEquals(fSys.makeQualified( new Path(System.getProperty("user.home"))), fSys.getWorkingDirectory()); - Assertions.assertEquals(fSys.makeQualified( + Assert.assertEquals(fSys.makeQualified( new Path(System.getProperty("user.home"))), fSys.getHomeDirectory()); /* @@ -93,13 +90,13 @@ public void testBasicPaths() { * But if we were to fix Path#makeQualified() then the next test should * have been: - Assertions.assertEquals( + Assert.assertEquals( new Path(chrootedTo + "/foo/bar").makeQualified( FsConstants.LOCAL_FS_URI, null), fSys.makeQualified(new Path( "/foo/bar"))); */ - Assertions.assertEquals( + Assert.assertEquals( new Path("/foo/bar").makeQualified(FsConstants.LOCAL_FS_URI, null), fSys.makeQualified(new Path("/foo/bar"))); } @@ -116,50 +113,50 @@ public void testCreateDelete() throws IOException { // Create file fileSystemTestHelper.createFile(fSys, "/foo"); - Assertions.assertTrue(fSys.isFile(new Path("/foo"))); - Assertions.assertTrue(fSysTarget.isFile(new Path(chrootedTo, "foo"))); + Assert.assertTrue(fSys.isFile(new Path("/foo"))); + Assert.assertTrue(fSysTarget.isFile(new Path(chrootedTo, "foo"))); // Create file with recursive dir fileSystemTestHelper.createFile(fSys, "/newDir/foo"); - Assertions.assertTrue(fSys.isFile(new Path("/newDir/foo"))); - Assertions.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/foo"))); + Assert.assertTrue(fSys.isFile(new Path("/newDir/foo"))); + Assert.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/foo"))); // Delete the created file - Assertions.assertTrue(fSys.delete(new Path("/newDir/foo"), false)); - Assertions.assertFalse(fSys.exists(new Path("/newDir/foo"))); - Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo, "newDir/foo"))); + Assert.assertTrue(fSys.delete(new Path("/newDir/foo"), false)); + Assert.assertFalse(fSys.exists(new Path("/newDir/foo"))); + Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo, "newDir/foo"))); // Create file with a 2 component dirs recursively fileSystemTestHelper.createFile(fSys, "/newDir/newDir2/foo"); - Assertions.assertTrue(fSys.isFile(new Path("/newDir/newDir2/foo"))); - Assertions.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/newDir2/foo"))); + Assert.assertTrue(fSys.isFile(new Path("/newDir/newDir2/foo"))); + Assert.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/newDir2/foo"))); // Delete the created file - Assertions.assertTrue(fSys.delete(new Path("/newDir/newDir2/foo"), false)); - Assertions.assertFalse(fSys.exists(new Path("/newDir/newDir2/foo"))); - Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/newDir2/foo"))); + Assert.assertTrue(fSys.delete(new Path("/newDir/newDir2/foo"), false)); + Assert.assertFalse(fSys.exists(new Path("/newDir/newDir2/foo"))); + Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/newDir2/foo"))); } @Test public void testMkdirDelete() throws IOException { fSys.mkdirs(fileSystemTestHelper.getTestRootPath(fSys, "/dirX")); - Assertions.assertTrue(fSys.isDirectory(new Path("/dirX"))); - Assertions.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"dirX"))); + Assert.assertTrue(fSys.isDirectory(new Path("/dirX"))); + Assert.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"dirX"))); fSys.mkdirs(fileSystemTestHelper.getTestRootPath(fSys, "/dirX/dirY")); - Assertions.assertTrue(fSys.isDirectory(new Path("/dirX/dirY"))); - Assertions.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"dirX/dirY"))); + Assert.assertTrue(fSys.isDirectory(new Path("/dirX/dirY"))); + Assert.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"dirX/dirY"))); // Delete the created dir - Assertions.assertTrue(fSys.delete(new Path("/dirX/dirY"), false)); - Assertions.assertFalse(fSys.exists(new Path("/dirX/dirY"))); - Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo,"dirX/dirY"))); + Assert.assertTrue(fSys.delete(new Path("/dirX/dirY"), false)); + Assert.assertFalse(fSys.exists(new Path("/dirX/dirY"))); + Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo,"dirX/dirY"))); - Assertions.assertTrue(fSys.delete(new Path("/dirX"), false)); - Assertions.assertFalse(fSys.exists(new Path("/dirX"))); - Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo,"dirX"))); + Assert.assertTrue(fSys.delete(new Path("/dirX"), false)); + Assert.assertFalse(fSys.exists(new Path("/dirX"))); + Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo,"dirX"))); } @Test @@ -167,19 +164,19 @@ public void testRename() throws IOException { // Rename a file fileSystemTestHelper.createFile(fSys, "/newDir/foo"); fSys.rename(new Path("/newDir/foo"), new Path("/newDir/fooBar")); - Assertions.assertFalse(fSys.exists(new Path("/newDir/foo"))); - Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/foo"))); - Assertions.assertTrue(fSys.isFile(fileSystemTestHelper.getTestRootPath(fSys,"/newDir/fooBar"))); - Assertions.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/fooBar"))); + Assert.assertFalse(fSys.exists(new Path("/newDir/foo"))); + Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/foo"))); + Assert.assertTrue(fSys.isFile(fileSystemTestHelper.getTestRootPath(fSys,"/newDir/fooBar"))); + Assert.assertTrue(fSysTarget.isFile(new Path(chrootedTo,"newDir/fooBar"))); // Rename a dir fSys.mkdirs(new Path("/newDir/dirFoo")); fSys.rename(new Path("/newDir/dirFoo"), new Path("/newDir/dirFooBar")); - Assertions.assertFalse(fSys.exists(new Path("/newDir/dirFoo"))); - Assertions.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/dirFoo"))); - Assertions.assertTrue(fSys.isDirectory(fileSystemTestHelper.getTestRootPath(fSys,"/newDir/dirFooBar"))); - Assertions.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"newDir/dirFooBar"))); + Assert.assertFalse(fSys.exists(new Path("/newDir/dirFoo"))); + Assert.assertFalse(fSysTarget.exists(new Path(chrootedTo,"newDir/dirFoo"))); + Assert.assertTrue(fSys.isDirectory(fileSystemTestHelper.getTestRootPath(fSys,"/newDir/dirFooBar"))); + Assert.assertTrue(fSysTarget.isDirectory(new Path(chrootedTo,"newDir/dirFooBar"))); } @Test @@ -187,8 +184,8 @@ public void testGetContentSummary() throws IOException { // GetContentSummary of a dir fSys.mkdirs(new Path("/newDir/dirFoo")); ContentSummary cs = fSys.getContentSummary(new Path("/newDir/dirFoo")); - Assertions.assertEquals(-1L, cs.getQuota()); - Assertions.assertEquals(-1L, cs.getSpaceQuota()); + Assert.assertEquals(-1L, cs.getQuota()); + Assert.assertEquals(-1L, cs.getSpaceQuota()); } /** @@ -210,15 +207,15 @@ public void testRenameAcrossFs() throws IOException { public void testList() throws IOException { FileStatus fs = fSys.getFileStatus(new Path("/")); - Assertions.assertTrue(fs.isDirectory()); + Assert.assertTrue(fs.isDirectory()); // should return the full path not the chrooted path - Assertions.assertEquals(fs.getPath(), chrootedTo); + Assert.assertEquals(fs.getPath(), chrootedTo); // list on Slash FileStatus[] dirPaths = fSys.listStatus(new Path("/")); - Assertions.assertEquals(0, dirPaths.length); + Assert.assertEquals(0, dirPaths.length); @@ -229,21 +226,21 @@ public void testList() throws IOException { fSys.mkdirs(new Path("/dirX/dirXX")); dirPaths = fSys.listStatus(new Path("/")); - Assertions.assertEquals(4, dirPaths.length); // note 2 crc files + Assert.assertEquals(4, dirPaths.length); // note 2 crc files // Note the the file status paths are the full paths on target fs = FileSystemTestHelper.containsPath(new Path(chrootedTo, "foo"), dirPaths); - Assertions.assertNotNull(fs); - Assertions.assertTrue(fs.isFile()); + Assert.assertNotNull(fs); + Assert.assertTrue(fs.isFile()); fs = FileSystemTestHelper.containsPath(new Path(chrootedTo, "bar"), dirPaths); - Assertions.assertNotNull(fs); - Assertions.assertTrue(fs.isFile()); + Assert.assertNotNull(fs); + Assert.assertTrue(fs.isFile()); fs = FileSystemTestHelper.containsPath(new Path(chrootedTo, "dirX"), dirPaths); - Assertions.assertNotNull(fs); - Assertions.assertTrue(fs.isDirectory()); + Assert.assertNotNull(fs); + Assert.assertTrue(fs.isDirectory()); fs = FileSystemTestHelper.containsPath(new Path(chrootedTo, "dirY"), dirPaths); - Assertions.assertNotNull(fs); - Assertions.assertTrue(fs.isDirectory()); + Assert.assertNotNull(fs); + Assert.assertTrue(fs.isDirectory()); } @Test @@ -253,31 +250,31 @@ public void testWorkingDirectory() throws Exception { fSys.mkdirs(new Path("/testWd")); Path workDir = new Path("/testWd"); fSys.setWorkingDirectory(workDir); - Assertions.assertEquals(workDir, fSys.getWorkingDirectory()); + Assert.assertEquals(workDir, fSys.getWorkingDirectory()); fSys.setWorkingDirectory(new Path(".")); - Assertions.assertEquals(workDir, fSys.getWorkingDirectory()); + Assert.assertEquals(workDir, fSys.getWorkingDirectory()); fSys.setWorkingDirectory(new Path("..")); - Assertions.assertEquals(workDir.getParent(), fSys.getWorkingDirectory()); + Assert.assertEquals(workDir.getParent(), fSys.getWorkingDirectory()); // cd using a relative path // Go back to our test root workDir = new Path("/testWd"); fSys.setWorkingDirectory(workDir); - Assertions.assertEquals(workDir, fSys.getWorkingDirectory()); + Assert.assertEquals(workDir, fSys.getWorkingDirectory()); Path relativeDir = new Path("existingDir1"); Path absoluteDir = new Path(workDir,"existingDir1"); fSys.mkdirs(absoluteDir); fSys.setWorkingDirectory(relativeDir); - Assertions.assertEquals(absoluteDir, fSys.getWorkingDirectory()); + Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory()); // cd using a absolute path absoluteDir = new Path("/test/existingDir2"); fSys.mkdirs(absoluteDir); fSys.setWorkingDirectory(absoluteDir); - Assertions.assertEquals(absoluteDir, fSys.getWorkingDirectory()); + Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory()); // Now open a file relative to the wd we just set above. Path absoluteFooPath = new Path(absoluteDir, "foo"); @@ -286,14 +283,14 @@ public void testWorkingDirectory() throws Exception { // Now mkdir relative to the dir we cd'ed to fSys.mkdirs(new Path("newDir")); - Assertions.assertTrue(fSys.isDirectory(new Path(absoluteDir, "newDir"))); + Assert.assertTrue(fSys.isDirectory(new Path(absoluteDir, "newDir"))); /* Filesystem impls (RawLocal and DistributedFileSystem do not check * for existing of working dir absoluteDir = getTestRootPath(fSys, "nonexistingPath"); try { fSys.setWorkingDirectory(absoluteDir); - Assertions.fail("cd to non existing dir should have failed"); + Assert.fail("cd to non existing dir should have failed"); } catch (Exception e) { // Exception as expected } @@ -304,7 +301,7 @@ public void testWorkingDirectory() throws Exception { absoluteDir = new Path(LOCAL_FS_ROOT_URI + "/existingDir"); fSys.mkdirs(absoluteDir); fSys.setWorkingDirectory(absoluteDir); - Assertions.assertEquals(absoluteDir, fSys.getWorkingDirectory()); + Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory()); } @@ -314,17 +311,15 @@ public void testWorkingDirectory() throws Exception { @Test public void testResolvePath() throws IOException { - Assertions.assertEquals(chrootedTo, fSys.resolvePath(new Path("/"))); + Assert.assertEquals(chrootedTo, fSys.resolvePath(new Path("/"))); fileSystemTestHelper.createFile(fSys, "/foo"); - Assertions.assertEquals(new Path(chrootedTo, "foo"), + Assert.assertEquals(new Path(chrootedTo, "foo"), fSys.resolvePath(new Path("/foo"))); } - @Test + @Test(expected=FileNotFoundException.class) public void testResolvePathNonExisting() throws IOException { - assertThrows(FileNotFoundException.class, () -> { fSys.resolvePath(new Path("/nonExisting")); - }); } @Test @@ -440,8 +435,7 @@ public void initialize(URI name, Configuration conf) throws IOException { } } - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testCreateSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path("/a/b/snapPath"); @@ -458,8 +452,7 @@ public void testCreateSnapshot() throws Exception { verify(mockFs).createSnapshot(chRootedSnapRootPath, "snap1"); } - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testDeleteSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path("/a/b/snapPath"); @@ -476,8 +469,7 @@ public void testDeleteSnapshot() throws Exception { verify(mockFs).deleteSnapshot(chRootedSnapRootPath, "snap1"); } - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testRenameSnapshot() throws Exception { Path snapRootPath = new Path("/snapPath"); Path chRootedSnapRootPath = new Path("/a/b/snapPath"); @@ -495,8 +487,7 @@ public void testRenameSnapshot() throws Exception { "snapNewName"); } - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testSetStoragePolicy() throws Exception { Path storagePolicyPath = new Path("/storagePolicy"); Path chRootedStoragePolicyPath = new Path("/a/b/storagePolicy"); @@ -513,8 +504,7 @@ public void testSetStoragePolicy() throws Exception { verify(mockFs).setStoragePolicy(chRootedStoragePolicyPath, "HOT"); } - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testUnsetStoragePolicy() throws Exception { Path storagePolicyPath = new Path("/storagePolicy"); Path chRootedStoragePolicyPath = new Path("/a/b/storagePolicy"); @@ -531,8 +521,7 @@ public void testUnsetStoragePolicy() throws Exception { verify(mockFs).unsetStoragePolicy(chRootedStoragePolicyPath); } - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testGetStoragePolicy() throws Exception { Path storagePolicyPath = new Path("/storagePolicy"); Path chRootedStoragePolicyPath = new Path("/a/b/storagePolicy"); @@ -549,8 +538,7 @@ public void testGetStoragePolicy() throws Exception { verify(mockFs).getStoragePolicy(chRootedStoragePolicyPath); } - @Test - @Timeout(value = 30) + @Test(timeout = 30000) public void testGetAllStoragePolicy() throws Exception { Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java index 7736af85f4b1a..c6de6ef2a5a4c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java @@ -23,7 +23,12 @@ import java.util.EnumSet; import static org.apache.hadoop.fs.FileContextTestHelper.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.AbstractFileSystem; @@ -35,7 +40,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.viewfs.ChRootedFs; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; @@ -73,11 +77,11 @@ public void tearDown() throws Exception { @Test public void testBasicPaths() { URI uri = fc.getDefaultFileSystem().getUri(); - Assertions.assertEquals(chrootedTo.toUri(), uri); - Assertions.assertEquals(fc.makeQualified( + assertEquals(chrootedTo.toUri(), uri); + assertEquals(fc.makeQualified( new Path(System.getProperty("user.home"))), fc.getWorkingDirectory()); - Assertions.assertEquals(fc.makeQualified( + assertEquals(fc.makeQualified( new Path(System.getProperty("user.home"))), fc.getHomeDirectory()); /* @@ -87,13 +91,13 @@ public void testBasicPaths() { * But if we were to fix Path#makeQualified() then the next test should * have been: - Assertions.assertEquals( + assertEquals( new Path(chrootedTo + "/foo/bar").makeQualified( FsConstants.LOCAL_FS_URI, null), fc.makeQualified(new Path( "/foo/bar"))); */ - Assertions.assertEquals( + assertEquals( new Path("/foo/bar").makeQualified(FsConstants.LOCAL_FS_URI, null), fc.makeQualified(new Path("/foo/bar"))); } @@ -111,50 +115,50 @@ public void testCreateDelete() throws IOException { // Create file fileContextTestHelper.createFileNonRecursive(fc, "/foo"); - Assertions.assertTrue(isFile(fc, new Path("/foo"))); - Assertions.assertTrue(isFile(fcTarget, new Path(chrootedTo, "foo"))); + assertTrue(isFile(fc, new Path("/foo"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo, "foo"))); // Create file with recursive dir fileContextTestHelper.createFile(fc, "/newDir/foo"); - Assertions.assertTrue(isFile(fc, new Path("/newDir/foo"))); - Assertions.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/foo"))); + assertTrue(isFile(fc, new Path("/newDir/foo"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/foo"))); // Delete the created file - Assertions.assertTrue(fc.delete(new Path("/newDir/foo"), false)); - Assertions.assertFalse(exists(fc, new Path("/newDir/foo"))); - Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); + assertTrue(fc.delete(new Path("/newDir/foo"), false)); + assertFalse(exists(fc, new Path("/newDir/foo"))); + assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); // Create file with a 2 component dirs recursively fileContextTestHelper.createFile(fc, "/newDir/newDir2/foo"); - Assertions.assertTrue(isFile(fc, new Path("/newDir/newDir2/foo"))); - Assertions.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); + assertTrue(isFile(fc, new Path("/newDir/newDir2/foo"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); // Delete the created file - Assertions.assertTrue(fc.delete(new Path("/newDir/newDir2/foo"), false)); - Assertions.assertFalse(exists(fc, new Path("/newDir/newDir2/foo"))); - Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); + assertTrue(fc.delete(new Path("/newDir/newDir2/foo"), false)); + assertFalse(exists(fc, new Path("/newDir/newDir2/foo"))); + assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); } @Test public void testMkdirDelete() throws IOException { fc.mkdir(fileContextTestHelper.getTestRootPath(fc, "/dirX"), FileContext.DEFAULT_PERM, false); - Assertions.assertTrue(isDir(fc, new Path("/dirX"))); - Assertions.assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX"))); + assertTrue(isDir(fc, new Path("/dirX"))); + assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX"))); fc.mkdir(fileContextTestHelper.getTestRootPath(fc, "/dirX/dirY"), FileContext.DEFAULT_PERM, false); - Assertions.assertTrue(isDir(fc, new Path("/dirX/dirY"))); - Assertions.assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX/dirY"))); + assertTrue(isDir(fc, new Path("/dirX/dirY"))); + assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX/dirY"))); // Delete the created dir - Assertions.assertTrue(fc.delete(new Path("/dirX/dirY"), false)); - Assertions.assertFalse(exists(fc, new Path("/dirX/dirY"))); - Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX/dirY"))); + assertTrue(fc.delete(new Path("/dirX/dirY"), false)); + assertFalse(exists(fc, new Path("/dirX/dirY"))); + assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX/dirY"))); - Assertions.assertTrue(fc.delete(new Path("/dirX"), false)); - Assertions.assertFalse(exists(fc, new Path("/dirX"))); - Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX"))); + assertTrue(fc.delete(new Path("/dirX"), false)); + assertFalse(exists(fc, new Path("/dirX"))); + assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX"))); } @Test @@ -162,19 +166,19 @@ public void testRename() throws IOException { // Rename a file fileContextTestHelper.createFile(fc, "/newDir/foo"); fc.rename(new Path("/newDir/foo"), new Path("/newDir/fooBar")); - Assertions.assertFalse(exists(fc, new Path("/newDir/foo"))); - Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); - Assertions.assertTrue(isFile(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/fooBar"))); - Assertions.assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/fooBar"))); + assertFalse(exists(fc, new Path("/newDir/foo"))); + assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); + assertTrue(isFile(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/fooBar"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/fooBar"))); // Rename a dir fc.mkdir(new Path("/newDir/dirFoo"), FileContext.DEFAULT_PERM, false); fc.rename(new Path("/newDir/dirFoo"), new Path("/newDir/dirFooBar")); - Assertions.assertFalse(exists(fc, new Path("/newDir/dirFoo"))); - Assertions.assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/dirFoo"))); - Assertions.assertTrue(isDir(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/dirFooBar"))); - Assertions.assertTrue(isDir(fcTarget, new Path(chrootedTo,"newDir/dirFooBar"))); + assertFalse(exists(fc, new Path("/newDir/dirFoo"))); + assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/dirFoo"))); + assertTrue(isDir(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/dirFooBar"))); + assertTrue(isDir(fcTarget, new Path(chrootedTo,"newDir/dirFooBar"))); } @@ -195,15 +199,15 @@ public void testRenameAcrossFs() throws IOException { public void testList() throws IOException { FileStatus fs = fc.getFileStatus(new Path("/")); - Assertions.assertTrue(fs.isDirectory()); + assertTrue(fs.isDirectory()); // should return the full path not the chrooted path - Assertions.assertEquals(fs.getPath(), chrootedTo); + assertEquals(fs.getPath(), chrootedTo); // list on Slash FileStatus[] dirPaths = fc.util().listStatus(new Path("/")); - Assertions.assertEquals(0, dirPaths.length); + assertEquals(0, dirPaths.length); @@ -215,21 +219,21 @@ public void testList() throws IOException { fc.mkdir(new Path("/dirX/dirXX"), FileContext.DEFAULT_PERM, false); dirPaths = fc.util().listStatus(new Path("/")); - Assertions.assertEquals(4, dirPaths.length); + assertEquals(4, dirPaths.length); // Note the the file status paths are the full paths on target fs = fileContextTestHelper.containsPath(fcTarget, "foo", dirPaths); - Assertions.assertNotNull(fs); - Assertions.assertTrue(fs.isFile()); + assertNotNull(fs); + assertTrue(fs.isFile()); fs = fileContextTestHelper.containsPath(fcTarget, "bar", dirPaths); - Assertions.assertNotNull(fs); - Assertions.assertTrue(fs.isFile()); + assertNotNull(fs); + assertTrue(fs.isFile()); fs = fileContextTestHelper.containsPath(fcTarget, "dirX", dirPaths); - Assertions.assertNotNull(fs); - Assertions.assertTrue(fs.isDirectory()); + assertNotNull(fs); + assertTrue(fs.isDirectory()); fs = fileContextTestHelper.containsPath(fcTarget, "dirY", dirPaths); - Assertions.assertNotNull(fs); - Assertions.assertTrue(fs.isDirectory()); + assertNotNull(fs); + assertTrue(fs.isDirectory()); } @Test @@ -240,13 +244,13 @@ public void testWorkingDirectory() throws Exception { Path workDir = new Path("/testWd"); Path fqWd = fc.makeQualified(workDir); fc.setWorkingDirectory(workDir); - Assertions.assertEquals(fqWd, fc.getWorkingDirectory()); + assertEquals(fqWd, fc.getWorkingDirectory()); fc.setWorkingDirectory(new Path(".")); - Assertions.assertEquals(fqWd, fc.getWorkingDirectory()); + assertEquals(fqWd, fc.getWorkingDirectory()); fc.setWorkingDirectory(new Path("..")); - Assertions.assertEquals(fqWd.getParent(), fc.getWorkingDirectory()); + assertEquals(fqWd.getParent(), fc.getWorkingDirectory()); // cd using a relative path @@ -254,20 +258,20 @@ public void testWorkingDirectory() throws Exception { workDir = new Path("/testWd"); fqWd = fc.makeQualified(workDir); fc.setWorkingDirectory(workDir); - Assertions.assertEquals(fqWd, fc.getWorkingDirectory()); + assertEquals(fqWd, fc.getWorkingDirectory()); Path relativeDir = new Path("existingDir1"); Path absoluteDir = new Path(workDir,"existingDir1"); fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true); Path fqAbsoluteDir = fc.makeQualified(absoluteDir); fc.setWorkingDirectory(relativeDir); - Assertions.assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); + assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); // cd using a absolute path absoluteDir = new Path("/test/existingDir2"); fqAbsoluteDir = fc.makeQualified(absoluteDir); fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true); fc.setWorkingDirectory(absoluteDir); - Assertions.assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); + assertEquals(fqAbsoluteDir, fc.getWorkingDirectory()); // Now open a file relative to the wd we just set above. Path absolutePath = new Path(absoluteDir, "foo"); @@ -276,12 +280,12 @@ public void testWorkingDirectory() throws Exception { // Now mkdir relative to the dir we cd'ed to fc.mkdir(new Path("newDir"), FileContext.DEFAULT_PERM, true); - Assertions.assertTrue(isDir(fc, new Path(absoluteDir, "newDir"))); + assertTrue(isDir(fc, new Path(absoluteDir, "newDir"))); absoluteDir = fileContextTestHelper.getTestRootPath(fc, "nonexistingPath"); try { fc.setWorkingDirectory(absoluteDir); - Assertions.fail("cd to non existing dir should have failed"); + fail("cd to non existing dir should have failed"); } catch (Exception e) { // Exception as expected } @@ -291,7 +295,7 @@ public void testWorkingDirectory() throws Exception { absoluteDir = new Path(LOCAL_FS_ROOT_URI + "/existingDir"); fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true); fc.setWorkingDirectory(absoluteDir); - Assertions.assertEquals(absoluteDir, fc.getWorkingDirectory()); + assertEquals(absoluteDir, fc.getWorkingDirectory()); } @@ -301,9 +305,9 @@ public void testWorkingDirectory() throws Exception { @Test public void testResolvePath() throws IOException { - Assertions.assertEquals(chrootedTo, fc.getDefaultFileSystem().resolvePath(new Path("/"))); + assertEquals(chrootedTo, fc.getDefaultFileSystem().resolvePath(new Path("/"))); fileContextTestHelper.createFile(fc, "/foo"); - Assertions.assertEquals(new Path(chrootedTo, "foo"), + assertEquals(new Path(chrootedTo, "foo"), fc.getDefaultFileSystem().resolvePath(new Path("/foo"))); } @@ -319,7 +323,7 @@ public void testIsValidNameValidInBaseFs() throws Exception { AbstractFileSystem baseFs = Mockito.spy(fc.getDefaultFileSystem()); ChRootedFs chRootedFs = new ChRootedFs(baseFs, new Path("/chroot")); Mockito.doReturn(true).when(baseFs).isValidName(Mockito.anyString()); - Assertions.assertTrue(chRootedFs.isValidName("/test")); + assertTrue(chRootedFs.isValidName("/test")); Mockito.verify(baseFs).isValidName("/chroot/test"); } @@ -328,7 +332,7 @@ public void testIsValidNameInvalidInBaseFs() throws Exception { AbstractFileSystem baseFs = Mockito.spy(fc.getDefaultFileSystem()); ChRootedFs chRootedFs = new ChRootedFs(baseFs, new Path("/chroot")); Mockito.doReturn(false).when(baseFs).isValidName(Mockito.anyString()); - Assertions.assertFalse(chRootedFs.isValidName("/test")); + assertFalse(chRootedFs.isValidName("/test")); Mockito.verify(baseFs).isValidName("/chroot/test"); } @@ -342,7 +346,7 @@ public void testCreateSnapshot() throws Exception { ChRootedFs chRootedFs = new ChRootedFs(baseFs, chrootedTo); Mockito.doReturn(snapRootPath).when(baseFs) .createSnapshot(chRootedSnapRootPath, "snap1"); - Assertions.assertEquals(snapRootPath, + assertEquals(snapRootPath, chRootedFs.createSnapshot(snapRootPath, "snap1")); Mockito.verify(baseFs).createSnapshot(chRootedSnapRootPath, "snap1"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java index 9d9f4bd615c37..49bee32ff0bc9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointInterceptorFactory.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.fs.viewfs; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * Test Regex Mount Point Interceptor Factory. */ @@ -34,7 +35,7 @@ public void testCreateNormalCase() { .toString(RegexMountPoint.INTERCEPTOR_INTERNAL_SEP) + "replace"; RegexMountPointInterceptor interceptor = RegexMountPointInterceptorFactory.create(replaceInterceptorStr); - Assertions.assertTrue( + assertTrue( interceptor instanceof RegexMountPointResolvedDstPathReplaceInterceptor); } @@ -49,6 +50,6 @@ public void testCreateBadCase() { + "replace"; RegexMountPointInterceptor interceptor = RegexMountPointInterceptorFactory.create(replaceInterceptorStr); - Assertions.assertTrue(interceptor == null); + assertTrue(interceptor == null); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java index a6249c65c07b0..6327d8562c6d4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestRegexMountPointResolvedDstPathReplaceInterceptor.java @@ -19,10 +19,11 @@ import java.io.IOException; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.viewfs.RegexMountPointInterceptorType.REPLACE_RESOLVED_DST_PATH; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; /** * Test RegexMountPointResolvedDstPathReplaceInterceptor. @@ -43,11 +44,11 @@ public void testDeserializeFromStringNormalCase() throws IOException { RegexMountPointResolvedDstPathReplaceInterceptor interceptor = RegexMountPointResolvedDstPathReplaceInterceptor .deserializeFromString(serializedString); - Assertions.assertEquals(srcRegex, interceptor.getSrcRegexString()); - Assertions.assertEquals(replaceString, interceptor.getReplaceString()); - Assertions.assertNull(interceptor.getSrcRegexPattern()); + assertEquals(srcRegex, interceptor.getSrcRegexString()); + assertEquals(replaceString, interceptor.getReplaceString()); + assertNull(interceptor.getSrcRegexPattern()); interceptor.initialize(); - Assertions.assertEquals(srcRegex, + assertEquals(srcRegex, interceptor.getSrcRegexPattern().toString()); } @@ -60,7 +61,7 @@ public void testDeserializeFromStringBadCase() throws IOException { RegexMountPointResolvedDstPathReplaceInterceptor interceptor = RegexMountPointResolvedDstPathReplaceInterceptor .deserializeFromString(serializedString); - Assertions.assertNull(interceptor); + assertNull(interceptor); } @Test @@ -71,7 +72,7 @@ public void testSerialization() { RegexMountPointResolvedDstPathReplaceInterceptor interceptor = new RegexMountPointResolvedDstPathReplaceInterceptor(srcRegex, replaceString); - Assertions.assertEquals(interceptor.serializeToString(), serializedString); + assertEquals(interceptor.serializeToString(), serializedString); } @Test @@ -82,7 +83,7 @@ public void testInterceptSource() { new RegexMountPointResolvedDstPathReplaceInterceptor(srcRegex, replaceString); String sourcePath = "/a/b/l3/dd"; - Assertions.assertEquals(sourcePath, interceptor.interceptSource(sourcePath)); + assertEquals(sourcePath, interceptor.interceptSource(sourcePath)); } @Test @@ -95,7 +96,7 @@ public void testInterceptResolve() throws IOException { new RegexMountPointResolvedDstPathReplaceInterceptor(srcRegex, replaceString); interceptor.initialize(); - Assertions.assertEquals("/user-hdfs", + assertEquals("/user-hdfs", interceptor.interceptResolvedDestPathStr(pathAfterResolution)); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java index 9bbec07a96733..613f3440c41de 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemDelegation.java @@ -35,8 +35,8 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeAll; import static org.apache.hadoop.fs.viewfs.TestChRootedFileSystem.getChildFileSystem; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.verify; /** * Verify that viewfs propagates certain methods to the underlying fs diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java index 22bc916de5b61..f245109b92fae 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java @@ -30,13 +30,16 @@ import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** * @@ -96,7 +99,7 @@ public void testLocalTargetLinkWriteSimple() } try (FSDataInputStream lViewIs = lViewFs.open(testPath)) { - Assertions.assertEquals(testString, lViewIs.readUTF()); + assertEquals(testString, lViewIs.readUTF()); } } } @@ -113,9 +116,9 @@ public void testLocalFsCreateAndDelete() throws Exception { try (FileSystem lViewFS = FileSystem.get(mountURI, conf)) { Path testPath = new Path(mountURI.toString() + "/lfsroot/test"); lViewFS.createNewFile(testPath); - Assertions.assertTrue(lViewFS.exists(testPath)); + assertTrue(lViewFS.exists(testPath)); lViewFS.delete(testPath, true); - Assertions.assertFalse(lViewFS.exists(testPath)); + assertFalse(lViewFS.exists(testPath)); } } @@ -133,7 +136,7 @@ public void testLocalFsLinkSlashMerge() throws Exception { try (FileSystem lViewFS = FileSystem.get(mountURI, conf)) { Path fileOnRoot = new Path(mountURI.toString() + "/NewFile"); lViewFS.createNewFile(fileOnRoot); - Assertions.assertTrue(lViewFS.exists(fileOnRoot)); + assertTrue(lViewFS.exists(fileOnRoot)); } } @@ -150,7 +153,7 @@ public void testLocalFsLinkSlashMergeWithOtherMountLinks() throws Exception { new String[] {targetTestRoot + "/wd2", targetTestRoot + "/wd2" }, conf); final URI mountURI = URI.create("file://mt/"); FileSystem.get(mountURI, conf); - Assertions.fail("A merge slash cannot be configured with other mount links."); + fail("A merge slash cannot be configured with other mount links."); }); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java index febdd99aba020..f4e57800c7c93 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java @@ -34,7 +34,6 @@ import org.junit.jupiter.api.Test; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.*; import static org.apache.hadoop.fs.viewfs.Constants.*; -import static org.junit.jupiter.api.Assertions.*; public class TestViewFsTrash { FileSystem fsTarget; // the target file system - the mount will point here diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java index 7c5d9b73fddef..887b9f8bdc03d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java @@ -39,7 +39,9 @@ import org.junit.jupiter.api.Test; import org.mockito.Mockito; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * The FileStatus is being serialized in MR as jobs are submitted. @@ -83,8 +85,8 @@ public void testFileStatusSerialziation() FileStatus stat = vfs.getFileStatus(path); assertEquals(content.length, stat.getLen()); ContractTestUtils.assertNotErasureCoded(vfs, path); - assertTrue( - stat.toString().contains("isErasureCoded=false"), path + " should have erasure coding unset in " + + assertTrue(stat.toString().contains("isErasureCoded=false"), + path + " should have erasure coding unset in " + "FileStatus#toString(): " + stat); // check serialization/deserialization diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java index e7057596c6839..b2d7416aa7675 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java @@ -31,7 +31,7 @@ import org.apache.hadoop.fs.viewfs.ViewFileSystemOverloadScheme.ChildFsGetter; import org.apache.hadoop.util.Shell; import org.eclipse.jetty.util.log.Log; -import org.junit.jupiter.api.Assertions; +import org.junit.Assert; /** @@ -164,7 +164,7 @@ static void addMountLinksToFile(String mountTable, String[] sources, out.writeBytes(""); if (isNfly) { String[] srcParts = src.split("[.]"); - Assertions.assertEquals(3, srcParts.length, "Invalid NFlyLink format"); + Assert.assertEquals("Invalid NFlyLink format", 3, srcParts.length); String actualSrc = srcParts[srcParts.length - 1]; String params = srcParts[srcParts.length - 2]; out.writeBytes(prefix + Constants.CONFIG_VIEWFS_LINK_NFLY + "." @@ -202,7 +202,7 @@ public static void addMountLinksToConf(String mountTable, String[] sources, boolean isNfly = src.startsWith(Constants.CONFIG_VIEWFS_LINK_NFLY); if (isNfly) { String[] srcParts = src.split("[.]"); - Assertions.assertEquals(3, srcParts.length, "Invalid NFlyLink format"); + Assert.assertEquals("Invalid NFlyLink format", 3, srcParts.length); String actualSrc = srcParts[srcParts.length - 1]; String params = srcParts[srcParts.length - 2]; ConfigUtil.addLinkNfly(config, mountTableName, actualSrc, params, From f5f015938bb9ed5197f1f30aea96488dc24ff3d1 Mon Sep 17 00:00:00 2001 From: fanshilun Date: Tue, 4 Feb 2025 21:34:58 +0800 Subject: [PATCH 4/6] HADOOP-19415. Fix CheckStyle. --- .../hadoop/fs/TestChecksumFileSystem.java | 6 ++-- .../apache/hadoop/fs/TestContentSummary.java | 20 ++++++------ .../apache/hadoop/fs/TestDFVariations.java | 6 ++-- .../java/org/apache/hadoop/fs/TestDU.java | 15 ++++----- .../org/apache/hadoop/fs/TestFileStatus.java | 3 +- .../hadoop/fs/TestFilterFileSystem.java | 9 ++---- .../apache/hadoop/fs/TestGlobExpander.java | 10 +++--- .../hadoop/fs/TestHarFileSystemBasics.java | 5 ++- .../hadoop/fs/TestLocalDirAllocator.java | 4 +-- .../apache/hadoop/fs/TestLocalFileSystem.java | 31 +++++++++---------- 10 files changed, 48 insertions(+), 61 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java index 4415605bf6ae6..03494e728937b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestChecksumFileSystem.java @@ -170,13 +170,11 @@ public void testStreamType() throws Exception { localFs.setVerifyChecksum(true); in = localFs.open(testPath); - assertTrue( - in.getWrappedStream() instanceof FSInputChecker, "stream is input checker"); + assertTrue(in.getWrappedStream() instanceof FSInputChecker, "stream is input checker"); localFs.setVerifyChecksum(false); in = localFs.open(testPath); - assertFalse( - in.getWrappedStream() instanceof FSInputChecker, "stream is not input checker"); + assertFalse(in.getWrappedStream() instanceof FSInputChecker, "stream is not input checker"); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java index 213a832463489..3577738d2074f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java @@ -58,11 +58,11 @@ public void testConstructorWithQuota() { spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); assertEquals(length, contentSummary.getLength(), "getLength"); assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); - assertEquals(directoryCount -, contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(directoryCount, + contentSummary.getDirectoryCount(), "getDirectoryCount"); assertEquals(quota, contentSummary.getQuota(), "getQuota"); - assertEquals(spaceConsumed -, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceConsumed, + contentSummary.getSpaceConsumed(), "getSpaceConsumed"); assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota"); } @@ -78,8 +78,8 @@ public void testConstructorNoQuota() { spaceConsumed(length).build(); assertEquals(length, contentSummary.getLength(), "getLength"); assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); - assertEquals(directoryCount -, contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(directoryCount, + contentSummary.getDirectoryCount(), "getDirectoryCount"); assertEquals(-1, contentSummary.getQuota(), "getQuota"); assertEquals(length, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); assertEquals(-1, contentSummary.getSpaceQuota(), "getSpaceQuota"); @@ -131,11 +131,11 @@ public void testReadFields() throws IOException { contentSummary.readFields(in); assertEquals(length, contentSummary.getLength(), "getLength"); assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); - assertEquals(directoryCount -, contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(directoryCount, + contentSummary.getDirectoryCount(), "getDirectoryCount"); assertEquals(quota, contentSummary.getQuota(), "getQuota"); - assertEquals(spaceConsumed -, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceConsumed, + contentSummary.getSpaceConsumed(), "getSpaceConsumed"); assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java index 804e8fea50302..818bd84d88b37 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java @@ -70,8 +70,7 @@ public void testMount() throws Exception { XXDF df = new XXDF(); String expectedMount = Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/foo/bar"; - assertEquals( - expectedMount, df.getMount(), "Invalid mount point"); + assertEquals(expectedMount, df.getMount(), "Invalid mount point"); } @Test @@ -80,8 +79,7 @@ public void testFileSystem() throws Exception { XXDF df = new XXDF(); String expectedFileSystem = Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/dev/sda3"; - assertEquals( - expectedFileSystem, df.getFilesystem(), "Invalid filesystem"); + assertEquals(expectedFileSystem, df.getFilesystem(), "Invalid filesystem"); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java index cc9a698841361..1dc80feee1840 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java @@ -91,9 +91,8 @@ public void testDU() throws IOException, InterruptedException { long duSize = du.getUsed(); du.close(); - assertTrue( - duSize >= writtenSize && - writtenSize <= (duSize + slack), "Invalid on-disk size"); + assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), + "Invalid on-disk size"); //test with 0 interval, will not launch thread du = new DU(file, 0, 1, -1); @@ -101,18 +100,16 @@ public void testDU() throws IOException, InterruptedException { duSize = du.getUsed(); du.close(); - assertTrue( - duSize >= writtenSize && - writtenSize <= (duSize + slack), "Invalid on-disk size"); + assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), + "Invalid on-disk size"); //test without launching thread du = new DU(file, 10000, 0, -1); du.init(); duSize = du.getUsed(); - assertTrue( - duSize >= writtenSize && - writtenSize <= (duSize + slack), "Invalid on-disk size"); + assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), + "Invalid on-disk size"); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java index 028feaca2749d..3266bb657c4d0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java @@ -87,8 +87,7 @@ public void testFileStatusWritable() throws Exception { int iterator = 0; for (FileStatus fs : tests) { dest.readFields(in); - assertEquals( - dest, fs, "Different FileStatuses in iteration " + iterator); + assertEquals(dest, fs, "Different FileStatuses in iteration " + iterator); iterator++; } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java index aa434a270bf71..0722d052bff18 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java @@ -188,7 +188,7 @@ public void testFilterFileSystem() throws Exception { } } assertTrue(errors <= 0, (errors + " methods were not overridden correctly - see" + - " log")); + " log")); } @Test @@ -307,11 +307,8 @@ public void testFilterPathCapabilites() throws Exception { try (FilterFileSystem flfs = new FilterLocalFileSystem()) { flfs.initialize(URI.create("filter:/"), conf); Path src = new Path("/src"); - assertFalse( - - flfs.hasPathCapability(src, - CommonPathCapabilities.FS_MULTIPART_UPLOADER), "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for " - + flfs); + assertFalse(flfs.hasPathCapability(src, CommonPathCapabilities.FS_MULTIPART_UPLOADER), + "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for " + flfs); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java index 23cd59729a321..39f8504f8d9dc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java @@ -21,7 +21,7 @@ import java.util.List; import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestGlobExpander { @@ -55,11 +55,11 @@ private void checkExpansionIsIdentical(String filePattern) throws IOException { private void checkExpansion(String filePattern, String... expectedExpansions) throws IOException { List actualExpansions = GlobExpander.expand(filePattern); - assertEquals(expectedExpansions.length -, actualExpansions.size(), "Different number of expansions"); + assertEquals(expectedExpansions.length, + actualExpansions.size(), "Different number of expansions"); for (int i = 0; i < expectedExpansions.length; i++) { - assertEquals(expectedExpansions[i] -, actualExpansions.get(i), "Expansion of " + filePattern); + assertEquals(expectedExpansions[i], + actualExpansions.get(i), "Expansion of " + filePattern); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java index 445d13e426b83..7dd45179b3e50 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java @@ -274,9 +274,8 @@ public void testMakeQualifiedPath() throws Exception { + harPath.toUri().getPath().toString(); Path path = new Path(harPathWithUserinfo); Path qualifiedPath = path.getFileSystem(conf).makeQualified(path); - assertTrue( - qualifiedPath.toString().equals(harPathWithUserinfo), String.format( - "The qualified path (%s) did not match the expected path (%s).", + assertTrue(qualifiedPath.toString().equals(harPathWithUserinfo), + String.format("The qualified path (%s) did not match the expected path (%s).", qualifiedPath.toString(), harPathWithUserinfo)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java index e11ff66c6c2fc..4941ebf322016 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java @@ -108,8 +108,8 @@ private static void rmBufferDirs() throws IOException { private static void validateTempDirCreation(String dir) throws IOException { File result = createTempFile(SMALL_FILE_SIZE); - assertTrue( - result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath()), "Checking for " + dir + " in " + result + " - FAILED!"); + assertTrue(result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath()), + "Checking for " + dir + " in " + result + " - FAILED!"); } private static File createTempFile() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java index 223f611d4b167..6bc3e382b9de5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java @@ -276,8 +276,7 @@ public void testBasicDelete() throws IOException { assertTrue(fileSys.mkdirs(dir1)); writeFile(fileSys, file1, 1); writeFile(fileSys, file2, 1); - assertFalse( - fileSys.delete(file3), "Returned true deleting non-existant path"); + assertFalse(fileSys.delete(file3), "Returned true deleting non-existant path"); assertTrue(fileSys.delete(file1), "Did not delete file"); assertTrue(fileSys.delete(dir1), "Did not delete non-empty dir"); } @@ -314,8 +313,8 @@ public void testListStatusWithColons() throws IOException { colonFile.mkdirs(); FileStatus[] stats = fileSys.listStatus(new Path(TEST_ROOT_DIR)); assertEquals(1, stats.length, "Unexpected number of stats"); - assertEquals(colonFile.getAbsolutePath() -, stats[0].getPath().toUri().getPath(), "Bad path from stat"); + assertEquals(colonFile.getAbsolutePath(), + stats[0].getPath().toUri().getPath(), "Bad path from stat"); } @Test @@ -329,8 +328,8 @@ public void testListStatusReturnConsistentPathOnWindows() throws IOException { file.mkdirs(); FileStatus[] stats = fileSys.listStatus(new Path(dirNoDriveSpec)); assertEquals(1, stats.length, "Unexpected number of stats"); - assertEquals(new Path(file.getPath()).toUri().getPath() -, stats[0].getPath().toUri().getPath(), "Bad path from stat"); + assertEquals(new Path(file.getPath()).toUri().getPath(), + stats[0].getPath().toUri().getPath(), "Bad path from stat"); } @Test @@ -601,8 +600,8 @@ public void testStripFragmentFromPath() throws Exception { // Create test file with fragment FileSystemTestHelper.createFile(fs, pathWithFragment); Path resolved = fs.resolvePath(pathWithFragment); - assertEquals(pathQualified -, resolved, "resolvePath did not strip fragment from Path"); + assertEquals(pathQualified, + resolved, "resolvePath did not strip fragment from Path"); } @Test @@ -794,8 +793,8 @@ protected Statistics getFileStatistics() { .stream() .filter(s -> s.getScheme().equals("file")) .collect(Collectors.toList()); - assertEquals( - 1, fileStats.size(), "Number of statistics counters for file://"); + assertEquals(1, fileStats.size(), + "Number of statistics counters for file://"); // this should be used for local and rawLocal, as they share the // same schema (although their class is different) return fileStats.get(0); @@ -827,8 +826,8 @@ private void assertWritesCRC(String operation, Path path, final long bytesOut0 = stats.getBytesWritten(); try { callable.call(); - assertEquals( - CRC_SIZE + DATA.length, stats.getBytesWritten() - bytesOut0, "Bytes written in " + operation + "; stats=" + stats); + assertEquals(CRC_SIZE + DATA.length, stats.getBytesWritten() - bytesOut0, + "Bytes written in " + operation + "; stats=" + stats); } finally { if (delete) { // clean up @@ -857,8 +856,8 @@ public void testCRCwithClassicAPIs() throws Throwable { final long bytesRead0 = stats.getBytesRead(); fileSys.open(file).close(); final long bytesRead1 = stats.getBytesRead(); - assertEquals( - CRC_SIZE, bytesRead1 - bytesRead0, "Bytes read in open() call with stats " + stats); + assertEquals(CRC_SIZE, bytesRead1 - bytesRead0, + "Bytes read in open() call with stats " + stats); } /** @@ -969,8 +968,8 @@ public void testReadIncludesCRCwithBuilders() throws Throwable { // now read back the data, again with the builder API final long bytesRead0 = stats.getBytesRead(); fileSys.openFile(file).build().get().close(); - assertEquals( - CRC_SIZE, stats.getBytesRead() - bytesRead0, "Bytes read in openFile() call with stats " + stats); + assertEquals(CRC_SIZE, stats.getBytesRead() - bytesRead0, + "Bytes read in openFile() call with stats " + stats); // now write with overwrite = true assertWritesCRC("createFileNonRecursive()", file, From cf57acb2ec0a5181a25abac3416677a033f1dec7 Mon Sep 17 00:00:00 2001 From: fanshilun Date: Wed, 5 Feb 2025 10:35:26 +0800 Subject: [PATCH 5/6] HADOOP-19415. Fix CheckStyle. --- .../apache/hadoop/fs/TestContentSummary.java | 9 +-- .../apache/hadoop/fs/TestDFVariations.java | 4 +- .../java/org/apache/hadoop/fs/TestDU.java | 6 +- .../hadoop/fs/TestDelegationTokenRenewer.java | 4 +- .../fs/TestFileSystemStorageStatistics.java | 2 +- .../hadoop/fs/TestFilterFileSystem.java | 4 +- .../hadoop/fs/TestFsShellReturnCode.java | 5 +- .../apache/hadoop/fs/TestGlobExpander.java | 4 +- .../hadoop/fs/TestHarFileSystemBasics.java | 4 +- .../org/apache/hadoop/fs/TestListFiles.java | 6 +- .../hadoop/fs/TestLocalDirAllocator.java | 74 +++++++++---------- .../apache/hadoop/fs/TestLocalFileSystem.java | 17 +++-- .../fs/TestLocalFileSystemPermission.java | 4 +- .../org/apache/hadoop/fs/TestQuotaUsage.java | 4 +- .../java/org/apache/hadoop/fs/TestTrash.java | 16 ++-- .../hadoop/fs/ftp/TestFTPFileSystem.java | 1 - .../fs/permission/TestFsPermission.java | 2 +- .../hadoop/fs/sftp/TestSFTPFileSystem.java | 2 +- .../hadoop/fs/shell/TestAclCommands.java | 24 +++--- .../apache/hadoop/fs/shell/TestPathData.java | 4 +- .../hadoop/fs/shell/TestXAttrCommands.java | 14 ++-- .../apache/hadoop/fs/shell/find/TestAnd.java | 2 +- .../fs/shell/find/TestFilterExpression.java | 2 +- .../apache/hadoop/fs/shell/find/TestFind.java | 2 +- .../hadoop/fs/shell/find/TestIname.java | 2 +- .../apache/hadoop/fs/shell/find/TestName.java | 2 +- .../hadoop/fs/shell/find/TestPrint.java | 2 +- .../hadoop/fs/shell/find/TestPrint0.java | 2 +- .../hadoop/fs/store/TestDataBlocks.java | 8 +- .../hadoop/fs/viewfs/TestChRootedFs.java | 48 ++++++------ ...OverloadSchemeCentralMountTableConfig.java | 4 +- .../fs/viewfs/TestViewfsFileStatus.java | 2 +- 32 files changed, 141 insertions(+), 145 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java index 3577738d2074f..c911d79e0146a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java @@ -58,8 +58,7 @@ public void testConstructorWithQuota() { spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); assertEquals(length, contentSummary.getLength(), "getLength"); assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); - assertEquals(directoryCount, - contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(directoryCount, contentSummary.getDirectoryCount(), "getDirectoryCount"); assertEquals(quota, contentSummary.getQuota(), "getQuota"); assertEquals(spaceConsumed, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); @@ -131,11 +130,9 @@ public void testReadFields() throws IOException { contentSummary.readFields(in); assertEquals(length, contentSummary.getLength(), "getLength"); assertEquals(fileCount, contentSummary.getFileCount(), "getFileCount"); - assertEquals(directoryCount, - contentSummary.getDirectoryCount(), "getDirectoryCount"); + assertEquals(directoryCount, contentSummary.getDirectoryCount(), "getDirectoryCount"); assertEquals(quota, contentSummary.getQuota(), "getQuota"); - assertEquals(spaceConsumed, - contentSummary.getSpaceConsumed(), "getSpaceConsumed"); + assertEquals(spaceConsumed, contentSummary.getSpaceConsumed(), "getSpaceConsumed"); assertEquals(spaceQuota, contentSummary.getSpaceQuota(), "getSpaceQuota"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java index 818bd84d88b37..ec6c2d13ca332 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java @@ -162,9 +162,9 @@ public void testGetMountCurrentDirectory() throws Exception { String mountPath = df.getMount(); File mountDir = new File(mountPath); assertTrue(mountDir.exists(), "Mount dir ["+mountDir.getAbsolutePath()+"] should exist."); - assertTrue(mountDir.isDirectory(), + assertTrue(mountDir.isDirectory(), "Mount dir ["+mountDir.getAbsolutePath()+"] should be directory."); - assertTrue(workingDir.startsWith(mountPath), + assertTrue(workingDir.startsWith(mountPath), "Working dir ["+workingDir+"] should start with ["+mountPath+"]."); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java index 1dc80feee1840..654867972183a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java @@ -91,7 +91,7 @@ public void testDU() throws IOException, InterruptedException { long duSize = du.getUsed(); du.close(); - assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), + assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), "Invalid on-disk size"); //test with 0 interval, will not launch thread @@ -100,7 +100,7 @@ public void testDU() throws IOException, InterruptedException { duSize = du.getUsed(); du.close(); - assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), + assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), "Invalid on-disk size"); //test without launching thread @@ -108,7 +108,7 @@ public void testDU() throws IOException, InterruptedException { du.init(); duSize = du.getUsed(); - assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), + assertTrue(duSize >= writtenSize && writtenSize <= (duSize + slack), "Invalid on-disk size"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java index 1606a95490925..8d11297d7cfd7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDelegationTokenRenewer.java @@ -79,7 +79,7 @@ public Long answer(InvocationOnMock invocation) { renewer.addRenewAction(fs); - assertEquals(1, renewer.getRenewQueueLength(), + assertEquals(1, renewer.getRenewQueueLength(), "FileSystem not added to DelegationTokenRenewer"); Thread.sleep(RENEW_CYCLE*2); @@ -92,7 +92,7 @@ public Long answer(InvocationOnMock invocation) { verify(fs, never()).getDelegationToken(null); verify(fs, never()).setDelegationToken(any()); - assertEquals(0, renewer.getRenewQueueLength(), + assertEquals(0, renewer.getRenewQueueLength(), "FileSystem not removed from DelegationTokenRenewer"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java index c74cb2f880d54..e68a0857723c7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemStorageStatistics.java @@ -60,7 +60,7 @@ public class TestFileSystemStorageStatistics { new FileSystem.Statistics("test-scheme"); private FileSystemStorageStatistics storageStatistics = new FileSystemStorageStatistics(FS_STORAGE_STATISTICS_NAME, statistics); - + @BeforeEach public void setup() { statistics.incrementBytesRead(RandomUtils.nextInt(0, 100)); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java index 0722d052bff18..6ce01fe7176e1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java @@ -307,8 +307,8 @@ public void testFilterPathCapabilites() throws Exception { try (FilterFileSystem flfs = new FilterLocalFileSystem()) { flfs.initialize(URI.create("filter:/"), conf); Path src = new Path("/src"); - assertFalse(flfs.hasPathCapability(src, CommonPathCapabilities.FS_MULTIPART_UPLOADER), - "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for " + flfs); + assertFalse(flfs.hasPathCapability(src, CommonPathCapabilities.FS_MULTIPART_UPLOADER), + "hasPathCapability(FS_MULTIPART_UPLOADER) should have failed for " + flfs); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java index 5ce7b074e73df..917ec0374aa48 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java @@ -109,7 +109,7 @@ private void change(int exit, String owner, String group, String...files) assertEquals(((owner != null) ? "STUB-"+owner : oldStats[i][j].getOwner()), stats[j].getOwner(), "check owner of " + files[i]); assertEquals(((group != null) ? "STUB-"+group : oldStats[i][j].getGroup()), - stats[j].getGroup(), "check group of " + files[i]); + stats[j].getGroup(), "check group of " + files[i]); } } } @@ -310,7 +310,8 @@ public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole() results = bytes.toString(); assertEquals(1, run, "Return code should be 1"); assertTrue(!results.contains("get: null"), " Null is coming when source path is invalid. "); - assertTrue(results.contains("get: `"+args[1]+"': No such file or directory"), " Not displaying the intended message "); + assertTrue(results.contains("get: `" + args[1] + "': No such file or directory"), + " Not displaying the intended message "); } finally { IOUtils.closeStream(out); System.setErr(oldErr); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java index 39f8504f8d9dc..b18047b771e04 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGlobExpander.java @@ -55,10 +55,10 @@ private void checkExpansionIsIdentical(String filePattern) throws IOException { private void checkExpansion(String filePattern, String... expectedExpansions) throws IOException { List actualExpansions = GlobExpander.expand(filePattern); - assertEquals(expectedExpansions.length, + assertEquals(expectedExpansions.length, actualExpansions.size(), "Different number of expansions"); for (int i = 0; i < expectedExpansions.length; i++) { - assertEquals(expectedExpansions[i], + assertEquals(expectedExpansions[i], actualExpansions.get(i), "Expansion of " + filePattern); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java index 7dd45179b3e50..8a2b5fc19230a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java @@ -260,7 +260,7 @@ public void testListLocatedStatus() throws Exception { assertTrue(expectedFileNames.contains(fileName), fileName + " not in expected files list"); expectedFileNames.remove(fileName); } - assertEquals(0, expectedFileNames.size(), + assertEquals(0, expectedFileNames.size(), "Didn't find all of the expected file names: " + expectedFileNames); } @@ -274,7 +274,7 @@ public void testMakeQualifiedPath() throws Exception { + harPath.toUri().getPath().toString(); Path path = new Path(harPathWithUserinfo); Path qualifiedPath = path.getFileSystem(conf).makeQualified(path); - assertTrue(qualifiedPath.toString().equals(harPathWithUserinfo), + assertTrue(qualifiedPath.toString().equals(harPathWithUserinfo), String.format("The qualified path (%s) did not match the expected path (%s).", qualifiedPath.toString(), harPathWithUserinfo)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java index c619557ccca1c..0a4dff0fbc2f3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java @@ -162,17 +162,17 @@ public void testDirectory() throws IOException { itor = fs.listFiles(TEST_DIR, true); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue(filesToFind.remove(stat.getPath()), + assertTrue(filesToFind.remove(stat.getPath()), "Path " + stat.getPath() + " unexpected"); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue(filesToFind.remove(stat.getPath()), + assertTrue(filesToFind.remove(stat.getPath()), "Path " + stat.getPath() + " unexpected"); stat = itor.next(); assertTrue(stat.isFile()); - assertTrue(filesToFind.remove(stat.getPath()), + assertTrue(filesToFind.remove(stat.getPath()), "Path " + stat.getPath() + " unexpected"); assertFalse(itor.hasNext()); assertTrue(filesToFind.isEmpty()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java index 4941ebf322016..b8faf8cce3b2c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java @@ -64,8 +64,8 @@ public class TestLocalDirAllocator { final static private String RELATIVE = "/RELATIVE"; final static private String ABSOLUTE = "/ABSOLUTE"; final static private String QUALIFIED = "/QUALIFIED"; - private String ROOT; - private String PREFIX; + private String root; + private String prefix; static { try { @@ -86,11 +86,11 @@ public class TestLocalDirAllocator { BUFFER_DIR_ROOT).toUri().toString(); } - public void initTestLocalDirAllocator(String root, String prefix) { - ROOT = root; - PREFIX = prefix; + public void initTestLocalDirAllocator(String paramRoot, String paramPrefix) { + this.root = paramRoot; + this.prefix = paramPrefix; } - + public static Collection params() { Object [][] data = new Object[][] { { BUFFER_DIR_ROOT, RELATIVE }, @@ -108,7 +108,7 @@ private static void rmBufferDirs() throws IOException { private static void validateTempDirCreation(String dir) throws IOException { File result = createTempFile(SMALL_FILE_SIZE); - assertTrue(result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath()), + assertTrue(result.getPath().startsWith(new Path(dir, FILENAME).toUri().getPath()), "Checking for " + dir + " in " + result + " - FAILED!"); } @@ -123,17 +123,17 @@ private static File createTempFile(long size) throws IOException { } private String buildBufferDir(String dir, int i) { - return dir + PREFIX + i; + return dir + prefix + i; } - + @Timeout(value = 30) @MethodSource("params") @ParameterizedTest public void test0(String root, String prefix) throws Exception { assumeNotWindows(); initTestLocalDirAllocator(root, prefix); - String dir0 = buildBufferDir(ROOT, 0); - String dir1 = buildBufferDir(ROOT, 1); + String dir0 = buildBufferDir(root, 0); + String dir1 = buildBufferDir(root, 1); try { conf.set(CONTEXT, dir0 + "," + dir1); assertTrue(localFs.mkdirs(new Path(dir1))); @@ -157,8 +157,8 @@ public void test0(String root, String prefix) throws Exception { public void testROBufferDirAndRWBufferDir(String root, String prefix) throws Exception { assumeNotWindows(); initTestLocalDirAllocator(root, prefix); - String dir1 = buildBufferDir(ROOT, 1); - String dir2 = buildBufferDir(ROOT, 2); + String dir1 = buildBufferDir(root, 1); + String dir2 = buildBufferDir(root, 2); try { conf.set(CONTEXT, dir1 + "," + dir2); assertTrue(localFs.mkdirs(new Path(dir2))); @@ -171,7 +171,7 @@ public void testROBufferDirAndRWBufferDir(String root, String prefix) throws Exc rmBufferDirs(); } } - + /** Two buffer dirs. Both do not exist but on a RW disk. * Check if tmp dirs are allocated in a round-robin */ @@ -181,8 +181,8 @@ public void testROBufferDirAndRWBufferDir(String root, String prefix) throws Exc public void testDirsNotExist(String root, String prefix) throws Exception { assumeNotWindows(); initTestLocalDirAllocator(root, prefix); - String dir2 = buildBufferDir(ROOT, 2); - String dir3 = buildBufferDir(ROOT, 3); + String dir2 = buildBufferDir(root, 2); + String dir3 = buildBufferDir(root, 3); try { conf.set(CONTEXT, dir2 + "," + dir3); @@ -192,9 +192,9 @@ public void testDirsNotExist(String root, String prefix) throws Exception { int secondDirIdx = (firstDirIdx == 2) ? 3 : 2; // check if tmp dirs are allocated in a round-robin manner - validateTempDirCreation(buildBufferDir(ROOT, firstDirIdx)); - validateTempDirCreation(buildBufferDir(ROOT, secondDirIdx)); - validateTempDirCreation(buildBufferDir(ROOT, firstDirIdx)); + validateTempDirCreation(buildBufferDir(root, firstDirIdx)); + validateTempDirCreation(buildBufferDir(root, secondDirIdx)); + validateTempDirCreation(buildBufferDir(root, firstDirIdx)); } finally { rmBufferDirs(); } @@ -210,8 +210,8 @@ public void testDirsNotExist(String root, String prefix) throws Exception { public void testRWBufferDirBecomesRO(String root, String prefix) throws Exception { assumeNotWindows(); initTestLocalDirAllocator(root, prefix); - String dir3 = buildBufferDir(ROOT, 3); - String dir4 = buildBufferDir(ROOT, 4); + String dir3 = buildBufferDir(root, 3); + String dir4 = buildBufferDir(root, 4); try { conf.set(CONTEXT, dir3 + "," + dir4); assertTrue(localFs.mkdirs(new Path(dir3))); @@ -222,7 +222,7 @@ public void testRWBufferDirBecomesRO(String root, String prefix) throws Exceptio // Determine the round-robin sequence int nextDirIdx = (dirAllocator.getCurrentDirectoryIndex() == 0) ? 3 : 4; - validateTempDirCreation(buildBufferDir(ROOT, nextDirIdx)); + validateTempDirCreation(buildBufferDir(root, nextDirIdx)); // change buffer directory 2 to be read only new File(new Path(dir4).toUri().getPath()).setReadOnly(); @@ -252,8 +252,8 @@ public void testRWBufferDirBecomesRO(String root, String prefix) throws Exceptio public void testCreateManyFiles(String root, String prefix) throws Exception { assumeNotWindows(); initTestLocalDirAllocator(root, prefix); - String dir5 = buildBufferDir(ROOT, 5); - String dir6 = buildBufferDir(ROOT, 6); + String dir5 = buildBufferDir(root, 5); + String dir6 = buildBufferDir(root, 6); try { conf.set(CONTEXT, dir5 + "," + dir6); @@ -302,7 +302,7 @@ public void testCreateManyFilesRandom(String root, String prefix) throws Excepti final int numTries = 100; String[] dirs = new String[numDirs]; for (int d = 0; d < numDirs; ++d) { - dirs[d] = buildBufferDir(ROOT, d); + dirs[d] = buildBufferDir(root, d); } boolean next_dir_not_selected_at_least_once = false; try { @@ -353,8 +353,8 @@ public void testCreateManyFilesRandom(String root, String prefix) throws Excepti @ParameterizedTest public void testLocalPathForWriteDirCreation(String root, String prefix) throws IOException { initTestLocalDirAllocator(root, prefix); - String dir0 = buildBufferDir(ROOT, 0); - String dir1 = buildBufferDir(ROOT, 1); + String dir0 = buildBufferDir(root, 0); + String dir1 = buildBufferDir(root, 1); try { conf.set(CONTEXT, dir0 + "," + dir1); assertTrue(localFs.mkdirs(new Path(dir1))); @@ -433,7 +433,7 @@ public void testShouldNotthrowNPE(String root, String prefix) throws Exception { public void testNoSideEffects(String root, String prefix) throws IOException { assumeNotWindows(); initTestLocalDirAllocator(root, prefix); - String dir = buildBufferDir(ROOT, 0); + String dir = buildBufferDir(root, 0); try { conf.set(CONTEXT, dir); File result = dirAllocator.createTmpFileForWrite(FILENAME, -1, conf); @@ -458,7 +458,7 @@ public void testNoSideEffects(String root, String prefix) throws IOException { public void testGetLocalPathToRead(String root, String prefix) throws IOException { assumeNotWindows(); initTestLocalDirAllocator(root, prefix); - String dir = buildBufferDir(ROOT, 0); + String dir = buildBufferDir(root, 0); try { conf.set(CONTEXT, dir); assertTrue(localFs.mkdirs(new Path(dir))); @@ -486,8 +486,8 @@ public void testGetLocalPathToRead(String root, String prefix) throws IOExceptio public void testGetAllLocalPathsToRead(String root, String prefix) throws IOException { assumeNotWindows(); initTestLocalDirAllocator(root, prefix); - String dir0 = buildBufferDir(ROOT, 0); - String dir1 = buildBufferDir(ROOT, 1); + String dir0 = buildBufferDir(root, 0); + String dir1 = buildBufferDir(root, 1); try { conf.set(CONTEXT, dir0 + "," + dir1); assertTrue(localFs.mkdirs(new Path(dir0))); @@ -535,7 +535,7 @@ public void testGetAllLocalPathsToRead(String root, String prefix) throws IOExce @ParameterizedTest public void testRemoveContext(String root, String prefix) throws IOException { initTestLocalDirAllocator(root, prefix); - String dir = buildBufferDir(ROOT, 0); + String dir = buildBufferDir(root, 0); try { String contextCfgItemName = "application_1340842292563_0004.app.cache.dirs"; conf.set(contextCfgItemName, dir); @@ -558,7 +558,7 @@ public void testRemoveContext(String root, String prefix) throws IOException { @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testGetLocalPathForWriteForInvalidPaths(String root, String prefix) + public void testGetLocalPathForWriteForInvalidPaths(String root, String prefix) throws Exception { initTestLocalDirAllocator(root, prefix); conf.set(CONTEXT, " "); @@ -566,7 +566,7 @@ public void testGetLocalPathForWriteForInvalidPaths(String root, String prefix) dirAllocator.getLocalPathForWrite("/test", conf); fail("not throwing the exception"); } catch (IOException e) { - assertEquals("No space available in any of the local directories.", + assertEquals("No space available in any of the local directories.", e.getMessage(), "Incorrect exception message"); } } @@ -581,8 +581,8 @@ public void testGetLocalPathForWriteForInvalidPaths(String root, String prefix) @ParameterizedTest public void testGetLocalPathForWriteForLessSpace(String root, String prefix) throws Exception { initTestLocalDirAllocator(root, prefix); - String dir0 = buildBufferDir(ROOT, 0); - String dir1 = buildBufferDir(ROOT, 1); + String dir0 = buildBufferDir(root, 0); + String dir1 = buildBufferDir(root, 1); conf.set(CONTEXT, dir0 + "," + dir1); LambdaTestUtils.intercept(DiskErrorException.class, String.format("Could not find any valid local directory for %s with requested size %s", @@ -598,7 +598,7 @@ public void testGetLocalPathForWriteForLessSpace(String root, String prefix) thr @ParameterizedTest public void testDirectoryRecovery(String root, String prefix) throws Throwable { initTestLocalDirAllocator(root, prefix); - String dir0 = buildBufferDir(ROOT, 0); + String dir0 = buildBufferDir(root, 0); String subdir = dir0 + "/subdir1/subdir2"; conf.set(CONTEXT, subdir); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java index 6bc3e382b9de5..ce28b827337db 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java @@ -75,7 +75,7 @@ public class TestLocalFileSystem { private final Path TEST_PATH = new Path(TEST_ROOT_DIR, "test-file"); private Configuration conf; private LocalFileSystem fileSys; - + private void cleanupFile(FileSystem fs, Path name) throws IOException { assertTrue(fs.exists(name)); fs.delete(name, true); @@ -423,7 +423,8 @@ public void testSetTimes() throws Exception { long newAccTime = 23456000; FileStatus status = fileSys.getFileStatus(path); - assertTrue(newModTime != status.getModificationTime(), "check we're actually changing something"); + assertTrue(newModTime != status.getModificationTime(), + "check we're actually changing something"); assertTrue(newAccTime != status.getAccessTime(), "check we're actually changing something"); fileSys.setTimes(path, newModTime, newAccTime); @@ -600,7 +601,7 @@ public void testStripFragmentFromPath() throws Exception { // Create test file with fragment FileSystemTestHelper.createFile(fs, pathWithFragment); Path resolved = fs.resolvePath(pathWithFragment); - assertEquals(pathQualified, + assertEquals(pathQualified, resolved, "resolvePath did not strip fragment from Path"); } @@ -764,7 +765,7 @@ public void testFSOutputStreamBuilderOptions() throws Exception { builder.must("strM", "value"); builder.must("unsupported", 12.34); - assertEquals("value", builder.getOptions().get("strM"), + assertEquals("value", builder.getOptions().get("strM"), "Optional value should be overwrite by a mandatory value"); Set mandatoryKeys = builder.getMandatoryKeys(); @@ -793,7 +794,7 @@ protected Statistics getFileStatistics() { .stream() .filter(s -> s.getScheme().equals("file")) .collect(Collectors.toList()); - assertEquals(1, fileStats.size(), + assertEquals(1, fileStats.size(), "Number of statistics counters for file://"); // this should be used for local and rawLocal, as they share the // same schema (although their class is different) @@ -826,7 +827,7 @@ private void assertWritesCRC(String operation, Path path, final long bytesOut0 = stats.getBytesWritten(); try { callable.call(); - assertEquals(CRC_SIZE + DATA.length, stats.getBytesWritten() - bytesOut0, + assertEquals(CRC_SIZE + DATA.length, stats.getBytesWritten() - bytesOut0, "Bytes written in " + operation + "; stats=" + stats); } finally { if (delete) { @@ -856,7 +857,7 @@ public void testCRCwithClassicAPIs() throws Throwable { final long bytesRead0 = stats.getBytesRead(); fileSys.open(file).close(); final long bytesRead1 = stats.getBytesRead(); - assertEquals(CRC_SIZE, bytesRead1 - bytesRead0, + assertEquals(CRC_SIZE, bytesRead1 - bytesRead0, "Bytes read in open() call with stats " + stats); } @@ -968,7 +969,7 @@ public void testReadIncludesCRCwithBuilders() throws Throwable { // now read back the data, again with the builder API final long bytesRead0 = stats.getBytesRead(); fileSys.openFile(file).build().get().close(); - assertEquals(CRC_SIZE, stats.getBytesRead() - bytesRead0, + assertEquals(CRC_SIZE, stats.getBytesRead() - bytesRead0, "Bytes read in openFile() call with stats " + stats); // now write with overwrite = true assertWritesCRC("createFileNonRecursive()", diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java index c9c2464039101..0c20167289e23 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java @@ -236,7 +236,7 @@ public void testSetUmaskInRealTime() throws Exception { try { assertTrue(localfs.mkdirs(dir)); FsPermission initialPermission = getPermission(localfs, dir); - assertEquals(new FsPermission("755"), + assertEquals(new FsPermission("755"), initialPermission, "With umask 022 permission should be 755 since the default " + "permission is 777"); @@ -249,7 +249,7 @@ public void testSetUmaskInRealTime() throws Exception { assertThat(new FsPermission("755")).as( "With umask 062 permission should not be 755 since the " + "default permission is 777").isNotEqualTo(finalPermission); - assertEquals(new FsPermission("715"), finalPermission, + assertEquals(new FsPermission("715"), finalPermission, "With umask 062 we expect 715 since the default permission is 777"); } finally { conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "022"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java index c3e05d4e88758..37a2c93963c5e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java @@ -43,7 +43,7 @@ public void testConstructorWithQuota() { QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount).quota(quota). spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); - assertEquals(fileAndDirCount, + assertEquals(fileAndDirCount, quotaUsage.getFileAndDirectoryCount(), "getFileAndDirectoryCount"); assertEquals(quota, quotaUsage.getQuota(), "getQuota"); assertEquals(spaceConsumed, @@ -59,7 +59,7 @@ public void testConstructorNoQuota() { QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount). spaceConsumed(spaceConsumed).build(); - assertEquals(fileAndDirCount, + assertEquals(fileAndDirCount, quotaUsage.getFileAndDirectoryCount(), "getFileAndDirectoryCount"); assertEquals(-1, quotaUsage.getQuota(), "getQuota"); assertEquals(spaceConsumed, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java index be60902160765..89d7419f763d2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java @@ -78,7 +78,7 @@ protected static Path mkdir(FileSystem fs, Path p) throws IOException { protected static void checkTrash(FileSystem trashFs, Path trashRoot, Path path) throws IOException { Path p = Path.mergePaths(trashRoot, path); - assertTrue( trashFs.exists(p), "Could not find file in trash: "+ p); + assertTrue(trashFs.exists(p), "Could not find file in trash: " + p); } // counts how many instances of the file are in the Trash @@ -311,7 +311,7 @@ public static void trashShell(final Configuration conf, final Path base, val = shell.run(args); - assertFalse(trashRootFs.exists(trashRoot), "Expected TrashRoot (" + trashRoot + + assertFalse(trashRootFs.exists(trashRoot), "Expected TrashRoot (" + trashRoot + ") to exist in file system:" + trashRootFs.getUri()); // No new Current should be created assertFalse(fs.exists(myFile)); @@ -395,7 +395,7 @@ public static void trashShell(final Configuration conf, final Path base, System.setOut(stdout); System.setErr(stderr); assertTrue(output.indexOf("Consider using -skipTrash option") != -1 || - output.indexOf("Failed to determine server " + "trash configuration") != -1, + output.indexOf("Failed to determine server " + "trash configuration") != -1, "skipTrash wasn't suggested as remedy to failed rm command" + " or we deleted / even though we could not get server defaults"); } @@ -421,7 +421,7 @@ public static void trashShell(final Configuration conf, final Path base, assertEquals(0, rc, "Expunge should return zero"); assertFalse(trashRootFs.exists(dirToDelete), "old checkpoint format not recognized"); - assertTrue(trashRootFs.exists(dirToKeep), + assertTrue(trashRootFs.exists(dirToKeep), "old checkpoint format directory should not be removed"); } @@ -450,7 +450,7 @@ public static void trashShell(final Configuration conf, final Path base, assertFalse(trashRootFs.exists(oldCheckpoint), "Old checkpoint should be removed"); assertFalse(trashRootFs.exists(recentCheckpoint), "Recent checkpoint should be removed"); assertFalse(trashRootFs.exists(currentFolder), "Current folder should be removed"); - assertEquals(0, trashRootFs.listStatus(trashRoot.getParent()).length, + assertEquals(0, trashRootFs.listStatus(trashRoot.getParent()).length, "Ensure trash folder is empty"); } } @@ -506,7 +506,7 @@ public void testExpungeWithFileSystem() throws Exception { assertFalse(testlfs.exists(oldCheckpoint), "Old checkpoint should be removed"); assertFalse(testlfs.exists(recentCheckpoint), "Recent checkpoint should be removed"); assertFalse(testlfs.exists(currentFolder), "Current folder should be removed"); - assertEquals(0, + assertEquals(0, testlfs.listStatus(trashRoot.getParent()).length, "Ensure trash folder is empty"); // Incorrect FileSystem scheme @@ -1014,7 +1014,7 @@ public static void verifyTrashPermission(FileSystem fs, Configuration conf) assertTrue(wrapper.exists(fileInTrash), String.format("File %s is not moved to trash", fileInTrash.toString())); // Verify permission not change - assertTrue(fstat.getPermission().equals(fsPermission), + assertTrue(fstat.getPermission().equals(fsPermission), String.format("Expected file: %s is %s, but actual is %s", fileInTrash.toString(), fsPermission.toString(), @@ -1058,7 +1058,7 @@ private void verifyAuditableTrashEmptier(Trash trash, emptierThread.join(); AuditableTrashPolicy at = (AuditableTrashPolicy) trash.getTrashPolicy(); - assertEquals(expectedNumOfCheckpoints, at.getNumberOfCheckpoints(), + assertEquals(expectedNumOfCheckpoints, at.getNumberOfCheckpoints(), String.format("Expected num of checkpoints is %s, but actual is %s", expectedNumOfCheckpoints, at.getNumberOfCheckpoints())); } catch (InterruptedException e) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java index d3529dcb8d4f9..1760af6839663 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java @@ -22,7 +22,6 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.Comparator; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.util.Preconditions; import org.apache.commons.net.ftp.FTP; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java index 5fc74d5d066a4..01c8339a07126 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/permission/TestFsPermission.java @@ -254,7 +254,7 @@ public void testBadUmasks() { FsPermission.getUMask(conf); fail("Shouldn't have been able to parse bad umask"); } catch(IllegalArgumentException iae) { - assertTrue(isCorrectExceptionMessage(iae.getMessage(), b), + assertTrue(isCorrectExceptionMessage(iae.getMessage(), b), "Exception should specify parsing error and invalid umask: " + iae.getMessage()); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java index 21cee11e0f6a2..b2b0923ad492d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java @@ -63,7 +63,7 @@ public class TestSFTPFileSystem { private static final String TEST_SFTP_DIR = "testsftp"; private static final String TEST_ROOT_DIR = GenericTestUtils.getTestDir().getAbsolutePath(); - + private static final String connection = "sftp://user:password@localhost"; private static Path localDir = null; private static FileSystem localFs = null; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java index d30a2bed1773f..4d6ef8f5e8557 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java @@ -63,29 +63,29 @@ public void setup(@TempDir java.nio.file.Path testFolder) throws IOException { @Test public void testGetfaclValidations() throws Exception { assertFalse(0 == runCommand(new String[] {"-getfacl"}), "getfacl should fail without path"); - assertFalse(0 == runCommand(new String[] {"-getfacl", path, "extraArg"}), + assertFalse(0 == runCommand(new String[] {"-getfacl", path, "extraArg"}), "getfacl should fail with extra argument"); } @Test public void testSetfaclValidations() throws Exception { - assertFalse(0 == runCommand(new String[] {"-setfacl", path}), + assertFalse(0 == runCommand(new String[] {"-setfacl", path}), "setfacl should fail without options"); - assertFalse(0 == runCommand(new String[] {"-setfacl", "-R", path}), + assertFalse(0 == runCommand(new String[] {"-setfacl", "-R", path}), "setfacl should fail without options -b, -k, -m, -x or --set"); - assertFalse(0 == runCommand(new String[] {"-setfacl"}), + assertFalse(0 == runCommand(new String[] {"-setfacl"}), "setfacl should fail without path"); - assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", path}), + assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", path}), "setfacl should fail without aclSpec"); - assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", path}), + assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", path}), "setfacl should fail with conflicting options"); - assertFalse(0 == runCommand(new String[] {"-setfacl", path, "extra"}), + assertFalse(0 == runCommand(new String[] {"-setfacl", path, "extra"}), "setfacl should fail with extra arguments"); assertFalse(0 == runCommand(new String[] {"-setfacl", "--set", "default:user::rwx", path, "extra"}), "setfacl should fail with extra arguments"); assertFalse(0 == runCommand(new String[] {"-setfacl", "-x", "user:user1:rwx", path}), "setfacl should fail with permissions for -x"); - assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", "", path}), + assertFalse(0 == runCommand(new String[] {"-setfacl", "-m", "", path}), "setfacl should fail ACL spec missing"); } @@ -97,8 +97,8 @@ public void testSetfaclValidationsWithoutPermissions() throws Exception { } catch (IllegalArgumentException e) { } assertTrue(parsedList.size() == 0); - assertFalse(0 == runCommand(new String[] { "-setfacl", "-m", "user:user1:", - "/path" }), "setfacl should fail with less arguments"); + assertFalse(0 == runCommand(new String[]{"-setfacl", "-m", "user:user1:", + "/path"}), "setfacl should fail with less arguments"); } @Test @@ -164,7 +164,7 @@ public void testLsNoRpcForGetAclStatus() throws Exception { conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "stubfs:///"); conf.setClass("fs.stubfs.impl", StubFileSystem.class, FileSystem.class); conf.setBoolean("stubfs.noRpcForGetAclStatus", true); - assertEquals(0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" }), + assertEquals(0, ToolRunner.run(conf, new FsShell(), new String[]{"-ls", "/"}), "ls must succeed even if getAclStatus RPC does not exist."); } @@ -173,7 +173,7 @@ public void testLsAclsUnsupported() throws Exception { Configuration conf = new Configuration(); conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, "stubfs:///"); conf.setClass("fs.stubfs.impl", StubFileSystem.class, FileSystem.class); - assertEquals(0, ToolRunner.run(conf, new FsShell(), new String[] { "-ls", "/" }), + assertEquals(0, ToolRunner.run(conf, new FsShell(), new String[]{"-ls", "/"}), "ls must succeed even if FileSystem does not implement ACLs."); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java index f116df227f7f0..6d968981cd328 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java @@ -262,9 +262,7 @@ public void testWithStringAndConfForBuggyPath() throws Exception { public void checkPathData(String dirString, PathData item) throws Exception { assertEquals(fs, item.fs, "checking fs"); assertEquals(dirString, item.toString(), "checking string"); - assertEquals( - fs.makeQualified(new Path(item.toString())), item.path, "checking path" - ); + assertEquals(fs.makeQualified(new Path(item.toString())), item.path, "checking path"); assertTrue(item.stat != null, "checking exist"); assertTrue(item.stat.isDirectory(), "checking isDir"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java index a7db9ef768c58..522cbf56de1ac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestXAttrCommands.java @@ -54,22 +54,22 @@ public void cleanUp() throws Exception { @Test public void testGetfattrValidations() throws Exception { errContent.reset(); - assertFalse(0 == runCommand(new String[] { "-getfattr", "-d"}), + assertFalse(0 == runCommand(new String[]{"-getfattr", "-d"}), "getfattr should fail without path"); assertTrue(errContent.toString().contains(" is missing")); errContent.reset(); - assertFalse(0 == runCommand(new String[] { "-getfattr", "extra", "-d", "/test"}), + assertFalse(0 == runCommand(new String[]{"-getfattr", "extra", "-d", "/test"}), "getfattr should fail with extra argument"); assertTrue(errContent.toString().contains("Too many arguments")); errContent.reset(); - assertFalse(0 == runCommand(new String[] { "-getfattr", "/test"}), + assertFalse(0 == runCommand(new String[]{"-getfattr", "/test"}), "getfattr should fail without \"-n name\" or \"-d\""); assertTrue(errContent.toString().contains("Must specify '-n name' or '-d' option")); errContent.reset(); - assertFalse(0 == runCommand(new String[] { "-getfattr", "-d", "-e", "aaa", "/test"}), + assertFalse(0 == runCommand(new String[]{"-getfattr", "-d", "-e", "aaa", "/test"}), "getfattr should fail with invalid encoding"); assertTrue(errContent.toString().contains("Invalid/unsupported encoding option specified: aaa")); } @@ -77,17 +77,17 @@ public void testGetfattrValidations() throws Exception { @Test public void testSetfattrValidations() throws Exception { errContent.reset(); - assertFalse(0 == runCommand(new String[] { "-setfattr", "-n", "user.a1" }), + assertFalse(0 == runCommand(new String[]{"-setfattr", "-n", "user.a1"}), "setfattr should fail without path"); assertTrue(errContent.toString().contains(" is missing")); errContent.reset(); - assertFalse(0 == runCommand(new String[] { "-setfattr", "extra", "-n", "user.a1", "/test"}), + assertFalse(0 == runCommand(new String[]{"-setfattr", "extra", "-n", "user.a1", "/test"}), "setfattr should fail with extra arguments"); assertTrue(errContent.toString().contains("Too many arguments")); errContent.reset(); - assertFalse(0 == runCommand(new String[] { "-setfattr", "/test"}), + assertFalse(0 == runCommand(new String[]{"-setfattr", "/test"}), "setfattr should fail without \"-n name\" or \"-x name\""); assertTrue(errContent.toString().contains("Must specify '-n name' or '-x name' option")); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java index 74169ea0240fe..2fec2db028c7b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestAnd.java @@ -34,7 +34,7 @@ @Timeout(10) public class TestAnd { - + // test all expressions passing @Test public void testPass() throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java index 0738831c50164..692c6db78663c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFilterExpression.java @@ -40,7 +40,7 @@ public class TestFilterExpression { private Expression expr; private FilterExpression test; - + @BeforeEach public void setup() { expr = mock(Expression.class); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java index 3e444990d66a4..e1b85356fd06a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java @@ -59,7 +59,7 @@ @Timeout(10) public class TestFind { - + private static FileSystem mockFs; private static Configuration conf; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java index 3ed1cb4cd1cc5..bd201abfc3887 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestIname.java @@ -32,7 +32,7 @@ public class TestIname { private FileSystem mockFs; private Name.Iname name; - + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java index 0ecbb53e204cb..967caebc81485 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestName.java @@ -32,7 +32,7 @@ public class TestName { private FileSystem mockFs; private Name name; - + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java index e59ad32ccd0bc..932689cbed4fa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint.java @@ -36,7 +36,7 @@ @Timeout(10) public class TestPrint { private FileSystem mockFs; - + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java index f61f4baea74b8..df4795bf87802 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestPrint0.java @@ -35,7 +35,7 @@ @Timeout(10) public class TestPrint0 { private FileSystem mockFs; - + @BeforeEach public void resetMock() throws IOException { mockFs = MockFileSystem.setup(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java index 2c10a5448cbbf..4bd9bb9378f2f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java @@ -90,8 +90,8 @@ private void assertWriteBlock(DataBlocks.DataBlock dataBlock) // Verify the size of data. assertEquals(ONE_KB, dataBlock.dataSize(), "Mismatch in data size in block"); // Verify that no capacity is left in the data block to write more. - assertFalse(dataBlock.hasCapacity(1), "Expected the data block to have no capacity to write 1 byte " - + "of data"); + assertFalse(dataBlock.hasCapacity(1), + "Expected the data block to have no capacity to write 1 byte of data"); } /** @@ -109,8 +109,8 @@ private void assertToByteArray(DataBlocks.DataBlock dataBlock) byte[] bytesWritten = blockUploadData.toByteArray(); // Verify that we can call toByteArray() more than once and gives the // same byte[]. - assertEquals(bytesWritten, blockUploadData.toByteArray(), "Mismatch in byteArray provided by toByteArray() the second " - + "time"); + assertEquals(bytesWritten, blockUploadData.toByteArray(), + "Mismatch in byteArray provided by toByteArray() the second time"); IOUtils.close(blockUploadData); // Verify that after closing blockUploadData, we can't call toByteArray(). LambdaTestUtils.intercept(IllegalStateException.class, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java index c6de6ef2a5a4c..8166201f3a711 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java @@ -121,22 +121,22 @@ public void testCreateDelete() throws IOException { // Create file with recursive dir fileContextTestHelper.createFile(fc, "/newDir/foo"); assertTrue(isFile(fc, new Path("/newDir/foo"))); - assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/foo"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo, "newDir/foo"))); // Delete the created file assertTrue(fc.delete(new Path("/newDir/foo"), false)); assertFalse(exists(fc, new Path("/newDir/foo"))); - assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "newDir/foo"))); // Create file with a 2 component dirs recursively fileContextTestHelper.createFile(fc, "/newDir/newDir2/foo"); assertTrue(isFile(fc, new Path("/newDir/newDir2/foo"))); - assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo, "newDir/newDir2/foo"))); // Delete the created file assertTrue(fc.delete(new Path("/newDir/newDir2/foo"), false)); assertFalse(exists(fc, new Path("/newDir/newDir2/foo"))); - assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/newDir2/foo"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "newDir/newDir2/foo"))); } @@ -144,21 +144,21 @@ public void testCreateDelete() throws IOException { public void testMkdirDelete() throws IOException { fc.mkdir(fileContextTestHelper.getTestRootPath(fc, "/dirX"), FileContext.DEFAULT_PERM, false); assertTrue(isDir(fc, new Path("/dirX"))); - assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX"))); + assertTrue(isDir(fcTarget, new Path(chrootedTo, "dirX"))); fc.mkdir(fileContextTestHelper.getTestRootPath(fc, "/dirX/dirY"), FileContext.DEFAULT_PERM, false); assertTrue(isDir(fc, new Path("/dirX/dirY"))); - assertTrue(isDir(fcTarget, new Path(chrootedTo,"dirX/dirY"))); + assertTrue(isDir(fcTarget, new Path(chrootedTo, "dirX/dirY"))); // Delete the created dir assertTrue(fc.delete(new Path("/dirX/dirY"), false)); assertFalse(exists(fc, new Path("/dirX/dirY"))); - assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX/dirY"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "dirX/dirY"))); assertTrue(fc.delete(new Path("/dirX"), false)); assertFalse(exists(fc, new Path("/dirX"))); - assertFalse(exists(fcTarget, new Path(chrootedTo,"dirX"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "dirX"))); } @Test @@ -167,22 +167,22 @@ public void testRename() throws IOException { fileContextTestHelper.createFile(fc, "/newDir/foo"); fc.rename(new Path("/newDir/foo"), new Path("/newDir/fooBar")); assertFalse(exists(fc, new Path("/newDir/foo"))); - assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/foo"))); - assertTrue(isFile(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/fooBar"))); - assertTrue(isFile(fcTarget, new Path(chrootedTo,"newDir/fooBar"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "newDir/foo"))); + assertTrue(isFile(fc, fileContextTestHelper.getTestRootPath(fc, "/newDir/fooBar"))); + assertTrue(isFile(fcTarget, new Path(chrootedTo, "newDir/fooBar"))); // Rename a dir fc.mkdir(new Path("/newDir/dirFoo"), FileContext.DEFAULT_PERM, false); fc.rename(new Path("/newDir/dirFoo"), new Path("/newDir/dirFooBar")); assertFalse(exists(fc, new Path("/newDir/dirFoo"))); - assertFalse(exists(fcTarget, new Path(chrootedTo,"newDir/dirFoo"))); - assertTrue(isDir(fc, fileContextTestHelper.getTestRootPath(fc,"/newDir/dirFooBar"))); - assertTrue(isDir(fcTarget, new Path(chrootedTo,"newDir/dirFooBar"))); + assertFalse(exists(fcTarget, new Path(chrootedTo, "newDir/dirFoo"))); + assertTrue(isDir(fc, fileContextTestHelper.getTestRootPath(fc, "/newDir/dirFooBar"))); + assertTrue(isDir(fcTarget, new Path(chrootedTo, "newDir/dirFooBar"))); } - /** + /* * We would have liked renames across file system to fail but * Unfortunately there is not way to distinguish the two file systems * @throws IOException @@ -223,17 +223,17 @@ public void testList() throws IOException { // Note the the file status paths are the full paths on target fs = fileContextTestHelper.containsPath(fcTarget, "foo", dirPaths); - assertNotNull(fs); - assertTrue(fs.isFile()); + assertNotNull(fs); + assertTrue(fs.isFile()); fs = fileContextTestHelper.containsPath(fcTarget, "bar", dirPaths); - assertNotNull(fs); - assertTrue(fs.isFile()); + assertNotNull(fs); + assertTrue(fs.isFile()); fs = fileContextTestHelper.containsPath(fcTarget, "dirX", dirPaths); - assertNotNull(fs); - assertTrue(fs.isDirectory()); + assertNotNull(fs); + assertTrue(fs.isDirectory()); fs = fileContextTestHelper.containsPath(fcTarget, "dirY", dirPaths); - assertNotNull(fs); - assertTrue(fs.isDirectory()); + assertNotNull(fs); + assertTrue(fs.isDirectory()); } @Test @@ -305,7 +305,7 @@ public void testWorkingDirectory() throws Exception { @Test public void testResolvePath() throws IOException { - assertEquals(chrootedTo, fc.getDefaultFileSystem().resolvePath(new Path("/"))); + assertEquals(chrootedTo, fc.getDefaultFileSystem().resolvePath(new Path("/"))); fileContextTestHelper.createFile(fc, "/foo"); assertEquals(new Path(chrootedTo, "foo"), fc.getDefaultFileSystem().resolvePath(new Path("/foo"))); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFSOverloadSchemeCentralMountTableConfig.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFSOverloadSchemeCentralMountTableConfig.java index 1527e3c1f30d8..eb2b1f3a3fe43 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFSOverloadSchemeCentralMountTableConfig.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFSOverloadSchemeCentralMountTableConfig.java @@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; /** * Test the TestViewFSOverloadSchemeCentralMountTableConfig with mount-table @@ -36,7 +36,7 @@ public class TestViewFSOverloadSchemeCentralMountTableConfig private Path oldMountTablePath; private Path latestMountTablepath; - @Before + @BeforeEach public void setUp() throws Exception { super.setUp(); // Mount table name format: mount-table..xml diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java index 887b9f8bdc03d..068fd12952e12 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewfsFileStatus.java @@ -85,7 +85,7 @@ public void testFileStatusSerialziation() FileStatus stat = vfs.getFileStatus(path); assertEquals(content.length, stat.getLen()); ContractTestUtils.assertNotErasureCoded(vfs, path); - assertTrue(stat.toString().contains("isErasureCoded=false"), + assertTrue(stat.toString().contains("isErasureCoded=false"), path + " should have erasure coding unset in " + "FileStatus#toString(): " + stat); From 6a873c9514751b7c7a5f36512806f4a0d6dcaeaa Mon Sep 17 00:00:00 2001 From: fanshilun Date: Wed, 5 Feb 2025 17:15:02 +0800 Subject: [PATCH 6/6] HADOOP-19415. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-common Part2. --- .../hadoop/fs/TestFsShellReturnCode.java | 2 +- .../hadoop/fs/TestLocalDirAllocator.java | 65 ++++++++++--------- .../apache/hadoop/fs/TestLocalFileSystem.java | 2 +- .../hadoop/fs/store/TestDataBlocks.java | 4 +- 4 files changed, 38 insertions(+), 35 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java index 917ec0374aa48..34a6d254940ce 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java @@ -310,7 +310,7 @@ public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole() results = bytes.toString(); assertEquals(1, run, "Return code should be 1"); assertTrue(!results.contains("get: null"), " Null is coming when source path is invalid. "); - assertTrue(results.contains("get: `" + args[1] + "': No such file or directory"), + assertTrue(results.contains("get: `" + args[1] + "': No such file or directory"), " Not displaying the intended message "); } finally { IOUtils.closeStream(out); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java index b8faf8cce3b2c..eb6d251add0c5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java @@ -129,9 +129,9 @@ private String buildBufferDir(String dir, int i) { @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void test0(String root, String prefix) throws Exception { + public void test0(String paramRoot, String paramPrefix) throws Exception { assumeNotWindows(); - initTestLocalDirAllocator(root, prefix); + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir0 = buildBufferDir(root, 0); String dir1 = buildBufferDir(root, 1); try { @@ -154,9 +154,10 @@ public void test0(String root, String prefix) throws Exception { @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testROBufferDirAndRWBufferDir(String root, String prefix) throws Exception { + public void testROBufferDirAndRWBufferDir(String paramRoot, String paramPrefix) + throws Exception { assumeNotWindows(); - initTestLocalDirAllocator(root, prefix); + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir1 = buildBufferDir(root, 1); String dir2 = buildBufferDir(root, 2); try { @@ -178,9 +179,9 @@ public void testROBufferDirAndRWBufferDir(String root, String prefix) throws Exc @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testDirsNotExist(String root, String prefix) throws Exception { + public void testDirsNotExist(String paramRoot, String paramPrefix) throws Exception { assumeNotWindows(); - initTestLocalDirAllocator(root, prefix); + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir2 = buildBufferDir(root, 2); String dir3 = buildBufferDir(root, 3); try { @@ -207,9 +208,9 @@ public void testDirsNotExist(String root, String prefix) throws Exception { @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testRWBufferDirBecomesRO(String root, String prefix) throws Exception { + public void testRWBufferDirBecomesRO(String paramRoot, String paramPrefix) throws Exception { assumeNotWindows(); - initTestLocalDirAllocator(root, prefix); + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir3 = buildBufferDir(root, 3); String dir4 = buildBufferDir(root, 4); try { @@ -249,9 +250,9 @@ public void testRWBufferDirBecomesRO(String root, String prefix) throws Exceptio @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testCreateManyFiles(String root, String prefix) throws Exception { + public void testCreateManyFiles(String paramRoot, String paramPrefix) throws Exception { assumeNotWindows(); - initTestLocalDirAllocator(root, prefix); + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir5 = buildBufferDir(root, 5); String dir6 = buildBufferDir(root, 6); try { @@ -295,9 +296,9 @@ public void testCreateManyFiles(String root, String prefix) throws Exception { @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testCreateManyFilesRandom(String root, String prefix) throws Exception { + public void testCreateManyFilesRandom(String paramRoot, String paramPrefix) throws Exception { assumeNotWindows(); - initTestLocalDirAllocator(root, prefix); + initTestLocalDirAllocator(paramRoot, paramPrefix); final int numDirs = 5; final int numTries = 100; String[] dirs = new String[numDirs]; @@ -351,8 +352,9 @@ public void testCreateManyFilesRandom(String root, String prefix) throws Excepti @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testLocalPathForWriteDirCreation(String root, String prefix) throws IOException { - initTestLocalDirAllocator(root, prefix); + public void testLocalPathForWriteDirCreation(String paramRoot, String paramPrefix) + throws IOException { + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir0 = buildBufferDir(root, 0); String dir1 = buildBufferDir(root, 1); try { @@ -385,8 +387,8 @@ public void testLocalPathForWriteDirCreation(String root, String prefix) throws @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testShouldNotthrowNPE(String root, String prefix) throws Exception { - initTestLocalDirAllocator(root, prefix); + public void testShouldNotthrowNPE(String paramRoot, String paramPrefix) throws Exception { + initTestLocalDirAllocator(paramRoot, paramPrefix); Configuration conf1 = new Configuration(); try { dirAllocator.getLocalPathForWrite("/test", conf1); @@ -430,9 +432,9 @@ public void testShouldNotthrowNPE(String root, String prefix) throws Exception { @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testNoSideEffects(String root, String prefix) throws IOException { + public void testNoSideEffects(String paramRoot, String paramPrefix) throws IOException { assumeNotWindows(); - initTestLocalDirAllocator(root, prefix); + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir = buildBufferDir(root, 0); try { conf.set(CONTEXT, dir); @@ -455,9 +457,9 @@ public void testNoSideEffects(String root, String prefix) throws IOException { @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testGetLocalPathToRead(String root, String prefix) throws IOException { + public void testGetLocalPathToRead(String paramRoot, String paramPrefix) throws IOException { assumeNotWindows(); - initTestLocalDirAllocator(root, prefix); + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir = buildBufferDir(root, 0); try { conf.set(CONTEXT, dir); @@ -483,9 +485,9 @@ public void testGetLocalPathToRead(String root, String prefix) throws IOExceptio @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testGetAllLocalPathsToRead(String root, String prefix) throws IOException { + public void testGetAllLocalPathsToRead(String paramRoot, String paramPrefix) throws IOException { assumeNotWindows(); - initTestLocalDirAllocator(root, prefix); + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir0 = buildBufferDir(root, 0); String dir1 = buildBufferDir(root, 1); try { @@ -533,8 +535,8 @@ public void testGetAllLocalPathsToRead(String root, String prefix) throws IOExce @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testRemoveContext(String root, String prefix) throws IOException { - initTestLocalDirAllocator(root, prefix); + public void testRemoveContext(String paramRoot, String paramPrefix) throws IOException { + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir = buildBufferDir(root, 0); try { String contextCfgItemName = "application_1340842292563_0004.app.cache.dirs"; @@ -558,9 +560,9 @@ public void testRemoveContext(String root, String prefix) throws IOException { @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testGetLocalPathForWriteForInvalidPaths(String root, String prefix) - throws Exception { - initTestLocalDirAllocator(root, prefix); + public void testGetLocalPathForWriteForInvalidPaths(String paramRoot, String paramPrefix) + throws Exception { + initTestLocalDirAllocator(paramRoot, paramPrefix); conf.set(CONTEXT, " "); try { dirAllocator.getLocalPathForWrite("/test", conf); @@ -579,8 +581,9 @@ public void testGetLocalPathForWriteForInvalidPaths(String root, String prefix) @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testGetLocalPathForWriteForLessSpace(String root, String prefix) throws Exception { - initTestLocalDirAllocator(root, prefix); + public void testGetLocalPathForWriteForLessSpace(String paramRoot, String paramPrefix) + throws Exception { + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir0 = buildBufferDir(root, 0); String dir1 = buildBufferDir(root, 1); conf.set(CONTEXT, dir0 + "," + dir1); @@ -596,8 +599,8 @@ public void testGetLocalPathForWriteForLessSpace(String root, String prefix) thr @Timeout(value = 30) @MethodSource("params") @ParameterizedTest - public void testDirectoryRecovery(String root, String prefix) throws Throwable { - initTestLocalDirAllocator(root, prefix); + public void testDirectoryRecovery(String paramRoot, String paramPrefix) throws Throwable { + initTestLocalDirAllocator(paramRoot, paramPrefix); String dir0 = buildBufferDir(root, 0); String subdir = dir0 + "/subdir1/subdir2"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java index ce28b827337db..d240929ff77b4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java @@ -423,7 +423,7 @@ public void testSetTimes() throws Exception { long newAccTime = 23456000; FileStatus status = fileSys.getFileStatus(path); - assertTrue(newModTime != status.getModificationTime(), + assertTrue(newModTime != status.getModificationTime(), "check we're actually changing something"); assertTrue(newAccTime != status.getAccessTime(), "check we're actually changing something"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java index 4bd9bb9378f2f..b1e168027ac1f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/store/TestDataBlocks.java @@ -90,7 +90,7 @@ private void assertWriteBlock(DataBlocks.DataBlock dataBlock) // Verify the size of data. assertEquals(ONE_KB, dataBlock.dataSize(), "Mismatch in data size in block"); // Verify that no capacity is left in the data block to write more. - assertFalse(dataBlock.hasCapacity(1), + assertFalse(dataBlock.hasCapacity(1), "Expected the data block to have no capacity to write 1 byte of data"); } @@ -109,7 +109,7 @@ private void assertToByteArray(DataBlocks.DataBlock dataBlock) byte[] bytesWritten = blockUploadData.toByteArray(); // Verify that we can call toByteArray() more than once and gives the // same byte[]. - assertEquals(bytesWritten, blockUploadData.toByteArray(), + assertEquals(bytesWritten, blockUploadData.toByteArray(), "Mismatch in byteArray provided by toByteArray() the second time"); IOUtils.close(blockUploadData); // Verify that after closing blockUploadData, we can't call toByteArray().