diff --git a/hadoop-tools/hadoop-azure/pom.xml b/hadoop-tools/hadoop-azure/pom.xml
index 6c976f19f2428..911aa8d5d4906 100644
--- a/hadoop-tools/hadoop-azure/pom.xml
+++ b/hadoop-tools/hadoop-azure/pom.xml
@@ -214,13 +214,6 @@
provided
-
-
- junit
- junit
- test
-
-
org.apache.hadoop
hadoop-common
@@ -367,11 +360,6 @@
junit-jupiter-params
test
-
- org.junit.platform
- junit-platform-launcher
- test
-
org.junit.vintage
junit-vintage-engine
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/AbstractWasbTestWithTimeout.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/AbstractWasbTestWithTimeout.java
index ad737b55acf81..d6624e8c0c178 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/AbstractWasbTestWithTimeout.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/AbstractWasbTestWithTimeout.java
@@ -28,7 +28,7 @@
import java.util.concurrent.TimeUnit;
-import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.assertj.core.api.Assumptions.assumeThat;
/**
* Base class for any Wasb test with timeouts & named threads.
@@ -69,10 +69,10 @@ protected int getTestTimeoutMillis() {
}
public static void assumeNotNull(Object objects) {
- assumeTrue(objects != null);
+ assumeThat(objects).isNotNull();
}
public static void assumeNotNull(Object objects, String message) {
- assumeTrue(objects != null, message);
+ assumeThat(objects).as(message).isNotNull();
}
}
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestContainerChecks.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestContainerChecks.java
index bc855a7ee6e3c..8110b6d6ae8e3 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestContainerChecks.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestContainerChecks.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.fs.azure;
-import static org.junit.jupiter.api.Assumptions.assumeFalse;
+import static org.assertj.core.api.Assumptions.assumeThat;
import java.io.FileNotFoundException;
import java.util.EnumSet;
@@ -169,7 +169,7 @@ public String call() throws Exception {
@Test
public void testContainerChecksWithSas() throws Exception {
- assumeFalse(runningInSASMode);
+ assumeThat(runningInSASMode).isFalse();
testAccount = AzureBlobStorageTestAccount.create("",
EnumSet.of(CreateOptions.UseSas));
assumeNotNull(testAccount);
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestListPerformance.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestListPerformance.java
index 99e7383e8a175..44d57d1b19f83 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestListPerformance.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestListPerformance.java
@@ -48,7 +48,7 @@
import org.apache.hadoop.fs.azure.integration.AzureTestUtils;
import org.apache.hadoop.fs.contract.ContractTestUtils;
-import static org.junit.jupiter.api.Assumptions.assumeFalse;
+import static org.assertj.core.api.Assumptions.assumeThat;
/**
* Test list performance.
@@ -99,7 +99,7 @@ protected AzureBlobStorageTestAccount createTestAccount() throws Exception {
@Test
public void test_0101_CreateDirectoryWithFiles() throws Exception {
- assumeFalse(fs.exists(TEST_DIR_PATH), "Test path exists; skipping");
+ assumeThat(fs.exists(TEST_DIR_PATH)).as("Test path exists; skipping").isFalse();
ExecutorService executorService = Executors.newFixedThreadPool(threads);
CloudBlobContainer container = testAccount.getRealContainer();
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractEmulator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractEmulator.java
index 4f76926de2d48..2cc7592dc1f9b 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractEmulator.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractEmulator.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.fs.azure;
-import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.assertj.core.api.Assumptions.assumeThat;
import org.apache.hadoop.fs.FileSystemContractBaseTest;
import org.apache.hadoop.fs.Path;
@@ -50,7 +50,9 @@ public void setUp() throws Exception {
if (testAccount != null) {
fs = testAccount.getFileSystem();
}
- assumeTrue(fs != null);
+ assumeThat(fs)
+ .as("FileSystem must not be null for this test")
+ .isNotNull();
basePath = fs.makeQualified(
AzureTestUtils.createTestPath(
new Path("ITestNativeAzureFileSystemContractEmulator")));
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractLive.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractLive.java
index b8f535ed13ed5..702b9c6375ce6 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractLive.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractLive.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.fs.azure;
-import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.assertj.core.api.Assumptions.assumeThat;
import org.apache.hadoop.fs.FileSystemContractBaseTest;
import org.apache.hadoop.fs.Path;
@@ -53,7 +53,7 @@ public void setUp() throws Exception {
if (testAccount != null) {
fs = testAccount.getFileSystem();
}
- assumeTrue(fs != null);
+ assumeThat(fs).isNotNull();
basePath = fs.makeQualified(
AzureTestUtils.createTestPath(
new Path("NativeAzureFileSystemContractLive")));
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractPageBlobLive.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractPageBlobLive.java
index 171c610140d91..5f0e951142b7c 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractPageBlobLive.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestNativeAzureFileSystemContractPageBlobLive.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.fs.azure;
-import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.assertj.core.api.Assumptions.assumeThat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystemContractBaseTest;
@@ -63,7 +63,7 @@ private AzureBlobStorageTestAccount createTestAccount()
@BeforeEach
public void setUp() throws Exception {
testAccount = createTestAccount();
- assumeTrue(testAccount != null);
+ assumeThat(testAccount).isNotNull();
fs = testAccount.getFileSystem();
basePath = AzureTestUtils.pathForTests(fs, "filesystemcontractpageblob");
}
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestWasbRemoteCallHelper.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestWasbRemoteCallHelper.java
index c69233961fd80..f1e8fdd181399 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestWasbRemoteCallHelper.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestWasbRemoteCallHelper.java
@@ -47,7 +47,7 @@
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.times;
-import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.assertj.core.api.Assumptions.assumeThat;
/**
* Test class to hold all WasbRemoteCallHelper tests.
@@ -72,8 +72,9 @@ public void setUp() throws Exception {
boolean useSecureMode = fs.getConf().getBoolean(KEY_USE_SECURE_MODE, false);
boolean useAuthorization = fs.getConf()
.getBoolean(NativeAzureFileSystem.KEY_AZURE_AUTHORIZATION, false);
- assumeTrue(useSecureMode && useAuthorization,
- "Test valid when both SecureMode and Authorization are enabled .. skipping");
+ assumeThat(useSecureMode && useAuthorization)
+ .as("Test valid when both SecureMode and Authorization are enabled .. skipping")
+ .isTrue();
}
/**
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestWasbUriAndConfiguration.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestWasbUriAndConfiguration.java
index b78f509886c51..08122f966b905 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestWasbUriAndConfiguration.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/ITestWasbUriAndConfiguration.java
@@ -21,7 +21,7 @@
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
import static org.apache.hadoop.fs.azure.NativeAzureFileSystem.RETURN_URI_AS_CANONICAL_SERVICE_NAME_PROPERTY_NAME;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
-import static org.junit.jupiter.api.Assumptions.assumeFalse;
+import static org.assertj.core.api.Assumptions.assumeThat;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
@@ -132,7 +132,7 @@ public void testConnectUsingKey() throws Exception {
@Test
public void testConnectUsingSAS() throws Exception {
- assumeFalse(runningInSASMode);
+ assumeThat(runningInSASMode).isFalse();
// Create the test account with SAS credentials.
testAccount = AzureBlobStorageTestAccount.create("",
EnumSet.of(CreateOptions.UseSas, CreateOptions.CreateContainer));
@@ -148,7 +148,7 @@ public void testConnectUsingSAS() throws Exception {
@Test
public void testConnectUsingSASReadonly() throws Exception {
- assumeFalse(runningInSASMode);
+ assumeThat(runningInSASMode).isFalse();
// Create the test account with SAS credentials.
testAccount = AzureBlobStorageTestAccount.create("", EnumSet.of(
CreateOptions.UseSas, CreateOptions.CreateContainer,
@@ -378,7 +378,7 @@ public void testDefaultKeyProvider() throws Exception {
public void testCredsFromCredentialProvider(@TempDir java.nio.file.Path tempDir)
throws Exception {
- assumeFalse(runningInSASMode);
+ assumeThat(runningInSASMode).isFalse();
String account = "testacct";
String key = "testkey";
// set up conf to have a cred provider
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemAuthorization.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemAuthorization.java
index 37a4711d8a48c..6cc6903d4930d 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemAuthorization.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemAuthorization.java
@@ -38,7 +38,6 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
-import org.apache.hadoop.util.StringUtils;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -48,7 +47,7 @@
import static org.apache.hadoop.fs.azure.AzureNativeFileSystemStore.KEY_USE_SECURE_MODE;
import static org.apache.hadoop.fs.azure.CachingAuthorizer.KEY_AUTH_SERVICE_CACHING_ENABLE;
import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
-import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.assertj.core.api.Assumptions.assumeThat;
/**
* Test class to hold all WASB authorization tests.
@@ -92,8 +91,9 @@ public void setUp() throws Exception {
super.setUp();
boolean useSecureMode = fs.getConf().getBoolean(KEY_USE_SECURE_MODE, false);
boolean useAuthorization = fs.getConf().getBoolean(NativeAzureFileSystem.KEY_AZURE_AUTHORIZATION, false);
- assumeTrue((useSecureMode && useAuthorization),
- "Test valid when both SecureMode and Authorization are enabled .. skipping");
+ assumeThat((useSecureMode && useAuthorization))
+ .as("Test valid when both SecureMode and Authorization are enabled .. skipping")
+ .isTrue();
authorizer = new MockWasbAuthorizerImpl(fs);
authorizer.init(fs.getConf());
@@ -1544,8 +1544,9 @@ public void testSetOwnerSucceedsForAuthorisedUsers() throws Throwable {
ContractTestUtils.assertPathExists(fs, "test path does not exist", testPath);
String owner = fs.getFileStatus(testPath).getOwner();
- assumeTrue(!StringUtils.equalsIgnoreCase(owner, newOwner),
- "changing owner requires original and new owner to be different");
+ assumeThat(owner)
+ .as("changing owner requires original and new owner to be different")
+ .isNotEqualToIgnoringCase(newOwner);
authorisedUser.doAs(new PrivilegedExceptionAction() {
@Override
@@ -1587,8 +1588,9 @@ public void testSetOwnerSucceedsForAnyUserWhenWildCardIsSpecified() throws Throw
ContractTestUtils.assertPathExists(fs, "test path does not exist", testPath);
String owner = fs.getFileStatus(testPath).getOwner();
- assumeTrue(!StringUtils.equalsIgnoreCase(owner, newOwner),
- "changing owner requires original and new owner to be different");
+ assumeThat(owner)
+ .as("changing owner requires original and new owner to be different")
+ .isNotEqualToIgnoringCase(newOwner);
user.doAs(new PrivilegedExceptionAction() {
@Override
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/integration/AzureTestUtils.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/integration/AzureTestUtils.java
index 69dc335bd8d6a..4a71c78a9813f 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/integration/AzureTestUtils.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/integration/AzureTestUtils.java
@@ -27,7 +27,6 @@
import java.util.List;
import org.junit.jupiter.api.Assertions;
-import org.junit.Assume;
import org.opentest4j.TestAbortedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -42,8 +41,6 @@
import org.apache.hadoop.fs.azure.AzureBlobStorageTestAccount;
import org.apache.hadoop.fs.azure.NativeAzureFileSystem;
-import static org.junit.jupiter.api.Assumptions.assumeTrue;
-
import static org.apache.hadoop.fs.azure.AzureBlobStorageTestAccount.WASB_ACCOUNT_NAME_DOMAIN_SUFFIX_REGEX;
import static org.apache.hadoop.fs.azure.AzureBlobStorageTestAccount.WASB_TEST_ACCOUNT_NAME_WITH_DOMAIN;
import static org.apache.hadoop.fs.azure.integration.AzureTestConstants.*;
@@ -51,6 +48,7 @@
import static org.apache.hadoop.test.MetricsAsserts.getLongCounter;
import static org.apache.hadoop.test.MetricsAsserts.getLongGauge;
import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
+import static org.assertj.core.api.Assumptions.assumeThat;
/**
* Utilities for the Azure tests. Based on {@code S3ATestUtils}, so
@@ -394,7 +392,7 @@ public static void assume(String message, boolean condition) {
if (!condition) {
LOG.warn(message);
}
- Assume.assumeTrue(message, condition);
+ assumeThat(condition).as(message).isTrue();
}
/**
@@ -495,8 +493,10 @@ public static String verifyWasbAccountNameInConfig(Configuration conf) {
if (accountName == null) {
accountName = conf.get(WASB_TEST_ACCOUNT_NAME_WITH_DOMAIN);
}
- assumeTrue(accountName != null && !accountName.endsWith(WASB_ACCOUNT_NAME_DOMAIN_SUFFIX_REGEX),
- "Account for WASB is missing or it is not in correct format");
+ assumeThat(accountName)
+ .as("Account for WASB is missing or it is not in correct format")
+ .isNotNull()
+ .doesNotEndWith(WASB_ACCOUNT_NAME_DOMAIN_SUFFIX_REGEX);
return accountName;
}
@@ -550,7 +550,8 @@ public static String readStringFromStream(FSDataInputStream inputStream) throws
* Assume hierarchical namespace is disabled for test account.
*/
public static void assumeNamespaceDisabled(Configuration conf) {
- Assume.assumeFalse("Hierarchical namespace is enabled for test account.",
- conf.getBoolean(FS_AZURE_TEST_NAMESPACE_ENABLED_ACCOUNT, false));
+ assumeThat(conf.getBoolean(FS_AZURE_TEST_NAMESPACE_ENABLED_ACCOUNT, false))
+ .as("Hierarchical namespace is enabled for test account.")
+ .isFalse();
}
}
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/integration/ITestAzureHugeFiles.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/integration/ITestAzureHugeFiles.java
index 8939bfec66d77..d70412aca1f83 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/integration/ITestAzureHugeFiles.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/integration/ITestAzureHugeFiles.java
@@ -43,7 +43,7 @@
import static org.apache.hadoop.fs.azure.integration.AzureTestUtils.*;
import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
-import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.assertj.core.api.Assumptions.assumeThat;
/**
@@ -153,8 +153,10 @@ FileStatus assumeHugeFileExists() throws IOException {
assertPathExists(getFileSystem(), "huge file not created", hugefile);
try {
FileStatus status = getFileSystem().getFileStatus(hugefile);
- assumeTrue(status.isFile(), "Not a file: " + status);
- assumeTrue(status.getLen() > 0, "File " + hugefile + " is empty");
+ assumeThat(status.isFile()).as("Not a file: " + status).isTrue();
+ assumeThat(status.getLen())
+ .as("File " + hugefile + " is empty")
+ .isPositive();
return status;
} catch (FileNotFoundException e) {
skip("huge file not created: " + hugefile);
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java
index 9be4998cb8217..27585f9fa5986 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsIntegrationTest.java
@@ -30,9 +30,8 @@
import java.util.concurrent.Future;
import org.assertj.core.api.Assertions;
-import org.junit.After;
-import org.junit.Assume;
-import org.junit.Before;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -76,7 +75,7 @@
import static org.apache.hadoop.fs.azurebfs.contracts.services.AzureServiceErrorCode.FILE_SYSTEM_NOT_FOUND;
import static org.apache.hadoop.fs.azurebfs.constants.TestConfigurationKeys.*;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
-import static org.junit.Assume.assumeTrue;
+import static org.assertj.core.api.Assumptions.assumeThat;
/**
* Base for AzureBlobFileSystem Integration tests.
@@ -114,8 +113,9 @@ protected AbstractAbfsIntegrationTest() throws Exception {
// check if accountName is set using different config key
accountName = rawConfig.get(FS_AZURE_ABFS_ACCOUNT_NAME);
}
- assumeTrue("Not set: " + FS_AZURE_ABFS_ACCOUNT_NAME,
- accountName != null && !accountName.isEmpty());
+ assumeThat(accountName)
+ .as("Not set: " + FS_AZURE_ABFS_ACCOUNT_NAME)
+ .isNotBlank();
final String abfsUrl = this.getFileSystemName() + "@" + this.getAccountName();
URI defaultUri = null;
@@ -191,7 +191,7 @@ public TracingContext getTestTracingContext(AzureBlobFileSystem fs,
FSOperationType.TEST_OP, needsPrimaryReqId, format, null);
}
- @Before
+ @BeforeEach
public void setup() throws Exception {
//Create filesystem first to make sure getWasbFileSystem() can return an existing filesystem.
createFileSystem();
@@ -225,7 +225,7 @@ public void setup() throws Exception {
}
}
- @After
+ @AfterEach
public void teardown() throws Exception {
try {
IOUtils.closeStream(wasb);
@@ -569,23 +569,24 @@ protected AbfsOutputStream createAbfsOutputStreamWithFlushEnabled(
*/
protected long assertAbfsStatistics(AbfsStatistic statistic,
long expectedValue, Map metricMap) {
- assertEquals("Mismatch in " + statistic.getStatName(), expectedValue,
- (long) metricMap.get(statistic.getStatName()));
+ assertEquals(expectedValue, (long) metricMap.get(statistic.getStatName()),
+ "Mismatch in " + statistic.getStatName());
return expectedValue;
}
protected void assumeValidTestConfigPresent(final Configuration conf, final String key) {
String configuredValue = conf.get(accountProperty(key, accountName),
conf.get(key, ""));
- Assume.assumeTrue(String.format("Missing Required Test Config: %s.", key),
- !configuredValue.isEmpty());
+ assumeThat(configuredValue)
+ .as(String.format("Missing Required Test Config: %s.", key))
+ .isNotEmpty();
}
protected void assumeValidAuthConfigsPresent() {
final AuthType currentAuthType = getAuthType();
- Assume.assumeFalse(
- "SAS Based Authentication Not Allowed For Integration Tests",
- currentAuthType == AuthType.SAS);
+ assumeThat(currentAuthType).
+ as("SAS Based Authentication Not Allowed For Integration Tests").
+ isEqualTo(AuthType.SAS);
if (currentAuthType == AuthType.SharedKey) {
assumeValidTestConfigPresent(getRawConfiguration(), FS_AZURE_ACCOUNT_KEY);
} else {
@@ -616,7 +617,7 @@ public AbfsServiceType getIngressServiceType() {
* @param path path to create. Can be relative or absolute.
*/
protected void createAzCopyFolder(Path path) throws Exception {
- Assume.assumeTrue(getAbfsServiceType() == AbfsServiceType.BLOB);
+ assumeThat(getAbfsServiceType()).isEqualTo(AbfsServiceType.BLOB);
assumeValidTestConfigPresent(getRawConfiguration(), FS_AZURE_TEST_FIXED_SAS_TOKEN);
String sasToken = getRawConfiguration().get(FS_AZURE_TEST_FIXED_SAS_TOKEN);
AzcopyToolHelper azcopyHelper = AzcopyToolHelper.getInstance(sasToken);
@@ -628,7 +629,7 @@ protected void createAzCopyFolder(Path path) throws Exception {
* @param path path to create. Can be relative or absolute.
*/
protected void createAzCopyFile(Path path) throws Exception {
- Assume.assumeTrue(getAbfsServiceType() == AbfsServiceType.BLOB);
+ assumeThat(getAbfsServiceType()).isEqualTo(AbfsServiceType.BLOB);
assumeValidTestConfigPresent(getRawConfiguration(), FS_AZURE_TEST_FIXED_SAS_TOKEN);
String sasToken = getRawConfiguration().get(FS_AZURE_TEST_FIXED_SAS_TOKEN);
AzcopyToolHelper azcopyHelper = AzcopyToolHelper.getInstance(sasToken);
@@ -646,8 +647,9 @@ private String getAzcopyAbsolutePath(Path path) throws IOException {
* Otherwise, the test will be skipped.
*/
protected void assumeBlobServiceType() {
- Assume.assumeTrue("Blob service type is required for this test",
- getAbfsServiceType() == AbfsServiceType.BLOB);
+ assumeThat(getAbfsServiceType()).
+ as("Blob service type is required for this test").
+ isEqualTo(AbfsServiceType.BLOB);
}
/**
@@ -655,8 +657,9 @@ protected void assumeBlobServiceType() {
* Otherwise, the test will be skipped.
*/
protected void assumeDfsServiceType() {
- Assume.assumeTrue("DFS service type is required for this test",
- getAbfsServiceType() == AbfsServiceType.DFS);
+ assumeThat(getAbfsServiceType())
+ .as("DFS service type is required for this test")
+ .isEqualTo(AbfsServiceType.DFS);
}
/**
@@ -674,7 +677,7 @@ protected void assumeHnsEnabled() throws IOException {
* @throws IOException if an error occurs while checking the account type.
*/
protected void assumeHnsEnabled(String errorMessage) throws IOException {
- Assume.assumeTrue(errorMessage, getIsNamespaceEnabled(getFileSystem()));
+ assumeThat(getIsNamespaceEnabled(getFileSystem())).as(errorMessage).isTrue();
}
/**
@@ -692,7 +695,7 @@ protected void assumeHnsDisabled() throws IOException {
* @throws IOException if an error occurs while checking the account type.
*/
protected void assumeHnsDisabled(String message) throws IOException {
- Assume.assumeFalse(message, getIsNamespaceEnabled(getFileSystem()));
+ assumeThat(getIsNamespaceEnabled(getFileSystem())).as(message).isFalse();
}
/**
@@ -755,19 +758,20 @@ protected void checkFuturesForExceptions(List> futures, int exceptionV
protected void assumeRecoveryThroughClientTransactionID(boolean isCreate)
throws IOException {
// Assumes that recovery through client transaction ID is enabled.
- Assume.assumeTrue("Recovery through client transaction ID is not enabled",
- getConfiguration().getIsClientTransactionIdEnabled());
+ assumeThat(getConfiguration().getIsClientTransactionIdEnabled())
+ .as("Recovery through client transaction ID is not enabled")
+ .isTrue();
// Assumes that service type is DFS.
assumeDfsServiceType();
// Assumes that namespace is enabled for the given AzureBlobFileSystem.
assumeHnsEnabled();
if (isCreate) {
// Assume that create client is DFS client.
- Assume.assumeTrue("Ingress service type is not DFS",
- AbfsServiceType.DFS.equals(getIngressServiceType()));
+ assumeThat(AbfsServiceType.DFS.equals(getIngressServiceType()))
+ .as("Ingress service type is not DFS")
+ .isTrue();
// Assume that append blob is not enabled in DFS client.
- Assume.assumeFalse("Append blob is enabled in DFS client",
- isAppendBlobEnabled());
+ assumeThat(isAppendBlobEnabled()).as("Append blob is enabled in DFS client").isFalse();
}
}
}
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsScaleTest.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsScaleTest.java
index 14c9bff7bf8d2..d8286ecac2e34 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsScaleTest.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsScaleTest.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.fs.azurebfs;
+import org.junit.jupiter.api.BeforeEach;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -44,6 +45,7 @@ protected int getTestTimeoutMillis() {
return AzureTestConstants.SCALE_TEST_TIMEOUT_MILLIS;
}
+ @BeforeEach
@Override
public void setup() throws Exception {
super.setup();
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsTestWithTimeout.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsTestWithTimeout.java
index 0485422871ecc..2b61ce750fda2 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsTestWithTimeout.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/AbstractAbfsTestWithTimeout.java
@@ -18,16 +18,17 @@
package org.apache.hadoop.fs.azurebfs;
import java.io.IOException;
+import java.util.concurrent.TimeUnit;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.rules.TestName;
-import org.junit.rules.Timeout;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Timeout;
+import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hadoop.test.TestName;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
@@ -37,27 +38,22 @@
* Base class for any ABFS test with timeouts & named threads.
* This class does not attempt to bind to Azure.
*/
-public class AbstractAbfsTestWithTimeout extends Assert {
+@Timeout(value = TEST_TIMEOUT, unit = TimeUnit.MILLISECONDS)
+public class AbstractAbfsTestWithTimeout extends Assertions {
private static final Logger LOG =
LoggerFactory.getLogger(AbstractAbfsTestWithTimeout.class);
/**
* The name of the current method.
*/
- @Rule
+ @RegisterExtension
public TestName methodName = new TestName();
- /**
- * Set the timeout for every test.
- * This is driven by the value returned by {@link #getTestTimeoutMillis()}.
- */
- @Rule
- public Timeout testTimeout = new Timeout(getTestTimeoutMillis());
/**
* Name the junit thread for the class. This will overridden
* before the individual test methods are run.
*/
- @BeforeClass
+ @BeforeAll
public static void nameTestThread() {
Thread.currentThread().setName("JUnit");
}
@@ -65,7 +61,7 @@ public static void nameTestThread() {
/**
* Name the thread to the current test method.
*/
- @Before
+ @BeforeEach
public void nameThread() {
Thread.currentThread().setName("JUnit-" + methodName.getMethodName());
}
@@ -110,15 +106,17 @@ protected boolean validateContent(AzureBlobFileSystem fs, Path path,
while (valueOfContentAtPos != -1 && pos < lenOfOriginalByteArray) {
if (originalByteArray[pos] != valueOfContentAtPos) {
- assertEquals("Mismatch in content validation at position {}", pos,
- originalByteArray[pos], valueOfContentAtPos);
+ assertEquals(
+ originalByteArray[pos],
+ valueOfContentAtPos,
+ String.format("Mismatch in content validation at position %d", pos));
return false;
}
valueOfContentAtPos = (byte) in.read();
pos++;
}
if (valueOfContentAtPos != -1) {
- assertEquals("Expected end of file", -1, valueOfContentAtPos);
+ assertEquals(-1, valueOfContentAtPos, "Expected end of file");
return false;
}
return true;
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestABFSJceksFiltering.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestABFSJceksFiltering.java
index e1b6b39521acd..da32419f6aaab 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestABFSJceksFiltering.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestABFSJceksFiltering.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.fs.azurebfs;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.conf.Configuration;
@@ -35,7 +35,7 @@ public void testIncompatibleCredentialProviderIsExcluded() throws Exception {
rawConfig.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
"jceks://abfs@a@b.c.d/tmp/a.jceks,jceks://file/tmp/secret.jceks");
try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.get(rawConfig)) {
- assertNotNull("filesystem", fs);
+ assertNotNull(fs, "filesystem");
String providers = fs.getConf().get(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH);
assertEquals("jceks://file/tmp/secret.jceks", providers);
}
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java
index c180689b267ab..c74d1bf97d5c9 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java
@@ -27,10 +27,9 @@
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
-import org.assertj.core.api.Assertions;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -43,6 +42,7 @@
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
import static org.apache.hadoop.fs.azurebfs.constants.TestConfigurationKeys.FS_AZURE_ACCOUNT_KEY;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
+import static org.assertj.core.api.Assertions.assertThat;
/**
* Test continuation token which has equal sign.
@@ -55,7 +55,7 @@ public ITestAbfsClient() throws Exception {
super();
}
- @Ignore("HADOOP-16845: Invalid continuation tokens are ignored by the ADLS "
+ @Disabled("HADOOP-16845: Invalid continuation tokens are ignored by the ADLS "
+ "Gen2 service, so we are disabling this test until the service is fixed.")
@Test
public void testContinuationTokenHavingEqualSign() throws Exception {
@@ -66,13 +66,13 @@ public void testContinuationTokenHavingEqualSign() throws Exception {
AbfsRestOperation op = abfsClient
.listPath("/", true, LIST_MAX_RESULTS, "===========",
getTestTracingContext(fs, true), null).getOp();
- Assert.assertTrue(false);
+ Assertions.assertTrue(false);
} catch (AbfsRestOperationException ex) {
- Assert.assertEquals("InvalidQueryParameterValue", ex.getErrorCode().getErrorCode());
+ Assertions.assertEquals("InvalidQueryParameterValue", ex.getErrorCode().getErrorCode());
}
}
- @Ignore("Enable this to verify the log warning message format for HostNotFoundException")
+ @Disabled("Enable this to verify the log warning message format for HostNotFoundException")
@Test
public void testUnknownHost() throws Exception {
// When hitting hostName not found exception, the retry will take about 14 mins until failed.
@@ -113,7 +113,7 @@ public void testListPathWithValidListMaxResultsValues()
if (continuationToken == null) {
// Listing is complete and number of objects should be same as expected
- Assertions.assertThat(list)
+ assertThat(list)
.describedAs("AbfsClient.listPath() should return %d items"
+ " when listMaxResults is %d, directory contains %d items and "
+ "listing is complete",
@@ -121,7 +121,7 @@ public void testListPathWithValidListMaxResultsValues()
.hasSize(expectedListResultsSize);
} else {
// Listing is incomplete and number of objects can be less than expected
- Assertions.assertThat(list)
+ assertThat(list)
.describedAs("AbfsClient.listPath() should return %d items"
+ " or less when listMaxResults is %d, directory contains"
+ " %d items and listing is incomplete",
@@ -148,7 +148,7 @@ public void testListPathWithValueGreaterThanServerMaximum()
if (continuationToken == null) {
// Listing is complete and number of objects should be same as expected
- Assertions.assertThat(list)
+ assertThat(list)
.describedAs("AbfsClient.listPath() should return %d items"
+ " when listMaxResults is %d directory contains %d items and "
+ "listing is complete", LIST_MAX_RESULTS_SERVER,
@@ -156,7 +156,7 @@ public void testListPathWithValueGreaterThanServerMaximum()
.hasSize(LIST_MAX_RESULTS_SERVER);
} else {
// Listing is incomplete and number of objects can be less than expected
- Assertions.assertThat(list)
+ assertThat(list)
.describedAs("AbfsClient.listPath() should return %d items"
+ " or less when listMaxResults is %d, directory contains"
+ " %d items and listing is complete", LIST_MAX_RESULTS_SERVER,
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsCustomEncryption.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsCustomEncryption.java
index 45de7b3d2348e..5ace54d909488 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsCustomEncryption.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsCustomEncryption.java
@@ -30,9 +30,8 @@
import org.assertj.core.api.Assertions;
import org.assertj.core.api.Assumptions;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -63,6 +62,8 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.util.Lists;
+import org.junit.jupiter.params.ParameterizedClass;
+import org.junit.jupiter.params.provider.MethodSource;
import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.CPK_IN_NON_HNS_ACCOUNT_ERROR_MESSAGE;
import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.EMPTY_STRING;
@@ -85,7 +86,8 @@
import static org.apache.hadoop.fs.permission.AclEntryType.USER;
import static org.apache.hadoop.fs.permission.FsAction.ALL;
-@RunWith(Parameterized.class)
+@ParameterizedClass(name="{0} mode, {2}")
+@MethodSource("params")
public class ITestAbfsCustomEncryption extends AbstractAbfsIntegrationTest {
public static final String SERVER_FILE_CONTENT = "123";
@@ -97,43 +99,33 @@ public class ITestAbfsCustomEncryption extends AbstractAbfsIntegrationTest {
private List fileSystemsOpenedInTest = new ArrayList<>();
// Encryption type used by filesystem while creating file
- @Parameterized.Parameter
public EncryptionType fileEncryptionType;
// Encryption type used by filesystem to call different operations
- @Parameterized.Parameter(1)
public EncryptionType requestEncryptionType;
- @Parameterized.Parameter(2)
public FSOperationType operation;
- @Parameterized.Parameter(3)
public boolean responseHeaderServerEnc;
- @Parameterized.Parameter(4)
public boolean responseHeaderReqServerEnc;
- @Parameterized.Parameter(5)
public boolean isExceptionCase;
/**
* Boolean value to indicate that the server response would have header related
* to CPK and the test would need to assert its value.
*/
- @Parameterized.Parameter(6)
public boolean isCpkResponseHdrExpected;
/**
* Boolean value to indicate that the server response would have fields related
* to CPK and the test would need to assert its value.
*/
- @Parameterized.Parameter(7)
public Boolean isCpkResponseKeyExpected = false;
- @Parameterized.Parameter(8)
public Boolean fileSystemListStatusResultToBeUsedForOpeningFile = false;
- @Parameterized.Parameters(name = "{0} mode, {2}")
public static Iterable