diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java
index 441ae70803afb..e292c687878bd 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java
@@ -58,8 +58,9 @@
import static org.apache.hadoop.fs.s3a.Constants.AWS_S3_CENTRAL_REGION;
import static org.apache.hadoop.fs.s3a.Constants.EXPERIMENTAL_AWS_INTERNAL_THROTTLING;
import static org.apache.hadoop.fs.s3a.Constants.EXPERIMENTAL_AWS_INTERNAL_THROTTLING_DEFAULT;
-import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.S3AUtils.getEncryptionAlgorithm;
+import static org.apache.hadoop.fs.s3a.S3AUtils.getS3EncryptionKey;
import static org.apache.hadoop.fs.s3a.S3AUtils.translateException;
/**
@@ -96,6 +97,9 @@ public class DefaultS3ClientFactory extends Configured
/** Exactly once log to inform about ignoring the AWS-SDK Warnings for CSE. */
private static final LogExactlyOnce IGNORE_CSE_WARN = new LogExactlyOnce(LOG);
+ /** Bucket name. */
+ private String bucket;
+
/**
* Create the client by preparing the AwsConf configuration
* and then invoking {@code buildAmazonS3Client()}.
@@ -105,9 +109,10 @@ public AmazonS3 createS3Client(
final URI uri,
final S3ClientCreationParameters parameters) throws IOException {
Configuration conf = getConf();
+ bucket = uri.getHost();
final ClientConfiguration awsConf = S3AUtils
.createAwsConf(conf,
- uri.getHost(),
+ bucket,
Constants.AWS_SERVICE_IDENTIFIER_S3);
// add any headers
parameters.getHeaders().forEach((h, v) ->
@@ -126,10 +131,13 @@ public AmazonS3 createS3Client(
awsConf.setUserAgentSuffix(parameters.getUserAgentSuffix());
}
+ // Get the encryption method for this bucket.
+ S3AEncryptionMethods encryptionMethods =
+ getEncryptionAlgorithm(bucket, conf);
try {
- if (S3AEncryptionMethods.getMethod(S3AUtils.
- lookupPassword(conf, S3_ENCRYPTION_ALGORITHM, null))
- .equals(S3AEncryptionMethods.CSE_KMS)) {
+ // If CSE is enabled then build a S3EncryptionClient.
+ if (S3AEncryptionMethods.CSE_KMS.getMethod()
+ .equals(encryptionMethods.getMethod())) {
return buildAmazonS3EncryptionClient(
awsConf,
parameters);
@@ -163,12 +171,11 @@ protected AmazonS3 buildAmazonS3EncryptionClient(
new AmazonS3EncryptionClientV2Builder();
Configuration conf = getConf();
- //CSE-KMS Method
- String kmsKeyId = S3AUtils.lookupPassword(conf,
- S3_ENCRYPTION_KEY, null);
+ // CSE-KMS Method
+ String kmsKeyId = getS3EncryptionKey(bucket, conf);
// Check if kmsKeyID is not null
- Preconditions.checkArgument(kmsKeyId != null, "CSE-KMS method "
- + "requires KMS key ID. Use " + S3_ENCRYPTION_KEY
+ Preconditions.checkArgument(!StringUtils.isBlank(kmsKeyId), "CSE-KMS "
+ + "method requires KMS key ID. Use " + S3_ENCRYPTION_KEY
+ " property to set it. ");
EncryptionMaterialsProvider materialsProvider =
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
index bc3e7ea5a5640..5646b9d677272 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
@@ -441,9 +441,8 @@ public void initialize(URI name, Configuration originalConf)
instrumentation = new S3AInstrumentation(uri);
initializeStatisticsBinding();
// If CSE-KMS method is set then CSE is enabled.
- isCSEEnabled = S3AUtils.lookupPassword(conf,
- Constants.S3_ENCRYPTION_ALGORITHM, "")
- .equals(S3AEncryptionMethods.CSE_KMS.getMethod());
+ isCSEEnabled = S3AEncryptionMethods.CSE_KMS.getMethod()
+ .equals(getS3EncryptionAlgorithm().getMethod());
LOG.debug("Client Side Encryption enabled: {}", isCSEEnabled);
setCSEGauge();
// Username is the current user at the time the FS was instantiated.
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java
index 5b504a94eec72..2fbc1e85f9a9d 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java
@@ -1578,10 +1578,13 @@ static void patchSecurityCredentialProviders(Configuration conf) {
* @return the encryption key or ""
* @throws IllegalArgumentException bad arguments.
*/
+ @SuppressWarnings("deprecation")
public static String getS3EncryptionKey(String bucket,
Configuration conf) {
try {
- return lookupPassword(bucket, conf, Constants.S3_ENCRYPTION_KEY);
+ String oldKey = lookupPassword(bucket, conf, SERVER_SIDE_ENCRYPTION_KEY);
+ // return the newKey with oldKey as fallback/default value.
+ return lookupPassword(bucket, conf, S3_ENCRYPTION_KEY, null, oldKey);
} catch (IOException e) {
LOG.error("Cannot retrieve " + Constants.S3_ENCRYPTION_KEY, e);
return "";
@@ -1599,11 +1602,19 @@ public static String getS3EncryptionKey(String bucket,
* one is set.
* @throws IOException on any validation problem.
*/
+ @SuppressWarnings("deprecation")
public static S3AEncryptionMethods getEncryptionAlgorithm(String bucket,
Configuration conf) throws IOException {
+ // lookup old property and use that as default/fallback for new lookup.
+ S3AEncryptionMethods oldEncryptionMethod = S3AEncryptionMethods.getMethod(
+ lookupPassword(bucket, conf,
+ SERVER_SIDE_ENCRYPTION_ALGORITHM));
+ // new lookup.
S3AEncryptionMethods encryptionMethod = S3AEncryptionMethods.getMethod(
lookupPassword(bucket, conf,
- Constants.S3_ENCRYPTION_ALGORITHM));
+ Constants.S3_ENCRYPTION_ALGORITHM,
+ null,
+ oldEncryptionMethod.getMethod()));
String encryptionKey = getS3EncryptionKey(bucket, conf);
int encryptionKeyLen =
StringUtils.isBlank(encryptionKey) ? 0 : encryptionKey.length();
diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
index f4f7144f34f16..8388b55bef9cd 100644
--- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
+++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
@@ -1426,6 +1426,55 @@ Finally, the public `s3a://landsat-pds/` bucket can be accessed anonymously:
```
+#### per-bucket configuration and deprecated configuration options
+
+Excluding secrets held in JCEKS files, a per-bucket declaration of a
+deprecated property will take priority over a global option.
+
+
+This means that when setting encryption options in XML files,
+the option, `fs.bucket.BUCKET.fs.s3a.server-side-encryption-algorithm`
+will take priority over the global value of `fs.bucket.s3a.encryption.algorithm`.
+The same holds for the encryption key option `fs.s3a.encryption.key`
+and its predecessor `fs.s3a.server-side-encryption.key`.
+
+
+For a site configuration of:
+
+```xml
+
+ fs.s3a.bucket.nightly.server-side-encryption-algorithm
+ SSE-KMS
+
+
+
+ fs.s3a.bucket.nightly.server-side-encryption.key
+ arn:aws:kms:eu-west-2:1528130000000:key/753778e4-2d0f-42e6-b894-6a3ae4ea4e5f
+
+
+
+ fs.s3a.encryption.algorithm
+ AES256
+
+
+
+ fs.s3a.encryption.key
+ unset
+
+
+
+```
+
+The bucket "nightly" will be encrypted with SSE-KMS using the KMS key
+`arn:aws:kms:eu-west-2:1528130000000:key/753778e4-2d0f-42e6-b894-6a3ae4ea4e5f`
+
+This *does not hold* when encryption settings are held in JCEKS stores.
+In that situation, *a global declaration using the newer key takes priority
+over a per-bucket declaration with the older key name.
+
+
+
+
### Customizing S3A secrets held in credential files
@@ -1444,14 +1493,25 @@ fs.s3a.encryption.key
fs.s3a.bucket.nightly.access.key
fs.s3a.bucket.nightly.secret.key
fs.s3a.bucket.nightly.session.token
-fs.s3a.bucket.nightly.server-side-encryption.key
-fs.s3a.bucket.nightly.server-side-encryption-algorithm
+fs.s3a.bucket.nightly.encryption.key
+fs.s3a.bucket.nightly.encryption.algorithm
```
When accessing the bucket `s3a://nightly/`, the per-bucket configuration
options for that bucket will be used, here the access keys and token,
and including the encryption algorithm and key.
+If the configuration mixes old and new key names, then the new names take
+priority over the older values, even if they are not scoped to the bucket
+
+```
+fs.s3a.encryption.algorithm
+fs.s3a.encryption.key
+fs.s3a.bucket.nightly.server-side-encryption-algorithm
+fs.s3a.bucket.nightly.server-side-encryption.key
+```
+
+
### Using Per-Bucket Configuration to access data round the world
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java
index 8e3208ce096c0..7945c820d060a 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java
@@ -32,9 +32,11 @@
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionTestsDisabled;
import static org.apache.hadoop.fs.s3a.S3AUtils.getEncryptionAlgorithm;
+import static org.apache.hadoop.fs.s3a.S3AUtils.getS3EncryptionKey;
/**
* Test whether or not encryption works by turning it on. Some checks
@@ -163,8 +165,9 @@ protected String createFilename(String name) {
*/
protected void assertEncrypted(Path path) throws IOException {
//S3 will return full arn of the key, so specify global arn in properties
- String kmsKeyArn = this.getConfiguration().
- getTrimmed(S3_ENCRYPTION_KEY);
+ String kmsKeyArn =
+ getS3EncryptionKey(getTestBucketName(getConfiguration()),
+ getConfiguration());
S3AEncryptionMethods algorithm = getSSEAlgorithm();
EncryptionTestUtils.assertEncrypted(getFileSystem(),
path,
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java
index bb052ed3d1752..4094b22eb1926 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java
@@ -45,8 +45,12 @@
import static org.apache.hadoop.fs.s3a.Constants.MULTIPART_MIN_SIZE;
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.assume;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestPropertyBool;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
/**
@@ -193,6 +197,7 @@ KEY_SCALE_TESTS_ENABLED, getTestPropertyBool(
* Testing how unencrypted and encrypted data behaves when read through
* CSE enabled and disabled FS respectively.
*/
+ @SuppressWarnings("deprecation")
@Test
public void testEncryptionEnabledAndDisabledFS() throws Exception {
maybeSkipTest();
@@ -203,8 +208,12 @@ public void testEncryptionEnabledAndDisabledFS() throws Exception {
Path encryptedFilePath = path(getMethodName() + "cse");
// Initialize a CSE disabled FS.
- cseDisabledConf.unset(S3_ENCRYPTION_ALGORITHM);
- cseDisabledConf.unset(S3_ENCRYPTION_KEY);
+ removeBaseAndBucketOverrides(getTestBucketName(cseDisabledConf),
+ cseDisabledConf,
+ S3_ENCRYPTION_ALGORITHM,
+ S3_ENCRYPTION_KEY,
+ SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ SERVER_SIDE_ENCRYPTION_KEY);
cseDisabledFS.initialize(getFileSystem().getUri(),
cseDisabledConf);
@@ -288,7 +297,7 @@ protected void validateEncryptionForFileSize(int len) throws IOException {
/**
* Skip tests if certain conditions are met.
*/
- protected abstract void maybeSkipTest();
+ protected abstract void maybeSkipTest() throws IOException;
/**
* Assert that at path references an encrypted blob.
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java
index 085c0f9ee3762..bcc37c8bfbbba 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java
@@ -28,8 +28,10 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.impl.HeaderProcessing;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionNotSet;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionTestsDisabled;
+import static org.apache.hadoop.fs.s3a.S3AUtils.getS3EncryptionKey;
/**
* Testing the S3 CSE - KMS method.
@@ -53,7 +55,7 @@ protected Configuration createConfiguration() {
}
@Override
- protected void maybeSkipTest() {
+ protected void maybeSkipTest() throws IOException {
skipIfEncryptionTestsDisabled(getConfiguration());
// skip the test if CSE-KMS or KMS key is not set.
skipIfEncryptionNotSet(getConfiguration(), S3AEncryptionMethods.CSE_KMS);
@@ -71,8 +73,8 @@ protected void assertEncrypted(Path path) throws IOException {
// Assert content encryption algo for KMS, is present in the
// materials description and KMS key ID isn't.
- String keyId =
- getConfiguration().get(Constants.S3_ENCRYPTION_KEY);
+ String keyId = getS3EncryptionKey(getTestBucketName(getConfiguration()),
+ getConfiguration());
Assertions.assertThat(processHeader(fsXAttrs,
xAttrPrefix + Headers.MATERIALS_DESCRIPTION))
.describedAs("Materials Description should contain the content "
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
index eb68eed1bef8f..dce99a6144fb4 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
@@ -42,7 +42,6 @@
import java.io.File;
import java.net.URI;
import java.security.PrivilegedExceptionAction;
-import java.util.Collection;
import org.apache.hadoop.security.ProviderUtils;
import org.apache.hadoop.security.UserGroupInformation;
@@ -501,81 +500,6 @@ private static T getField(Object target, Class fieldType,
return fieldType.cast(obj);
}
- @Test
- public void testBucketConfigurationPropagation() throws Throwable {
- Configuration config = new Configuration(false);
- setBucketOption(config, "b", "base", "1024");
- String basekey = "fs.s3a.base";
- assertOptionEquals(config, basekey, null);
- String bucketKey = "fs.s3a.bucket.b.base";
- assertOptionEquals(config, bucketKey, "1024");
- Configuration updated = propagateBucketOptions(config, "b");
- assertOptionEquals(updated, basekey, "1024");
- // original conf is not updated
- assertOptionEquals(config, basekey, null);
-
- String[] sources = updated.getPropertySources(basekey);
- assertEquals(1, sources.length);
- String sourceInfo = sources[0];
- assertTrue("Wrong source " + sourceInfo, sourceInfo.contains(bucketKey));
- }
-
- @Test
- public void testBucketConfigurationPropagationResolution() throws Throwable {
- Configuration config = new Configuration(false);
- String basekey = "fs.s3a.base";
- String baseref = "fs.s3a.baseref";
- String baseref2 = "fs.s3a.baseref2";
- config.set(basekey, "orig");
- config.set(baseref2, "${fs.s3a.base}");
- setBucketOption(config, "b", basekey, "1024");
- setBucketOption(config, "b", baseref, "${fs.s3a.base}");
- Configuration updated = propagateBucketOptions(config, "b");
- assertOptionEquals(updated, basekey, "1024");
- assertOptionEquals(updated, baseref, "1024");
- assertOptionEquals(updated, baseref2, "1024");
- }
-
- @Test
- public void testMultipleBucketConfigurations() throws Throwable {
- Configuration config = new Configuration(false);
- setBucketOption(config, "b", USER_AGENT_PREFIX, "UA-b");
- setBucketOption(config, "c", USER_AGENT_PREFIX, "UA-c");
- config.set(USER_AGENT_PREFIX, "UA-orig");
- Configuration updated = propagateBucketOptions(config, "c");
- assertOptionEquals(updated, USER_AGENT_PREFIX, "UA-c");
- }
-
- @Test
- public void testClearBucketOption() throws Throwable {
- Configuration config = new Configuration();
- config.set(USER_AGENT_PREFIX, "base");
- setBucketOption(config, "bucket", USER_AGENT_PREFIX, "overridden");
- clearBucketOption(config, "bucket", USER_AGENT_PREFIX);
- Configuration updated = propagateBucketOptions(config, "c");
- assertOptionEquals(updated, USER_AGENT_PREFIX, "base");
- }
-
- @Test
- public void testBucketConfigurationSkipsUnmodifiable() throws Throwable {
- Configuration config = new Configuration(false);
- String impl = "fs.s3a.impl";
- config.set(impl, "orig");
- setBucketOption(config, "b", impl, "b");
- String metastoreImpl = "fs.s3a.metadatastore.impl";
- String ddb = "org.apache.hadoop.fs.s3a.s3guard.DynamoDBMetadataStore";
- setBucketOption(config, "b", metastoreImpl, ddb);
- setBucketOption(config, "b", "impl2", "b2");
- setBucketOption(config, "b", "bucket.b.loop", "b3");
- assertOptionEquals(config, "fs.s3a.bucket.b.impl", "b");
-
- Configuration updated = propagateBucketOptions(config, "b");
- assertOptionEquals(updated, impl, "orig");
- assertOptionEquals(updated, "fs.s3a.impl2", "b2");
- assertOptionEquals(updated, metastoreImpl, ddb);
- assertOptionEquals(updated, "fs.s3a.bucket.b.loop", null);
- }
-
@Test
public void testConfOptionPropagationToFS() throws Exception {
Configuration config = new Configuration();
@@ -587,53 +511,6 @@ public void testConfOptionPropagationToFS() throws Exception {
assertOptionEquals(updated, "fs.s3a.propagation", "propagated");
}
- @Test
- public void testSecurityCredentialPropagationNoOverride() throws Exception {
- Configuration config = new Configuration();
- config.set(CREDENTIAL_PROVIDER_PATH, "base");
- patchSecurityCredentialProviders(config);
- assertOptionEquals(config, CREDENTIAL_PROVIDER_PATH,
- "base");
- }
-
- @Test
- public void testSecurityCredentialPropagationOverrideNoBase()
- throws Exception {
- Configuration config = new Configuration();
- config.unset(CREDENTIAL_PROVIDER_PATH);
- config.set(S3A_SECURITY_CREDENTIAL_PROVIDER_PATH, "override");
- patchSecurityCredentialProviders(config);
- assertOptionEquals(config, CREDENTIAL_PROVIDER_PATH,
- "override");
- }
-
- @Test
- public void testSecurityCredentialPropagationOverride() throws Exception {
- Configuration config = new Configuration();
- config.set(CREDENTIAL_PROVIDER_PATH, "base");
- config.set(S3A_SECURITY_CREDENTIAL_PROVIDER_PATH, "override");
- patchSecurityCredentialProviders(config);
- assertOptionEquals(config, CREDENTIAL_PROVIDER_PATH,
- "override,base");
- Collection all = config.getStringCollection(
- CREDENTIAL_PROVIDER_PATH);
- assertTrue(all.contains("override"));
- assertTrue(all.contains("base"));
- }
-
- @Test
- public void testSecurityCredentialPropagationEndToEnd() throws Exception {
- Configuration config = new Configuration();
- config.set(CREDENTIAL_PROVIDER_PATH, "base");
- setBucketOption(config, "b", S3A_SECURITY_CREDENTIAL_PROVIDER_PATH,
- "override");
- Configuration updated = propagateBucketOptions(config, "b");
-
- patchSecurityCredentialProviders(updated);
- assertOptionEquals(updated, CREDENTIAL_PROVIDER_PATH,
- "override,base");
- }
-
@Test(timeout = 10_000L)
public void testS3SpecificSignerOverride() throws IOException {
ClientConfiguration clientConfiguration = null;
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSUserDefinedKey.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSUserDefinedKey.java
index c281ae10475d8..2be3fe8889624 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSUserDefinedKey.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSUserDefinedKey.java
@@ -18,11 +18,13 @@
package org.apache.hadoop.fs.s3a;
+import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
+import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3AEncryptionMethods.SSE_KMS;
-import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionNotSet;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName;
/**
* Concrete class that extends {@link AbstractTestS3AEncryption}
@@ -36,9 +38,12 @@ public class ITestS3AEncryptionSSEKMSUserDefinedKey
protected Configuration createConfiguration() {
// get the KMS key for this test.
Configuration c = new Configuration();
- String kmsKey = c.get(S3_ENCRYPTION_KEY);
+ String kmsKey = S3AUtils.getS3EncryptionKey(getTestBucketName(c), c);
// skip the test if SSE-KMS or KMS key not set.
- skipIfEncryptionNotSet(c, getSSEAlgorithm());
+ if (StringUtils.isBlank(kmsKey)) {
+ skip(S3_ENCRYPTION_KEY + " is not set for " +
+ SSE_KMS.getMethod());
+ }
Configuration conf = super.createConfiguration();
conf.set(S3_ENCRYPTION_KEY, kmsKey);
return conf;
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java
index 0f4882553efb3..a0fb762d89914 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java
@@ -34,11 +34,13 @@
import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
import static org.apache.hadoop.fs.contract.ContractTestUtils.writeDataset;
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
-import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.EncryptionTestUtils.AWS_KMS_SSE_ALGORITHM;
import static org.apache.hadoop.fs.s3a.S3AEncryptionMethods.SSE_KMS;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionNotSet;
+import static org.apache.hadoop.fs.s3a.S3AUtils.getS3EncryptionKey;
/**
* Concrete class that extends {@link AbstractTestS3AEncryption}
@@ -60,11 +62,13 @@ public void setup() throws Exception {
skipIfEncryptionNotSet(c, getSSEAlgorithm());
}
+ @SuppressWarnings("deprecation")
@Override
protected void patchConfigurationEncryptionSettings(
final Configuration conf) {
removeBaseAndBucketOverrides(conf,
- S3_ENCRYPTION_ALGORITHM);
+ S3_ENCRYPTION_ALGORITHM,
+ SERVER_SIDE_ENCRYPTION_ALGORITHM);
conf.set(S3_ENCRYPTION_ALGORITHM,
getSSEAlgorithm().getMethod());
}
@@ -89,7 +93,7 @@ protected S3AEncryptionMethods getSSEAlgorithm() {
protected void assertEncrypted(Path path) throws IOException {
S3AFileSystem fs = getFileSystem();
Configuration c = fs.getConf();
- String kmsKey = c.getTrimmed(S3_ENCRYPTION_KEY);
+ String kmsKey = getS3EncryptionKey(getTestBucketName(c), c);
EncryptionTestUtils.assertEncrypted(fs, path, SSE_KMS, kmsKey);
}
@@ -145,7 +149,7 @@ public void testEncryptionOverRename2() throws Throwable {
ContractTestUtils.rename(kmsFS, src, targetDir);
Path renamedFile = new Path(targetDir, src.getName());
ContractTestUtils.verifyFileContents(fs, renamedFile, data);
- String kmsKey = fs2Conf.getTrimmed(S3_ENCRYPTION_KEY);
+ String kmsKey = getS3EncryptionKey(getTestBucketName(fs2Conf), fs2Conf);
// we assert that the renamed file has picked up the KMS key of our FS
EncryptionTestUtils.assertEncrypted(fs, renamedFile, SSE_KMS, kmsKey);
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
index 707cf356ac534..8395d7e9ebf2f 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
@@ -61,6 +61,7 @@
import org.apache.hadoop.util.functional.CallableRaisingIOE;
import com.amazonaws.auth.AWSCredentialsProvider;
+import org.assertj.core.api.Assertions;
import org.hamcrest.core.Is;
import org.junit.Assert;
import org.junit.Assume;
@@ -83,6 +84,8 @@
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
+import static org.apache.hadoop.fs.s3a.S3AUtils.getEncryptionAlgorithm;
+import static org.apache.hadoop.fs.s3a.S3AUtils.getS3EncryptionKey;
import static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.checkNotNull;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CREDENTIAL_PROVIDER_PATH;
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
@@ -246,9 +249,10 @@ public static FileContext createTestFileContext(Configuration conf)
*
* @param conf Test Configuration.
*/
- private static void skipIfS3GuardAndS3CSEEnabled(Configuration conf) {
- String encryptionMethod =
- conf.getTrimmed(Constants.S3_ENCRYPTION_ALGORITHM, "");
+ private static void skipIfS3GuardAndS3CSEEnabled(Configuration conf)
+ throws IOException {
+ String encryptionMethod = getEncryptionAlgorithm(getTestBucketName(conf),
+ conf).getMethod();
String metaStore = conf.getTrimmed(S3_METADATA_STORE_IMPL, "");
if (encryptionMethod.equals(S3AEncryptionMethods.CSE_KMS.getMethod()) &&
!metaStore.equals(S3GUARD_METASTORE_NULL)) {
@@ -1238,7 +1242,13 @@ public static void verifyDirStatus(S3AFileStatus status,
public static void assertOptionEquals(Configuration conf,
String key,
String expected) {
- assertEquals("Value of " + key, expected, conf.get(key));
+ String actual = conf.get(key);
+ String origin = actual == null
+ ? "(none)"
+ : "[" + StringUtils.join(conf.getPropertySources(key), ", ") + "]";
+ Assertions.assertThat(actual)
+ .describedAs("Value of %s with origin %s", key, origin)
+ .isEqualTo(expected);
}
/**
@@ -1538,12 +1548,14 @@ public static S3AFileStatus innerGetFileStatus(
* @param configuration configuration to probe.
*/
public static void skipIfEncryptionNotSet(Configuration configuration,
- S3AEncryptionMethods s3AEncryptionMethod) {
+ S3AEncryptionMethods s3AEncryptionMethod) throws IOException {
// if S3 encryption algorithm is not set to desired method or AWS encryption
// key is not set, then skip.
- if (!configuration.getTrimmed(S3_ENCRYPTION_ALGORITHM, "")
- .equals(s3AEncryptionMethod.getMethod())
- || configuration.get(Constants.S3_ENCRYPTION_KEY) == null) {
+ String bucket = getTestBucketName(configuration);
+ if (!s3AEncryptionMethod.getMethod().equals(getEncryptionAlgorithm(bucket,
+ configuration).getMethod()) || StringUtils.isBlank(getS3EncryptionKey(bucket,
+ configuration))
+ ) {
skip(S3_ENCRYPTION_KEY + " is not set for " + s3AEncryptionMethod
.getMethod() + " or " + S3_ENCRYPTION_ALGORITHM + " is not set to "
+ s3AEncryptionMethod.getMethod());
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestBucketConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestBucketConfiguration.java
new file mode 100644
index 0000000000000..f3c5ca0265005
--- /dev/null
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestBucketConfiguration.java
@@ -0,0 +1,247 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import java.io.File;
+import java.net.URI;
+import java.util.Collection;
+
+import org.assertj.core.api.Assertions;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.ProviderUtils;
+import org.apache.hadoop.security.alias.CredentialProvider;
+import org.apache.hadoop.security.alias.CredentialProviderFactory;
+import org.apache.hadoop.test.AbstractHadoopTestBase;
+
+import static org.apache.hadoop.fs.s3a.Constants.FS_S3A_BUCKET_PREFIX;
+import static org.apache.hadoop.fs.s3a.Constants.S3A_SECURITY_CREDENTIAL_PROVIDER_PATH;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.USER_AGENT_PREFIX;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.assertOptionEquals;
+import static org.apache.hadoop.fs.s3a.S3AUtils.CREDENTIAL_PROVIDER_PATH;
+import static org.apache.hadoop.fs.s3a.S3AUtils.clearBucketOption;
+import static org.apache.hadoop.fs.s3a.S3AUtils.getEncryptionAlgorithm;
+import static org.apache.hadoop.fs.s3a.S3AUtils.patchSecurityCredentialProviders;
+import static org.apache.hadoop.fs.s3a.S3AUtils.propagateBucketOptions;
+import static org.apache.hadoop.fs.s3a.S3AUtils.setBucketOption;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * S3A tests for configuration option propagation.
+ */
+@SuppressWarnings("deprecation")
+public class TestBucketConfiguration extends AbstractHadoopTestBase {
+
+ private static final String NEW_ALGORITHM_KEY_GLOBAL = "CSE-KMS";
+ private static final String OLD_ALGORITHM_KEY_BUCKET = "SSE-KMS";
+ @Rule
+ public final TemporaryFolder tempDir = new TemporaryFolder();
+
+ /**
+ * Setup: create the contract then init it.
+ * @throws Exception on any failure
+ */
+ @Before
+ public void setup() throws Exception {
+ // forces in deprecation wireup, even when this test method is running isolated
+ S3AFileSystem.initializeClass();
+ }
+
+ @Test
+ public void testBucketConfigurationPropagation() throws Throwable {
+ Configuration config = new Configuration(false);
+ setBucketOption(config, "b", "base", "1024");
+ String basekey = "fs.s3a.base";
+ assertOptionEquals(config, basekey, null);
+ String bucketKey = "fs.s3a.bucket.b.base";
+ assertOptionEquals(config, bucketKey, "1024");
+ Configuration updated = propagateBucketOptions(config, "b");
+ assertOptionEquals(updated, basekey, "1024");
+ // original conf is not updated
+ assertOptionEquals(config, basekey, null);
+
+ String[] sources = updated.getPropertySources(basekey);
+ assertEquals(1, sources.length);
+ Assertions.assertThat(sources)
+ .describedAs("base key property sources")
+ .hasSize(1);
+ Assertions.assertThat(sources[0])
+ .describedAs("Property source")
+ .contains(bucketKey);
+ }
+
+ @Test
+ public void testBucketConfigurationPropagationResolution() throws Throwable {
+ Configuration config = new Configuration(false);
+ String basekey = "fs.s3a.base";
+ String baseref = "fs.s3a.baseref";
+ String baseref2 = "fs.s3a.baseref2";
+ config.set(basekey, "orig");
+ config.set(baseref2, "${fs.s3a.base}");
+ setBucketOption(config, "b", basekey, "1024");
+ setBucketOption(config, "b", baseref, "${fs.s3a.base}");
+ Configuration updated = propagateBucketOptions(config, "b");
+ assertOptionEquals(updated, basekey, "1024");
+ assertOptionEquals(updated, baseref, "1024");
+ assertOptionEquals(updated, baseref2, "1024");
+ }
+
+ @Test
+ public void testMultipleBucketConfigurations() throws Throwable {
+ Configuration config = new Configuration(false);
+ setBucketOption(config, "b", USER_AGENT_PREFIX, "UA-b");
+ setBucketOption(config, "c", USER_AGENT_PREFIX, "UA-c");
+ config.set(USER_AGENT_PREFIX, "UA-orig");
+ Configuration updated = propagateBucketOptions(config, "c");
+ assertOptionEquals(updated, USER_AGENT_PREFIX, "UA-c");
+ }
+
+ @Test
+ public void testClearBucketOption() throws Throwable {
+ Configuration config = new Configuration();
+ config.set(USER_AGENT_PREFIX, "base");
+ setBucketOption(config, "bucket", USER_AGENT_PREFIX, "overridden");
+ clearBucketOption(config, "bucket", USER_AGENT_PREFIX);
+ Configuration updated = propagateBucketOptions(config, "c");
+ assertOptionEquals(updated, USER_AGENT_PREFIX, "base");
+ }
+
+ @Test
+ public void testBucketConfigurationSkipsUnmodifiable() throws Throwable {
+ Configuration config = new Configuration(false);
+ String impl = "fs.s3a.impl";
+ config.set(impl, "orig");
+ setBucketOption(config, "b", impl, "b");
+ String metastoreImpl = "fs.s3a.metadatastore.impl";
+ String ddb = "org.apache.hadoop.fs.s3a.s3guard.DynamoDBMetadataStore";
+ setBucketOption(config, "b", metastoreImpl, ddb);
+ setBucketOption(config, "b", "impl2", "b2");
+ setBucketOption(config, "b", "bucket.b.loop", "b3");
+ assertOptionEquals(config, "fs.s3a.bucket.b.impl", "b");
+
+ Configuration updated = propagateBucketOptions(config, "b");
+ assertOptionEquals(updated, impl, "orig");
+ assertOptionEquals(updated, "fs.s3a.impl2", "b2");
+ assertOptionEquals(updated, metastoreImpl, ddb);
+ assertOptionEquals(updated, "fs.s3a.bucket.b.loop", null);
+ }
+
+ @Test
+ public void testSecurityCredentialPropagationNoOverride() throws Exception {
+ Configuration config = new Configuration();
+ config.set(CREDENTIAL_PROVIDER_PATH, "base");
+ patchSecurityCredentialProviders(config);
+ assertOptionEquals(config, CREDENTIAL_PROVIDER_PATH,
+ "base");
+ }
+
+ @Test
+ public void testSecurityCredentialPropagationOverrideNoBase()
+ throws Exception {
+ Configuration config = new Configuration();
+ config.unset(CREDENTIAL_PROVIDER_PATH);
+ config.set(S3A_SECURITY_CREDENTIAL_PROVIDER_PATH, "override");
+ patchSecurityCredentialProviders(config);
+ assertOptionEquals(config, CREDENTIAL_PROVIDER_PATH,
+ "override");
+ }
+
+ @Test
+ public void testSecurityCredentialPropagationOverride() throws Exception {
+ Configuration config = new Configuration();
+ config.set(CREDENTIAL_PROVIDER_PATH, "base");
+ config.set(S3A_SECURITY_CREDENTIAL_PROVIDER_PATH, "override");
+ patchSecurityCredentialProviders(config);
+ assertOptionEquals(config, CREDENTIAL_PROVIDER_PATH,
+ "override,base");
+ Collection all = config.getStringCollection(
+ CREDENTIAL_PROVIDER_PATH);
+ assertTrue(all.contains("override"));
+ assertTrue(all.contains("base"));
+ }
+
+ @Test
+ public void testSecurityCredentialPropagationEndToEnd() throws Exception {
+ Configuration config = new Configuration();
+ config.set(CREDENTIAL_PROVIDER_PATH, "base");
+ setBucketOption(config, "b", S3A_SECURITY_CREDENTIAL_PROVIDER_PATH,
+ "override");
+ Configuration updated = propagateBucketOptions(config, "b");
+
+ patchSecurityCredentialProviders(updated);
+ assertOptionEquals(updated, CREDENTIAL_PROVIDER_PATH,
+ "override,base");
+ }
+
+ /**
+ * This test shows that a per-bucket value of the older key takes priority
+ * over a global value of a new key in XML configuration file.
+ */
+ @Test
+ public void testBucketConfigurationDeprecatedEncryptionAlgorithm()
+ throws Throwable {
+ Configuration config = new Configuration(false);
+ config.set(S3_ENCRYPTION_ALGORITHM, NEW_ALGORITHM_KEY_GLOBAL);
+ setBucketOption(config, "b", SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ OLD_ALGORITHM_KEY_BUCKET);
+ Configuration updated = propagateBucketOptions(config, "b");
+
+ // Get the encryption method and verify that the value is per-bucket of
+ // old keys.
+ String value = getEncryptionAlgorithm("b", updated).getMethod();
+ Assertions.assertThat(value)
+ .describedAs("lookupPassword(%s)", S3_ENCRYPTION_ALGORITHM)
+ .isEqualTo(OLD_ALGORITHM_KEY_BUCKET);
+ }
+
+ @Test
+ public void testJceksDeprecatedEncryptionAlgorithm() throws Exception {
+ // set up conf to have a cred provider
+ final Configuration conf = new Configuration(false);
+ final File file = tempDir.newFile("test.jks");
+ final URI jks = ProviderUtils.nestURIForLocalJavaKeyStoreProvider(
+ file.toURI());
+ conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
+ jks.toString());
+
+ // add our creds to the provider
+ final CredentialProvider provider =
+ CredentialProviderFactory.getProviders(conf).get(0);
+ provider.createCredentialEntry(S3_ENCRYPTION_ALGORITHM,
+ NEW_ALGORITHM_KEY_GLOBAL.toCharArray());
+ provider.createCredentialEntry(
+ FS_S3A_BUCKET_PREFIX + "b." + SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ OLD_ALGORITHM_KEY_BUCKET.toCharArray());
+ provider.flush();
+
+ // Get the encryption method and verify that the value is per-bucket of
+ // old keys.
+ final String value = S3AUtils.getEncryptionAlgorithm("b", conf).getMethod();
+ Assertions.assertThat(value)
+ .describedAs("lookupPassword(%s)", S3_ENCRYPTION_ALGORITHM)
+ .isEqualTo(OLD_ALGORITHM_KEY_BUCKET);
+ }
+}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java
index b3fc5de49290e..6aed9e7e314e9 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java
@@ -66,8 +66,10 @@
import static org.apache.hadoop.fs.s3a.Constants.*;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.assumeSessionTestsEnabled;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.disableFilesystemCaching;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.unsetHadoopCredentialProviders;
+import static org.apache.hadoop.fs.s3a.S3AUtils.getS3EncryptionKey;
import static org.apache.hadoop.fs.s3a.auth.delegation.DelegationConstants.*;
import static org.apache.hadoop.fs.s3a.auth.delegation.DelegationTokenIOException.TOKEN_MISMATCH;
import static org.apache.hadoop.fs.s3a.auth.delegation.MiniKerberizedHadoopCluster.ALICE;
@@ -146,7 +148,7 @@ protected Configuration createConfiguration() {
String s3EncryptionMethod =
conf.getTrimmed(Constants.S3_ENCRYPTION_ALGORITHM,
S3AEncryptionMethods.SSE_KMS.getMethod());
- String s3EncryptionKey = conf.getTrimmed(Constants.S3_ENCRYPTION_KEY, "");
+ String s3EncryptionKey = getS3EncryptionKey(getTestBucketName(conf), conf);
removeBaseAndBucketOverrides(conf,
DELEGATION_TOKEN_BINDING,
Constants.S3_ENCRYPTION_ALGORITHM,
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java
index 9325feb784336..93242155c678a 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java
@@ -20,15 +20,17 @@
import java.io.IOException;
+import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.Constants;
import org.apache.hadoop.fs.s3a.EncryptionTestUtils;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
-import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3AEncryptionMethods.SSE_KMS;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionNotSet;
+import static org.apache.hadoop.fs.s3a.S3AUtils.getS3EncryptionKey;
/**
* Class to test SSE_KMS encryption settings for huge files.
@@ -58,13 +60,13 @@ protected String getBlockOutputBufferName() {
@Override
protected boolean isEncrypted(S3AFileSystem fileSystem) {
Configuration c = new Configuration();
- return c.get(S3_ENCRYPTION_KEY) != null;
+ return StringUtils.isNotBlank(getS3EncryptionKey(getTestBucketName(c), c));
}
@Override
protected void assertEncrypted(Path hugeFile) throws IOException {
Configuration c = new Configuration();
- String kmsKey = c.get(S3_ENCRYPTION_KEY);
+ String kmsKey = getS3EncryptionKey(getTestBucketName(c), c);
EncryptionTestUtils.assertEncrypted(getFileSystem(), hugeFile,
SSE_KMS, kmsKey);
}