diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java index 2d980593e9e83..1ab9f44010726 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java @@ -2489,7 +2489,7 @@ UploadResult executePut(PutObjectRequest putObjectRequest, * Wait for an upload to complete. * If the waiting for completion is interrupted, the upload will be * aborted before an {@code InterruptedIOException} is thrown. - * @param upload upload to wait for + * @param uploadInfo upload to wait for * @param key destination key * @return the upload result * @throws InterruptedIOException if the blocking was interrupted. @@ -2592,7 +2592,7 @@ private void copyFile(String srcKey, String dstKey, long size) setOptionalObjectMetadata(dstom); CopyObjectRequest copyObjectRequest = new CopyObjectRequest(bucket, srcKey, bucket, dstKey); - setOptionalCopyObjectRequestParameters(copyObjectRequest); + setOptionalCopyObjectRequestParameters(srcom, copyObjectRequest); copyObjectRequest.setCannedAccessControlList(cannedACL); copyObjectRequest.setNewObjectMetadata(dstom); Copy copy = transfers.copy(copyObjectRequest); @@ -2608,6 +2608,49 @@ private void copyFile(String srcKey, String dstKey, long size) }); } + /** + * Propagate encryption parameters from source file if set else use the + * current filesystem encryption settings. + * @param srcom source object meta. + * @param copyObjectRequest copy object request body. + */ + private void setOptionalCopyObjectRequestParameters( + ObjectMetadata srcom, + CopyObjectRequest copyObjectRequest) { + String sourceKMSId = srcom.getSSEAwsKmsKeyId(); + if (isNotEmpty(sourceKMSId)) { + // source KMS ID is propagated + LOG.debug("Propagating SSE-KMS settings from source {}", + sourceKMSId); + copyObjectRequest.setSSEAwsKeyManagementParams( + new SSEAwsKeyManagementParams(sourceKMSId)); + } + switch(getServerSideEncryptionAlgorithm()) { + /** + * Overriding with client encryption settings. + */ + case SSE_C: + if (isNotBlank(getServerSideEncryptionKey(bucket, getConf()))) { + //at the moment, only supports copy using the same key + SSECustomerKey customerKey = generateSSECustomerKey(); + copyObjectRequest.setSourceSSECustomerKey(customerKey); + copyObjectRequest.setDestinationSSECustomerKey(customerKey); + } + break; + case SSE_KMS: + copyObjectRequest.setSSEAwsKeyManagementParams( + generateSSEAwsKeyParams() + ); + break; + default: + } + } + + /** + * Set the optional parameters when initiating the request (encryption, + * headers, storage, etc). + * @param req request to patch. + */ protected void setOptionalMultipartUploadRequestParameters( InitiateMultipartUploadRequest req) { switch (serverSideEncryptionAlgorithm) { @@ -2657,26 +2700,6 @@ InitiateMultipartUploadResult initiateMultipartUpload( return getAmazonS3Client().initiateMultipartUpload(request); } - protected void setOptionalCopyObjectRequestParameters( - CopyObjectRequest copyObjectRequest) throws IOException { - switch (serverSideEncryptionAlgorithm) { - case SSE_KMS: - copyObjectRequest.setSSEAwsKeyManagementParams( - generateSSEAwsKeyParams() - ); - break; - case SSE_C: - if (isNotBlank(getServerSideEncryptionKey(bucket, getConf()))) { - //at the moment, only supports copy using the same key - SSECustomerKey customerKey = generateSSECustomerKey(); - copyObjectRequest.setSourceSSECustomerKey(customerKey); - copyObjectRequest.setDestinationSSECustomerKey(customerKey); - } - break; - default: - } - } - private void setOptionalPutRequestParameters(PutObjectRequest request) { switch (serverSideEncryptionAlgorithm) { case SSE_KMS: diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java index f22af4963596b..32b540c03d4a4 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java @@ -39,7 +39,6 @@ */ public abstract class AbstractS3ATestBase extends AbstractFSContractTestBase implements S3ATestConstants { - protected static final Logger LOG = LoggerFactory.getLogger(AbstractS3ATestBase.class); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java index 108f102c67f1d..9012e67d83162 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java @@ -21,15 +21,19 @@ import com.amazonaws.services.s3.model.ObjectMetadata; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.net.util.Base64; +import java.io.IOException; + +import org.junit.Test; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.ContractTestUtils; -import org.junit.Test; -import java.io.IOException; import static org.apache.hadoop.fs.contract.ContractTestUtils.*; -import static org.apache.hadoop.fs.s3a.S3ATestUtils.*; +import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM; +import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY; +import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionTestsDisabled; /** * Test whether or not encryption works by turning it on. Some checks @@ -42,11 +46,24 @@ public abstract class AbstractTestS3AEncryption extends AbstractS3ATestBase { protected Configuration createConfiguration() { Configuration conf = super.createConfiguration(); S3ATestUtils.disableFilesystemCaching(conf); - conf.set(Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM, - getSSEAlgorithm().getMethod()); + patchConfigurationEncryptionSettings(conf); return conf; } + /** + * This removes the encryption settings from the + * configuration and then sets the + * fs.s3a.server-side-encryption-algorithm value to + * be that of {@code getSSEAlgorithm()}. + * Called in {@code createConfiguration()}. + * @param conf configuration to patch. + */ + protected void patchConfigurationEncryptionSettings( + final Configuration conf) { + conf.set(SERVER_SIDE_ENCRYPTION_ALGORITHM, + getSSEAlgorithm().getMethod()); + } + private static final int[] SIZES = { 0, 1, 2, 3, 4, 5, 254, 255, 256, 257, 2 ^ 12 - 1 }; @@ -66,10 +83,15 @@ public void testEncryptionOverRename() throws Throwable { S3AFileSystem fs = getFileSystem(); writeDataset(fs, src, data, data.length, 1024 * 1024, true); ContractTestUtils.verifyFileContents(fs, src, data); - Path dest = path(src.getName() + "-copy"); - fs.rename(src, dest); - ContractTestUtils.verifyFileContents(fs, dest, data); - assertEncrypted(dest); + // this file will be encrypted + assertEncrypted(src); + + Path targetDir = path("target"); + mkdirs(targetDir); + fs.rename(src, targetDir); + Path renamedFile = new Path(targetDir, src.getName()); + ContractTestUtils.verifyFileContents(fs, renamedFile, data); + assertEncrypted(renamedFile); } protected void validateEncryptionForFilesize(int len) throws IOException { @@ -95,40 +117,14 @@ protected String createFilename(String name) { * @throws IOException on a failure */ protected void assertEncrypted(Path path) throws IOException { - ObjectMetadata md = getFileSystem().getObjectMetadata(path); - switch(getSSEAlgorithm()) { - case SSE_C: - assertEquals("AES256", md.getSSECustomerAlgorithm()); - String md5Key = convertKeyToMd5(); - assertEquals(md5Key, md.getSSECustomerKeyMd5()); - break; - case SSE_KMS: - assertEquals("aws:kms", md.getSSEAlgorithm()); - //S3 will return full arn of the key, so specify global arn in properties - assertEquals(this.getConfiguration(). - getTrimmed(Constants.SERVER_SIDE_ENCRYPTION_KEY), - md.getSSEAwsKmsKeyId()); - break; - default: - assertEquals("AES256", md.getSSEAlgorithm()); - } - } - - /** - * Decodes the SERVER_SIDE_ENCRYPTION_KEY from base64 into an AES key, then - * gets the md5 of it, then encodes it in base64 so it will match the version - * that AWS returns to us. - * - * @return md5'd base64 encoded representation of the server side encryption - * key - */ - private String convertKeyToMd5() { - String base64Key = getConfiguration().getTrimmed( - Constants.SERVER_SIDE_ENCRYPTION_KEY - ); - byte[] key = Base64.decodeBase64(base64Key); - byte[] md5 = DigestUtils.md5(key); - return Base64.encodeBase64String(md5).trim(); + //S3 will return full arn of the key, so specify global arn in properties + String kmsKeyArn = this.getConfiguration(). + getTrimmed(SERVER_SIDE_ENCRYPTION_KEY); + S3AEncryptionMethods algorithm = getSSEAlgorithm(); + EncryptionTestUtils.assertEncrypted(getFileSystem(), + path, + algorithm, + kmsKeyArn); } protected abstract S3AEncryptionMethods getSSEAlgorithm(); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java new file mode 100644 index 0000000000000..f9cfc0442e828 --- /dev/null +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.s3a; + +import java.io.IOException; + +import com.amazonaws.services.s3.model.ObjectMetadata; + +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.commons.net.util.Base64; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +public final class EncryptionTestUtils { + + /** Private constructor */ + private EncryptionTestUtils() { + } + + public static final String AWS_KMS_SSE_ALGORITHM = "aws:kms"; + + public static final String SSE_C_ALGORITHM = "AES256"; + + /** + * Decodes the SERVER_SIDE_ENCRYPTION_KEY from base64 into an AES key, then + * gets the md5 of it, then encodes it in base64 so it will match the version + * that AWS returns to us. + * + * @return md5'd base64 encoded representation of the server side encryption + * key + */ + public static String convertKeyToMd5(FileSystem fs) { + String base64Key = fs.getConf().getTrimmed( + SERVER_SIDE_ENCRYPTION_KEY + ); + byte[] key = Base64.decodeBase64(base64Key); + byte[] md5 = DigestUtils.md5(key); + return Base64.encodeBase64String(md5).trim(); + } + + /** + * Assert that a path is encrypted with right encryption settings. + * @param path file path. + * @param algorithm encryption algorithm. + * @param kmsKeyArn full kms key. + */ + public static void assertEncrypted(S3AFileSystem fs, + final Path path, + final S3AEncryptionMethods algorithm, + final String kmsKeyArn) + throws IOException { + ObjectMetadata md = fs.getObjectMetadata(path); + String details = String.format( + "file %s with encryption algorithm %s and key %s", + path, + md.getSSEAlgorithm(), + md.getSSEAwsKmsKeyId()); + switch(algorithm) { + case SSE_C: + assertNull("Metadata algorithm should have been null in " + + details, + md.getSSEAlgorithm()); + assertEquals("Wrong SSE-C algorithm in " + + details, + SSE_C_ALGORITHM, md.getSSECustomerAlgorithm()); + String md5Key = convertKeyToMd5(fs); + assertEquals("getSSECustomerKeyMd5() wrong in " + details, + md5Key, md.getSSECustomerKeyMd5()); + break; + case SSE_KMS: + assertEquals("Wrong algorithm in " + details, + AWS_KMS_SSE_ALGORITHM, md.getSSEAlgorithm()); + assertEquals("Wrong KMS key in " + details, + kmsKeyArn, + md.getSSEAwsKmsKeyId()); + break; + default: + assertEquals("AES256", md.getSSEAlgorithm()); + } + } + +} diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java index 8b68fcfb880bd..47a6e5ec562d0 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java @@ -51,7 +51,8 @@ protected S3AEncryptionMethods getSSEAlgorithm() { @Override protected void assertEncrypted(Path path) throws IOException { ObjectMetadata md = getFileSystem().getObjectMetadata(path); - assertEquals("aws:kms", md.getSSEAlgorithm()); + assertEquals("SSE Algorithm", EncryptionTestUtils.AWS_KMS_SSE_ALGORITHM, + md.getSSEAlgorithm()); assertThat(md.getSSEAwsKmsKeyId(), containsString("arn:aws:kms:")); } } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java new file mode 100644 index 0000000000000..b474fef58f9f6 --- /dev/null +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.s3a; + +import java.io.IOException; + +import org.junit.Ignore; +import org.junit.Test; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.contract.ContractTestUtils; + +import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset; +import static org.apache.hadoop.fs.contract.ContractTestUtils.skip; +import static org.apache.hadoop.fs.contract.ContractTestUtils.writeDataset; +import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM; +import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY; +import static org.apache.hadoop.fs.s3a.S3AEncryptionMethods.SSE_KMS; +import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides; + +/** + * Concrete class that extends {@link AbstractTestS3AEncryption} + * and tests already configured bucket level encryption using s3 console. + * This requires the SERVER_SIDE_ENCRYPTION_KEY + * to be set in auth-keys.xml for it to run. The value should match with the + * kms key set for the bucket. + * See HADOOP-16794. + */ +public class ITestS3AEncryptionWithDefaultS3Settings extends + AbstractTestS3AEncryption { + + @Override + public void setup() throws Exception { + super.setup(); + // get the KMS key for this test. + S3AFileSystem fs = getFileSystem(); + Configuration c = fs.getConf(); + String kmsKey = c.get(SERVER_SIDE_ENCRYPTION_KEY); + if (StringUtils.isBlank(kmsKey)) { + skip(SERVER_SIDE_ENCRYPTION_KEY + " is not set for " + + SSE_KMS.getMethod()); + } + } + + @Override + protected void patchConfigurationEncryptionSettings( + final Configuration conf) { + conf.set(SERVER_SIDE_ENCRYPTION_ALGORITHM, + getSSEAlgorithm().getMethod()); + } + + /** + * Setting this to NONE as we don't want to overwrite + * already configured encryption settings. + * @return the algorithm + */ + @Override + protected S3AEncryptionMethods getSSEAlgorithm() { + return S3AEncryptionMethods.NONE; + } + + /** + * The check here is that the object is encrypted + * and that the encryption key is the KMS key + * provided, not any default key. + * @param path path + */ + @Override + protected void assertEncrypted(Path path) throws IOException { + S3AFileSystem fs = getFileSystem(); + Configuration c = fs.getConf(); + String kmsKey = c.getTrimmed(SERVER_SIDE_ENCRYPTION_KEY); + EncryptionTestUtils.assertEncrypted(fs, path, SSE_KMS, kmsKey); + } + + @Override + @Ignore + @Test + public void testEncryption() throws Throwable { + } + + @Test + public void testEncryptionOverRename2() throws Throwable { + S3AFileSystem fs = getFileSystem(); + + // write the file with the unencrypted FS. + // this will pick up whatever defaults we have. + Path src = path(createFilename(1024)); + byte[] data = dataset(1024, 'a', 'z'); + writeDataset(fs, src, data, data.length, 1024 * 1024, true); + ContractTestUtils.verifyFileContents(fs, src, data); + + // fs2 conf will always use SSE-KMS + Configuration fs2Conf = new Configuration(fs.getConf()); + fs2Conf.set(SERVER_SIDE_ENCRYPTION_ALGORITHM, + S3AEncryptionMethods.SSE_KMS.getMethod()); + try (FileSystem kmsFS = FileSystem.newInstance(fs.getUri(), fs2Conf)) { + Path targetDir = path("target"); + kmsFS.mkdirs(targetDir); + ContractTestUtils.rename(kmsFS, src, targetDir); + Path renamedFile = new Path(targetDir, src.getName()); + ContractTestUtils.verifyFileContents(fs, renamedFile, data); + String kmsKey = fs2Conf.getTrimmed(SERVER_SIDE_ENCRYPTION_KEY); + // we assert that the renamed file has picked up the KMS key of our FS + EncryptionTestUtils.assertEncrypted(fs, renamedFile, SSE_KMS, kmsKey); + } + } +} diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java index 88a19d574cbb6..315d1fe7285be 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java @@ -457,6 +457,30 @@ public void test_050_readHugeFile() throws Throwable { logFSState(); } + /** + * Test to verify source file encryption key. + * @throws IOException + */ + @Test + public void test_090_verifyRenameSourceEncryption() throws IOException { + if(isEncrypted(getFileSystem())) { + assertEncrypted(getHugefile()); + } + } + + protected void assertEncrypted(Path hugeFile) throws IOException { + //Concrete classes will have implementation. + } + + /** + * Checks if the encryption is enabled for the file system. + * @param fileSystem + * @return + */ + protected boolean isEncrypted(S3AFileSystem fileSystem) { + return false; + } + @Test public void test_100_renameHugeFile() throws Throwable { assumeHugeFileExists(); @@ -485,6 +509,20 @@ public void test_100_renameHugeFile() throws Throwable { bandwidth(timer2, size); } + /** + * Test to verify target file encryption key. + * @throws IOException + */ + @Test + public void test_110_verifyRenameDestEncryption() throws IOException { + if(isEncrypted(getFileSystem())) { + /** + * Using hugeFile again as hugeFileRenamed is renamed back + * to hugeFile. + */ + assertEncrypted(hugefile); + } + } /** * Cleanup: delete the files. */ diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java new file mode 100644 index 0000000000000..9b63430139f10 --- /dev/null +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.s3a.scale; + +import java.io.IOException; + +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.s3a.Constants; +import org.apache.hadoop.fs.s3a.EncryptionTestUtils; +import org.apache.hadoop.fs.s3a.S3AFileSystem; + +import static org.apache.hadoop.fs.contract.ContractTestUtils.skip; +import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY; +import static org.apache.hadoop.fs.s3a.S3AEncryptionMethods.SSE_KMS; + +/** + * Class to test SSE_KMS encryption settings for huge files. + * Tests will only run if value of {@link Constants#SERVER_SIDE_ENCRYPTION_KEY} + * is set in the configuration. The testing bucket must be configured with this + * same key else test might fail. + */ +public class ITestS3AHugeFilesEncryption extends AbstractSTestS3AHugeFiles { + + @Override + public void setup() throws Exception { + Configuration c = new Configuration(); + String kmsKey = c.get(SERVER_SIDE_ENCRYPTION_KEY); + if (StringUtils.isBlank(kmsKey)) { + skip(SERVER_SIDE_ENCRYPTION_KEY + " is not set for " + + SSE_KMS.getMethod()); + } + super.setup(); + } + + @Override + protected String getBlockOutputBufferName() { + return Constants.FAST_UPLOAD_BUFFER_ARRAY; + } + + /** + * @param fileSystem + * @return true if {@link Constants#SERVER_SIDE_ENCRYPTION_KEY} is set + * in the config. + */ + @Override + protected boolean isEncrypted(S3AFileSystem fileSystem) { + Configuration c = new Configuration(); + return c.get(SERVER_SIDE_ENCRYPTION_KEY) != null; + } + + @Override + protected void assertEncrypted(Path hugeFile) throws IOException { + Configuration c = new Configuration(); + String kmsKey = c.get(SERVER_SIDE_ENCRYPTION_KEY); + EncryptionTestUtils.assertEncrypted(getFileSystem(), hugeFile, + SSE_KMS, kmsKey); + } +}