Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -785,6 +785,13 @@ private Constants() {
public static final String S3_ENCRYPTION_CONTEXT =
"fs.s3a.encryption.context";

/**
* Default S3-SSE encryption context.
* value:{@value}
*/
public static final String DEFAULT_S3_ENCRYPTION_CONTEXT =
"";

/**
* Client side encryption (CSE-CUSTOM) with custom cryptographic material manager class name.
* Custom keyring class name for CSE-KMS.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;

import static org.apache.hadoop.fs.s3a.Constants.DEFAULT_S3_ENCRYPTION_CONTEXT;

/**
* Encryption options in a form which can serialized or marshalled as a hadoop
* Writeable.
Expand All @@ -54,7 +56,10 @@ public class EncryptionSecrets implements Writable, Serializable {

public static final int MAX_SECRET_LENGTH = 2048;

private static final long serialVersionUID = 1208329045511296375L;
/**
* Change this after any change to the payload: {@value}.
*/
private static final long serialVersionUID = 8834417969966697162L;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How did you generate this id? Is there a way to introduce a unit test to validate if new fields were added and the id should be updated?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

IntelliJ has a helper

  • java serialization: you MUST update the value if the payload the set of serialized fields (everything non file not tagged as transient) changes. I think consensus is you can generate any sufficiently random number and all is good. Key is: change it.
  • hadoop writable (which is how this stuff is actually marshalled in delegation tokens): you implement the read/write. This means we can be adaptive here in reading old versions too. which is what I'll do.

I don't worry about the java serialization so much as it'll only surface if people are trying to save delegation tokens in odd ways


/**
* Encryption algorithm to use: must match one in
Expand All @@ -70,7 +75,7 @@ public class EncryptionSecrets implements Writable, Serializable {
/**
* Encryption context: base64-encoded UTF-8 string.
*/
private String encryptionContext = "";
private String encryptionContext = DEFAULT_S3_ENCRYPTION_CONTEXT;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

encryptionAlgorithm and encryptionKey in the lines above are assigned to "" instead of a constant. I think we can create a default constant for them too in a future commit.


/**
* This field isn't serialized/marshalled; it is rebuilt from the
Expand All @@ -86,7 +91,24 @@ public EncryptionSecrets() {
}

/**
* Create a pair of secrets.
* Create a tuple of secrets. The encryption context is set to "".
* This constructor is used in external implementations of S3A delegation
* tokens, sp MUST be retained even if there is no use in our own
* production code.
* @param encryptionAlgorithm algorithm enumeration.
* @param encryptionKey key/key reference.
* @throws IOException failure to initialize.
* @deprecated use {@link #EncryptionSecrets(S3AEncryptionMethods, String, String)}
* which takes an encryption context.
*/
public EncryptionSecrets(final S3AEncryptionMethods encryptionAlgorithm,
final String encryptionKey) throws IOException {
this(encryptionAlgorithm.getMethod(), encryptionKey,
DEFAULT_S3_ENCRYPTION_CONTEXT);
}

/**
* Create a 3/tuple of secrets.
* @param encryptionAlgorithm algorithm enumeration.
* @param encryptionKey key/key reference.
* @param encryptionContext base64-encoded string with the encryption context key-value pairs.
Expand All @@ -99,7 +121,7 @@ public EncryptionSecrets(final S3AEncryptionMethods encryptionAlgorithm,
}

/**
* Create a pair of secrets.
* Create a 3/tuple of secrets.
* @param encryptionAlgorithm algorithm name
* @param encryptionKey key/key reference.
* @param encryptionContext base64-encoded string with the encryption context key-value pairs.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.s3a.S3AUtils;

import static org.apache.hadoop.fs.s3a.Constants.DEFAULT_S3_ENCRYPTION_CONTEXT;
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_CONTEXT;

/**
Expand Down Expand Up @@ -61,7 +62,7 @@ public static String getS3EncryptionContext(String bucket, Configuration conf)
}
if (encryptionContext == null) {
// no encryption context, return ""
return "";
return DEFAULT_S3_ENCRYPTION_CONTEXT;
}
return encryptionContext;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.net.URI;
import java.net.URISyntaxException;

import org.assertj.core.api.Assertions;
import software.amazon.awssdk.auth.credentials.AwsCredentials;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
Expand Down Expand Up @@ -78,13 +79,33 @@ public void testRoundTripNoSessionData() throws Throwable {

@Test
public void testRoundTripEncryptionData() throws Throwable {
final String context = "encryptionContext";
EncryptionSecrets secrets = new EncryptionSecrets(
S3AEncryptionMethods.SSE_KMS,
"key",
"encryptionContext");
context);
EncryptionSecrets result = S3ATestUtils.roundTrip(secrets,
new Configuration());
assertEquals(secrets, result, "round trip");
Assertions.assertThat(result .getEncryptionContext())
.describedAs("encryptionContext")
.isEqualTo(context);
}

@Test
public void testRoundTripEncryptionSecretsNoContext() throws Throwable {
EncryptionSecrets secrets = new EncryptionSecrets(
S3AEncryptionMethods.SSE_KMS,
"key");
EncryptionSecrets result = S3ATestUtils.roundTrip(secrets,
new Configuration());
assertEquals(secrets, result, "round trip");
// not equal to secrets with a context
Assertions.assertThat(result)
.isNotEqualTo(new EncryptionSecrets(
S3AEncryptionMethods.SSE_KMS,
"key",
"encryptionContext"));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
import org.apache.hadoop.test.AbstractHadoopTestBase;

import static org.apache.hadoop.fs.s3a.Constants.DEFAULT_PART_UPLOAD_TIMEOUT;
import static org.apache.hadoop.fs.s3a.Constants.DEFAULT_S3_ENCRYPTION_CONTEXT;
import static org.apache.hadoop.fs.s3a.impl.PutObjectOptions.defaultOptions;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
import static org.assertj.core.api.Assertions.assertThat;
Expand Down Expand Up @@ -89,7 +90,7 @@ public void testRequestFactoryWithEncryption() throws Throwable {
.withBucket("bucket")
.withEncryptionSecrets(
new EncryptionSecrets(S3AEncryptionMethods.SSE_KMS,
"kms:key", ""))
"kms:key", DEFAULT_S3_ENCRYPTION_CONTEXT))
.build();
createFactoryObjects(factory);
}
Expand Down Expand Up @@ -329,7 +330,7 @@ public void testCompleteMultipartUploadRequestWithChecksumAlgorithmAndSSEC() thr
.encodeToString(encryptionKey);
final String encryptionKeyMd5 = Md5Utils.md5AsBase64(encryptionKey);
final EncryptionSecrets encryptionSecrets = new EncryptionSecrets(S3AEncryptionMethods.SSE_C,
encryptionKeyBase64, null);
encryptionKeyBase64);
RequestFactory factory = RequestFactoryImpl.builder()
.withBucket("bucket")
.withChecksumAlgorithm(ChecksumAlgorithm.CRC32_C)
Expand Down