Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/website.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,5 @@ jobs:
publish_dir: ./staging/hadoop-project
user_name: 'github-actions[bot]'
user_email: 'github-actions[bot]@users.noreply.github.com'
force_orphan: true

6 changes: 3 additions & 3 deletions LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -482,9 +482,9 @@ com.microsoft.azure:azure-cosmosdb-gateway:2.4.5
com.microsoft.azure:azure-data-lake-store-sdk:2.3.3
com.microsoft.azure:azure-keyvault-core:1.0.0
com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre7
org.bouncycastle:bcpkix-jdk18on:1.77
org.bouncycastle:bcprov-jdk18on:1.77
org.bouncycastle:bcutil-jdk18on:1.77
org.bouncycastle:bcpkix-jdk18on:1.78.1
org.bouncycastle:bcprov-jdk18on:1.78.1
org.bouncycastle:bcutil-jdk18on:1.78.1
org.checkerframework:checker-qual:2.5.2
org.codehaus.mojo:animal-sniffer-annotations:1.21
org.jruby.jcodings:jcodings:1.0.13
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ Linux kernel 2.6+
- joda-time (version 2.9.9 recommended)
- httpClient (version 4.5.1 or later recommended)
- Jackson: jackson-core, jackson-databind, jackson-annotations (version 2.9.8 or later)
- bcprov-jdk18on (version 1.77 recommended)
- bcprov-jdk18on (version 1.78.1 recommended)


#### Configure Properties
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -947,11 +947,7 @@ public void flush() throws IOException {
@Override
public void warmUpEncryptedKeys(String... keyNames)
throws IOException {
try {
encKeyVersionQueue.initializeQueuesForKeys(keyNames);
} catch (ExecutionException e) {
throw new IOException(e);
}
encKeyVersionQueue.initializeQueuesForKeys(keyNames);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -269,12 +269,24 @@ public ValueQueue(final int numValues, final float lowWaterMark, long expiry,
* Initializes the Value Queues for the provided keys by calling the
* fill Method with "numInitValues" values
* @param keyNames Array of key Names
* @throws ExecutionException executionException.
* @throws IOException if initialization fails for any provided keys
*/
public void initializeQueuesForKeys(String... keyNames)
throws ExecutionException {
public void initializeQueuesForKeys(String... keyNames) throws IOException {
int successfulInitializations = 0;
ExecutionException lastException = null;

for (String keyName : keyNames) {
keyQueues.get(keyName);
try {
keyQueues.get(keyName);
successfulInitializations++;
} catch (ExecutionException e) {
lastException = e;
}
}

if (keyNames.length > 0 && successfulInitializations != keyNames.length) {
throw new IOException(String.format("Failed to initialize %s queues for the provided keys.",
keyNames.length - successfulInitializations), lastException);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,27 @@
import java.util.Arrays;
import java.util.HashSet;
import java.util.Queue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.TimeUnit;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.hadoop.crypto.key.kms.ValueQueue;
import org.apache.hadoop.crypto.key.kms.ValueQueue.QueueRefiller;
import org.apache.hadoop.crypto.key.kms.ValueQueue.SyncGenerationPolicy;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.thirdparty.com.google.common.cache.LoadingCache;
import org.junit.Assert;
import org.junit.Test;

import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.spy;


public class TestValueQueue {
Logger LOG = LoggerFactory.getLogger(TestValueQueue.class);
Expand Down Expand Up @@ -111,6 +119,41 @@ public void testWarmUp() throws Exception {
vq.shutdown();
}

/**
* Verifies that Queue is initialized (Warmed-up) for partial keys.
*/
@Test(timeout = 30000)
public void testPartialWarmUp() throws Exception {
MockFiller filler = new MockFiller();
ValueQueue<String> vq =
new ValueQueue<>(10, 0.5f, 30000, 1,
SyncGenerationPolicy.ALL, filler);

@SuppressWarnings("unchecked")
LoadingCache<String, LinkedBlockingQueue<KeyProviderCryptoExtension.EncryptedKeyVersion>> kq =
(LoadingCache<String, LinkedBlockingQueue<KeyProviderCryptoExtension.EncryptedKeyVersion>>)
FieldUtils.getField(ValueQueue.class, "keyQueues", true).get(vq);

LoadingCache<String, LinkedBlockingQueue<KeyProviderCryptoExtension.EncryptedKeyVersion>>
kqSpy = spy(kq);
doThrow(new ExecutionException(new Exception())).when(kqSpy).get("k2");
FieldUtils.writeField(vq, "keyQueues", kqSpy, true);

Assert.assertThrows(IOException.class, () -> vq.initializeQueuesForKeys("k1", "k2", "k3"));
verify(kqSpy, times(1)).get("k2");

FillInfo[] fillInfos =
{filler.getTop(), filler.getTop(), filler.getTop()};
Assert.assertEquals(5, fillInfos[0].num);
Assert.assertEquals(5, fillInfos[1].num);
Assert.assertNull(fillInfos[2]);

Assert.assertEquals(new HashSet<>(Arrays.asList("k1", "k3")),
new HashSet<>(Arrays.asList(fillInfos[0].key,
fillInfos[1].key)));
vq.shutdown();
}

/**
* Verifies that the refill task is executed after "checkInterval" if
* num values below "lowWatermark"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,13 +104,8 @@ SyncGenerationPolicy.LOW_WATERMARK, new EncryptedQueueRefiller()
}

@Override
public void warmUpEncryptedKeys(String... keyNames) throws
IOException {
try {
encKeyVersionQueue.initializeQueuesForKeys(keyNames);
} catch (ExecutionException e) {
throw new IOException(e);
}
public void warmUpEncryptedKeys(String... keyNames) throws IOException {
encKeyVersionQueue.initializeQueuesForKeys(keyNames);
}

@Override
Expand Down
5 changes: 2 additions & 3 deletions hadoop-project/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@
<guava.version>27.0-jre</guava.version>
<guice.version>4.2.3</guice.version>

<bouncycastle.version>1.78</bouncycastle.version>
<bouncycastle.version>1.78.1</bouncycastle.version>

<!-- Required for testing LDAP integration -->
<apacheds.version>2.0.0.AM26</apacheds.version>
Expand Down Expand Up @@ -170,7 +170,7 @@

<!-- Plugin versions and config -->
<maven-surefire-plugin.argLine>-Xmx2048m -XX:+HeapDumpOnOutOfMemoryError</maven-surefire-plugin.argLine>
<maven-surefire-plugin.version>3.2.5</maven-surefire-plugin.version>
<maven-surefire-plugin.version>3.0.0-M1</maven-surefire-plugin.version>
<maven-surefire-report-plugin.version>${maven-surefire-plugin.version}</maven-surefire-report-plugin.version>
<maven-failsafe-plugin.version>${maven-surefire-plugin.version}</maven-failsafe-plugin.version>

Expand Down Expand Up @@ -2450,7 +2450,6 @@
<DYLD_LIBRARY_PATH>${env.DYLD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib</DYLD_LIBRARY_PATH>
<MALLOC_ARENA_MAX>4</MALLOC_ARENA_MAX>
</environmentVariables>
<failIfNoSpecifiedTests>false</failIfNoSpecifiedTests>
<trimStackTrace>false</trimStackTrace>
<systemPropertyVariables>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,10 @@ public PathOutputCommitter createOutputCommitter(Path outputPath,
throw new PathCommitException(outputPath,
"Filesystem not supported by this committer");
}
LOG.info("Using Committer {} for {}",
LOG.info("Using Committer {} for {} created by {}",
outputCommitter,
outputPath);
outputPath,
this);
return outputCommitter;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,11 +113,14 @@ private AbstractS3ACommitterFactory chooseCommitterFactory(
// job/task configurations.
Configuration fsConf = fileSystem.getConf();

String name = fsConf.getTrimmed(FS_S3A_COMMITTER_NAME, COMMITTER_NAME_FILE);
String name = fsConf.getTrimmed(FS_S3A_COMMITTER_NAME, "");
LOG.debug("Committer from filesystems \"{}\"", name);

name = taskConf.getTrimmed(FS_S3A_COMMITTER_NAME, name);
LOG.debug("Committer option is {}", name);
LOG.debug("Committer option is \"{}\"", name);
switch (name) {
case COMMITTER_NAME_FILE:
case "":
factory = null;
break;
case COMMITTER_NAME_DIRECTORY:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ protected void assertEncrypted(Path path) throws IOException {
@Test
public void testEncryptionFileAttributes() throws Exception {
describe("Test for correct encryption file attributes for SSE-KMS with user default setting.");
skipIfBucketNotKmsEncrypted();
Path path = path(createFilename(1024));
byte[] data = dataset(1024, 'a', 'z');
S3AFileSystem fs = getFileSystem();
Expand Down
Loading