diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneDeletedBlocksTable.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneDeletedBlocksTable.java index 880a79938cf4..c4f8b66570b0 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneDeletedBlocksTable.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneDeletedBlocksTable.java @@ -18,8 +18,8 @@ package org.apache.hadoop.ozone.container.metadata; import java.io.IOException; -import java.util.ArrayList; import java.util.List; +import java.util.stream.Collectors; import org.apache.hadoop.hdds.utils.MetadataKeyFilters; import org.apache.hadoop.hdds.utils.db.BatchOperation; import org.apache.hadoop.hdds.utils.db.Table; @@ -145,39 +145,13 @@ private static String unprefix(String key) { private static List> unprefix( List> kvs) { - List> processedKVs = new ArrayList<>(); - kvs.forEach(kv -> processedKVs.add(new UnprefixedKeyValue(kv))); - - return processedKVs; + return kvs.stream() + .map(kv -> Table.newKeyValue(unprefix(kv.getKey()), kv.getValue())) + .collect(Collectors.toList()); } private static MetadataKeyFilters.KeyPrefixFilter getDeletedFilter() { return (new MetadataKeyFilters.KeyPrefixFilter()) .addFilter(DELETED_KEY_PREFIX); } - - /** - * {@link KeyValue} implementation that removes the deleted key prefix from - * an existing {@link KeyValue}. - */ - private static class UnprefixedKeyValue implements KeyValue { - - private final KeyValue prefixedKeyValue; - - UnprefixedKeyValue( - KeyValue prefixedKeyValue) { - this.prefixedKeyValue = prefixedKeyValue; - } - - @Override - public String getKey() throws IOException { - return unprefix(prefixedKeyValue.getKey()); - } - - @Override - public ChunkInfoList getValue() throws IOException { - return prefixedKeyValue.getValue(); - } - } } diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java index 62303c8a3bb7..b821fd932a22 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RDBStoreByteArrayIterator.java @@ -40,7 +40,7 @@ byte[] key() { @Override Table.KeyValue getKeyValue() { final ManagedRocksIterator i = getRocksDBIterator(); - return RawKeyValue.create(i.get().key(), i.get().value()); + return Table.newKeyValue(i.get().key(), i.get().value()); } @Override diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RawKeyValue.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RawKeyValue.java deleted file mode 100644 index c7250617b6f9..000000000000 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/RawKeyValue.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hdds.utils.db; - -import java.util.Arrays; -import org.apache.hadoop.hdds.utils.db.Table.KeyValue; - -/** - * {@link KeyValue} for a raw type. - * - * @param The raw type. - */ -public abstract class RawKeyValue implements KeyValue { - - private final RAW key; - private final RAW value; - - /** - * Create a KeyValue pair. - * - * @param key - Key Bytes - * @param value - Value bytes - * @return KeyValue object. - */ - public static ByteArray create(byte[] key, byte[] value) { - return new ByteArray(key, value); - } - - /** Implement {@link RawKeyValue} with byte[]. */ - public static final class ByteArray extends RawKeyValue { - static byte[] copy(byte[] bytes) { - return Arrays.copyOf(bytes, bytes.length); - } - - private ByteArray(byte[] key, byte[] value) { - super(key, value); - } - - @Override - public byte[] getKey() { - return copy(super.getKey()); - } - - @Override - public byte[] getValue() { - return copy(super.getValue()); - } - } - - private RawKeyValue(RAW key, RAW value) { - this.key = key; - this.value = value; - } - - /** - * Return key. - */ - @Override - public RAW getKey() { - return key; - } - - /** - * Return value. - */ - @Override - public RAW getValue() { - return value; - } -} diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java index 9552f327e2e8..5097cb0efc04 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/Table.java @@ -325,94 +325,58 @@ void deleteBatchWithPrefix(BatchOperation batch, KEY prefix) /** * Class used to represent the key and value pair of a db entry. */ - interface KeyValue { - - KEY getKey() throws IOException; + final class KeyValue { + private final K key; + private final V value; + private final int rawSize; + + private KeyValue(K key, V value, int rawSize) { + this.key = key; + this.value = value; + this.rawSize = rawSize; + } - VALUE getValue() throws IOException; + public K getKey() { + return key; + } - default int getRawSize() throws IOException { - return 0; + public V getValue() { + return value; } - } - static KeyValue newKeyValue(K key, V value) { - return new KeyValue() { - @Override - public K getKey() { - return key; - } + public int getRawSize() { + return rawSize; + } - @Override - public V getValue() { - return value; - } + @Override + public String toString() { + return "(key=" + key + ", value=" + value + ")"; + } - @Override - public String toString() { - return "(key=" + key + ", value=" + value + ")"; + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } else if (!(obj instanceof KeyValue)) { + return false; } + final KeyValue that = (KeyValue) obj; + return this.getKey().equals(that.getKey()) + && this.getValue().equals(that.getValue()); + } - @Override - public boolean equals(Object obj) { - if (!(obj instanceof KeyValue)) { - return false; - } - KeyValue kv = (KeyValue) obj; - try { - return getKey().equals(kv.getKey()) && getValue().equals(kv.getValue()); - } catch (IOException e) { - throw new RuntimeException(e); - } - } + @Override + public int hashCode() { + return Objects.hash(getKey(), getValue()); + } + } - @Override - public int hashCode() { - return Objects.hash(getKey(), getValue()); - } - }; + static KeyValue newKeyValue(K key, V value) { + return newKeyValue(key, value, 0); } static KeyValue newKeyValue(K key, V value, int rawSize) { - return new KeyValue() { - @Override - public K getKey() { - return key; - } - - @Override - public V getValue() { - return value; - } - - @Override - public int getRawSize() throws IOException { - return rawSize; - } - - @Override - public String toString() { - return "(key=" + key + ", value=" + value + ")"; - } - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof KeyValue)) { - return false; - } - KeyValue kv = (KeyValue) obj; - try { - return getKey().equals(kv.getKey()) && getValue().equals(kv.getValue()); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - @Override - public int hashCode() { - return Objects.hash(getKey(), getValue()); - } - }; + return new KeyValue<>(key, value, rawSize); } /** A {@link TableIterator} to iterate {@link KeyValue}s. */ diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java index dcf482aad2ad..c3c39352115f 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java @@ -123,11 +123,11 @@ private byte[] encodeValue(VALUE value) throws IOException { return value == null ? null : valueCodec.toPersistedFormat(value); } - private KEY decodeKey(byte[] key) throws IOException { + private KEY decodeKey(byte[] key) throws CodecException { return key == null ? null : keyCodec.fromPersistedFormat(key); } - private VALUE decodeValue(byte[] value) throws IOException { + private VALUE decodeValue(byte[] value) throws CodecException { return value == null ? null : valueCodec.fromPersistedFormat(value); } @@ -465,7 +465,7 @@ public TableCacheMetrics createCacheMetrics() { } @Override - public List getRangeKVs( + public List> getRangeKVs( KEY startKey, int count, KEY prefix, MetadataKeyFilters.MetadataKeyFilter... filters) throws IOException, IllegalArgumentException { @@ -477,15 +477,11 @@ public List getRangeKVs( List> rangeKVBytes = rawTable.getRangeKVs(startKeyBytes, count, prefixBytes, filters); - - List rangeKVs = new ArrayList<>(); - rangeKVBytes.forEach(byteKV -> rangeKVs.add(new TypedKeyValue(byteKV))); - - return rangeKVs; + return convert(rangeKVBytes); } @Override - public List getSequentialRangeKVs( + public List> getSequentialRangeKVs( KEY startKey, int count, KEY prefix, MetadataKeyFilters.MetadataKeyFilter... filters) throws IOException, IllegalArgumentException { @@ -498,10 +494,15 @@ public List getSequentialRangeKVs( List> rangeKVBytes = rawTable.getSequentialRangeKVs(startKeyBytes, count, prefixBytes, filters); + return convert(rangeKVBytes); + } - List rangeKVs = new ArrayList<>(); - rangeKVBytes.forEach(byteKV -> rangeKVs.add(new TypedKeyValue(byteKV))); - + private List> convert(List> rangeKVBytes) + throws CodecException { + final List> rangeKVs = new ArrayList<>(); + for (KeyValue kv : rangeKVBytes) { + rangeKVs.add(Table.newKeyValue(decodeKey(kv.getKey()), decodeValue(kv.getValue()))); + } return rangeKVs; } @@ -532,28 +533,6 @@ TableCache getCache() { return cache; } - /** - * Key value implementation for strongly typed tables. - */ - public final class TypedKeyValue implements KeyValue { - - private final KeyValue rawKeyValue; - - private TypedKeyValue(KeyValue rawKeyValue) { - this.rawKeyValue = rawKeyValue; - } - - @Override - public KEY getKey() throws IOException { - return decodeKey(rawKeyValue.getKey()); - } - - @Override - public VALUE getValue() throws IOException { - return decodeValue(rawKeyValue.getValue()); - } - } - RawIterator newCodecBufferTableIterator( TableIterator> i) { return new RawIterator(i) { @@ -574,8 +553,7 @@ public CodecBuffer get() { } @Override - KeyValue convert(KeyValue raw) - throws IOException { + KeyValue convert(KeyValue raw) throws CodecException { final int rawSize = raw.getValue().readableBytes(); final KEY key = keyCodec.fromCodecBuffer(raw.getKey()); final VALUE value = valueCodec.fromCodecBuffer(raw.getValue()); @@ -600,8 +578,8 @@ AutoCloseSupplier convert(KEY key) throws IOException { } @Override - KeyValue convert(KeyValue raw) { - return new TypedKeyValue(raw); + KeyValue convert(KeyValue raw) throws CodecException { + return Table.newKeyValue(decodeKey(raw.getKey()), decodeValue(raw.getValue())); } } @@ -625,8 +603,7 @@ abstract class RawIterator * Covert the given {@link Table.KeyValue} * from ({@link RAW}, {@link RAW}) to ({@link KEY}, {@link VALUE}). */ - abstract KeyValue convert(KeyValue raw) - throws IOException; + abstract KeyValue convert(KeyValue raw) throws CodecException; @Override public void seekToFirst() { diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreByteArrayIterator.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreByteArrayIterator.java index 7a11f55720cb..0054dde422d7 100644 --- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreByteArrayIterator.java +++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreByteArrayIterator.java @@ -95,8 +95,7 @@ public void testForeachRemainingCallsConsumerWithAllElements() { RDBStoreByteArrayIterator iter = newIterator(); iter.forEachRemaining(consumerStub); - ArgumentCaptor capture = - forClass(RawKeyValue.ByteArray.class); + final ArgumentCaptor> capture = forClass(Table.KeyValue.class); verify(consumerStub, times(3)).accept(capture.capture()); assertArrayEquals( new byte[]{0x00}, capture.getAllValues().get(0).getKey()); diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreCodecBufferIterator.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreCodecBufferIterator.java index 05a4c946394e..500c465a4dde 100644 --- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreCodecBufferIterator.java +++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/db/TestRDBStoreCodecBufferIterator.java @@ -31,7 +31,6 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import java.io.IOException; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -116,14 +115,8 @@ public void testForEachRemaining() throws Exception { List> remaining = new ArrayList<>(); try (RDBStoreCodecBufferIterator i = newIterator()) { - i.forEachRemaining(kv -> { - try { - remaining.add(RawKeyValue.create( - kv.getKey().getArray(), kv.getValue().getArray())); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); + i.forEachRemaining(kv -> + remaining.add(Table.newKeyValue(kv.getKey().getArray(), kv.getValue().getArray()))); System.out.println("remaining: " + remaining); assertArrayEquals(new byte[]{0x00}, remaining.get(0).getKey()); diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogStateManagerImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogStateManagerImpl.java index 50ce31293306..f198a425e7ea 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogStateManagerImpl.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/DeletedBlockLogStateManagerImpl.java @@ -84,12 +84,7 @@ private void findNext() { while (iter.hasNext()) { TypedTable.KeyValue next = iter .next(); - long txID; - try { - txID = next.getKey(); - } catch (IOException e) { - throw new IllegalStateException(""); - } + final long txID = next.getKey(); if ((deletingTxIDs == null || !deletingTxIDs.contains(txID)) && ( skippingRetryTxIDs == null || !skippingRetryTxIDs @@ -146,7 +141,7 @@ public TypedTable.KeyValue seek( } @Override - public void removeFromDB() throws IOException { + public void removeFromDB() { throw new UnsupportedOperationException("read-only"); } }; diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMCertStore.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMCertStore.java index da5597bf41e6..b3751ab35585 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMCertStore.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMCertStore.java @@ -28,6 +28,7 @@ import java.util.List; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Collectors; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeType; import org.apache.hadoop.hdds.protocol.proto.SCMRatisProtocol; import org.apache.hadoop.hdds.scm.ha.SCMRatisServer; @@ -160,24 +161,15 @@ public X509Certificate getCertificateByID(BigInteger serialID) public List listCertificate(NodeType role, BigInteger startSerialID, int count) throws IOException { - List results = new ArrayList<>(); - String errorMessage = "Fail to list certificate from SCM metadata store"; Preconditions.checkNotNull(startSerialID); if (startSerialID.longValue() == 0) { startSerialID = null; } - for (Table.KeyValue kv : getValidCertTableList(role, startSerialID, count)) { - try { - X509Certificate cert = kv.getValue(); - results.add(cert); - } catch (IOException e) { - LOG.error(errorMessage, e); - throw new SCMSecurityException(errorMessage); - } - } - return results; + return getValidCertTableList(role, startSerialID, count).stream() + .map(Table.KeyValue::getValue) + .collect(Collectors.toList()); } private List> diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/KeyDeletingService.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/KeyDeletingService.java index c7078758143b..d875e95e8517 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/KeyDeletingService.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/KeyDeletingService.java @@ -46,6 +46,7 @@ import org.apache.hadoop.hdds.utils.BackgroundTaskQueue; import org.apache.hadoop.hdds.utils.BackgroundTaskResult; import org.apache.hadoop.hdds.utils.BackgroundTaskResult.EmptyTaskResult; +import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.ozone.ClientVersion; import org.apache.hadoop.ozone.common.BlockGroup; import org.apache.hadoop.ozone.common.DeleteBlockGroupResult; @@ -358,13 +359,8 @@ private void processDeletedKeysForStore(SnapshotInfo currentSnapshotInfo, KeyMan keyManager, lock)) { List renamedTableEntries = keyManager.getRenamesKeyEntries(volume, bucket, null, renameEntryFilter, remainNum).stream() - .map(entry -> { - try { - return entry.getKey(); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - }).collect(Collectors.toList()); + .map(Table.KeyValue::getKey) + .collect(Collectors.toList()); remainNum -= renamedTableEntries.size(); // Get pending keys that can be deleted diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/QuotaRepairTask.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/QuotaRepairTask.java index 99cebe9a6351..1204ce2ecc22 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/QuotaRepairTask.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/QuotaRepairTask.java @@ -399,22 +399,18 @@ private static void extractCount( Table.KeyValue kv, Map prefixUsageMap, boolean haveValue) { - try { - String prefix = getVolumeBucketPrefix(kv.getKey()); - CountPair usage = prefixUsageMap.get(prefix); - if (null == usage) { - return; - } - usage.incrNamespace(1L); - // avoid decode of value - if (haveValue) { - VALUE value = kv.getValue(); - if (value instanceof OmKeyInfo) { - usage.incrSpace(((OmKeyInfo) value).getReplicatedSize()); - } + String prefix = getVolumeBucketPrefix(kv.getKey()); + CountPair usage = prefixUsageMap.get(prefix); + if (null == usage) { + return; + } + usage.incrNamespace(1L); + // avoid decode of value + if (haveValue) { + VALUE value = kv.getValue(); + if (value instanceof OmKeyInfo) { + usage.incrSpace(((OmKeyInfo) value).getReplicatedSize()); } - } catch (IOException ex) { - throw new UncheckedIOException(ex); } } diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java index 645e561a206e..d021cc752507 100644 --- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java +++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java @@ -134,15 +134,11 @@ public void testGetDeletedKeyEntries(int numberOfVolumes, int numberOfBucketsPer volumeNamePrefix, bucketNamePrefix, keyPrefix, volumeNumber, bucketNumber, startVolumeNumber, startBucketNumber, startKeyNumber, filter, numberOfEntries).stream() .map(kv -> { - try { - String key = kv.getKey(); - RepeatedOmKeyInfo value = kv.getValue(); - List omKeyInfos = Collections.singletonList(Mockito.mock(OmKeyInfo.class)); - when(value.cloneOmKeyInfoList()).thenReturn(omKeyInfos); - return Table.newKeyValue(key, omKeyInfos); - } catch (IOException e) { - throw new RuntimeException(e); - } + String key = kv.getKey(); + RepeatedOmKeyInfo value = kv.getValue(); + List omKeyInfos = Collections.singletonList(Mockito.mock(OmKeyInfo.class)); + when(value.cloneOmKeyInfoList()).thenReturn(omKeyInfos); + return Table.newKeyValue(key, omKeyInfos); }).collect(Collectors.toList()); String volumeName = volumeNumber == null ? null : (String.format("%s%010d", volumeNamePrefix, volumeNumber)); String bucketName = bucketNumber == null ? null : (String.format("%s%010d", bucketNamePrefix, bucketNumber)); diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/response/snapshot/TestOMSnapshotMoveTableKeysResponse.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/response/snapshot/TestOMSnapshotMoveTableKeysResponse.java index aabacd8c503a..3130c42df305 100644 --- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/response/snapshot/TestOMSnapshotMoveTableKeysResponse.java +++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/response/snapshot/TestOMSnapshotMoveTableKeysResponse.java @@ -118,31 +118,19 @@ public void testMoveTableKeysToNextSnapshot(boolean nextSnapshotExists) throws E Map renameEntries = new HashMap<>(); snapshot.getMetadataManager().getDeletedTable().iterator() .forEachRemaining(entry -> { - try { - deletedTable.add(OzoneManagerProtocolProtos.SnapshotMoveKeyInfos.newBuilder().setKey(entry.getKey()) - .addAllKeyInfos(entry.getValue().getOmKeyInfoList().stream().map(omKeyInfo -> omKeyInfo.getProtobuf( - ClientVersion.CURRENT_VERSION)).collect(Collectors.toList())).build()); - } catch (IOException e) { - throw new RuntimeException(e); - } + deletedTable.add(OzoneManagerProtocolProtos.SnapshotMoveKeyInfos.newBuilder().setKey(entry.getKey()) + .addAllKeyInfos(entry.getValue().getOmKeyInfoList().stream().map(omKeyInfo -> omKeyInfo.getProtobuf( + ClientVersion.CURRENT_VERSION)).collect(Collectors.toList())).build()); }); snapshot.getMetadataManager().getDeletedDirTable().iterator() .forEachRemaining(entry -> { - try { - deletedDirTable.add(OzoneManagerProtocolProtos.SnapshotMoveKeyInfos.newBuilder().setKey(entry.getKey()) - .addKeyInfos(entry.getValue().getProtobuf(ClientVersion.CURRENT_VERSION)).build()); - } catch (IOException e) { - throw new RuntimeException(e); - } + deletedDirTable.add(OzoneManagerProtocolProtos.SnapshotMoveKeyInfos.newBuilder().setKey(entry.getKey()) + .addKeyInfos(entry.getValue().getProtobuf(ClientVersion.CURRENT_VERSION)).build()); }); snapshot.getMetadataManager().getSnapshotRenamedTable().iterator().forEachRemaining(entry -> { - try { - renamedTable.add(HddsProtos.KeyValue.newBuilder().setKey(entry.getKey()).setValue(entry.getValue()).build()); - renameEntries.put(entry.getKey(), entry.getValue()); - } catch (IOException e) { - throw new RuntimeException(e); - } + renamedTable.add(HddsProtos.KeyValue.newBuilder().setKey(entry.getKey()).setValue(entry.getValue()).build()); + renameEntries.put(entry.getKey(), entry.getValue()); }); OMSnapshotMoveTableKeysResponse response = new OMSnapshotMoveTableKeysResponse( OzoneManagerProtocolProtos.OMResponse.newBuilder().setStatus(OzoneManagerProtocolProtos.Status.OK) @@ -160,20 +148,16 @@ public void testMoveTableKeysToNextSnapshot(boolean nextSnapshotExists) throws E AtomicInteger count = new AtomicInteger(); nextMetadataManager.getDeletedTable().iterator().forEachRemaining(entry -> { count.getAndIncrement(); - try { - int maxCount = count.get() >= 6 && count.get() <= 8 ? 20 : 10; - Assertions.assertEquals(maxCount, entry.getValue().getOmKeyInfoList().size()); - List versions = entry.getValue().getOmKeyInfoList().stream().map(OmKeyInfo::getKeyLocationVersions) - .map(omKeyInfo -> omKeyInfo.get(0).getVersion()).collect(Collectors.toList()); - List expectedVersions = new ArrayList<>(); - if (maxCount == 20) { - expectedVersions.addAll(LongStream.range(10, 20).boxed().collect(Collectors.toList())); - } - expectedVersions.addAll(LongStream.range(0, 10).boxed().collect(Collectors.toList())); - Assertions.assertEquals(expectedVersions, versions); - } catch (IOException e) { - throw new RuntimeException(e); + int maxCount = count.get() >= 6 && count.get() <= 8 ? 20 : 10; + Assertions.assertEquals(maxCount, entry.getValue().getOmKeyInfoList().size()); + List versions = entry.getValue().getOmKeyInfoList().stream().map(OmKeyInfo::getKeyLocationVersions) + .map(omKeyInfo -> omKeyInfo.get(0).getVersion()).collect(Collectors.toList()); + List expectedVersions = new ArrayList<>(); + if (maxCount == 20) { + expectedVersions.addAll(LongStream.range(10, 20).boxed().collect(Collectors.toList())); } + expectedVersions.addAll(LongStream.range(0, 10).boxed().collect(Collectors.toList())); + Assertions.assertEquals(expectedVersions, versions); }); Assertions.assertEquals(15, count.get()); count.set(0); @@ -182,12 +166,8 @@ public void testMoveTableKeysToNextSnapshot(boolean nextSnapshotExists) throws E Assertions.assertEquals(15, count.get()); count.set(0); nextMetadataManager.getSnapshotRenamedTable().iterator().forEachRemaining(entry -> { - try { - String expectedValue = renameEntries.getOrDefault(entry.getKey(), entry.getValue()); - Assertions.assertEquals(expectedValue, entry.getValue()); - } catch (IOException e) { - throw new RuntimeException(e); - } + String expectedValue = renameEntries.getOrDefault(entry.getKey(), entry.getValue()); + Assertions.assertEquals(expectedValue, entry.getValue()); count.getAndIncrement(); }); Assertions.assertEquals(15, count.get()); diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/service/TestKeyDeletingService.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/service/TestKeyDeletingService.java index 3c32a70e047c..b3178580b5c9 100644 --- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/service/TestKeyDeletingService.java +++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/service/TestKeyDeletingService.java @@ -486,12 +486,8 @@ public void testRenamedKeyReclaimation(boolean testForSnapshot) assertTableRowCount(snapshotRenamedTable, initialRenamedCount + 1, metadataManager); try (TableIterator> itr = snapshotRenamedTable.iterator()) { itr.forEachRemaining(entry -> { - try { - String[] val = metadataManager.splitRenameKey(entry.getKey()); - Assertions.assertEquals(Long.valueOf(val[2]), keyInfo.getObjectID()); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + String[] val = metadataManager.splitRenameKey(entry.getKey()); + Assertions.assertEquals(Long.valueOf(val[2]), keyInfo.getObjectID()); }); } } finally { diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestFSODirectoryPathResolver.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestFSODirectoryPathResolver.java index 6c3e5fef1552..4fbee30ab02e 100644 --- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestFSODirectoryPathResolver.java +++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestFSODirectoryPathResolver.java @@ -57,19 +57,8 @@ private Table getMockedDirectoryInfoTable( Iterator> iterator = dirMap .getOrDefault(dirId, Collections.emptyList()).stream() - .map(children -> new Table.KeyValue() { - @Override - public String getKey() { - return prefix + children + OM_KEY_PREFIX + "dir" + children; - } - - @Override - public OmDirectoryInfo getValue() { - return OmDirectoryInfo.newBuilder() - .setName("dir" + children).setObjectID(children) - .build(); - } - }) + .map(children -> Table.newKeyValue(prefix + children + OM_KEY_PREFIX + "dir" + children, + OmDirectoryInfo.newBuilder().setName("dir" + children).setObjectID(children).build())) .iterator(); return new TableIterator>() { diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java index e2d3775f369a..d5adaa0aa266 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java @@ -97,6 +97,7 @@ import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager; import org.apache.hadoop.hdds.upgrade.HDDSLayoutVersionManager; +import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.hdds.utils.db.TypedTable; import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.ozone.OzoneAcl; @@ -833,10 +834,8 @@ public void testGetFileCounts() throws Exception { .TypedTableIterator.class); TypedTable.TypedTableIterator mockKeyIterFso = mock(TypedTable .TypedTableIterator.class); - TypedTable.TypedKeyValue mockKeyValueLegacy = mock( - TypedTable.TypedKeyValue.class); - TypedTable.TypedKeyValue mockKeyValueFso = mock( - TypedTable.TypedKeyValue.class); + final Table.KeyValue mockKeyValueLegacy = mock(Table.KeyValue.class); + final Table.KeyValue mockKeyValueFso = mock(Table.KeyValue.class); when(keyTableLegacy.iterator()).thenReturn(mockKeyIterLegacy); when(keyTableFso.iterator()).thenReturn(mockKeyIterFso); diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/tasks/TestFileSizeCountTask.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/tasks/TestFileSizeCountTask.java index 1eaf85499e4e..5e7af1835828 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/tasks/TestFileSizeCountTask.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/tasks/TestFileSizeCountTask.java @@ -37,6 +37,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.hdds.utils.db.TypedTable; import org.apache.hadoop.ozone.om.OMMetadataManager; import org.apache.hadoop.ozone.om.OmMetadataManagerImpl; @@ -112,7 +113,7 @@ public void testReprocess() throws IOException { when(keyTableOBS.iterator()).thenReturn(mockIterOBS); // Simulate three keys then end. when(mockIterOBS.hasNext()).thenReturn(true, true, true, false); - TypedTable.TypedKeyValue mockKeyValueOBS = mock(TypedTable.TypedKeyValue.class); + final Table.KeyValue mockKeyValueOBS = mock(Table.KeyValue.class); when(mockIterOBS.next()).thenReturn(mockKeyValueOBS); when(mockKeyValueOBS.getValue()).thenReturn(omKeyInfos[0], omKeyInfos[1], omKeyInfos[2]); @@ -123,7 +124,7 @@ public void testReprocess() throws IOException { TypedTable.TypedTableIterator mockIterFSO = mock(TypedTable.TypedTableIterator.class); when(keyTableFSO.iterator()).thenReturn(mockIterFSO); when(mockIterFSO.hasNext()).thenReturn(true, true, true, false); - TypedTable.TypedKeyValue mockKeyValueFSO = mock(TypedTable.TypedKeyValue.class); + final Table.KeyValue mockKeyValueFSO = mock(Table.KeyValue.class); when(mockIterFSO.next()).thenReturn(mockKeyValueFSO); when(mockKeyValueFSO.getValue()).thenReturn(omKeyInfos[0], omKeyInfos[1], omKeyInfos[2]); @@ -305,8 +306,8 @@ public void testReprocessAtScale() throws IOException { TypedTable.TypedTableIterator mockKeyIterLegacy = mock(TypedTable.TypedTableIterator.class); TypedTable.TypedTableIterator mockKeyIterFso = mock(TypedTable.TypedTableIterator.class); - TypedTable.TypedKeyValue mockKeyValueLegacy = mock(TypedTable.TypedKeyValue.class); - TypedTable.TypedKeyValue mockKeyValueFso = mock(TypedTable.TypedKeyValue.class); + final Table.KeyValue mockKeyValueLegacy = mock(Table.KeyValue.class); + final Table.KeyValue mockKeyValueFso = mock(Table.KeyValue.class); when(keyTableLegacy.iterator()).thenReturn(mockKeyIterLegacy); when(keyTableFso.iterator()).thenReturn(mockKeyIterFso); diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/tasks/TestOmTableInsightTask.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/tasks/TestOmTableInsightTask.java index ae83bf4f6ed7..40dd0c972e60 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/tasks/TestOmTableInsightTask.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/tasks/TestOmTableInsightTask.java @@ -350,8 +350,7 @@ public void testReprocessForCount() throws Exception { when(omMetadataManager.getTable(tableName)).thenReturn(table); when(mockIter.hasNext()).thenReturn(true, true, true, true, true, false); - TypedTable.TypedKeyValue mockKeyValue = - mock(TypedTable.TypedKeyValue.class); + final Table.KeyValue mockKeyValue = mock(Table.KeyValue.class); if (tableName.equals(DELETED_TABLE)) { RepeatedOmKeyInfo keyInfo = mock(RepeatedOmKeyInfo.class);