diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java index 943958c65f89..037b898ba09b 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/utils/db/Proto3Codec.java @@ -37,19 +37,25 @@ public final class Proto3Codec implements Codec { private final Class clazz; private final Parser parser; + private final boolean allowInvalidProtocolBufferException; /** * @return the {@link Codec} for the given class. */ public static Codec get(T t) { + return get(t, false); + } + + public static Codec get(T t, boolean allowInvalidProtocolBufferException) { final Codec codec = CODECS.computeIfAbsent(t.getClass(), - key -> new Proto3Codec<>(t)); + key -> new Proto3Codec<>(t, allowInvalidProtocolBufferException)); return (Codec) codec; } - private Proto3Codec(M m) { + private Proto3Codec(M m, boolean allowInvalidProtocolBufferException) { this.clazz = (Class) m.getClass(); this.parser = (Parser) m.getParserForType(); + this.allowInvalidProtocolBufferException = allowInvalidProtocolBufferException; } @Override @@ -85,6 +91,9 @@ public M fromCodecBuffer(@Nonnull CodecBuffer buffer) throws CodecException { try { return parser.parseFrom(buffer.asReadOnlyByteBuffer()); } catch (InvalidProtocolBufferException e) { + if (allowInvalidProtocolBufferException) { + return null; + } throw new CodecException("Failed to parse " + buffer + " for " + getTypeClass(), e); } } @@ -97,7 +106,14 @@ public byte[] toPersistedFormat(M message) { @Override public M fromPersistedFormatImpl(byte[] bytes) throws InvalidProtocolBufferException { - return parser.parseFrom(bytes); + try { + return parser.parseFrom(bytes); + } catch (InvalidProtocolBufferException e) { + if (allowInvalidProtocolBufferException) { + return null; + } + throw e; + } } @Override diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java index c226e0ea8772..7b0fda00e130 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java @@ -38,7 +38,8 @@ */ public class BlockData { private static final Codec CODEC = new DelegatedCodec<>( - Proto3Codec.get(ContainerProtos.BlockData.getDefaultInstance()), + // allow InvalidProtocolBufferException for backward compatibility with Schema One + Proto3Codec.get(ContainerProtos.BlockData.getDefaultInstance(), true), BlockData::getFromProtoBuf, BlockData::getProtoBufMessage, BlockData.class); @@ -96,6 +97,9 @@ public void setBlockCommitSequenceId(long blockCommitSequenceId) { * @return - BlockData */ public static BlockData getFromProtoBuf(ContainerProtos.BlockData data) throws CodecException { + if (data == null) { + return null; + } BlockData blockData = new BlockData( BlockID.getFromProtobuf(data.getBlockID())); for (int x = 0; x < data.getMetadataCount(); x++) { diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java index 59a16cc4d11a..c23c25836e32 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/metadata/SchemaOneChunkInfoListCodec.java @@ -17,11 +17,14 @@ package org.apache.hadoop.ozone.container.metadata; +import java.util.concurrent.atomic.AtomicBoolean; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.utils.db.Codec; import org.apache.hadoop.hdds.utils.db.CodecException; import org.apache.hadoop.ozone.container.common.helpers.ChunkInfoList; import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Codec for parsing {@link org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChunkInfoList} @@ -44,6 +47,8 @@ * always be present. */ public final class SchemaOneChunkInfoListCodec implements Codec { + public static final Logger LOG = LoggerFactory.getLogger(SchemaOneChunkInfoListCodec.class); + private static final AtomicBoolean LOGGED = new AtomicBoolean(false); private static final Codec INSTANCE = new SchemaOneChunkInfoListCodec(); @@ -72,9 +77,12 @@ public ChunkInfoList fromPersistedFormat(byte[] rawData) throws CodecException { return ChunkInfoList.getFromProtoBuf( ContainerProtos.ChunkInfoList.parseFrom(rawData)); } catch (InvalidProtocolBufferException ex) { - throw new CodecException("Invalid chunk information. " + + if (LOGGED.compareAndSet(false, true)) { + LOG.warn("Invalid chunk information. " + "This data may have been written using datanode " + "schema version one, which did not save chunk information.", ex); + } + return null; } } diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestSchemaOneBackwardsCompatibility.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestSchemaOneBackwardsCompatibility.java index 54133bd750a8..c24f708f1bfb 100644 --- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestSchemaOneBackwardsCompatibility.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/TestSchemaOneBackwardsCompatibility.java @@ -366,8 +366,7 @@ public void testReadDeletedBlockChunkInfo(String schemaVersion) for (Table.KeyValue chunkListKV: deletedBlocks) { preUpgradeBlocks.add(chunkListKV.getKey()); - assertThrows(IOException.class, () -> chunkListKV.getValue(), - "No exception thrown when trying to retrieve old deleted blocks values as chunk lists."); + assertNull(chunkListKV.getValue()); } assertEquals(TestDB.NUM_DELETED_BLOCKS, preUpgradeBlocks.size()); diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java index 1ffbcb42f6e8..12f557b4417f 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/db/TypedTable.java @@ -637,7 +637,7 @@ public KeyValue next() { try { return convert(rawIterator.next()); } catch (CodecException e) { - throw new IllegalStateException("Failed next()", e); + throw new IllegalStateException("Failed next() in " + TypedTable.this, e); } }