diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java index f0713469e61..70539111fb9 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueContainerCheck.java @@ -20,6 +20,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import org.apache.hadoop.hdds.StringUtils; import org.apache.hadoop.hdds.conf.ConfigurationSource; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdfs.util.Canceler; @@ -45,7 +46,6 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; -import java.util.Arrays; import org.apache.ratis.thirdparty.com.google.protobuf.ByteString; import org.slf4j.Logger; @@ -421,8 +421,8 @@ private static ScanResult verifyChecksum(BlockData block, " for block %s", ChunkInfo.getFromProtoBuf(chunk), i, - Arrays.toString(expected.toByteArray()), - Arrays.toString(actual.toByteArray()), + StringUtils.bytes2Hex(expected.asReadOnlyByteBuffer()), + StringUtils.bytes2Hex(actual.asReadOnlyByteBuffer()), block.getBlockID()); return ScanResult.unhealthy( ScanResult.FailureType.CORRUPT_CHUNK, chunkFile, diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java index 15163bf3e6a..9d65eae06b1 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java @@ -17,12 +17,13 @@ package org.apache.hadoop.hdds.scm.ha; -import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hdds.protocol.proto.SCMRatisProtocol.SCMRatisResponseProto; import org.apache.hadoop.hdds.scm.ha.io.CodecFactory; import org.apache.ratis.protocol.Message; import org.apache.ratis.protocol.RaftClientReply; +import org.apache.ratis.thirdparty.com.google.protobuf.ByteString; +import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations; /** * Represents the response from RatisServer. @@ -72,13 +73,11 @@ public static Message encode(final Object result) } final Class type = result.getClass(); - final ByteString value = CodecFactory.getCodec(type).serialize(result); - final SCMRatisResponseProto response = SCMRatisResponseProto.newBuilder() - .setType(type.getName()).setValue(value).build(); - return Message.valueOf( - org.apache.ratis.thirdparty.com.google.protobuf.ByteString.copyFrom( - response.toByteArray())); + .setType(type.getName()) + .setValue(CodecFactory.getCodec(type).serialize(result)) + .build(); + return Message.valueOf(UnsafeByteOperations.unsafeWrap(response.toByteString().asReadOnlyByteBuffer())); } public static SCMRatisResponse decode(RaftClientReply reply) @@ -87,14 +86,13 @@ public static SCMRatisResponse decode(RaftClientReply reply) return new SCMRatisResponse(reply.getException()); } - final byte[] response = reply.getMessage().getContent().toByteArray(); + final ByteString response = reply.getMessage().getContent(); - if (response.length == 0) { + if (response.isEmpty()) { return new SCMRatisResponse(); } - final SCMRatisResponseProto responseProto = SCMRatisResponseProto - .parseFrom(response); + final SCMRatisResponseProto responseProto = SCMRatisResponseProto.parseFrom(response.toByteArray()); try { final Class type = ReflectionUtil.getClass(responseProto.getType()); diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECBlockChecksumComputer.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECBlockChecksumComputer.java index e0b82bebc3a..220bef71491 100644 --- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECBlockChecksumComputer.java +++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ECBlockChecksumComputer.java @@ -25,12 +25,13 @@ import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; import org.apache.hadoop.util.DataChecksum; import org.apache.ratis.thirdparty.com.google.protobuf.ByteString; +import org.apache.ratis.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; +import java.security.MessageDigest; import java.util.List; @@ -42,8 +43,8 @@ public class ECBlockChecksumComputer extends AbstractBlockChecksumComputer { private static final Logger LOG = LoggerFactory.getLogger(ECBlockChecksumComputer.class); - private List chunkInfoList; - private OmKeyInfo keyInfo; + private final List chunkInfoList; + private final OmKeyInfo keyInfo; public ECBlockChecksumComputer( @@ -68,7 +69,7 @@ public void compute(OzoneClientConfig.ChecksumCombineMode combineMode) } - private void computeMd5Crc() throws IOException { + private void computeMd5Crc() { Preconditions.checkArgument(chunkInfoList.size() > 0); final ContainerProtos.ChunkInfo firstChunkInfo = chunkInfoList.get(0); @@ -77,32 +78,28 @@ private void computeMd5Crc() throws IOException { // Total parity checksum bytes per stripe to remove int parityBytes = getParityBytes(chunkSize, bytesPerCrc); - ByteArrayOutputStream out = new ByteArrayOutputStream(); + final MessageDigest digester = MD5Hash.getDigester(); for (ContainerProtos.ChunkInfo chunkInfo : chunkInfoList) { ByteString stripeChecksum = chunkInfo.getStripeChecksum(); Preconditions.checkNotNull(stripeChecksum); - byte[] checksumBytes = stripeChecksum.toByteArray(); - - Preconditions.checkArgument(checksumBytes.length % 4 == 0, + final int checksumSize = stripeChecksum.size(); + Preconditions.checkArgument(checksumSize % 4 == 0, "Checksum Bytes size does not match"); - ByteBuffer byteWrap = ByteBuffer - .wrap(checksumBytes, 0, checksumBytes.length - parityBytes); - byte[] currentChecksum = new byte[4]; - - while (byteWrap.hasRemaining()) { - byteWrap.get(currentChecksum); - out.write(currentChecksum); - } + final ByteBuffer byteWrap = stripeChecksum.asReadOnlyByteBuffer(); + byteWrap.limit(checksumSize - parityBytes); + digester.update(byteWrap); } - MD5Hash fileMD5 = MD5Hash.digest(out.toByteArray()); - setOutBytes(fileMD5.getDigest()); + final byte[] fileMD5 = digester.digest(); + setOutBytes(digester.digest()); - LOG.debug("Number of chunks={}, md5hash={}", - chunkInfoList.size(), fileMD5); + if (LOG.isDebugEnabled()) { + LOG.debug("Number of chunks={}, md5hash={}", + chunkInfoList.size(), StringUtils.bytes2HexString(fileMD5)); + } } private void computeCompositeCrc() throws IOException { @@ -149,17 +146,15 @@ private void computeCompositeCrc() throws IOException { ByteString stripeChecksum = chunkInfo.getStripeChecksum(); Preconditions.checkNotNull(stripeChecksum); - byte[] checksumBytes = stripeChecksum.toByteArray(); - - Preconditions.checkArgument(checksumBytes.length % 4 == 0, + final int checksumSize = stripeChecksum.size(); + Preconditions.checkArgument(checksumSize % 4 == 0, "Checksum Bytes size does not match"); CrcComposer chunkCrcComposer = CrcComposer.newCrcComposer(dataChecksumType, bytesPerCrc); // Limit parity bytes as they do not contribute to fileChecksum - ByteBuffer byteWrap = ByteBuffer - .wrap(checksumBytes, 0, checksumBytes.length - parityBytes); - byte[] currentChecksum = new byte[4]; + final ByteBuffer byteWrap = stripeChecksum.asReadOnlyByteBuffer(); + byteWrap.limit(checksumSize - parityBytes); long chunkOffsetIndex = 1; while (byteWrap.hasRemaining()) { @@ -177,8 +172,7 @@ private void computeCompositeCrc() throws IOException { currentChunkOffset = bytesPerCrcOffset; } - byteWrap.get(currentChecksum); - int checksumDataCrc = CrcUtil.readInt(currentChecksum, 0); + final int checksumDataCrc = byteWrap.getInt(); //To handle last chunk when it size is lower than 1524K in the case // of rs-3-2-1524k. long chunkSizePerChecksum = Math.min(Math.min(keySize, bytesPerCrc), diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedBlockChecksumComputer.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedBlockChecksumComputer.java index cf976e3bd39..2c0fc0c0d36 100644 --- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedBlockChecksumComputer.java +++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/checksum/ReplicatedBlockChecksumComputer.java @@ -26,8 +26,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.nio.ByteBuffer; +import java.security.MessageDigest; import java.util.List; /** @@ -39,7 +40,13 @@ public class ReplicatedBlockChecksumComputer extends private static final Logger LOG = LoggerFactory.getLogger(ReplicatedBlockChecksumComputer.class); - private List chunkInfoList; + static MD5Hash digest(ByteBuffer data) { + final MessageDigest digester = MD5Hash.getDigester(); + digester.update(data); + return new MD5Hash(digester.digest()); + } + + private final List chunkInfoList; public ReplicatedBlockChecksumComputer( List chunkInfoList) { @@ -62,20 +69,20 @@ public void compute(OzoneClientConfig.ChecksumCombineMode combineMode) } // compute the block checksum, which is the md5 of chunk checksums - private void computeMd5Crc() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - + private void computeMd5Crc() { + ByteString bytes = ByteString.EMPTY; for (ContainerProtos.ChunkInfo chunkInfo : chunkInfoList) { ContainerProtos.ChecksumData checksumData = chunkInfo.getChecksumData(); List checksums = checksumData.getChecksumsList(); for (ByteString checksum : checksums) { - baos.write(checksum.toByteArray()); + bytes = bytes.concat(checksum); } } - MD5Hash fileMD5 = MD5Hash.digest(baos.toByteArray()); + final MD5Hash fileMD5 = digest(bytes.asReadOnlyByteBuffer()); + setOutBytes(fileMD5.getDigest()); LOG.debug("number of chunks={}, md5out={}", @@ -121,7 +128,7 @@ private void computeCompositeCrc() throws IOException { Preconditions.checkArgument(remainingChunkSize <= checksums.size() * chunkSize); for (ByteString checksum : checksums) { - int checksumDataCrc = CrcUtil.readInt(checksum.toByteArray(), 0); + final int checksumDataCrc = checksum.asReadOnlyByteBuffer().getInt(); chunkCrcComposer.update(checksumDataCrc, Math.min(bytesPerCrc, remainingChunkSize)); remainingChunkSize -= bytesPerCrc; diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/DatanodeChunkValidator.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/DatanodeChunkValidator.java index b290da2da1f..2bbf8b6d5b2 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/DatanodeChunkValidator.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/DatanodeChunkValidator.java @@ -193,7 +193,7 @@ private ChecksumData computeChecksum(ContainerCommandResponseProto response) throws OzoneChecksumException { ContainerProtos.ReadChunkResponseProto readChunk = response.getReadChunk(); if (readChunk.hasData()) { - return checksum.computeChecksum(readChunk.getData().toByteArray()); + return checksum.computeChecksum(readChunk.getData().asReadOnlyByteBuffer()); } else { return checksum.computeChecksum( readChunk.getDataBuffers().getBuffersList());