diff --git a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockDataStreamOutput.java b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockDataStreamOutput.java index d0419fa0c3cc..c69af90a9142 100644 --- a/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockDataStreamOutput.java +++ b/hadoop-hdds/client/src/main/java/org/apache/hadoop/hdds/scm/storage/BlockDataStreamOutput.java @@ -485,8 +485,7 @@ private void writeChunkToContainer(ByteBuffer buf) throws IOException { final int effectiveChunkSize = buf.remaining(); final long offset = chunkOffset.getAndAdd(effectiveChunkSize); - ChecksumData checksumData = - checksum.computeChecksum(buf.asReadOnlyBuffer()); + ChecksumData checksumData = checksum.computeChecksum(buf); ChunkInfo chunkInfo = ChunkInfo.newBuilder() .setChunkName(blockID.get().getLocalID() + "_chunk_" + ++chunkIndex) .setOffset(offset) diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Checksum.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Checksum.java index 76f84c46ab5e..d300b9ef0e50 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Checksum.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/common/Checksum.java @@ -139,6 +139,11 @@ public ChecksumData computeChecksum(byte[] data) */ public ChecksumData computeChecksum(ByteBuffer data) throws OzoneChecksumException { + // If type is set to NONE, we do not need to compute the checksums. We also + // need to avoid unnecessary conversions. + if (checksumType == ChecksumType.NONE) { + return new ChecksumData(checksumType, bytesPerChecksum); + } if (!data.isReadOnly()) { data = data.asReadOnlyBuffer(); }