Skip to content

Commit

Permalink
HDDS-8175. getFileChecksum() throws exception in debug mode. (#7611)
Browse files Browse the repository at this point in the history
  • Loading branch information
chiacyu authored Jan 2, 2025
1 parent 9b5cedd commit c282d91
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 77 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,6 @@ protected void setChecksumType(ContainerProtos.ChecksumType type) {
protected abstract AbstractBlockChecksumComputer getBlockChecksumComputer(List<ContainerProtos.ChunkInfo> chunkInfos,
long blockLength);

protected abstract String populateBlockChecksumBuf(ByteBuffer blockChecksumByteBuffer) throws IOException;

protected abstract List<ContainerProtos.ChunkInfo> getChunkInfos(
OmKeyLocationInfo keyLocationInfo) throws IOException;

Expand All @@ -167,6 +165,39 @@ protected ByteBuffer getBlockChecksumFromChunkChecksums(AbstractBlockChecksumCom
return blockChecksumComputer.getOutByteBuffer();
}

/**
* Parses out the raw blockChecksum bytes from {@code checksumData} byte
* buffer according to the blockChecksumType and populates the cumulative
* blockChecksumBuf with it.
*
* @return a debug-string representation of the parsed checksum if
* debug is enabled, otherwise null.
*/

protected String populateBlockChecksumBuf(ByteBuffer blockChecksumByteBuffer) throws IOException {
String blockChecksumForDebug = null;
switch (getCombineMode()) {
case MD5MD5CRC:
final MD5Hash md5 = new MD5Hash(blockChecksumByteBuffer.array());
md5.write(getBlockChecksumBuf());
if (LOG.isDebugEnabled()) {
blockChecksumForDebug = md5.toString();
}
break;
case COMPOSITE_CRC:
byte[] crcBytes = blockChecksumByteBuffer.array();
if (LOG.isDebugEnabled()) {
blockChecksumForDebug = CrcUtil.toMultiCrcString(crcBytes);
}
getBlockChecksumBuf().write(crcBytes);
break;
default:
throw new IOException(
"Unknown combine mode: " + getCombineMode());
}
return blockChecksumForDebug;
};

/**
* Compute block checksums block by block and append the raw bytes of the
* block checksums into getBlockChecksumBuf().
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
import org.apache.hadoop.hdds.security.token.OzoneBlockTokenIdentifier;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.ozone.client.OzoneBucket;
import org.apache.hadoop.ozone.client.OzoneVolume;
import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
Expand All @@ -37,7 +36,6 @@
import org.apache.hadoop.security.token.Token;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;

Expand All @@ -60,33 +58,6 @@ protected AbstractBlockChecksumComputer getBlockChecksumComputer(List<ContainerP
return new ECBlockChecksumComputer(chunkInfos, getKeyInfo(), blockLength);
}

@Override
protected String populateBlockChecksumBuf(
ByteBuffer blockChecksumByteBuffer) throws IOException {
String blockChecksumForDebug = null;
switch (getCombineMode()) {
case MD5MD5CRC:
final MD5Hash md5 = new MD5Hash(blockChecksumByteBuffer.array());
md5.write(getBlockChecksumBuf());
if (LOG.isDebugEnabled()) {
blockChecksumForDebug = md5.toString();
}
break;
case COMPOSITE_CRC:
byte[] crcBytes = blockChecksumByteBuffer.array();
if (LOG.isDebugEnabled()) {
blockChecksumForDebug = CrcUtil.toSingleCrcString(crcBytes);
}
getBlockChecksumBuf().write(crcBytes);
break;
default:
throw new IOException(
"Unknown combine mode: " + getCombineMode());
}

return blockChecksumForDebug;
}

@Override
protected List<ContainerProtos.ChunkInfo> getChunkInfos(OmKeyLocationInfo
keyLocationInfo) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.storage.ContainerProtocolCalls;
import org.apache.hadoop.hdds.security.token.OzoneBlockTokenIdentifier;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.ozone.client.OzoneBucket;
import org.apache.hadoop.ozone.client.OzoneVolume;
import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
Expand All @@ -36,7 +35,6 @@
import org.apache.hadoop.security.token.Token;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;

/**
Expand Down Expand Up @@ -107,48 +105,4 @@ protected List<ContainerProtos.ChunkInfo> getChunkInfos(

return chunks;
}

/**
* Parses out the raw blockChecksum bytes from {@code checksumData} byte
* buffer according to the blockChecksumType and populates the cumulative
* blockChecksumBuf with it.
*
* @return a debug-string representation of the parsed checksum if
* debug is enabled, otherwise null.
*/
@Override
protected String populateBlockChecksumBuf(ByteBuffer checksumData)
throws IOException {
String blockChecksumForDebug = null;
switch (getCombineMode()) {
case MD5MD5CRC:
//read md5
final MD5Hash md5 = new MD5Hash(checksumData.array());
md5.write(getBlockChecksumBuf());
if (LOG.isDebugEnabled()) {
blockChecksumForDebug = md5.toString();
}
break;
case COMPOSITE_CRC:
// TODO: abort if chunk checksum type is not CRC32/CRC32C
//BlockChecksumType returnedType = PBHelperClient.convert(
// checksumData.getBlockChecksumOptions().getBlockChecksumType());
/*if (returnedType != BlockChecksumType.COMPOSITE_CRC) {
throw new IOException(String.format(
"Unexpected blockChecksumType '%s', expecting COMPOSITE_CRC",
returnedType));
}*/
byte[] crcBytes = checksumData.array();
if (LOG.isDebugEnabled()) {
blockChecksumForDebug = CrcUtil.toSingleCrcString(crcBytes);
}
getBlockChecksumBuf().write(crcBytes);
break;
default:
throw new IOException(
"Unknown combine mode: " + getCombineMode());
}

return blockChecksumForDebug;
}
}

0 comments on commit c282d91

Please sign in to comment.