Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -147,16 +147,41 @@ private void fetchBlocks() throws IOException {
.build();
OmKeyInfo keyInfo = ozoneManagerClient.lookupKey(keyArgs);

if (keyInfo.getFileChecksum() != null &&
isFullLength(keyInfo.getDataSize())) {
// if the checksum is cached in OM, and we request the checksum of
// the full length.
fileChecksum = keyInfo.getFileChecksum();
}

// use OmKeyArgs to call Om.lookup() and get OmKeyInfo
keyLocationInfos = keyInfo
.getLatestVersionLocations().getBlocksLatestVersionOnly();
}

/**
* Return true if the requested length is longer than the file length
* (dataSize).
*
* @param dataSize file length
* @return
*/
private boolean isFullLength(long dataSize) {
return this.length >= dataSize;
}

/**
* Compute file checksum given the list of chunk checksums requested earlier.
*
* Skip computation if the already computed, or if the OmKeyInfo of the key
* in OM has pre-computed checksum.
* @throws IOException
*/
public void compute() throws IOException {
if (fileChecksum != null) {
LOG.debug("Checksum is available. Skip computing it.");
return;
}
/**
* request length is 0 or the file is empty, return one with the
* magic entry that matches the md5 of a 32 byte zero-padded byte array.
Expand All @@ -167,7 +192,7 @@ public void compute() throws IOException {
final int lenOfZeroBytes = 32;
byte[] emptyBlockMd5 = new byte[lenOfZeroBytes];
MD5Hash fileMD5 = MD5Hash.digest(emptyBlockMd5);
fileChecksum = new MD5MD5CRC32GzipFileChecksum(0, 0, fileMD5);
fileChecksum = new MD5MD5CRC32GzipFileChecksum(0, 0, fileMD5);
} else {
checksumBlocks();
fileChecksum = makeFinalResult();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,36 @@ public XceiverClientReply sendCommandAsync(
FileChecksum fileChecksum = helper.getFileChecksum();
assertTrue(fileChecksum instanceof MD5MD5CRC32GzipFileChecksum);
assertEquals(1, helper.getKeyLocationInfoList().size());

FileChecksum cachedChecksum = new MD5MD5CRC32GzipFileChecksum();
/// test cached checksum
OmKeyInfo omKeyInfoWithChecksum = new OmKeyInfo.Builder()
.setVolumeName(null)
.setBucketName(null)
.setKeyName(null)
.setOmKeyLocationInfos(Collections.singletonList(
new OmKeyLocationInfoGroup(0, omKeyLocationInfoList)))
.setCreationTime(Time.now())
.setModificationTime(Time.now())
.setDataSize(0)
.setReplicationConfig(
RatisReplicationConfig.getInstance(
HddsProtos.ReplicationFactor.ONE))
.setFileEncryptionInfo(null)
.setAcls(null)
.setFileChecksum(cachedChecksum)
.build();
when(om.lookupKey(ArgumentMatchers.any())).
thenReturn(omKeyInfoWithChecksum);

helper = new ReplicatedFileChecksumHelper(
mockVolume, bucket, "dummy", 10, combineMode,
mockRpcClient);

helper.compute();
fileChecksum = helper.getFileChecksum();
assertTrue(fileChecksum instanceof MD5MD5CRC32GzipFileChecksum);
assertEquals(1, helper.getKeyLocationInfoList().size());
}

private XceiverClientReply buildValidResponse() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,15 @@
import java.util.Objects;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileEncryptionInfo;
import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.client.ContainerBlockID;
import org.apache.hadoop.hdds.client.ECReplicationConfig;
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.ozone.OzoneAcl;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.FileChecksumProto;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.KeyInfo;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.KeyLocationList;
import org.apache.hadoop.ozone.protocolPB.OMPBHelper;
Expand All @@ -58,6 +60,7 @@ public final class OmKeyInfo extends WithParentObjectId {
private long modificationTime;
private ReplicationConfig replicationConfig;
private FileEncryptionInfo encInfo;
private FileChecksum fileChecksum;

/**
* Represents leaf node name. This also will be used when the keyName is
Expand All @@ -78,7 +81,7 @@ public final class OmKeyInfo extends WithParentObjectId {
ReplicationConfig replicationConfig,
Map<String, String> metadata,
FileEncryptionInfo encInfo, List<OzoneAcl> acls,
long objectID, long updateID) {
long objectID, long updateID, FileChecksum fileChecksum) {
this.volumeName = volumeName;
this.bucketName = bucketName;
this.keyName = keyName;
Expand All @@ -92,6 +95,7 @@ public final class OmKeyInfo extends WithParentObjectId {
this.acls = acls;
this.objectID = objectID;
this.updateID = updateID;
this.fileChecksum = fileChecksum;
}

@SuppressWarnings("parameternumber")
Expand All @@ -101,10 +105,11 @@ public final class OmKeyInfo extends WithParentObjectId {
ReplicationConfig replicationConfig,
Map<String, String> metadata,
FileEncryptionInfo encInfo, List<OzoneAcl> acls,
long parentObjectID, long objectID, long updateID) {
long parentObjectID, long objectID, long updateID,
FileChecksum fileChecksum) {
this(volumeName, bucketName, keyName, versions, dataSize,
creationTime, modificationTime, replicationConfig, metadata,
encInfo, acls, objectID, updateID);
encInfo, acls, objectID, updateID, fileChecksum);
this.fileName = fileName;
this.parentObjectID = parentObjectID;
}
Expand Down Expand Up @@ -355,6 +360,10 @@ public void setReplicationConfig(ReplicationConfig repConfig) {
this.replicationConfig = repConfig;
}

public FileChecksum getFileChecksum() {
return fileChecksum;
}

/**
* Builder of OmKeyInfo.
*/
Expand All @@ -376,6 +385,7 @@ public static class Builder {
// not persisted to DB. FileName will be the last element in path keyName.
private String fileName;
private long parentObjectID;
private FileChecksum fileChecksum;

public Builder() {
this.metadata = new HashMap<>();
Expand Down Expand Up @@ -483,12 +493,17 @@ public Builder setParentObjectID(long parentID) {
return this;
}

public Builder setFileChecksum(FileChecksum checksum) {
this.fileChecksum = checksum;
return this;
}

public OmKeyInfo build() {
return new OmKeyInfo(
volumeName, bucketName, keyName, fileName,
omKeyLocationInfoGroups, dataSize, creationTime,
modificationTime, replicationConfig, metadata, encInfo, acls,
parentObjectID, objectID, updateID);
parentObjectID, objectID, updateID, fileChecksum);
}
}

Expand Down Expand Up @@ -577,6 +592,11 @@ private KeyInfo getProtobuf(boolean ignorePipeline, String fullKeyName,
.setObjectID(objectID)
.setUpdateID(updateID)
.setParentID(parentObjectID);

FileChecksumProto fileChecksumProto = OMPBHelper.convert(fileChecksum);
if (fileChecksumProto != null) {
kb.setFileChecksum(fileChecksumProto);
}
if (StringUtils.isNotBlank(fullKeyName)) {
kb.setKeyName(fullKeyName);
} else {
Expand All @@ -588,7 +608,7 @@ private KeyInfo getProtobuf(boolean ignorePipeline, String fullKeyName,
return kb.build();
}

public static OmKeyInfo getFromProtobuf(KeyInfo keyInfo) {
public static OmKeyInfo getFromProtobuf(KeyInfo keyInfo) throws IOException {
if (keyInfo == null) {
return null;
}
Expand Down Expand Up @@ -623,6 +643,10 @@ public static OmKeyInfo getFromProtobuf(KeyInfo keyInfo) {
if (keyInfo.hasParentID()) {
builder.setParentObjectID(keyInfo.getParentID());
}
if (keyInfo.hasFileChecksum()) {
FileChecksum fileChecksum = OMPBHelper.convert(keyInfo.getFileChecksum());
builder.setFileChecksum(fileChecksum);
}
// not persisted to DB. FileName will be filtered out from keyName
builder.setFileName(OzoneFSUtils.getFileName(keyInfo.getKeyName()));
return builder.build();
Expand All @@ -638,7 +662,8 @@ public String getObjectInfo() {
", creationTime='" + creationTime + '\'' +
", objectID='" + objectID + '\'' +
", parentID='" + parentObjectID + '\'' +
", replication='" + replicationConfig +
", replication='" + replicationConfig + '\'' +
", fileChecksum='" + fileChecksum +
'}';
}

Expand Down Expand Up @@ -704,6 +729,10 @@ public OmKeyInfo copyObject() {
metadata.forEach((k, v) -> builder.addMetadata(k, v));
}

if (fileChecksum != null) {
builder.setFileChecksum(fileChecksum);
}

return builder.build();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hadoop.ozone.om.helpers;

import java.io.IOException;
import java.util.Objects;

import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OzoneFileStatusProto;
Expand Down Expand Up @@ -104,7 +105,8 @@ public OzoneFileStatusProto getProtobuf(int clientVersion) {
return builder.build();
}

public static OzoneFileStatus getFromProtobuf(OzoneFileStatusProto status) {
public static OzoneFileStatus getFromProtobuf(OzoneFileStatusProto status)
throws IOException {
return new OzoneFileStatus(
OmKeyInfo.getFromProtobuf(status.getKeyInfo()),
status.getBlockSize(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*/
package org.apache.hadoop.ozone.om.helpers;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
Expand Down Expand Up @@ -52,7 +53,7 @@ public List<OmKeyInfo> getOmKeyInfoList() {
}

public static RepeatedOmKeyInfo getFromProto(RepeatedKeyInfo
repeatedKeyInfo) {
repeatedKeyInfo) throws IOException {
List<OmKeyInfo> list = new ArrayList<>();
for (KeyInfo k : repeatedKeyInfo.getKeyInfoList()) {
list.add(OmKeyInfo.getFromProtobuf(k));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import org.apache.hadoop.ozone.om.helpers.ServiceInfo;
import org.apache.hadoop.ozone.om.helpers.ServiceInfoEx;
import org.apache.hadoop.ozone.om.protocol.S3Auth;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.AddAclRequest;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.AddAclResponse;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.AllocateBlockRequest;
Expand Down Expand Up @@ -910,10 +911,12 @@ public List<OmKeyInfo> listKeys(String volumeName, String bucketName,

ListKeysResponse resp =
handleError(submitRequest(omRequest)).getListKeysResponse();
keys.addAll(
resp.getKeyInfoList().stream()
.map(OmKeyInfo::getFromProtobuf)
.collect(Collectors.toList()));
List<OmKeyInfo> list = new ArrayList<>();
for (OzoneManagerProtocolProtos.KeyInfo keyInfo : resp.getKeyInfoList()) {
OmKeyInfo fromProtobuf = OmKeyInfo.getFromProtobuf(keyInfo);
list.add(fromProtobuf);
}
keys.addAll(list);
return keys;

}
Expand Down Expand Up @@ -1637,10 +1640,14 @@ public List<RepeatedOmKeyInfo> listTrash(String volumeName,
List<RepeatedOmKeyInfo> deletedKeyList =
new ArrayList<>(trashResponse.getDeletedKeysCount());

deletedKeyList.addAll(
trashResponse.getDeletedKeysList().stream()
.map(RepeatedOmKeyInfo::getFromProto)
.collect(Collectors.toList()));
List<RepeatedOmKeyInfo> list = new ArrayList<>();
for (OzoneManagerProtocolProtos.RepeatedKeyInfo
repeatedKeyInfo : trashResponse.getDeletedKeysList()) {
RepeatedOmKeyInfo fromProto =
RepeatedOmKeyInfo.getFromProto(repeatedKeyInfo);
list.add(fromProto);
}
deletedKeyList.addAll(list);

return deletedKeyList;
}
Expand Down
Loading