Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@
*/
package org.apache.hadoop.ozone.om.helpers;

import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
.MultipartKeyInfo;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
.PartKeyInfo;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.MultipartKeyInfo;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.PartKeyInfo;

import java.util.HashMap;
import java.util.Map;
Expand All @@ -30,15 +30,24 @@
* upload part information of the key.
*/
public class OmMultipartKeyInfo {
private String uploadID;
private final String uploadID;
private final long creationTime;
private final ReplicationType replicationType;
private final ReplicationFactor replicationFactor;
private TreeMap<Integer, PartKeyInfo> partKeyInfoList;

/**
* Construct OmMultipartKeyInfo object which holds multipart upload
* information for a key.
*/
public OmMultipartKeyInfo(String id, Map<Integer, PartKeyInfo> list) {
public OmMultipartKeyInfo(String id, long creationTime,
ReplicationType replicationType,
ReplicationFactor replicationFactor,
Map<Integer, PartKeyInfo> list) {
this.uploadID = id;
this.creationTime = creationTime;
this.replicationType = replicationType;
this.replicationFactor = replicationFactor;
this.partKeyInfoList = new TreeMap<>(list);
}

Expand All @@ -50,6 +59,10 @@ public String getUploadID() {
return uploadID;
}

public long getCreationTime() {
return creationTime;
}

public TreeMap<Integer, PartKeyInfo> getPartKeyInfoMap() {
return partKeyInfoList;
}
Expand All @@ -62,18 +75,27 @@ public PartKeyInfo getPartKeyInfo(int partNumber) {
return partKeyInfoList.get(partNumber);
}

public ReplicationType getReplicationType() {
return replicationType;
}

public ReplicationFactor getReplicationFactor() {
return replicationFactor;
}

/**
* Construct OmMultipartInfo from MultipartKeyInfo proto object.
* @param multipartKeyInfo
* @return OmMultipartKeyInfo
*/
public static OmMultipartKeyInfo getFromProto(MultipartKeyInfo
multipartKeyInfo) {
public static OmMultipartKeyInfo getFromProto(
MultipartKeyInfo multipartKeyInfo) {
Map<Integer, PartKeyInfo> list = new HashMap<>();
multipartKeyInfo.getPartKeyInfoListList().stream().forEach(partKeyInfo
-> list.put(partKeyInfo.getPartNumber(), partKeyInfo));
return new OmMultipartKeyInfo(multipartKeyInfo.getUploadID(), list);
multipartKeyInfo.getPartKeyInfoListList().forEach(partKeyInfo ->
list.put(partKeyInfo.getPartNumber(), partKeyInfo));
return new OmMultipartKeyInfo(multipartKeyInfo.getUploadID(),
multipartKeyInfo.getCreationTime(), multipartKeyInfo.getType(),
multipartKeyInfo.getFactor(), list);
}

/**
Expand All @@ -82,7 +104,10 @@ public static OmMultipartKeyInfo getFromProto(MultipartKeyInfo
*/
public MultipartKeyInfo getProto() {
MultipartKeyInfo.Builder builder = MultipartKeyInfo.newBuilder()
.setUploadID(uploadID);
.setUploadID(uploadID)
.setCreationTime(creationTime)
.setType(replicationType)
.setFactor(replicationFactor);
partKeyInfoList.forEach((key, value) -> builder.addPartKeyInfoList(value));
return builder.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -969,7 +969,10 @@ message MultipartInfoInitiateResponse {
}

message MultipartKeyInfo {
required string uploadID = 4;
required string uploadID = 1;
required uint64 creationTime = 2;
required hadoop.hdds.ReplicationType type = 3;
required hadoop.hdds.ReplicationFactor factor = 4;
repeated PartKeyInfo partKeyInfoList = 5;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,10 @@

package org.apache.hadoop.ozone.om.codec;

import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.ozone.om.helpers.OmMultipartKeyInfo;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.junit.Assert;
import org.junit.Test;

Expand All @@ -34,8 +36,10 @@ public class TestOmMultipartKeyInfoCodec {
@Test
public void testOmMultipartKeyInfoCodec() {
OmMultipartKeyInfoCodec codec = new OmMultipartKeyInfoCodec();
OmMultipartKeyInfo omMultipartKeyInfo = new OmMultipartKeyInfo(UUID
.randomUUID().toString(), new HashMap<>());
OmMultipartKeyInfo omMultipartKeyInfo = new OmMultipartKeyInfo(
UUID.randomUUID().toString(), Time.now(),
HddsProtos.ReplicationType.RATIS, HddsProtos.ReplicationFactor.THREE,
new HashMap<>());
byte[] data = new byte[0];
try {
data = codec.toPersistedFormat(omMultipartKeyInfo);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -893,17 +893,18 @@ private OmMultipartInfo createMultipartInfo(OmKeyArgs keyArgs,
// Not checking if there is an already key for this in the keyTable, as
// during final complete multipart upload we take care of this.


long currentTime = Time.now();
Map<Integer, PartKeyInfo> partKeyInfoMap = new HashMap<>();
OmMultipartKeyInfo multipartKeyInfo = new OmMultipartKeyInfo(
multipartUploadID, partKeyInfoMap);
multipartUploadID, currentTime, keyArgs.getType(),
keyArgs.getFactor(), partKeyInfoMap);
List<OmKeyLocationInfo> locations = new ArrayList<>();
OmKeyInfo omKeyInfo = new OmKeyInfo.Builder()
.setVolumeName(keyArgs.getVolumeName())
.setBucketName(keyArgs.getBucketName())
.setKeyName(keyArgs.getKeyName())
.setCreationTime(Time.now())
.setModificationTime(Time.now())
.setCreationTime(currentTime)
.setModificationTime(currentTime)
.setReplicationType(keyArgs.getType())
.setReplicationFactor(keyArgs.getFactor())
.setOmKeyLocationInfos(Collections.singletonList(
Expand Down Expand Up @@ -1323,29 +1324,29 @@ public OmMultipartUploadList listMultipartUploads(String volumeName,

List<OmMultipartUpload> collect = multipartUploadKeys.stream()
.map(OmMultipartUpload::from)
.map(upload -> {
.peek(upload -> {
String dbKey = metadataManager
.getOzoneKey(upload.getVolumeName(),
upload.getBucketName(),
upload.getKeyName());
try {
Table<String, OmKeyInfo> openKeyTable =
metadataManager.getOpenKeyTable();
Table<String, OmMultipartKeyInfo> keyInfoTable =
metadataManager.getMultipartInfoTable();

OmKeyInfo omKeyInfo =
openKeyTable.get(upload.getDbKey());
OmMultipartKeyInfo multipartKeyInfo =
keyInfoTable.get(upload.getDbKey());

upload.setCreationTime(
Instant.ofEpochMilli(omKeyInfo.getCreationTime()));

upload.setReplicationType(omKeyInfo.getType());
upload.setReplicationFactor(omKeyInfo.getFactor());
Instant.ofEpochMilli(multipartKeyInfo.getCreationTime()));
upload.setReplicationType(
multipartKeyInfo.getReplicationType());
upload.setReplicationFactor(
multipartKeyInfo.getReplicationFactor());
} catch (IOException e) {
LOG.warn(
"Open key entry for multipart upload record can be read {}",
dbKey);
}
return upload;
})
.collect(Collectors.toList());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,8 @@ public OMClientResponse validateAndUpdateCache(OzoneManager ozoneManager,


multipartKeyInfo = new OmMultipartKeyInfo(
keyArgs.getMultipartUploadID(), new HashMap<>());
keyArgs.getMultipartUploadID(), keyArgs.getModificationTime(),
keyArgs.getType(), keyArgs.getFactor(), new HashMap<>());

omKeyInfo = new OmKeyInfo.Builder()
.setVolumeName(keyArgs.getVolumeName())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,9 @@ public S3InitiateMultipartUploadResponse createS3InitiateMPUResponse(
String volumeName, String bucketName, String keyName,
String multipartUploadID) {
OmMultipartKeyInfo multipartKeyInfo = new OmMultipartKeyInfo(
multipartUploadID, new HashMap<>());
multipartUploadID, Time.now(),
HddsProtos.ReplicationType.RATIS, HddsProtos.ReplicationFactor.ONE,
new HashMap<>());

OmKeyInfo omKeyInfo = new OmKeyInfo.Builder()
.setVolumeName(volumeName)
Expand Down