Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
Expand All @@ -34,6 +36,8 @@
public class MockDatanodeStorage {

private final Map<DatanodeBlockID, BlockData> blocks = new HashedMap();
private final Map<Long, List<DatanodeBlockID>>
containerBlocks = new HashedMap();
private final Map<BlockID, String> fullBlockData = new HashMap<>();

private final Map<String, ChunkInfo> chunks = new HashMap<>();
Expand All @@ -48,12 +52,25 @@ public void setStorageFailed() {

public void putBlock(DatanodeBlockID blockID, BlockData blockData) {
blocks.put(blockID, blockData);
List<DatanodeBlockID> dnBlocks = containerBlocks
.getOrDefault(blockID.getContainerID(), new ArrayList<>());
dnBlocks.add(blockID);
containerBlocks.put(blockID.getContainerID(), dnBlocks);
}

public BlockData getBlock(DatanodeBlockID blockID) {
return blocks.get(blockID);
}

public List<BlockData> listBlock(long containerID) {
List<DatanodeBlockID> datanodeBlockIDS = containerBlocks.get(containerID);
List<BlockData> listBlocksData = new ArrayList<>();
for (DatanodeBlockID dBlock : datanodeBlockIDS) {
listBlocksData.add(blocks.get(dBlock));
}
return listBlocksData;
}

public void writeChunk(
DatanodeBlockID blockID,
ChunkInfo chunkInfo, ByteString bytes) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hadoop.ozone.client;

import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChunkInfo;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandResponseProto;
Expand Down Expand Up @@ -99,6 +100,9 @@ public XceiverClientReply sendCommandAsync(
case GetBlock:
return result(request,
r -> r.setGetBlock(getBlock(request.getGetBlock())));
case ListBlock:
return result(request,
r -> r.setListBlock(listBlock(request.getContainerID())));
default:
throw new IllegalArgumentException(
"Mock version of datanode call " + request.getCmdType()
Expand All @@ -122,6 +126,11 @@ private GetBlockResponseProto getBlock(GetBlockRequestProto getBlock) {
.build();
}

private ContainerProtos.ListBlockResponseProto listBlock(long containerID) {
return ContainerProtos.ListBlockResponseProto.newBuilder()
.addAllBlockData(datanodeStorage.listBlock(containerID)).build();
}

private PutBlockResponseProto putBlock(PutBlockRequestProto putBlock) {
long length = 0;
for (ChunkInfo chunk : putBlock.getBlockData().getChunksList()) {
Expand Down