Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.DatanodeID;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos;
Expand Down Expand Up @@ -58,20 +59,20 @@
*/
public class SimpleMockNodeManager implements NodeManager {

private Map<UUID, DatanodeInfo> nodeMap = new ConcurrentHashMap<>();
private Map<UUID, Set<PipelineID>> pipelineMap = new ConcurrentHashMap<>();
private Map<UUID, Set<ContainerID>> containerMap = new ConcurrentHashMap<>();
private Map<DatanodeID, DatanodeInfo> nodeMap = new ConcurrentHashMap<>();
private Map<DatanodeID, Set<PipelineID>> pipelineMap = new ConcurrentHashMap<>();
private Map<DatanodeID, Set<ContainerID>> containerMap = new ConcurrentHashMap<>();

public void register(DatanodeDetails dd, NodeStatus status) {
dd.setPersistedOpState(status.getOperationalState());
dd.setPersistedOpStateExpiryEpochSec(status.getOpStateExpiryEpochSeconds());
nodeMap.put(dd.getUuid(), new DatanodeInfo(dd, status, null));
nodeMap.put(dd.getID(), new DatanodeInfo(dd, status, null));
}

public void setNodeStatus(DatanodeDetails dd, NodeStatus status) {
dd.setPersistedOpState(status.getOperationalState());
dd.setPersistedOpStateExpiryEpochSec(status.getOpStateExpiryEpochSeconds());
DatanodeInfo dni = nodeMap.get(dd.getUuid());
DatanodeInfo dni = nodeMap.get(dd.getID());
dni.setNodeStatus(status);
}

Expand All @@ -91,7 +92,7 @@ public void setPipelines(DatanodeDetails dd, int count) {
for (int i = 0; i < count; i++) {
pipelines.add(PipelineID.randomId());
}
pipelineMap.put(dd.getUuid(), pipelines);
pipelineMap.put(dd.getID(), pipelines);
}

/**
Expand All @@ -105,7 +106,7 @@ public void setPipelines(DatanodeDetails dd, int count) {
@Override
public NodeStatus getNodeStatus(DatanodeDetails datanodeDetails)
throws NodeNotFoundException {
DatanodeInfo dni = nodeMap.get(datanodeDetails.getUuid());
DatanodeInfo dni = nodeMap.get(datanodeDetails.getID());
if (dni != null) {
return dni.getNodeStatus();
} else {
Expand All @@ -125,7 +126,7 @@ public void setNodeOperationalState(DatanodeDetails dn,
NodeOperationalState newState,
long opStateExpiryEpocSec)
throws NodeNotFoundException {
DatanodeInfo dni = nodeMap.get(dn.getUuid());
DatanodeInfo dni = nodeMap.get(dn.getID());
if (dni == null) {
throw new NodeNotFoundException(dn.getID());
}
Expand All @@ -145,7 +146,7 @@ public void setNodeOperationalState(DatanodeDetails dn,
*/
@Override
public Set<PipelineID> getPipelines(DatanodeDetails datanodeDetails) {
Set<PipelineID> p = pipelineMap.get(datanodeDetails.getUuid());
Set<PipelineID> p = pipelineMap.get(datanodeDetails.getID());
if (p == null || p.isEmpty()) {
return null;
} else {
Expand All @@ -159,7 +160,7 @@ public int getPipelinesCount(DatanodeDetails datanodeDetails) {
}

public void setContainers(DatanodeDetails dn, Set<ContainerID> containerIds) {
containerMap.put(dn.getUuid(), containerIds);
containerMap.put(dn.getID(), containerIds);
}

/**
Expand All @@ -177,7 +178,7 @@ public Set<ContainerID> getContainers(DatanodeDetails dn)
// The concrete implementation of this method in SCMNodeManager will return
// an empty set if there are no containers, and will never return null.
return containerMap
.computeIfAbsent(dn.getUuid(), key -> new HashSet<>());
.computeIfAbsent(dn.getID(), key -> new HashSet<>());
}

/**
Expand Down