Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ public void deleteWithBatch(BatchOperation batch, KEY key)
}

@Override
public String getName() throws IOException {
public String getName() {
return table.getName();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@
/**
* Command for the datanode with the destination address.
*/
public class CommandForDatanode<T extends Message> implements
IdentifiableEventPayload {
public final class CommandForDatanode<T extends Message> implements IdentifiableEventPayload {

private final DatanodeID datanodeId;
private final SCMCommand<T> command;
Expand All @@ -54,4 +53,9 @@ public SCMCommand<T> getCommand() {
public long getId() {
return command.getId();
}

@Override
public String toString() {
return "CommandForDatanode{" + datanodeId + ", " + command + '}';
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -127,4 +127,9 @@ public boolean hasExpired(long currentEpochMs) {
public boolean contributesToQueueSize() {
return true;
}

@Override
public String toString() {
return getType() + "(id=" + id + ", term=" + term + ')';
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -173,9 +173,8 @@ default VALUE getReadCopy(KEY key) throws IOException {
/**
* Returns the Name of this Table.
* @return - Table Name.
* @throws IOException on failure.
*/
String getName() throws IOException;
String getName();

/**
* Returns the key count of this Table. Note the result can be inaccurate.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,12 @@
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.DatanodeID;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.ContainerNotFoundException;
import org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat;
import org.apache.hadoop.hdds.scm.node.DatanodeInfo;
import org.apache.hadoop.hdds.scm.node.NodeStatus;
import org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
Expand Down Expand Up @@ -115,13 +115,10 @@ public Response getDatanodes() {
} catch (NodeNotFoundException e) {
LOG.warn("Cannot get nodeState for datanode {}", datanode, e);
}
final NodeOperationalState nodeOpState = datanode.getPersistedOpState();
String hostname = datanode.getHostName();
Set<PipelineID> pipelineIDs = nodeManager.getPipelines(datanode);
List<DatanodePipeline> pipelines = new ArrayList<>();
AtomicInteger leaderCount = new AtomicInteger();
AtomicInteger openContainers = new AtomicInteger();
DatanodeMetadata.Builder builder = DatanodeMetadata.newBuilder();

pipelineIDs.forEach(pipelineID -> {
try {
Expand All @@ -140,39 +137,32 @@ public Response getDatanodes() {
openContainers.getAndAdd(openContainerPerPipeline);
} catch (PipelineNotFoundException ex) {
LOG.warn("Cannot get pipeline {} for datanode {}, pipeline not found",
pipelineID.getId(), hostname, ex);
pipelineID.getId(), datanode, ex);
} catch (IOException ioEx) {
LOG.warn("Cannot get leader node of pipeline with id {}.",
pipelineID.getId(), ioEx);
}
});

final DatanodeMetadata.Builder builder = DatanodeMetadata.newBuilder()
.setOpenContainers(openContainers.get());

try {
builder.setContainers(nodeManager.getContainerCount(datanode));
builder.setOpenContainers(openContainers.get());
} catch (NodeNotFoundException ex) {
LOG.warn("Cannot get containers, datanode {} not found.",
datanode.getUuid(), ex);
LOG.warn("Failed to getContainerCount for {}", datanode, ex);
}

datanodes.add(builder.setHostname(nodeManager.getHostName(datanode))
datanodes.add(builder.setDatanode(datanode)
.setDatanodeStorageReport(storageReport)
.setLastHeartbeat(nodeManager.getLastHeartbeat(datanode))
.setState(nodeState)
.setOperationalState(nodeOpState)
.setPipelines(pipelines)
.setLeaderCount(leaderCount.get())
.setUuid(datanode.getUuidString())
.setVersion(nodeManager.getVersion(datanode))
.setSetupTime(nodeManager.getSetupTime(datanode))
.setRevision(nodeManager.getRevision(datanode))
.setLayoutVersion(datanode.getLastKnownLayoutVersion().getMetadataLayoutVersion())
.setNetworkLocation(datanode.getNetworkLocation())
.build());
});

DatanodesResponse datanodesResponse =
new DatanodesResponse(datanodes.size(), datanodes);
return Response.ok(datanodesResponse).build();
return Response.ok(new DatanodesResponse(datanodes)).build();
}

/**
Expand Down Expand Up @@ -209,34 +199,30 @@ public Response removeDatanodes(List<String> uuids) {
Preconditions.checkArgument(!uuids.isEmpty(), "Datanode list argument should not be empty");
try {
for (String uuid : uuids) {
DatanodeDetails nodeByUuid = nodeManager.getNode(DatanodeID.fromUuidString(uuid));
try {
final DatanodeInfo nodeByUuid = nodeManager.getNode(DatanodeID.fromUuidString(uuid));
if (nodeByUuid != null) {
final DatanodeMetadata metadata = DatanodeMetadata.newBuilder()
.setDatanode(nodeByUuid)
.setState(nodeManager.getNodeStatus(nodeByUuid).getHealth())
.build();

if (preChecksSuccess(nodeByUuid, failedNodeErrorResponseMap)) {
removedDatanodes.add(DatanodeMetadata.newBuilder()
.setHostname(nodeManager.getHostName(nodeByUuid))
.setUuid(uuid)
.setState(nodeManager.getNodeStatus(nodeByUuid).getHealth())
.build());
removedDatanodes.add(metadata);
nodeManager.removeNode(nodeByUuid);
LOG.info("Node {} removed successfully !!!", uuid);
} else {
failedDatanodes.add(DatanodeMetadata.newBuilder()
.setHostname(nodeManager.getHostName(nodeByUuid))
.setUuid(uuid)
.setOperationalState(nodeByUuid.getPersistedOpState())
.setState(nodeManager.getNodeStatus(nodeByUuid).getHealth())
.build());
failedDatanodes.add(metadata);
}
} catch (NodeNotFoundException nnfe) {
LOG.error("Selected node {} not found : {} ", uuid, nnfe);
} else {
LOG.error("Node not found: {}", uuid);
notFoundDatanodes.add(DatanodeMetadata.newBuilder()
.setHostname("")
.setState(NodeState.DEAD)
.setUuid(uuid).build());
}
}
} catch (Exception exp) {
LOG.error("Unexpected Error while removing datanodes : {} ", exp);
LOG.error("Unexpected Error while removing datanodes {}", uuids, exp);
throw new WebApplicationException(exp, Response.Status.INTERNAL_SERVER_ERROR);
}

Expand All @@ -250,25 +236,19 @@ public Response removeDatanodes(List<String> uuids) {
}

if (!notFoundDatanodes.isEmpty()) {
DatanodesResponse notFoundNodesResp =
new DatanodesResponse(notFoundDatanodes.size(), notFoundDatanodes);
final DatanodesResponse notFoundNodesResp = new DatanodesResponse(notFoundDatanodes);
removeDataNodesResponseWrapper.getDatanodesResponseMap().put("notFoundDatanodes", notFoundNodesResp);
}

if (!removedDatanodes.isEmpty()) {
DatanodesResponse removedNodesResp =
new DatanodesResponse(removedDatanodes.size(), removedDatanodes);
final DatanodesResponse removedNodesResp = new DatanodesResponse(removedDatanodes);
removeDataNodesResponseWrapper.getDatanodesResponseMap().put("removedDatanodes", removedNodesResp);
}
return Response.ok(removeDataNodesResponseWrapper).build();
}

private boolean preChecksSuccess(DatanodeDetails nodeByUuid, Map<String, String> failedNodeErrorResponseMap)
throws NodeNotFoundException {
if (null == nodeByUuid) {
throw new NodeNotFoundException();
}
NodeStatus nodeStatus = null;
private boolean preChecksSuccess(DatanodeDetails nodeByUuid, Map<String, String> failedNodeErrorResponseMap) {
final NodeStatus nodeStatus;
AtomicBoolean isContainerOrPipeLineOpen = new AtomicBoolean(false);
try {
nodeStatus = nodeManager.getNodeStatus(nodeByUuid);
Expand Down Expand Up @@ -300,9 +280,8 @@ private void checkPipelines(DatanodeDetails nodeByUuid, AtomicBoolean isContaine
final Pipeline pipeline = pipelineManager.getPipeline(id);
if (pipeline.isOpen()) {
LOG.warn("Pipeline : {} is still open for datanode: {}, pre-check failed, datanode not eligible " +
"for remove.", id.getId(), nodeByUuid.getUuid());
"for remove.", id.getId(), nodeByUuid);
isContainerOrPipeLineOpen.set(true);
return;
}
} catch (PipelineNotFoundException pipelineNotFoundException) {
LOG.warn("Pipeline {} is not managed by PipelineManager.", id, pipelineNotFoundException);
Expand All @@ -317,10 +296,8 @@ private void checkContainers(DatanodeDetails nodeByUuid, AtomicBoolean isContain
try {
final ContainerInfo container = reconContainerManager.getContainer(id);
if (container.getState() == HddsProtos.LifeCycleState.OPEN) {
LOG.warn("Container : {} is still open for datanode: {}, pre-check failed, datanode not eligible " +
"for remove.", container.getContainerID(), nodeByUuid.getUuid());
LOG.warn("Failed to remove datanode {} due to OPEN container: {}", nodeByUuid, container);
isContainerOrPipeLineOpen.set(true);
return;
}
} catch (ContainerNotFoundException cnfe) {
LOG.warn("Container {} is not managed by ContainerManager.",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import javax.xml.bind.annotation.XmlElement;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeOperationalState;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState;
import org.apache.hadoop.hdds.scm.node.DatanodeInfo;

/**
* Metadata object that represents a Datanode.
Expand Down Expand Up @@ -214,11 +215,6 @@ public Builder setState(NodeState state) {
return this;
}

public Builder setOperationalState(NodeOperationalState operationalState) {
this.opState = operationalState;
return this;
}

public Builder setLastHeartbeat(long lastHeartbeat) {
this.lastHeartbeat = lastHeartbeat;
return this;
Expand Down Expand Up @@ -255,28 +251,18 @@ public Builder setUuid(String uuid) {
return this;
}

public Builder setVersion(String version) {
this.version = version;
return this;
}

public Builder setSetupTime(long setupTime) {
this.setupTime = setupTime;
return this;
}
public Builder setDatanode(DatanodeInfo datanode) {
this.uuid = datanode.getUuidString();
this.hostname = datanode.getHostName();
this.networkLocation = datanode.getNetworkLocation();

public Builder setRevision(String revision) {
this.revision = revision;
return this;
}
this.opState = datanode.getPersistedOpState();

public Builder setLayoutVersion(int layoutVersion) {
this.layoutVersion = layoutVersion;
return this;
}
this.version = datanode.getVersion();
this.revision = datanode.getRevision();
this.layoutVersion = datanode.getLastKnownLayoutVersion().getMetadataLayoutVersion();

public Builder setNetworkLocation(String networkLocation) {
this.networkLocation = networkLocation;
this.setupTime = datanode.getSetupTime();
return this;
}

Expand Down
Loading