diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java index 342a3e7bd8d9..b1bf4200c233 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/client/ScmClient.java @@ -20,6 +20,7 @@ import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hdds.annotation.InterfaceStability; import org.apache.hadoop.hdds.scm.DatanodeAdminError; +import org.apache.hadoop.hdds.scm.container.ContainerReplicaInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline; import org.apache.hadoop.hdds.scm.container.ContainerInfo; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; @@ -70,6 +71,16 @@ public interface ScmClient extends Closeable { ContainerWithPipeline getContainerWithPipeline(long containerId) throws IOException; + /** + * Gets the list of ReplicaInfo known by SCM for a given container. + * @param containerId - The Container ID + * @return List of ContainerReplicaInfo for the container or an empty list + * if none. + * @throws IOException + */ + List getContainerReplicas( + long containerId) throws IOException; + /** * Close a container. * diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerReplicaInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerReplicaInfo.java new file mode 100644 index 000000000000..b30dff716d2b --- /dev/null +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/container/ContainerReplicaInfo.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdds.scm.container; + +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; + +import java.util.UUID; + +/** + * Class which stores ContainerReplica details on the client. + */ +public final class ContainerReplicaInfo { + + private long containerID; + private String state; + private DatanodeDetails datanodeDetails; + private UUID placeOfBirth; + private long sequenceId; + private long keyCount; + private long bytesUsed; + + public static ContainerReplicaInfo fromProto( + HddsProtos.SCMContainerReplicaProto proto) { + ContainerReplicaInfo.Builder builder = new ContainerReplicaInfo.Builder(); + builder.setContainerID(proto.getContainerID()) + .setState(proto.getState()) + .setDatanodeDetails(DatanodeDetails + .getFromProtoBuf(proto.getDatanodeDetails())) + .setPlaceOfBirth(UUID.fromString(proto.getPlaceOfBirth())) + .setSequenceId(proto.getSequenceID()) + .setKeyCount(proto.getKeyCount()) + .setBytesUsed(proto.getBytesUsed()); + return builder.build(); + } + + private ContainerReplicaInfo() { + } + + public long getContainerID() { + return containerID; + } + + public String getState() { + return state; + } + + public DatanodeDetails getDatanodeDetails() { + return datanodeDetails; + } + + public UUID getPlaceOfBirth() { + return placeOfBirth; + } + + public long getSequenceId() { + return sequenceId; + } + + public long getKeyCount() { + return keyCount; + } + + public long getBytesUsed() { + return bytesUsed; + } + + /** + * Builder for ContainerReplicaInfo class. + */ + public static class Builder { + + private final ContainerReplicaInfo subject = new ContainerReplicaInfo(); + + public Builder setContainerID(long containerID) { + subject.containerID = containerID; + return this; + } + + public Builder setState(String state) { + subject.state = state; + return this; + } + + public Builder setDatanodeDetails(DatanodeDetails datanodeDetails) { + subject.datanodeDetails = datanodeDetails; + return this; + } + + public Builder setPlaceOfBirth(UUID placeOfBirth) { + subject.placeOfBirth = placeOfBirth; + return this; + } + + public Builder setSequenceId(long sequenceId) { + subject.sequenceId = sequenceId; + return this; + } + + public Builder setKeyCount(long keyCount) { + subject.keyCount = keyCount; + return this; + } + + public Builder setBytesUsed(long bytesUsed) { + subject.bytesUsed = bytesUsed; + return this; + } + + public ContainerReplicaInfo build() { + return subject; + } + } +} diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java index 264a5991b773..579f35131f35 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java @@ -94,6 +94,15 @@ ContainerWithPipeline allocateContainer( ContainerWithPipeline getContainerWithPipeline(long containerID) throws IOException; + /** + * Gets the list of ReplicaInfo known by SCM for a given container. + * @param containerId ID of the container + * @return List of ReplicaInfo for the container or an empty list if none. + * @throws IOException + */ + List + getContainerReplicas(long containerId) throws IOException; + /** * Ask SCM the location of a batch of containers. SCM responds with a group of * nodes where these containers and their replicas are located. diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReplicaInfo.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReplicaInfo.java new file mode 100644 index 000000000000..195baca2db0c --- /dev/null +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReplicaInfo.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdds.scm.container; + +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.MockDatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.junit.Assert; +import org.junit.Test; + +import java.util.UUID; + +/** + * Test for the ContainerReplicaInfo class. + */ +public class TestContainerReplicaInfo { + + @Test + public void testObjectCreatedFromProto() { + HddsProtos.SCMContainerReplicaProto proto = + HddsProtos.SCMContainerReplicaProto.newBuilder() + .setKeyCount(10) + .setBytesUsed(12345) + .setContainerID(567) + .setPlaceOfBirth(UUID.randomUUID().toString()) + .setSequenceID(5) + .setDatanodeDetails(MockDatanodeDetails.randomDatanodeDetails() + .getProtoBufMessage()) + .setState("OPEN") + .build(); + + ContainerReplicaInfo info = ContainerReplicaInfo.fromProto(proto); + + Assert.assertEquals(proto.getContainerID(), info.getContainerID()); + Assert.assertEquals(proto.getBytesUsed(), info.getBytesUsed()); + Assert.assertEquals(proto.getKeyCount(), info.getKeyCount()); + Assert.assertEquals(proto.getPlaceOfBirth(), + info.getPlaceOfBirth().toString()); + Assert.assertEquals(DatanodeDetails.getFromProtoBuf( + proto.getDatanodeDetails()), info.getDatanodeDetails()); + Assert.assertEquals(proto.getSequenceID(), info.getSequenceId()); + Assert.assertEquals(proto.getState(), info.getState()); + } +} \ No newline at end of file diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/container/package-info.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/container/package-info.java new file mode 100644 index 000000000000..2f459fbcba67 --- /dev/null +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/container/package-info.java @@ -0,0 +1,21 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + Test cases for SCM container client classes. + */ +package org.apache.hadoop.hdds.scm.container; diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java index b7458222c819..62139eb492e7 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolClientSideTranslatorPB.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ForceExitSafeModeRequestProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ForceExitSafeModeResponseProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerRequestProto; +import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerReplicasRequestProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerTokenRequestProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerTokenResponseProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerWithPipelineBatchRequestProto; @@ -251,6 +252,26 @@ public ContainerWithPipeline getContainerWithPipeline(long containerID) } + /** + * {@inheritDoc} + */ + @Override + public List + getContainerReplicas(long containerID) throws IOException { + Preconditions.checkState(containerID >= 0, + "Container ID cannot be negative"); + + GetContainerReplicasRequestProto request = + GetContainerReplicasRequestProto.newBuilder() + .setTraceID(TracingUtil.exportCurrentSpan()) + .setContainerID(containerID).build(); + + ScmContainerLocationResponse response = + submitRequest(Type.GetContainerReplicas, + (builder) -> builder.setGetContainerReplicasRequest(request)); + return response.getGetContainerReplicasResponse().getContainerReplicaList(); + } + /** * {@inheritDoc} */ diff --git a/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto b/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto index f8c6aa764886..313a7c8b6d58 100644 --- a/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto +++ b/hadoop-hdds/interface-admin/src/main/proto/ScmAdminProtocol.proto @@ -75,6 +75,7 @@ message ScmContainerLocationRequest { optional FinalizeScmUpgradeRequestProto finalizeScmUpgradeRequest = 36; optional QueryUpgradeFinalizationProgressRequestProto queryUpgradeFinalizationProgressRequest = 37; optional GetContainerCountRequestProto getContainerCountRequest = 38; + optional GetContainerReplicasRequestProto getContainerReplicasRequest = 39; } message ScmContainerLocationResponse { @@ -121,6 +122,7 @@ message ScmContainerLocationResponse { optional FinalizeScmUpgradeResponseProto finalizeScmUpgradeResponse = 36; optional QueryUpgradeFinalizationProgressResponseProto queryUpgradeFinalizationProgressResponse = 37; optional GetContainerCountResponseProto getContainerCountResponse = 38; + optional GetContainerReplicasResponseProto getContainerReplicasResponse = 39; enum Status { OK = 1; @@ -165,6 +167,7 @@ enum Type { FinalizeScmUpgrade = 31; QueryUpgradeFinalizationProgress = 32; GetContainerCount = 33; + GetContainerReplicas = 34; } /** @@ -213,6 +216,15 @@ message GetContainerWithPipelineResponseProto { required ContainerWithPipeline containerWithPipeline = 1; } +message GetContainerReplicasRequestProto { + required int64 containerID = 1; + optional string traceID = 2; +} + +message GetContainerReplicasResponseProto { + repeated SCMContainerReplicaProto containerReplica = 1; +} + message GetContainerWithPipelineBatchRequestProto { repeated int64 containerIDs = 1; optional string traceID = 2; diff --git a/hadoop-hdds/interface-client/src/main/proto/hdds.proto b/hadoop-hdds/interface-client/src/main/proto/hdds.proto index 133f4c694fd9..bc1b35a259c6 100644 --- a/hadoop-hdds/interface-client/src/main/proto/hdds.proto +++ b/hadoop-hdds/interface-client/src/main/proto/hdds.proto @@ -379,3 +379,12 @@ message ContainerReplicaHistoryProto { required int64 bcsId = 4; } +message SCMContainerReplicaProto { + required int64 containerID = 1; + required string state = 2; + required DatanodeDetailsProto datanodeDetails = 3; + required string placeOfBirth = 4; + required int64 sequenceID = 5; + required int64 keyCount = 6; + required int64 bytesUsed = 7; +} diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java index b839e991da16..17da7764fcca 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocolServerSideTranslatorPB.java @@ -43,6 +43,8 @@ import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.FinalizeScmUpgradeResponseProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ForceExitSafeModeRequestProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ForceExitSafeModeResponseProto; +import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerReplicasRequestProto; +import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerReplicasResponseProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerRequestProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerResponseProto; import org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.GetContainerTokenRequestProto; @@ -405,6 +407,13 @@ public ScmContainerLocationResponse processRequest( .setGetContainerCountResponse(getContainerCount( request.getGetContainerCountRequest())) .build(); + case GetContainerReplicas: + return ScmContainerLocationResponse.newBuilder() + .setCmdType(request.getCmdType()) + .setStatus(Status.OK) + .setGetContainerReplicasResponse(getContainerReplicas( + request.getGetContainerReplicasRequest())) + .build(); default: throw new IllegalArgumentException( "Unknown command type: " + request.getCmdType()); @@ -416,6 +425,14 @@ public ScmContainerLocationResponse processRequest( } } + public GetContainerReplicasResponseProto getContainerReplicas( + GetContainerReplicasRequestProto request) throws IOException { + List replicas + = impl.getContainerReplicas(request.getContainerID()); + return GetContainerReplicasResponseProto.newBuilder() + .addAllContainerReplica(replicas).build(); + } + public ContainerResponseProto allocateContainer(ContainerRequestProto request, int clientVersion) throws IOException { ContainerWithPipeline cp = impl diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java index fa1d316d99db..0cdb5712523e 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java @@ -289,6 +289,28 @@ public ContainerWithPipeline getContainerWithPipeline(long containerID) } } + @Override + public List + getContainerReplicas(long containerId) throws IOException { + List results = new ArrayList<>(); + + Set replicas = getScm().getContainerManager() + .getContainerReplicas(ContainerID.valueOf(containerId)); + for (ContainerReplica r : replicas) { + results.add( + HddsProtos.SCMContainerReplicaProto.newBuilder() + .setContainerID(containerId) + .setState(r.getState().toString()) + .setDatanodeDetails(r.getDatanodeDetails().getProtoBufMessage()) + .setBytesUsed(r.getBytesUsed()) + .setPlaceOfBirth(r.getOriginDatanodeId().toString()) + .setKeyCount(r.getKeyCount()) + .setSequenceID(r.getSequenceId()).build() + ); + } + return results; + } + @Override public List getContainerWithPipelineBatch( List containerIDs) throws IOException { diff --git a/hadoop-hdds/tools/pom.xml b/hadoop-hdds/tools/pom.xml index 9a3bce46d70c..277bff615955 100644 --- a/hadoop-hdds/tools/pom.xml +++ b/hadoop-hdds/tools/pom.xml @@ -83,5 +83,14 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd"> mockito-core test + + org.slf4j + slf4j-api + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java index 123a74d70920..ca4e41bd9d4c 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ContainerOperationClient.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hdds.scm.client.ScmClient; import org.apache.hadoop.hdds.scm.container.ContainerID; import org.apache.hadoop.hdds.scm.container.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.ContainerReplicaInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; @@ -45,6 +46,7 @@ import java.io.IOException; import java.security.cert.X509Certificate; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; @@ -480,6 +482,25 @@ public ContainerWithPipeline getContainerWithPipeline(long containerId) return storageContainerLocationClient.getContainerWithPipeline(containerId); } + /** + * Gets the list of ReplicaInfo known by SCM for a given container. + * @param containerId - The Container ID + * @return List of ContainerReplicaInfo for the container or an empty list + * if none. + * @throws IOException + */ + @Override + public List + getContainerReplicas(long containerId) throws IOException { + List protos + = storageContainerLocationClient.getContainerReplicas(containerId); + List replicas = new ArrayList<>(); + for (HddsProtos.SCMContainerReplicaProto p : protos) { + replicas.add(ContainerReplicaInfo.fromProto(p)); + } + return replicas; + } + /** * Close a container. * diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java index 65884d24a4db..7b0daa6de752 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hdds.scm.cli.container; import java.io.IOException; +import java.util.List; import java.util.stream.Collectors; import org.apache.hadoop.hdds.cli.GenericParentCommand; @@ -25,10 +26,13 @@ import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.scm.cli.ScmSubcommand; import org.apache.hadoop.hdds.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.container.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.ContainerReplicaInfo; import org.apache.hadoop.hdds.scm.container.common.helpers .ContainerWithPipeline; import com.google.common.base.Preconditions; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.server.JsonUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -67,13 +71,23 @@ public void execute(ScmClient scmClient) throws IOException { final ContainerWithPipeline container = scmClient. getContainerWithPipeline(containerID); Preconditions.checkNotNull(container, "Container cannot be null"); + List replicas = null; + try { + replicas = scmClient.getContainerReplicas(containerID); + } catch (IOException e) { + LOG.error("Unable to retrieve the replica details", e); + } if (json) { - LOG.info(JsonUtils.toJsonStringWithDefaultPrettyPrinter(container)); + ContainerWithPipelineAndReplicas wrapper = + new ContainerWithPipelineAndReplicas(container.getContainerInfo(), + container.getPipeline(), replicas); + LOG.info(JsonUtils.toJsonStringWithDefaultPrettyPrinter(wrapper)); } else { // Print container report info. LOG.info("Container id: {}", containerID); - boolean verbose = spec.root().userObject() instanceof GenericParentCommand + boolean verbose = spec != null + && spec.root().userObject() instanceof GenericParentCommand && ((GenericParentCommand) spec.root().userObject()).isVerbose(); if (verbose) { LOG.info("Pipeline Info: {}", container.getPipeline()); @@ -87,10 +101,53 @@ public void execute(ScmClient scmClient) throws IOException { InfoSubcommand::buildDatanodeDetails) .collect(Collectors.joining(",\n")); LOG.info("Datanodes: [{}]", machinesStr); + + // Print the replica details if available + if (replicas != null) { + String replicaStr = replicas.stream().map( + InfoSubcommand::buildReplicaDetails) + .collect(Collectors.joining(",\n")); + LOG.info("Replicas: [{}]", replicaStr); + } } } private static String buildDatanodeDetails(DatanodeDetails details) { return details.getUuidString() + "/" + details.getHostName(); } + + private static String buildReplicaDetails(ContainerReplicaInfo replica) { + StringBuilder sb = new StringBuilder(); + sb.append("State: " + replica.getState() + ";"); + sb.append(" Origin: " + replica.getPlaceOfBirth().toString() + ";"); + sb.append(" Location: " + + buildDatanodeDetails(replica.getDatanodeDetails())); + return sb.toString(); + } + + private static class ContainerWithPipelineAndReplicas { + + private ContainerInfo containerInfo; + private Pipeline pipeline; + private List replicas; + + ContainerWithPipelineAndReplicas(ContainerInfo container, Pipeline pipeline, + List replicas) { + this.containerInfo = container; + this.pipeline = pipeline; + this.replicas = replicas; + } + + public ContainerInfo getContainerInfo() { + return containerInfo; + } + + public Pipeline getPipeline() { + return pipeline; + } + + public List getReplicas() { + return replicas; + } + } } diff --git a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java new file mode 100644 index 000000000000..ad43f9e0c200 --- /dev/null +++ b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java @@ -0,0 +1,249 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdds.scm.cli.container; + +import org.apache.hadoop.hdds.client.RatisReplicationConfig; +import org.apache.hadoop.hdds.protocol.DatanodeDetails; +import org.apache.hadoop.hdds.protocol.proto.HddsProtos; +import org.apache.hadoop.hdds.scm.client.ScmClient; +import org.apache.hadoop.hdds.scm.container.ContainerInfo; +import org.apache.hadoop.hdds.scm.container.ContainerReplicaInfo; +import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline; +import org.apache.hadoop.hdds.scm.pipeline.Pipeline; +import org.apache.hadoop.hdds.scm.pipeline.PipelineID; +import org.apache.log4j.AppenderSkeleton; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.spi.LoggingEvent; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; +import picocli.CommandLine; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState.CLOSED; +import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor.THREE; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.Mockito.mock; + +/** + * Tests for InfoSubCommand class. + */ +public class TestInfoSubCommand { + + private ScmClient scmClient; + private InfoSubcommand cmd; + private List datanodes; + private Logger logger; + private TestAppender appender; + + @Before + public void setup() throws IOException { + scmClient = mock(ScmClient.class); + datanodes = createDatanodeDetails(3); + Mockito.when(scmClient.getContainerWithPipeline(anyLong())) + .thenReturn(getContainerWithPipeline()); + + appender = new TestAppender(); + logger = Logger.getLogger( + org.apache.hadoop.hdds.scm.cli.container.InfoSubcommand.class); + logger.addAppender(appender); + } + + @After + public void after() { + logger.removeAppender(appender); + } + + @Test + public void testReplicasIncludedInOutput() throws Exception { + Mockito.when(scmClient.getContainerReplicas(anyLong())) + .thenReturn(getReplicas()); + cmd = new InfoSubcommand(); + CommandLine c = new CommandLine(cmd); + c.parseArgs("1"); + cmd.execute(scmClient); + + // Ensure we have a line for Replicas: + List logs = appender.getLog(); + List replica = logs.stream() + .filter(m -> m.getRenderedMessage().matches("(?s)^Replicas:.*")) + .collect(Collectors.toList()); + Assert.assertEquals(1, replica.size()); + + // Ensure each DN UUID is mentioned in the message: + for (DatanodeDetails dn : datanodes) { + Pattern pattern = Pattern.compile(".*" + dn.getUuid().toString() + ".*", + Pattern.DOTALL); + Matcher matcher = pattern.matcher(replica.get(0).getRenderedMessage()); + Assert.assertTrue(matcher.matches()); + } + } + + @Test + public void testReplicasNotOutputIfError() throws IOException { + Mockito.when(scmClient.getContainerReplicas(anyLong())) + .thenThrow(new IOException("Error getting Replicas")); + cmd = new InfoSubcommand(); + CommandLine c = new CommandLine(cmd); + c.parseArgs("1"); + cmd.execute(scmClient); + + // Ensure we have no lines for Replicas: + List logs = appender.getLog(); + List replica = logs.stream() + .filter(m -> m.getRenderedMessage().matches("(?s)^Replicas:.*")) + .collect(Collectors.toList()); + Assert.assertEquals(0, replica.size()); + + // Ensure we have an error logged: + List error = logs.stream() + .filter(m -> m.getLevel() == Level.ERROR) + .collect(Collectors.toList()); + Assert.assertEquals(1, error.size()); + Assert.assertTrue(error.get(0).getRenderedMessage() + .matches("(?s)^Unable to retrieve the replica details.*")); + } + + @Test + public void testReplicasNotOutputIfErrorWithJson() throws IOException { + Mockito.when(scmClient.getContainerReplicas(anyLong())) + .thenThrow(new IOException("Error getting Replicas")); + cmd = new InfoSubcommand(); + CommandLine c = new CommandLine(cmd); + c.parseArgs("1", "--json"); + cmd.execute(scmClient); + + List logs = appender.getLog(); + Assert.assertEquals(2, logs.size()); + String error = logs.get(0).getRenderedMessage(); + String json = logs.get(1).getRenderedMessage(); + + Assert.assertTrue(error + .matches("(?s)^Unable to retrieve the replica details.*")); + Assert.assertFalse(json.matches("(?s).*replicas.*")); + } + + @Test + public void testReplicasOutputWithJson() throws IOException { + Mockito.when(scmClient.getContainerReplicas(anyLong())) + .thenReturn(getReplicas()); + cmd = new InfoSubcommand(); + CommandLine c = new CommandLine(cmd); + c.parseArgs("1", "--json"); + cmd.execute(scmClient); + + List logs = appender.getLog(); + Assert.assertEquals(1, logs.size()); + + // Ensure each DN UUID is mentioned in the message after replicas: + String json = logs.get(0).getRenderedMessage(); + Assert.assertTrue(json.matches("(?s).*replicas.*")); + for (DatanodeDetails dn : datanodes) { + Pattern pattern = Pattern.compile( + ".*replicas.*" + dn.getUuid().toString() + ".*", Pattern.DOTALL); + Matcher matcher = pattern.matcher(json); + Assert.assertTrue(matcher.matches()); + } + } + + private List getReplicas() { + List replicas = new ArrayList<>(); + for (DatanodeDetails dn : datanodes) { + ContainerReplicaInfo container = new ContainerReplicaInfo.Builder() + .setContainerID(1) + .setBytesUsed(1234) + .setState("CLOSED") + .setPlaceOfBirth(dn.getUuid()) + .setDatanodeDetails(dn) + .setKeyCount(1) + .setSequenceId(1).build(); + replicas.add(container); + } + return replicas; + } + + private ContainerWithPipeline getContainerWithPipeline() { + Pipeline pipeline = new Pipeline.Builder() + .setState(Pipeline.PipelineState.CLOSED) + .setReplicationConfig(new RatisReplicationConfig(THREE)) + .setId(PipelineID.randomId()) + .setNodes(datanodes) + .build(); + + ContainerInfo container = new ContainerInfo.Builder() + .setSequenceId(1) + .setPipelineID(pipeline.getId()) + .setUsedBytes(1234) + .setReplicationConfig(new RatisReplicationConfig(THREE)) + .setNumberOfKeys(1) + .setState(CLOSED) + .build(); + + return new ContainerWithPipeline(container, pipeline); + } + + private List createDatanodeDetails(int count) { + List dns = new ArrayList<>(); + for (int i = 0; i < count; i++) { + HddsProtos.DatanodeDetailsProto dnd = + HddsProtos.DatanodeDetailsProto.newBuilder() + .setHostName("host" + i) + .setIpAddress("1.2.3." + i + 1) + .setNetworkLocation("/default") + .setNetworkName("host" + i) + .addPorts(HddsProtos.Port.newBuilder() + .setName("ratis").setValue(5678).build()) + .setUuid(UUID.randomUUID().toString()) + .build(); + dns.add(DatanodeDetails.getFromProtoBuf(dnd)); + } + return dns; + } + + private static class TestAppender extends AppenderSkeleton { + private final List log = new ArrayList<>(); + + @Override + public boolean requiresLayout() { + return false; + } + + @Override + protected void append(final LoggingEvent loggingEvent) { + log.add(loggingEvent); + } + + @Override + public void close() { + } + + public List getLog() { + return new ArrayList<>(log); + } + } +}