diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java index 093dd93430b9..cc496a28e777 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/cli/OzoneAdmin.java @@ -22,13 +22,7 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.tracing.TracingUtil; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.NativeCodeLoader; -import org.apache.log4j.ConsoleAppender; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; -import org.apache.log4j.PatternLayout; import picocli.CommandLine; /** @@ -75,12 +69,6 @@ public UserGroupInformation getUser() throws IOException { * @param argv - System Args Strings[] */ public static void main(String[] argv) { - LogManager.resetConfiguration(); - Logger.getRootLogger().setLevel(Level.INFO); - Logger.getRootLogger() - .addAppender(new ConsoleAppender(new PatternLayout("%m%n"))); - Logger.getLogger(NativeCodeLoader.class).setLevel(Level.ERROR); - new OzoneAdmin().run(argv); } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java index ff82b82ec87a..29f2f3d45727 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStartSubcommand.java @@ -19,8 +19,6 @@ import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.scm.client.ScmClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine.Command; import java.io.IOException; @@ -35,12 +33,9 @@ versionProvider = HddsVersionProvider.class) public class ReplicationManagerStartSubcommand extends ScmSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(ReplicationManagerStartSubcommand.class); - @Override public void execute(ScmClient scmClient) throws IOException { scmClient.startReplicationManager(); - LOG.info("Starting ReplicationManager..."); + System.out.println("Starting ReplicationManager..."); } } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java index 9bc3649dd9f0..b2e308e14227 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStatusSubcommand.java @@ -19,8 +19,6 @@ import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.scm.client.ScmClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine.Command; import java.io.IOException; @@ -35,18 +33,15 @@ versionProvider = HddsVersionProvider.class) public class ReplicationManagerStatusSubcommand extends ScmSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(ReplicationManagerStatusSubcommand.class); - @Override public void execute(ScmClient scmClient) throws IOException { boolean execReturn = scmClient.getReplicationManagerStatus(); // Output data list if (execReturn) { - LOG.info("ReplicationManager is Running."); + System.out.println("ReplicationManager is Running."); } else { - LOG.info("ReplicationManager is Not Running."); + System.out.println("ReplicationManager is Not Running."); } } } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java index 7d3063a7636c..12de13c07d26 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/ReplicationManagerStopSubcommand.java @@ -19,8 +19,6 @@ import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.scm.client.ScmClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine.Command; import java.io.IOException; @@ -35,14 +33,11 @@ versionProvider = HddsVersionProvider.class) public class ReplicationManagerStopSubcommand extends ScmSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(ReplicationManagerStopSubcommand.class); - @Override public void execute(ScmClient scmClient) throws IOException { scmClient.stopReplicationManager(); - LOG.info("Stopping ReplicationManager..."); - LOG.info("Requested SCM to stop ReplicationManager, " + + System.out.println("Stopping ReplicationManager..."); + System.out.println("Requested SCM to stop ReplicationManager, " + "it might take sometime for the ReplicationManager to stop."); } } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java index db2f02c5e125..747215dcac71 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeCheckSubcommand.java @@ -24,8 +24,6 @@ import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.scm.client.ScmClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine; import picocli.CommandLine.Command; @@ -39,9 +37,6 @@ versionProvider = HddsVersionProvider.class) public class SafeModeCheckSubcommand extends ScmSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(SafeModeCheckSubcommand.class); - @CommandLine.Option(names = {"--verbose"}, description = "Show detailed status of rules.") private boolean verbose; @@ -52,17 +47,17 @@ public void execute(ScmClient scmClient) throws IOException { // Output data list if (execReturn) { - LOG.info("SCM is in safe mode."); + System.out.println("SCM is in safe mode."); if (verbose) { for (Map.Entry> entry : scmClient.getSafeModeRuleStatuses().entrySet()) { Pair value = entry.getValue(); - LOG.info("validated:{}, {}, {}", + System.out.printf("validated:%s, %s, %s%n", value.getLeft(), entry.getKey(), value.getRight()); } } } else { - LOG.info("SCM is out of safe mode."); + System.out.println("SCM is out of safe mode."); } } } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java index bcf64deb85e2..e4173c9767e3 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeExitSubcommand.java @@ -22,8 +22,6 @@ import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.scm.client.ScmClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine.Command; /** @@ -36,14 +34,11 @@ versionProvider = HddsVersionProvider.class) public class SafeModeExitSubcommand extends ScmSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(SafeModeExitSubcommand.class); - @Override public void execute(ScmClient scmClient) throws IOException { boolean execReturn = scmClient.forceExitSafeMode(); if (execReturn) { - LOG.info("SCM exit safe mode successfully."); + System.out.println("SCM exit safe mode successfully."); } } } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java index abaca08cfbb9..ad94d4fffd0d 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/SafeModeWaitSubcommand.java @@ -23,8 +23,6 @@ import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.scm.client.ScmClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine.Command; import picocli.CommandLine.Option; import picocli.CommandLine.Mixin; @@ -39,9 +37,6 @@ versionProvider = HddsVersionProvider.class) public class SafeModeWaitSubcommand implements Callable { - private static final Logger LOG = - LoggerFactory.getLogger(SafeModeWaitSubcommand.class); - @Option(description = "Define timeout (in second) to wait until (exit code 1) " + "or until safemode is ended (exit code 0).", defaultValue = "30", @@ -62,26 +57,26 @@ public Void call() throws Exception { long remainingTime; do { if (!scmClient.inSafeMode()) { - LOG.info("SCM is out of safe mode."); + System.out.println("SCM is out of safe mode."); return null; } remainingTime = getRemainingTimeInSec(); if (remainingTime > 0) { - LOG.info( + System.out.printf( "SCM is in safe mode. Will retry in 1 sec. Remaining time " - + "(sec): {}", + + "(sec): %s%n", remainingTime); Thread.sleep(1000); } else { - LOG.info("SCM is in safe mode. No more retries."); + System.out.println("SCM is in safe mode. No more retries."); } } while (remainingTime > 0); } catch (InterruptedException ex) { - LOG.info( - "SCM is not available (yet?). Error is {}. Will retry in 1 sec. " - + "Remaining time (sec): {}", + System.out.printf( + "SCM is not available (yet?). Error is %s. Will retry in 1 sec. " + + "Remaining time (sec): %s%n", ex.getMessage(), getRemainingTimeInSec()); Thread.sleep(1000); Thread.currentThread().interrupt(); diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpiredCertsSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpiredCertsSubcommand.java index cab7a29a4ea6..09caf8147ad4 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpiredCertsSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/CleanExpiredCertsSubcommand.java @@ -19,8 +19,6 @@ import org.apache.hadoop.hdds.cli.HddsVersionProvider; import org.apache.hadoop.hdds.protocol.SCMSecurityProtocol; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine; import java.io.IOException; @@ -36,13 +34,10 @@ versionProvider = HddsVersionProvider.class) public class CleanExpiredCertsSubcommand extends ScmCertSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(CleanExpiredCertsSubcommand.class); - @Override protected void execute(SCMSecurityProtocol client) throws IOException { List pemEncodedCerts = client.removeExpiredCertificates(); - LOG.info("List of removed expired certificates:"); - printCertList(LOG, pemEncodedCerts); + System.out.println("List of removed expired certificates:"); + printCertList(pemEncodedCerts); } } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/InfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/InfoSubcommand.java index 6177c8f7ff4e..c708d424d9c9 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/InfoSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/InfoSubcommand.java @@ -26,12 +26,8 @@ import org.apache.hadoop.hdds.protocol.SCMSecurityProtocol; import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine.Command; -import picocli.CommandLine.Model.CommandSpec; import picocli.CommandLine.Parameters; -import picocli.CommandLine.Spec; /** * This is the handler that process certificate info command. @@ -44,12 +40,6 @@ class InfoSubcommand extends ScmCertSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(InfoSubcommand.class); - - @Spec - private CommandSpec spec; - @Parameters(description = "Serial id of the certificate in decimal.") private String serialId; @@ -61,12 +51,12 @@ public void execute(SCMSecurityProtocol client) throws IOException { "Certificate can't be found"); // Print container report info. - LOG.info("Certificate id: {}", serialId); + System.out.printf("Certificate id: %s%n", serialId); try { X509Certificate cert = CertificateCodec.getX509Certificate(certPemStr); - LOG.info(cert.toString()); + System.out.println(cert); } catch (CertificateException ex) { - LOG.error("Failed to get certificate id " + serialId); + System.err.println("Failed to get certificate id " + serialId); throw new IOException("Fail to get certificate id " + serialId, ex); } } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ListSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ListSubcommand.java index c2e0bd7fadff..ea0898381478 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ListSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ListSubcommand.java @@ -36,8 +36,6 @@ import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec; import org.apache.hadoop.hdds.server.JsonUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine.Command; import picocli.CommandLine.Help.Visibility; import picocli.CommandLine.Option; @@ -54,9 +52,6 @@ versionProvider = HddsVersionProvider.class) public class ListSubcommand extends ScmCertSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(ListSubcommand.class); - @Option(names = {"-s", "--start"}, description = "Certificate serial id to start the iteration", defaultValue = "0", showDefaultValue = Visibility.ALWAYS) @@ -114,7 +109,7 @@ protected void execute(SCMSecurityProtocol client) throws IOException { CertificateCodec.getX509Certificate(certPemStr); certList.add(new Certificate(cert)); } catch (CertificateException ex) { - LOG.error("Failed to parse certificate."); + err.println("Failed to parse certificate."); } } System.out.println( @@ -122,9 +117,9 @@ protected void execute(SCMSecurityProtocol client) throws IOException { return; } - LOG.info("Certificate list:(Type={}, BatchSize={}, CertCount={})", + System.out.printf("Certificate list:(Type=%s, BatchSize=%s, CertCount=%s)%n", type.toUpperCase(), count, certPemList.size()); - printCertList(LOG, certPemList); + printCertList(certPemList); } private static class BigIntJsonSerializer extends JsonSerializer { diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ScmCertSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ScmCertSubcommand.java index d7ebb44e0ffc..354adbb5d6ba 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ScmCertSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/cert/ScmCertSubcommand.java @@ -20,7 +20,6 @@ import org.apache.hadoop.hdds.protocol.SCMSecurityProtocol; import org.apache.hadoop.hdds.scm.cli.ScmOption; import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec; -import org.slf4j.Logger; import picocli.CommandLine; import java.io.IOException; @@ -37,29 +36,29 @@ public abstract class ScmCertSubcommand implements Callable { @CommandLine.Mixin private ScmOption scmOption; - private static final String OUTPUT_FORMAT = "%-17s %-30s %-30s %-110s %-110s"; + private static final String OUTPUT_FORMAT = "%-17s %-30s %-30s %-110s %-110s%n"; - protected void printCertList(Logger log, List pemEncodedCerts) { + protected void printCertList(List pemEncodedCerts) { if (pemEncodedCerts.isEmpty()) { - log.info("No certificates to list"); + System.out.println("No certificates to list"); return; } - log.info(String.format(OUTPUT_FORMAT, "SerialNumber", "Valid From", - "Expiry", "Subject", "Issuer")); + System.out.printf(OUTPUT_FORMAT, "SerialNumber", "Valid From", + "Expiry", "Subject", "Issuer"); for (String certPemStr : pemEncodedCerts) { try { X509Certificate cert = CertificateCodec.getX509Certificate(certPemStr); - printCert(cert, log); + printCert(cert); } catch (CertificateException e) { - log.error("Failed to parse certificate.", e); + System.err.println("Failed to parse certificate: " + e.getMessage()); } } } - protected void printCert(X509Certificate cert, Logger log) { - log.info(String.format(OUTPUT_FORMAT, cert.getSerialNumber(), + protected void printCert(X509Certificate cert) { + System.out.printf(OUTPUT_FORMAT, cert.getSerialNumber(), cert.getNotBefore(), cert.getNotAfter(), cert.getSubjectDN(), - cert.getIssuerDN())); + cert.getIssuerDN()); } protected abstract void execute(SCMSecurityProtocol client) diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java index 9eedbf858958..313dc64c9fc9 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/CreateSubcommand.java @@ -25,8 +25,6 @@ import org.apache.hadoop.hdds.scm.container.common.helpers .ContainerWithPipeline; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine.Command; import picocli.CommandLine.Option; @@ -40,9 +38,6 @@ versionProvider = HddsVersionProvider.class) public class CreateSubcommand extends ScmSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(CreateSubcommand.class); - @Option(description = "Owner of the new container", defaultValue = "OZONE", names = { "-o", "--owner"}) private String owner; @@ -50,7 +45,7 @@ public class CreateSubcommand extends ScmSubcommand { @Override public void execute(ScmClient scmClient) throws IOException { ContainerWithPipeline container = scmClient.createContainer(owner); - LOG.info("Container {} is created.", + System.out.printf("Container %s is created.%n", container.getContainerInfo().getContainerID()); } } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java index 8ed9f520b29d..0e67661bba1d 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java @@ -45,8 +45,6 @@ import org.apache.hadoop.hdds.scm.pipeline.PipelineID; import org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException; import org.apache.hadoop.hdds.server.JsonUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine; import picocli.CommandLine.Command; import picocli.CommandLine.Model.CommandSpec; @@ -63,9 +61,6 @@ versionProvider = HddsVersionProvider.class) public class InfoSubcommand extends ScmSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(InfoSubcommand.class); - @Spec private CommandSpec spec; @@ -126,13 +121,13 @@ private void printOutput(ScmClient scmClient, String id, boolean first) private void printHeader() { if (json && multiContainer) { - LOG.info("["); + System.out.println("["); } } private void printFooter() { if (json && multiContainer) { - LOG.info("]"); + System.out.println("]"); } } @@ -142,9 +137,9 @@ private void printError(String error) { private void printBreak() { if (json) { - LOG.info(","); + System.out.println(","); } else { - LOG.info(""); + System.out.println(""); } } @@ -175,47 +170,47 @@ private void printDetails(ScmClient scmClient, long containerID, new ContainerWithPipelineAndReplicas(container.getContainerInfo(), container.getPipeline(), replicas, container.getContainerInfo().getPipelineID()); - LOG.info(JsonUtils.toJsonStringWithDefaultPrettyPrinter(wrapper)); + System.out.println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(wrapper)); } else { ContainerWithoutDatanodes wrapper = new ContainerWithoutDatanodes(container.getContainerInfo(), container.getPipeline(), replicas, container.getContainerInfo().getPipelineID()); - LOG.info(JsonUtils.toJsonStringWithDefaultPrettyPrinter(wrapper)); + System.out.println(JsonUtils.toJsonStringWithDefaultPrettyPrinter(wrapper)); } } else { // Print container report info. - LOG.info("Container id: {}", containerID); + System.out.printf("Container id: %s%n", containerID); boolean verbose = spec != null && spec.root().userObject() instanceof GenericParentCommand && ((GenericParentCommand) spec.root().userObject()).isVerbose(); if (verbose) { - LOG.info("Pipeline Info: {}", container.getPipeline()); + System.out.printf("Pipeline Info: %s%n", container.getPipeline()); } else { - LOG.info("Pipeline id: {}", container.getPipeline().getId().getId()); + System.out.printf("Pipeline id: %s%n", container.getPipeline().getId().getId()); } - LOG.info("Write PipelineId: {}", + System.out.printf("Write PipelineId: %s%n", container.getContainerInfo().getPipelineID().getId()); try { String pipelineState = scmClient.getPipeline( container.getContainerInfo().getPipelineID().getProtobuf()) .getPipelineState().toString(); - LOG.info("Write Pipeline State: {}", pipelineState); + System.out.printf("Write Pipeline State: %s%n", pipelineState); } catch (IOException ioe) { if (SCMHAUtils.unwrapException( ioe) instanceof PipelineNotFoundException) { - LOG.info("Write Pipeline State: CLOSED"); + System.out.println("Write Pipeline State: CLOSED"); } else { printError("Failed to retrieve pipeline info"); } } - LOG.info("Container State: {}", container.getContainerInfo().getState()); + System.out.printf("Container State: %s%n", container.getContainerInfo().getState()); // Print pipeline of an existing container. String machinesStr = container.getPipeline().getNodes().stream().map( InfoSubcommand::buildDatanodeDetails) .collect(Collectors.joining(",\n")); - LOG.info("Datanodes: [{}]", machinesStr); + System.out.printf("Datanodes: [%s]%n", machinesStr); // Print the replica details if available if (replicas != null) { @@ -223,7 +218,7 @@ private void printDetails(ScmClient scmClient, long containerID, .sorted(Comparator.comparing(ContainerReplicaInfo::getReplicaIndex)) .map(InfoSubcommand::buildReplicaDetails) .collect(Collectors.joining(",\n")); - LOG.info("Replicas: [{}]", replicaStr); + System.out.printf("Replicas: [%s]%n", replicaStr); } } } diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java index b120fe4169da..ecc43d04087a 100644 --- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java +++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/ListSubcommand.java @@ -36,8 +36,6 @@ import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import picocli.CommandLine.Command; import picocli.CommandLine.Help.Visibility; import picocli.CommandLine.Option; @@ -52,9 +50,6 @@ versionProvider = HddsVersionProvider.class) public class ListSubcommand extends ScmSubcommand { - private static final Logger LOG = - LoggerFactory.getLogger(ListSubcommand.class); - @Option(names = {"-s", "--start"}, description = "Container id to start the iteration") private long startId; @@ -94,7 +89,7 @@ public class ListSubcommand extends ScmSubcommand { private void outputContainerInfo(ContainerInfo containerInfo) throws IOException { // Print container report info. - LOG.info("{}", WRITER.writeValueAsString(containerInfo)); + System.out.println(WRITER.writeValueAsString(containerInfo)); } @Override diff --git a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java index d8c1addb78e0..efc11d550f55 100644 --- a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java +++ b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestInfoSubCommand.java @@ -28,9 +28,6 @@ import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.pipeline.PipelineID; import org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException; -import org.apache.log4j.AppenderSkeleton; -import org.apache.log4j.Logger; -import org.apache.log4j.spi.LoggingEvent; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -44,6 +41,7 @@ import java.io.UnsupportedEncodingException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.regex.Matcher; @@ -52,6 +50,7 @@ import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.LifeCycleState.CLOSED; import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor.THREE; +import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -69,8 +68,6 @@ public class TestInfoSubCommand { private ScmClient scmClient; private InfoSubcommand cmd; private List datanodes; - private Logger logger; - private TestAppender appender; private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); private final ByteArrayOutputStream errContent = new ByteArrayOutputStream(); @@ -88,18 +85,12 @@ public void setup() throws IOException { when(scmClient.getContainerWithPipeline(anyLong())).then(i -> getContainerWithPipeline(i.getArgument(0))); when(scmClient.getPipeline(any())).thenThrow(new PipelineNotFoundException("Pipeline not found.")); - appender = new TestAppender(); - logger = Logger.getLogger( - org.apache.hadoop.hdds.scm.cli.container.InfoSubcommand.class); - logger.addAppender(appender); - System.setOut(new PrintStream(outContent, false, DEFAULT_ENCODING)); System.setErr(new PrintStream(errContent, false, DEFAULT_ENCODING)); } @AfterEach public void after() { - logger.removeAppender(appender); System.setOut(originalOut); System.setErr(originalErr); System.setIn(originalIn); @@ -150,10 +141,8 @@ public void testContainersCanBeReadFromStdin() throws IOException { private void validateMultiOutput() throws UnsupportedEncodingException { // Ensure we have a log line for each containerID - List logs = appender.getLog(); - List replica = logs.stream() - .filter(m -> m.getRenderedMessage() - .matches("(?s)^Container id: (1|123|456|789).*")) + List replica = Arrays.stream(outContent.toString(DEFAULT_ENCODING).split("\n")) + .filter(m -> m.matches("(?s)^Container id: (1|123|456|789).*")) .collect(Collectors.toList()); assertEquals(4, replica.size()); @@ -191,10 +180,8 @@ public void testMultipleContainersCanBePassedJson() throws Exception { private void validateJsonMultiOutput() throws UnsupportedEncodingException { // Ensure we have a log line for each containerID - List logs = appender.getLog(); - List replica = logs.stream() - .filter(m -> m.getRenderedMessage() - .matches("(?s)^.*\"containerInfo\".*")) + List replica = Arrays.stream(outContent.toString(DEFAULT_ENCODING).split("\n")) + .filter(m -> m.matches("(?s)^.*\"containerInfo\".*")) .collect(Collectors.toList()); assertEquals(4, replica.size()); @@ -213,34 +200,33 @@ private void testReplicaIncludedInOutput(boolean includeIndex) cmd.execute(scmClient); // Ensure we have a line for Replicas: - List logs = appender.getLog(); - List replica = logs.stream() - .filter(m -> m.getRenderedMessage().matches("(?s)^Replicas:.*")) - .collect(Collectors.toList()); - assertEquals(1, replica.size()); + String output = outContent.toString(DEFAULT_ENCODING); + Pattern pattern = Pattern.compile("Replicas: \\[.*\\]", Pattern.DOTALL); + Matcher matcher = pattern.matcher(output); + assertTrue(matcher.find()); + String replica = matcher.group(); // Ensure each DN UUID is mentioned in the message: for (DatanodeDetails dn : datanodes) { - Pattern pattern = Pattern.compile(".*" + dn.getUuid().toString() + ".*", + Pattern uuidPattern = Pattern.compile(".*" + dn.getUuid().toString() + ".*", Pattern.DOTALL); - Matcher matcher = pattern.matcher(replica.get(0).getRenderedMessage()); - assertTrue(matcher.matches()); + assertThat(replica).matches(uuidPattern); } // Ensure the replicaIndex output is in order if (includeIndex) { List indexList = new ArrayList<>(); for (int i = 1; i < datanodes.size() + 1; i++) { String temp = "ReplicaIndex: " + i; - indexList.add(replica.get(0).getRenderedMessage().indexOf(temp)); + indexList.add(replica.indexOf(temp)); } assertEquals(datanodes.size(), indexList.size()); assertTrue(inSort(indexList)); } // Ensure ReplicaIndex is not mentioned as it was not passed in the proto: - Pattern pattern = Pattern.compile(".*ReplicaIndex.*", - Pattern.DOTALL); - Matcher matcher = pattern.matcher(replica.get(0).getRenderedMessage()); - assertEquals(includeIndex, matcher.matches()); + assertEquals(includeIndex, + Pattern.compile(".*ReplicaIndex.*", Pattern.DOTALL) + .matcher(replica) + .matches()); } @Test @@ -253,9 +239,8 @@ public void testReplicasNotOutputIfError() throws IOException { cmd.execute(scmClient); // Ensure we have no lines for Replicas: - List logs = appender.getLog(); - List replica = logs.stream() - .filter(m -> m.getRenderedMessage().matches("(?s)^Replicas:.*")) + List replica = Arrays.stream(outContent.toString(DEFAULT_ENCODING).split("\n")) + .filter(m -> m.matches("(?s)^Replicas:.*")) .collect(Collectors.toList()); assertEquals(0, replica.size()); @@ -274,9 +259,7 @@ public void testReplicasNotOutputIfErrorWithJson() throws IOException { c.parseArgs("1", "--json"); cmd.execute(scmClient); - List logs = appender.getLog(); - assertEquals(1, logs.size()); - String json = logs.get(0).getRenderedMessage(); + String json = outContent.toString(DEFAULT_ENCODING); assertFalse(json.matches("(?s).*replicas.*")); } @@ -310,11 +293,8 @@ private void testJsonOutput() throws IOException { c.parseArgs("1", "--json"); cmd.execute(scmClient); - List logs = appender.getLog(); - assertEquals(1, logs.size()); - // Ensure each DN UUID is mentioned in the message after replicas: - String json = logs.get(0).getRenderedMessage(); + String json = outContent.toString(DEFAULT_ENCODING); assertTrue(json.matches("(?s).*replicas.*")); for (DatanodeDetails dn : datanodes) { Pattern pattern = Pattern.compile( @@ -409,25 +389,4 @@ private List createDatanodeDetails(int count) { return dns; } - private static class TestAppender extends AppenderSkeleton { - private final List log = new ArrayList<>(); - - @Override - public boolean requiresLayout() { - return false; - } - - @Override - protected void append(final LoggingEvent loggingEvent) { - log.add(loggingEvent); - } - - @Override - public void close() { - } - - public List getLog() { - return new ArrayList<>(log); - } - } }