diff --git a/dev-support/pmd/pmd-ruleset.xml b/dev-support/pmd/pmd-ruleset.xml
index 1bdb2be45a51..e3af967ff3f1 100644
--- a/dev-support/pmd/pmd-ruleset.xml
+++ b/dev-support/pmd/pmd-ruleset.xml
@@ -35,6 +35,7 @@
+
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
index c450e46e8ffb..690631ca3149 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java
@@ -845,7 +845,7 @@ public static HddsProtos.UUID toProtobuf(UUID uuid) {
if (elements != null && elements.length > startIndex) {
final StringBuilder sb = new StringBuilder();
for (int line = startIndex; line < elements.length; line++) {
- sb.append(elements[line]).append("\n");
+ sb.append(elements[line]).append('\n');
}
return sb.toString();
}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/NodeDetails.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/NodeDetails.java
index 05e8ecb49850..0984048bd513 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/NodeDetails.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/NodeDetails.java
@@ -105,7 +105,7 @@ public String getHostAddress() {
public String getRatisHostPortStr() {
StringBuilder hostPort = new StringBuilder();
hostPort.append(getHostName())
- .append(":")
+ .append(':')
.append(ratisPort);
return hostPort.toString();
}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMNodeInfo.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMNodeInfo.java
index f3626bfeaf5d..d0806b34d581 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMNodeInfo.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMNodeInfo.java
@@ -182,7 +182,7 @@ public static List buildNodeInfo(ConfigurationSource conf) {
}
private static String buildAddress(String address, int port) {
- return new StringBuilder().append(address).append(":")
+ return new StringBuilder().append(address).append(':')
.append(port).toString();
}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NetworkTopologyImpl.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NetworkTopologyImpl.java
index 99e718438d69..1e5c57266cbe 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NetworkTopologyImpl.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/net/NetworkTopologyImpl.java
@@ -893,18 +893,18 @@ public String toString() {
StringBuilder tree = new StringBuilder();
tree.append("Level: ");
tree.append(maxLevel);
- tree.append("\n");
+ tree.append('\n');
netlock.readLock().lock();
try {
// print the number of leaves
int numOfLeaves = clusterTree.getNumOfLeaves();
tree.append("Number of leaves:");
tree.append(numOfLeaves);
- tree.append("\n");
+ tree.append('\n');
// print all nodes
for (int i = 0; i < numOfLeaves; i++) {
tree.append(clusterTree.getLeaf(i).getNetworkFullPath());
- tree.append("\n");
+ tree.append('\n');
}
} finally {
netlock.readLock().unlock();
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
index 605c3bc254c4..1549f4f7826f 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/pipeline/Pipeline.java
@@ -521,20 +521,20 @@ public int hashCode() {
@Override
public String toString() {
final StringBuilder b =
- new StringBuilder(getClass().getSimpleName()).append("{");
+ new StringBuilder(getClass().getSimpleName()).append('{');
b.append(" Id: ").append(id.getId());
b.append(", Nodes: [");
for (DatanodeDetails datanodeDetails : nodeStatus.keySet()) {
b.append(" {").append(datanodeDetails);
b.append(", ReplicaIndex: ").append(this.getReplicaIndex(datanodeDetails)).append("},");
}
- b.append("]");
+ b.append(']');
b.append(", ReplicationConfig: ").append(replicationConfig);
b.append(", State:").append(getPipelineState());
b.append(", leaderId:").append(leaderId != null ? leaderId.toString() : "");
b.append(", CreationTimestamp").append(getCreationTimestamp()
.atZone(ZoneId.systemDefault()));
- b.append("}");
+ b.append('}');
return b.toString();
}
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java
index 18f968a9cfb9..f22393a50d3c 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/tracing/StringCodec.java
@@ -70,9 +70,9 @@ public JaegerSpanContext extract(StringBuilder s) {
public void inject(JaegerSpanContext context, StringBuilder string) {
int intFlag = context.getFlags() & 255;
string.append(context.getTraceId())
- .append(":").append(Long.toHexString(context.getSpanId()))
- .append(":").append(Long.toHexString(context.getParentId()))
- .append(":").append(Integer.toHexString(intFlag));
+ .append(':').append(Long.toHexString(context.getSpanId()))
+ .append(':').append(Long.toHexString(context.getParentId()))
+ .append(':').append(Integer.toHexString(intFlag));
}
private static long high(String hexString) {
diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
index b718e80689af..eb1a436b67e1 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockData.java
@@ -279,7 +279,7 @@ public void appendTo(StringBuilder sb) {
sb.append("[blockId=");
blockID.appendTo(sb);
sb.append(", size=").append(size);
- sb.append("]");
+ sb.append(']');
}
public long getBlockGroupLength() {
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockDeletingServiceMetrics.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockDeletingServiceMetrics.java
index 03f0fec1835b..6e4d638e6ac1 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockDeletingServiceMetrics.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/BlockDeletingServiceMetrics.java
@@ -206,22 +206,22 @@ public long getProcessedTransactionFailCount() {
@Override
public String toString() {
StringBuilder buffer = new StringBuilder()
- .append("successCount = ").append(successCount.value()).append("\t")
- .append("successBytes = ").append(successBytes.value()).append("\t")
- .append("failureCount = ").append(failureCount.value()).append("\t")
+ .append("successCount = ").append(successCount.value()).append('\t')
+ .append("successBytes = ").append(successBytes.value()).append('\t')
+ .append("failureCount = ").append(failureCount.value()).append('\t')
.append("outOfOrderDeleteBlockTransactionCount = ")
- .append(outOfOrderDeleteBlockTransactionCount.value()).append("\t")
- .append("totalPendingBlockCount = ").append(totalPendingBlockCount.value()).append("\t")
- .append("totalBlockChosenCount = ").append(totalBlockChosenCount.value()).append("\t")
- .append("totalContainerChosenCount = ").append(totalContainerChosenCount.value()).append("\t")
- .append("receivedTransactionCount = ").append(receivedTransactionCount.value()).append("\t")
- .append("receivedRetryTransactionCount = ").append(receivedRetryTransactionCount.value()).append("\t")
- .append("processedTransactionSuccessCount = ").append(processedTransactionSuccessCount.value()).append("\t")
- .append("processedTransactionFailCount = ").append(processedTransactionFailCount.value()).append("\t")
- .append("receivedContainerCount = ").append(receivedContainerCount.value()).append("\t")
- .append("receivedBlockCount = ").append(receivedBlockCount.value()).append("\t")
- .append("markedBlockCount = ").append(markedBlockCount.value()).append("\t")
- .append("totalLockTimeoutTransactionCount = ").append(totalLockTimeoutTransactionCount.value()).append("\t");
+ .append(outOfOrderDeleteBlockTransactionCount.value()).append('\t')
+ .append("totalPendingBlockCount = ").append(totalPendingBlockCount.value()).append('\t')
+ .append("totalBlockChosenCount = ").append(totalBlockChosenCount.value()).append('\t')
+ .append("totalContainerChosenCount = ").append(totalContainerChosenCount.value()).append('\t')
+ .append("receivedTransactionCount = ").append(receivedTransactionCount.value()).append('\t')
+ .append("receivedRetryTransactionCount = ").append(receivedRetryTransactionCount.value()).append('\t')
+ .append("processedTransactionSuccessCount = ").append(processedTransactionSuccessCount.value()).append('\t')
+ .append("processedTransactionFailCount = ").append(processedTransactionFailCount.value()).append('\t')
+ .append("receivedContainerCount = ").append(receivedContainerCount.value()).append('\t')
+ .append("receivedBlockCount = ").append(receivedBlockCount.value()).append('\t')
+ .append("markedBlockCount = ").append(markedBlockCount.value()).append('\t')
+ .append("totalLockTimeoutTransactionCount = ").append(totalLockTimeoutTransactionCount.value()).append('\t');
return buffer.toString();
}
}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java
index 638e1137568d..610e54025b05 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/DeletedContainerBlocksSummary.java
@@ -99,7 +99,7 @@ public String getTxIDSummary() {
@Override public String toString() {
StringBuffer sb = new StringBuffer();
for (DeletedBlocksTransaction blks : blocks) {
- sb.append(" ")
+ sb.append(' ')
.append("TXID=")
.append(blks.getTxID())
.append(", ")
@@ -108,8 +108,8 @@ public String getTxIDSummary() {
.append(", ")
.append(blks.getContainerID())
.append(" : [")
- .append(StringUtils.join(',', blks.getLocalIDList())).append("]")
- .append("\n");
+ .append(StringUtils.join(',', blks.getLocalIDList())).append(']')
+ .append('\n');
}
return sb.toString();
}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java
index 5f539e121b39..d3148df8c8f2 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/impl/StorageLocationReport.java
@@ -235,7 +235,7 @@ public static StorageLocationReport getFromProtobuf(StorageReportProto report)
@Override
public String toString() {
StringBuilder sb = new StringBuilder(128)
- .append("{")
+ .append('{')
.append(" id=").append(id)
.append(" dir=").append(storageLocation)
.append(" type=").append(storageType);
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ec/reconstruction/ECReconstructionCoordinator.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ec/reconstruction/ECReconstructionCoordinator.java
index 4694850b936c..d8856f081d7a 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ec/reconstruction/ECReconstructionCoordinator.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/ec/reconstruction/ECReconstructionCoordinator.java
@@ -373,7 +373,7 @@ private void logBlockGroupDetails(BlockLocationInfo blockLocationInfo,
int cnt = 0;
for (ContainerProtos.ChunkInfo chunkInfo : data.getChunks()) {
if (cnt > 0) {
- sb.append("\n");
+ sb.append('\n');
}
sb.append(" chunkNum: ")
.append(++cnt)
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
index 408897c6bb61..8e1080dc48d1 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/KeyValueHandler.java
@@ -1427,7 +1427,7 @@ private boolean logBlocksFoundOnDisk(Container container) throws IOException {
StringBuilder stringBuilder = new StringBuilder();
for (Path block : dir) {
if (notEmpty) {
- stringBuilder.append(",");
+ stringBuilder.append(',');
}
stringBuilder.append(block);
notEmpty = true;
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/AbstractReplicationTask.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/AbstractReplicationTask.java
index 2ba2b676a913..05932e6edf79 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/AbstractReplicationTask.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/AbstractReplicationTask.java
@@ -144,7 +144,7 @@ protected Object getCommandForDebug() {
@Override
public String toString() {
StringBuilder sb = new StringBuilder()
- .append(getStatus()).append(" ")
+ .append(getStatus()).append(' ')
.append(getCommandForDebug());
if (getStatus() == Status.QUEUED) {
sb.append(", queued at ").append(getQueued());
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java
index 8d4b43d1cc76..6bd5adb688b5 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CloseContainerCommand.java
@@ -90,7 +90,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", containerID: ").append(getContainerID())
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ClosePipelineCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ClosePipelineCommand.java
index d1c8e9444b2e..0c1caa3dd68d 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ClosePipelineCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ClosePipelineCommand.java
@@ -75,7 +75,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", pipelineID: ").append(getPipelineID());
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CreatePipelineCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CreatePipelineCommand.java
index b6340e86c6ee..ebd5bcfdcc3a 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CreatePipelineCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/CreatePipelineCommand.java
@@ -159,7 +159,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", pipelineID: ").append(getPipelineID())
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java
index e955cabdfa47..e6457d4d25db 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteBlocksCommand.java
@@ -69,7 +69,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", deletedBlocksTransaction: [");
@@ -83,7 +83,7 @@ public String toString() {
if (!blocksTobeDeleted.isEmpty()) {
sb.delete(sb.length() - 2, sb.length());
}
- sb.append("]");
+ sb.append(']');
return sb.toString();
}
}
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteContainerCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteContainerCommand.java
index d142e23c237b..193c67576c2d 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteContainerCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/DeleteContainerCommand.java
@@ -108,7 +108,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", containerID: ").append(getContainerID())
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/FinalizeNewLayoutVersionCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/FinalizeNewLayoutVersionCommand.java
index b686db5ef0f7..0d5c02202ddc 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/FinalizeNewLayoutVersionCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/FinalizeNewLayoutVersionCommand.java
@@ -78,7 +78,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", finalizeUpgrade: ").append(finalizeUpgrade)
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReconstructECContainersCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReconstructECContainersCommand.java
index 93bbfd8202a4..3628c2c9e6ab 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReconstructECContainersCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReconstructECContainersCommand.java
@@ -131,7 +131,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", containerID: ").append(containerID)
@@ -139,7 +139,7 @@ public String toString() {
.append(", sources: [").append(getSources().stream()
.map(a -> a.dnDetails
+ " replicaIndex: " + a.getReplicaIndex())
- .collect(Collectors.joining(", "))).append("]")
+ .collect(Collectors.joining(", "))).append(']')
.append(", targets: ").append(getTargetDatanodes())
.append(", missingIndexes: ").append(
Arrays.toString(missingContainerIndexes.toByteArray()));
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RefreshVolumeUsageCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RefreshVolumeUsageCommand.java
index f685a8282b32..6872b152e15e 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RefreshVolumeUsageCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/RefreshVolumeUsageCommand.java
@@ -60,7 +60,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline());
return sb.toString();
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReplicateContainerCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReplicateContainerCommand.java
index d4046bd989d7..826af4fdd362 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReplicateContainerCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReplicateContainerCommand.java
@@ -159,7 +159,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType());
sb.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline());
sb.append(", containerId=").append(getContainerID());
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java
index adb87a3898e4..82961d994a85 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/ReregisterCommand.java
@@ -59,7 +59,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline());
return sb.toString();
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SetNodeOperationalStateCommand.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SetNodeOperationalStateCommand.java
index 45e51d00e7ff..51aaf87ae347 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SetNodeOperationalStateCommand.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/commands/SetNodeOperationalStateCommand.java
@@ -91,7 +91,7 @@ public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getType())
.append(": cmdID: ").append(getId())
- .append(", encodedToken: \"").append(getEncodedToken()).append("\"")
+ .append(", encodedToken: \"").append(getEncodedToken()).append('"')
.append(", term: ").append(getTerm())
.append(", deadlineMsSinceEpoch: ").append(getDeadline())
.append(", opState: ").append(opState)
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/utils/TestHddsVolumeUtil.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/utils/TestHddsVolumeUtil.java
index 9ec248e38319..9cbf95f3ea31 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/utils/TestHddsVolumeUtil.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/utils/TestHddsVolumeUtil.java
@@ -73,7 +73,7 @@ public void setup() throws Exception {
for (int i = 0; i < VOLUMNE_NUM; i++) {
hddsVolumeDirs[i] =
Files.createDirectory(tempDir.resolve("volumeDir" + i)).toFile();
- hddsDirs.append(hddsVolumeDirs[i]).append(",");
+ hddsDirs.append(hddsVolumeDirs[i]).append(',');
}
conf.set(ScmConfigKeys.HDDS_DATANODE_DIR_KEY, hddsDirs.toString());
hddsVolumeSet = new MutableVolumeSet(datanodeId, clusterId, conf, null,
@@ -85,7 +85,7 @@ public void setup() throws Exception {
for (int i = 0; i < VOLUMNE_NUM; i++) {
dbVolumeDirs[i] =
Files.createDirectory(tempDir.resolve("dbVolumeDir" + i)).toFile();
- dbDirs.append(dbVolumeDirs[i]).append(",");
+ dbDirs.append(dbVolumeDirs[i]).append(',');
}
conf.set(OzoneConfigKeys.HDDS_DATANODE_CONTAINER_DB_DIR,
dbDirs.toString());
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestDbVolume.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestDbVolume.java
index 683c2b12b1f0..280f8597d71c 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestDbVolume.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/volume/TestDbVolume.java
@@ -164,7 +164,7 @@ private MutableVolumeSet createHddsVolumeSet(int volumeNum)
for (int i = 0; i < volumeNum; i++) {
hddsVolumeDirs[i] =
Files.createDirectory(folder.resolve("volumeDir" + i)).toFile();
- hddsDirs.append(hddsVolumeDirs[i]).append(",");
+ hddsDirs.append(hddsVolumeDirs[i]).append(',');
}
CONF.set(ScmConfigKeys.HDDS_DATANODE_DIR_KEY, hddsDirs.toString());
MutableVolumeSet hddsVolumeSet = new MutableVolumeSet(DATANODE_UUID,
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestContainerReader.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestContainerReader.java
index dbe6df0c7b28..6a48765c1a91 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestContainerReader.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestContainerReader.java
@@ -375,7 +375,7 @@ public void testMultipleContainerReader(ContainerTestVersionInfo versionInfo)
for (int i = 0; i < volumeNum; i++) {
volumeDirs[i] =
Files.createDirectory(tempDir.resolve("volumeDir" + i)).toFile();
- datanodeDirs = datanodeDirs.append(volumeDirs[i]).append(",");
+ datanodeDirs = datanodeDirs.append(volumeDirs[i]).append(',');
}
BlockUtils.shutdownCache(conf);
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
index 34619c19183f..de5b2331f12d 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
@@ -189,7 +189,7 @@ public void testBuildNodeReport(ContainerTestVersionInfo versionInfo)
for (int i = 0; i < 3; i++) {
dbPaths[i] =
Files.createDirectory(folder.resolve(Integer.toString(i))).toFile();
- dbDirString.append(dbPaths[i]).append(",");
+ dbDirString.append(dbPaths[i]).append(',');
}
conf.set(OzoneConfigKeys.HDDS_DATANODE_CONTAINER_DB_DIR,
dbDirString.toString());
diff --git a/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/TestCoderBase.java b/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/TestCoderBase.java
index 3f5d66bc1468..a6f6dba51753 100644
--- a/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/TestCoderBase.java
+++ b/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/TestCoderBase.java
@@ -482,7 +482,7 @@ protected void dumpSetting() {
StringBuilder sb = new StringBuilder("Erasure coder test settings:\n");
sb.append(" numDataUnits=").append(numDataUnits);
sb.append(" numParityUnits=").append(numParityUnits);
- sb.append(" chunkSize=").append(chunkSize).append("\n");
+ sb.append(" chunkSize=").append(chunkSize).append('\n');
sb.append(" erasedDataIndexes=").
append(Arrays.toString(erasedDataIndexes));
@@ -490,7 +490,7 @@ protected void dumpSetting() {
append(Arrays.toString(erasedParityIndexes));
sb.append(" usingDirectBuffer=").append(usingDirectBuffer);
sb.append(" allowVerboseDump=").append(allowDump);
- sb.append("\n");
+ sb.append('\n');
System.out.println(sb.toString());
}
diff --git a/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/rawcoder/RawErasureCoderBenchmark.java b/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/rawcoder/RawErasureCoderBenchmark.java
index ef9b59325dde..89c15316ef11 100644
--- a/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/rawcoder/RawErasureCoderBenchmark.java
+++ b/hadoop-hdds/erasurecode/src/test/java/org/apache/ozone/erasurecode/rawcoder/RawErasureCoderBenchmark.java
@@ -85,7 +85,7 @@ private static void printAvailableCoders() {
StringBuilder sb = new StringBuilder(
"Available coders with coderIndex:\n");
for (CODER coder : CODER.values()) {
- sb.append(coder.ordinal()).append(":").append(coder).append("\n");
+ sb.append(coder.ordinal()).append(':').append(coder).append('\n');
}
System.out.println(sb);
}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
index aa7eb407ae50..0e505633dd47 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/HttpServer2.java
@@ -1410,7 +1410,7 @@ public String toString() {
: STATE_DESCRIPTION_NOT_LIVE)
.append("), listening at:");
for (ServerConnector l : listeners) {
- sb.append(l.getHost()).append(":").append(l.getPort()).append("/,");
+ sb.append(l.getHost()).append(':').append(l.getPort()).append("/,");
}
return sb.toString();
}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java
index 6f431bfe8945..89cf9adbb6a4 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/server/http/PrometheusMetricsSink.java
@@ -90,7 +90,7 @@ private String getPrometheusMetricKeyAsString(MetricsRecord metricsRecord,
String key, String username) {
StringBuilder prometheusMetricKey = new StringBuilder();
prometheusMetricKey.append(key)
- .append("{");
+ .append('{');
String sep = "";
List metricsTags =
@@ -110,10 +110,10 @@ private String getPrometheusMetricKeyAsString(MetricsRecord metricsRecord,
.append(tagName)
.append("=\"")
.append(tag.value())
- .append("\"");
+ .append('"');
sep = ",";
}
- prometheusMetricKey.append("}");
+ prometheusMetricKey.append('}');
return prometheusMetricKey.toString();
}
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
index b3e21db17f34..9a780be8b3d4 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/HddsServerUtil.java
@@ -707,7 +707,7 @@ public static String toStartupShutdownString(String prefix, String... msg) {
StringBuilder b = new StringBuilder(prefix);
b.append("\n/************************************************************");
for (String s : msg) {
- b.append("\n").append(prefix).append(s);
+ b.append('\n').append(prefix).append(s);
}
b.append("\n************************************************************/");
return b.toString();
diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/SignalLogger.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/SignalLogger.java
index 9e5832c9e6b4..02dcc136c7d1 100644
--- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/SignalLogger.java
+++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/utils/SignalLogger.java
@@ -94,7 +94,7 @@ public void register(final Logger log) {
log.info("Error installing UNIX signal handler for {}", signal, e);
}
}
- bld.append("]");
+ bld.append(']');
if (log.isInfoEnabled()) {
log.info(bld.toString());
}
diff --git a/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java b/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
index b975ea26dc34..2bafef981818 100644
--- a/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
+++ b/hadoop-hdds/rocksdb-checkpoint-differ/src/test/java/org/apache/ozone/rocksdiff/TestRocksDBCheckpointDiffer.java
@@ -996,7 +996,7 @@ private void printMutableGraphFromAGivenNode(
for (CompactionNode current : currentLevel) {
Set successors = mutableGraph.successors(current);
for (CompactionNode succNode : successors) {
- sb.append(succNode.getFileName()).append(" ");
+ sb.append(succNode.getFileName()).append(' ');
nextLevel.add(succNode);
}
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/ScmBlockDeletingServiceMetrics.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/ScmBlockDeletingServiceMetrics.java
index 2f298a5ebc5c..20293bbd4aa2 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/ScmBlockDeletingServiceMetrics.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/block/ScmBlockDeletingServiceMetrics.java
@@ -334,15 +334,15 @@ public long getNumCommandsDatanodeFailed() {
@Override
public String toString() {
StringBuilder buffer = new StringBuilder()
- .append("numBlockDeletionTransactionCreated = ").append(numBlockDeletionTransactionCreated.value()).append("\t")
+ .append("numBlockDeletionTransactionCreated = ").append(numBlockDeletionTransactionCreated.value()).append('\t')
.append("numBlockDeletionTransactionCompleted = ")
- .append(numBlockDeletionTransactionCompleted.value()).append("\t")
- .append("numBlockDeletionCommandSent = ").append(numBlockDeletionCommandSent.value()).append("\t")
- .append("numBlockDeletionCommandSuccess = ").append(numBlockDeletionCommandSuccess.value()).append("\t")
- .append("numBlockDeletionCommandFailure = ").append(numBlockDeletionCommandFailure.value()).append("\t")
- .append("numBlockDeletionTransactionSent = ").append(numBlockDeletionTransactionSent.value()).append("\t")
- .append("numBlockDeletionTransactionSuccess = ").append(numBlockDeletionTransactionSuccess.value()).append("\t")
- .append("numBlockDeletionTransactionFailure = ").append(numBlockDeletionTransactionFailure.value()).append("\t")
+ .append(numBlockDeletionTransactionCompleted.value()).append('\t')
+ .append("numBlockDeletionCommandSent = ").append(numBlockDeletionCommandSent.value()).append('\t')
+ .append("numBlockDeletionCommandSuccess = ").append(numBlockDeletionCommandSuccess.value()).append('\t')
+ .append("numBlockDeletionCommandFailure = ").append(numBlockDeletionCommandFailure.value()).append('\t')
+ .append("numBlockDeletionTransactionSent = ").append(numBlockDeletionTransactionSent.value()).append('\t')
+ .append("numBlockDeletionTransactionSuccess = ").append(numBlockDeletionTransactionSuccess.value()).append('\t')
+ .append("numBlockDeletionTransactionFailure = ").append(numBlockDeletionTransactionFailure.value()).append('\t')
.append("numDeletionCommandsPerDatanode = ").append(numCommandsDatanode);
return buffer.toString();
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerHealthResult.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerHealthResult.java
index 34e8b9d323fc..eeb6b75130ca 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerHealthResult.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ContainerHealthResult.java
@@ -305,7 +305,7 @@ public String toString() {
if (requeueCount > 0) {
sb.append(" requeued:").append(requeueCount);
}
- return sb.append("}").toString();
+ return sb.append('}').toString();
}
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ECContainerReplicaCount.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ECContainerReplicaCount.java
index 36be55c41b07..13ad1d37b472 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ECContainerReplicaCount.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/container/replication/ECContainerReplicaCount.java
@@ -569,7 +569,7 @@ public String toString() {
if (!pendingDelete.isEmpty()) {
sb.append(", PendingDelete: ").append(pendingDelete.size());
}
- sb.append(")")
+ sb.append(')')
.append(", ReplicationConfig: ").append(repConfig)
.append(", RemainingMaintenanceRedundancy: ")
.append(remainingMaintenanceRedundancy);
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeAdminMonitorImpl.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeAdminMonitorImpl.java
index 5fb0c2188c9c..2bcdedce5e31 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeAdminMonitorImpl.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/node/DatanodeAdminMonitorImpl.java
@@ -499,7 +499,7 @@ private String replicaDetails(Collection replicas) {
sb.append(replicas.stream()
.map(Object::toString)
.collect(Collectors.joining(",")));
- sb.append("}");
+ sb.append('}');
return sb.toString();
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
index 7bc437627aa5..b945786ac00d 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMClientProtocolServer.java
@@ -379,7 +379,7 @@ public List getContainerWithPipelineBatch(
ContainerWithPipeline cp = getContainerWithPipelineCommon(containerID);
cpList.add(cp);
strContainerIDs.append(ContainerID.valueOf(containerID).toString());
- strContainerIDs.append(",");
+ strContainerIDs.append(',');
} catch (IOException ex) {
AUDIT.logReadFailure(buildAuditMessageForFailure(
SCMAction.GET_CONTAINER_WITH_PIPELINE_BATCH,
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
index df615eec3bc1..eb4feec2fc5c 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/SCMDatanodeProtocolServer.java
@@ -276,14 +276,14 @@ public static SCMRegisteredResponseProto getRegisteredResponse(
private String constructCommandAuditMap(List cmds) {
StringBuilder auditMap = new StringBuilder();
- auditMap.append("[");
+ auditMap.append('[');
for (SCMCommandProto cmd : cmds) {
if (cmd.getCommandType().equals(deleteBlocksCommand)) {
auditMap.append("commandType: ").append(cmd.getCommandType());
auditMap.append(" deleteTransactionsCount: ")
.append(cmd.getDeleteBlocksCommandProto().getDeletedBlocksTransactionsCount());
auditMap.append(" cmdID: ").append(cmd.getDeleteBlocksCommandProto().getCmdId());
- auditMap.append(" encodedToken: \"").append(cmd.getEncodedToken()).append("\"");
+ auditMap.append(" encodedToken: \"").append(cmd.getEncodedToken()).append('"');
auditMap.append(" deadlineMsSinceEpoch: ").append(cmd.getDeadlineMsSinceEpoch());
} else {
auditMap.append(TextFormat.shortDebugString(cmd));
@@ -294,7 +294,7 @@ private String constructCommandAuditMap(List cmds) {
if (len > 2) {
auditMap.delete(len - 2, len);
}
- auditMap.append("]");
+ auditMap.append(']');
return auditMap.toString();
}
diff --git a/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/TimedOutTestsListener.java b/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/TimedOutTestsListener.java
index c2d07137a511..c7a8873c695b 100644
--- a/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/TimedOutTestsListener.java
+++ b/hadoop-hdds/test-utils/src/test/java/org/apache/ozone/test/TimedOutTestsListener.java
@@ -96,7 +96,7 @@ private static String buildThreadDump() {
dump.append("\n at ");
dump.append(stackTraceElement);
}
- dump.append("\n");
+ dump.append('\n');
}
return dump.toString();
}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java
index b2f7ce1384b4..5a850551c2b5 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/TopologySubcommand.java
@@ -181,9 +181,9 @@ private String formatPortOutput(List ports) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < ports.size(); i++) {
HddsProtos.Port port = ports.get(i);
- sb.append(port.getName()).append("=").append(port.getValue());
+ sb.append(port.getName()).append('=').append(port.getValue());
if (i < ports.size() - 1) {
- sb.append(",");
+ sb.append(',');
}
}
return sb.toString();
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
index 38df56453690..2c3ad44c9798 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/container/InfoSubcommand.java
@@ -195,11 +195,11 @@ private static String buildDatanodeDetails(DatanodeDetails details) {
private static String buildReplicaDetails(ContainerReplicaInfo replica) {
StringBuilder sb = new StringBuilder()
- .append("State: ").append(replica.getState()).append(";");
+ .append("State: ").append(replica.getState()).append(';');
if (replica.getReplicaIndex() != -1) {
- sb.append(" ReplicaIndex: ").append(replica.getReplicaIndex()).append(";");
+ sb.append(" ReplicaIndex: ").append(replica.getReplicaIndex()).append(';');
}
- sb.append(" Origin: ").append(replica.getPlaceOfBirth().toString()).append(";")
+ sb.append(" Origin: ").append(replica.getPlaceOfBirth().toString()).append(';')
.append(" Location: ").append(buildDatanodeDetails(replica.getDatanodeDetails()));
return sb.toString();
}
diff --git a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
index 929873c65803..90e678f28155 100644
--- a/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
+++ b/hadoop-hdds/tools/src/main/java/org/apache/hadoop/hdds/scm/cli/datanode/ListInfoSubcommand.java
@@ -145,9 +145,9 @@ private void printDatanodeInfo(DatanodeWithAttributes dna) {
relatedPipelineNum = relatedPipelines.size();
relatedPipelines.forEach(
p -> pipelineListInfo.append(p.getId().getId().toString())
- .append("/").append(p.getReplicationConfig().toString())
- .append("/").append(p.getType().toString())
- .append("/").append(p.getPipelineState().toString()).append("/")
+ .append('/').append(p.getReplicationConfig().toString())
+ .append('/').append(p.getType().toString())
+ .append('/').append(p.getPipelineState().toString()).append('/')
.append(datanode.getUuid().equals(p.getLeaderId()) ?
"Leader" : "Follower")
.append(System.getProperty("line.separator")));
diff --git a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestReportSubCommand.java b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestReportSubCommand.java
index f74850e6c4aa..ad661ea3c8ee 100644
--- a/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestReportSubCommand.java
+++ b/hadoop-hdds/tools/src/test/java/org/apache/hadoop/hdds/scm/cli/container/TestReportSubCommand.java
@@ -177,7 +177,7 @@ private String containerList(int start, int end) {
if (i != start) {
sb.append(", ");
}
- sb.append("#").append(i);
+ sb.append('#').append(i);
}
return sb.toString();
}
diff --git a/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/MockDatanodeStorage.java b/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/MockDatanodeStorage.java
index 92f0b6ea4c4c..a526cebe2ced 100644
--- a/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/MockDatanodeStorage.java
+++ b/hadoop-ozone/client/src/test/java/org/apache/hadoop/ozone/client/MockDatanodeStorage.java
@@ -133,7 +133,7 @@ public BlockData getBlock(DatanodeBlockID blockID) {
if (!blocks.containsKey(id)) {
StringBuilder sb = new StringBuilder();
for (BlockID bid : blocks.keySet()) {
- sb.append(bid).append("\n");
+ sb.append(bid).append('\n');
}
throw new AssertionError("blockID " + id +
" not found in blocks. Available block ID: \n" + sb);
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/OmUtils.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/OmUtils.java
index 789d26c8880b..9ca59b9c38c6 100644
--- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/OmUtils.java
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/OmUtils.java
@@ -919,10 +919,10 @@ public static String getOMAddressListPrintString(List omList) {
}
printString.append(omList.get(0).getOMPrintInfo());
for (int i = 1; i < omList.size(); i++) {
- printString.append(",")
+ printString.append(',')
.append(omList.get(i).getOMPrintInfo());
}
- printString.append("]");
+ printString.append(']');
return printString.toString();
}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfoGroup.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfoGroup.java
index 01d5fbaa7084..ddf62a45ed8f 100644
--- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfoGroup.java
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OmKeyLocationInfoGroup.java
@@ -185,14 +185,14 @@ void addAll(long versionToAdd, List locationInfoList) {
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
- sb.append("version:").append(version).append(" ");
- sb.append("isMultipartKey:").append(isMultipartKey).append(" ");
+ sb.append("version:").append(version).append(' ');
+ sb.append("isMultipartKey:").append(isMultipartKey).append(' ');
for (List kliList : locationVersionMap.values()) {
for (OmKeyLocationInfo kli: kliList) {
sb.append("conID ").append(kli.getContainerID());
- sb.append(" ");
+ sb.append(' ');
sb.append("locID ").append(kli.getLocalID());
- sb.append(" ");
+ sb.append(' ');
sb.append("bcsID ").append(kli.getBlockCommitSequenceId());
sb.append(" || ");
}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatus.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatus.java
index cafffb4809e3..99ea12b1cd67 100644
--- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatus.java
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatus.java
@@ -134,7 +134,7 @@ public int hashCode() {
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
- sb.append("{");
+ sb.append('{');
if (keyInfo == null) {
sb.append("");
} else {
@@ -143,7 +143,7 @@ public String toString() {
sb.append(" (dir)");
}
}
- sb.append("}");
+ sb.append('}');
return sb.toString();
}
diff --git a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatusLight.java b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatusLight.java
index 213431188ba0..5d07f331086b 100644
--- a/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatusLight.java
+++ b/hadoop-ozone/common/src/main/java/org/apache/hadoop/ozone/om/helpers/OzoneFileStatusLight.java
@@ -132,7 +132,7 @@ public int hashCode() {
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
- sb.append("{");
+ sb.append('{');
if (keyInfo == null) {
sb.append("");
} else {
@@ -141,7 +141,7 @@ public String toString() {
sb.append(" (dir)");
}
}
- sb.append("}");
+ sb.append('}');
return sb.toString();
}
diff --git a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/ozone/lib/lang/XException.java b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/ozone/lib/lang/XException.java
index b773d2b4db91..d9e014798799 100644
--- a/hadoop-ozone/httpfsgateway/src/main/java/org/apache/ozone/lib/lang/XException.java
+++ b/hadoop-ozone/httpfsgateway/src/main/java/org/apache/ozone/lib/lang/XException.java
@@ -99,7 +99,7 @@ private static String format(ERROR error, Object... args) {
if (template == null) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < args.length; i++) {
- sb.append(" {").append(i).append("}");
+ sb.append(" {").append(i).append('}');
}
template = sb.deleteCharAt(0).toString();
}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
index 8a1fcb5b551f..198874fac8da 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
@@ -435,7 +435,7 @@ public void testCompareConfigurationClassAgainstXml() {
StringBuilder xmlErrorMsg = new StringBuilder();
for (Class c : configurationClasses) {
xmlErrorMsg.append(c);
- xmlErrorMsg.append(" ");
+ xmlErrorMsg.append(' ');
}
xmlErrorMsg.append("has ");
xmlErrorMsg.append(missingXmlSize);
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java
index ca1450382d45..39ecfdda09a4 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java
@@ -361,7 +361,7 @@ public void testRandomSeeks() throws Throwable {
sb.append("Sequence of actions:\n");
for (int j = 0; j < seeks.length; j++) {
sb.append("seek @ ").append(seeks[j]).append(" ")
- .append("read ").append(reads[j]).append("\n");
+ .append("read ").append(reads[j]).append('\n');
}
LOG.error(sb.toString());
throw afe;
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
index 1d3fcf4ff501..1ddaa079bb95 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
@@ -1456,9 +1456,9 @@ public static String pathsToString(Collection paths) {
builder.append(nl);
for (Path path : paths) {
builder.append(" \"").append(path.toString())
- .append("\"").append(nl);
+ .append('"').append(nl);
}
- builder.append("]");
+ builder.append(']');
return builder.toString();
}
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneClientMultipartUploadWithFSO.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneClientMultipartUploadWithFSO.java
index f69869c286b0..1055087d8264 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneClientMultipartUploadWithFSO.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/client/rpc/TestOzoneClientMultipartUploadWithFSO.java
@@ -870,7 +870,7 @@ public void testListMultipartUploadsPagination() throws Exception {
StringBuilder key = new StringBuilder();
int depth = 1 + i % 3; // Creates varying depth (1-3 levels)
for (int j = 0; j < depth; j++) {
- key.append("dir").append(j + 1).append("/");
+ key.append("dir").append(j + 1).append('/');
}
key.append("file").append(i);
keys.add(key.toString());
diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
index f1568407a74d..0679fcf24f35 100644
--- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
+++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/om/TestKeyManagerImpl.java
@@ -363,7 +363,7 @@ public void testCreateDirectory() throws IOException {
.setKeyName(keyNameBuf.toString())
.build();
for (int i = 0; i < 5; i++) {
- keyNameBuf.append("/").append(RandomStringUtils.secure().nextAlphabetic(5));
+ keyNameBuf.append('/').append(RandomStringUtils.secure().nextAlphabetic(5));
}
String keyName = keyNameBuf.toString();
writeClient.createDirectory(keyArgs);
@@ -430,7 +430,7 @@ public void testOpenFile() throws IOException {
StringBuffer keyNameBuf = new StringBuffer();
keyNameBuf.append(RandomStringUtils.secure().nextAlphabetic(5));
for (int i = 0; i < 5; i++) {
- keyNameBuf.append("/").append(RandomStringUtils.secure().nextAlphabetic(5));
+ keyNameBuf.append('/').append(RandomStringUtils.secure().nextAlphabetic(5));
}
keyName = keyNameBuf.toString();
keyArgs = createBuilder()
diff --git a/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java b/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
index cef2724de2a5..ef097ab57995 100644
--- a/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
+++ b/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java
@@ -712,10 +712,10 @@ private String getSCMAddresses(List scms) {
while (iter.hasNext()) {
StorageContainerManager scm = iter.next();
stringBuilder.append(scm.getDatanodeRpcAddress().getHostString())
- .append(":")
+ .append(':')
.append(scm.getDatanodeRpcAddress().getPort());
if (iter.hasNext()) {
- stringBuilder.append(",");
+ stringBuilder.append(',');
}
}
diff --git a/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneHAClusterImpl.java b/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneHAClusterImpl.java
index e22152033313..92e136073206 100644
--- a/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneHAClusterImpl.java
+++ b/hadoop-ozone/mini-cluster/src/main/java/org/apache/hadoop/ozone/MiniOzoneHAClusterImpl.java
@@ -634,7 +634,7 @@ private void initSCMHAConfig() {
if (i == 1) {
conf.set(ScmConfigKeys.OZONE_SCM_PRIMORDIAL_NODE_ID_KEY, scmNodeId);
}
- scmNodesKeyValue.append(",").append(scmNodeId);
+ scmNodesKeyValue.append(',').append(scmNodeId);
String scmAddrKey = ConfUtils.addKeySuffixes(
ScmConfigKeys.OZONE_SCM_ADDRESS_KEY, scmServiceId, scmNodeId);
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
index 0624f0ee0e92..df3cef96a54b 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java
@@ -905,7 +905,7 @@ private void logVersionMismatch(OzoneConfiguration conf, ScmInfo scmInfo) {
StringBuilder scmBlockAddressBuilder = new StringBuilder();
for (SCMNodeInfo scmNodeInfo : scmNodeInfoList) {
scmBlockAddressBuilder.append(scmNodeInfo.getBlockClientAddress())
- .append(",");
+ .append(',');
}
String scmBlockAddress = scmBlockAddressBuilder.toString();
if (!StringUtils.isBlank(scmBlockAddress)) {
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis/OzoneManagerStateMachine.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis/OzoneManagerStateMachine.java
index 71e810d78eb8..8eadd5bff06a 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis/OzoneManagerStateMachine.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ratis/OzoneManagerStateMachine.java
@@ -225,7 +225,7 @@ public void notifyConfigurationChanged(long term, long index,
.append(", index=").append(index)
.append(", New Peer list: ");
newPeers.forEach(peer -> logBuilder.append(peer.getId().toStringUtf8())
- .append("(")
+ .append('(')
.append(peer.getAddress())
.append("), "));
LOG.info(logBuilder.substring(0, logBuilder.length() - 2));
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/key/OMKeysDeleteRequest.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/key/OMKeysDeleteRequest.java
index b9a09cbc7524..36b61f83e779 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/key/OMKeysDeleteRequest.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/request/key/OMKeysDeleteRequest.java
@@ -354,9 +354,9 @@ protected static void addDeletedKeys(Map auditMap,
StringBuilder keys = new StringBuilder();
for (int i = 0; i < deletedKeyInfos.size(); i++) {
OmKeyInfo key = deletedKeyInfos.get(i);
- keys.append("{").append(KEY).append("=").append(key.getKeyName()).append(", ");
- keys.append(DATA_SIZE).append("=").append(key.getDataSize()).append(", ");
- keys.append(REPLICATION_CONFIG).append("=").append(key.getReplicationConfig()).append("}");
+ keys.append('{').append(KEY).append('=').append(key.getKeyName()).append(", ");
+ keys.append(DATA_SIZE).append('=').append(key.getDataSize()).append(", ");
+ keys.append(REPLICATION_CONFIG).append('=').append(key.getReplicationConfig()).append('}');
if (i < deletedKeyInfos.size() - 1) {
keys.append(", ");
}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/OpenKeyCleanupService.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/OpenKeyCleanupService.java
index 1583f3fef7fc..d4f97b23eeb3 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/OpenKeyCleanupService.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/service/OpenKeyCleanupService.java
@@ -208,7 +208,7 @@ public BackgroundTaskResult call() throws Exception {
.append(": ")
.append(openKey.getKeysList().stream().map(OzoneManagerProtocolProtos.OpenKey::getName)
.collect(Collectors.toList()))
- .append("\n");
+ .append('\n');
}
LOG.debug("Non-hsync'ed openKeys being deleted in current iteration: \n" + sb);
}
diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OmSnapshotUtils.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OmSnapshotUtils.java
index 8fd47c998e98..f5805044b7f4 100644
--- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OmSnapshotUtils.java
+++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/snapshot/OmSnapshotUtils.java
@@ -92,8 +92,8 @@ public static Path createHardLinkList(int truncateLength,
fixedFile = f.toString();
}
}
- sb.append(truncateFileName(truncateLength, entry.getKey())).append("\t")
- .append(fixedFile).append("\n");
+ sb.append(truncateFileName(truncateLength, entry.getKey())).append('\t')
+ .append(fixedFile).append('\n');
}
Files.write(data, sb.toString().getBytes(StandardCharsets.UTF_8));
return data;
diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/file/TestOMDirectoryCreateRequest.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/file/TestOMDirectoryCreateRequest.java
index 31fb48c57dcb..65cc1c0d7e51 100644
--- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/file/TestOMDirectoryCreateRequest.java
+++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/file/TestOMDirectoryCreateRequest.java
@@ -539,7 +539,7 @@ private String genRandomKeyName() {
StringBuilder keyNameBuilder = new StringBuilder();
keyNameBuilder.append(RandomStringUtils.secure().nextAlphabetic(5));
for (int i = 0; i < 3; i++) {
- keyNameBuilder.append("/").append(RandomStringUtils.secure().nextAlphabetic(5));
+ keyNameBuilder.append('/').append(RandomStringUtils.secure().nextAlphabetic(5));
}
return keyNameBuilder.toString();
}
diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/validation/TestRequestFeatureValidatorProcessor.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/validation/TestRequestFeatureValidatorProcessor.java
index d55cefc2e729..cb30adaa425f 100644
--- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/validation/TestRequestFeatureValidatorProcessor.java
+++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/request/validation/TestRequestFeatureValidatorProcessor.java
@@ -487,9 +487,9 @@ private StringBuilder buildMethodSignature(
StringBuilder signature = new StringBuilder();
signature.append(" ");
for (String modifier : modifiers) {
- signature.append(modifier).append(" ");
+ signature.append(modifier).append(' ');
}
- signature.append(returnType).append(" ");
+ signature.append(returnType).append(' ');
signature.append("validatorMethod(");
signature.append(createParameterList(paramspecs));
signature.append(") ");
diff --git a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/FileStatusAdapter.java b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/FileStatusAdapter.java
index cf858bb335bb..eba24f88bb5c 100644
--- a/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/FileStatusAdapter.java
+++ b/hadoop-ozone/ozonefs-common/src/main/java/org/apache/hadoop/fs/ozone/FileStatusAdapter.java
@@ -145,7 +145,7 @@ public BlockLocation[] getBlockLocations() {
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName())
- .append("{")
+ .append('{')
.append("path=").append(path)
.append("; isDirectory=").append(isdir);
if (isFile()) {
@@ -159,7 +159,7 @@ public String toString() {
.append("; group=").append(group)
.append("; permission=").append(permission)
.append("; isSymlink=").append(getSymlink())
- .append("}");
+ .append('}');
return sb.toString();
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
index a2cb0c699371..5a474a024e29 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
@@ -177,7 +177,7 @@ public static String buildCanonicalRequest(
for (String header : StringUtils.split(signedHeaders, ';')) {
canonicalHeaders.append(header.toLowerCase());
- canonicalHeaders.append(":");
+ canonicalHeaders.append(':');
if (headers.containsKey(header)) {
String headerValue = headers.get(header);
canonicalHeaders.append(headerValue);
@@ -291,7 +291,7 @@ private static String getQueryParamString(
for (String p : params) {
if (!p.equals("X-Amz-Signature")) {
if (result.length() > 0) {
- result.append("&");
+ result.append('&');
}
result.append(urlEncode(p));
result.append('=');
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
index 4e8b9b6bad53..4f22bd418447 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
@@ -312,7 +312,7 @@ public void testPutObjectWithTooManyTags() throws Exception {
for (int i = 0; i < TAG_NUM_LIMIT + 1; i++) {
sb.append(String.format("tag%d=value%d", i, i));
if (i < TAG_NUM_LIMIT) {
- sb.append("&");
+ sb.append('&');
}
}
when(headersWithTooManyTags.getHeaderString(TAG_HEADER)).thenReturn(sb.toString());
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/audit/parser/common/DatabaseHelper.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/audit/parser/common/DatabaseHelper.java
index 8b68f396f69e..7d6271aca3c8 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/audit/parser/common/DatabaseHelper.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/audit/parser/common/DatabaseHelper.java
@@ -209,9 +209,9 @@ private static String executeStatement(String dbName, String sql)
while (rs.next()) {
for (int index = 1; index <= cols; index++) {
result.append(rs.getObject(index));
- result.append("\t");
+ result.append('\t');
}
- result.append("\n");
+ result.append('\n');
}
}
}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OmMetadataGenerator.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OmMetadataGenerator.java
index 834eb60b9900..e89c71ed111a 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OmMetadataGenerator.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/OmMetadataGenerator.java
@@ -308,7 +308,7 @@ public Supplier realTimeStatusSupplier() {
maxValueRecorder.put(name, maxValue);
valueRecorder.put(name, curValue);
instantsRecorder.put(name, now);
- sb.append(" ")
+ sb.append(' ')
.append(name)
.append(": rate ")
.append(rate)
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/ProgressBar.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/ProgressBar.java
index b578a3430cfc..4fddb9054be7 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/ProgressBar.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/freon/ProgressBar.java
@@ -177,7 +177,7 @@ private void printProgressBar(PrintStream stream, long value) {
if (!realTimeMessage.isEmpty()) {
shrinkTimes = 3;
}
- sb.append(" ").append(String.format("%.2f", percent)).append("% |");
+ sb.append(' ').append(String.format("%.2f", percent)).append("% |");
for (int i = 0; i <= percent / shrinkTimes; i++) {
sb.append('█');
}
@@ -185,7 +185,7 @@ private void printProgressBar(PrintStream stream, long value) {
sb.append(' ');
}
sb.append("| ");
- sb.append(value).append("/").append(maxValue);
+ sb.append(value).append('/').append(maxValue);
long timeInSec = TimeUnit.SECONDS.convert(
System.nanoTime() - startTime, TimeUnit.NANOSECONDS);
String timeToPrint = String.format("%d:%02d:%02d", timeInSec / 3600,
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/ContainerUpgradeResult.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/ContainerUpgradeResult.java
index aa7b3dbb266e..83f6c4d55267 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/ContainerUpgradeResult.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/ContainerUpgradeResult.java
@@ -113,7 +113,7 @@ public String toString() {
stringBuilder.append(getCostMs());
stringBuilder.append(", status=");
stringBuilder.append(status);
- stringBuilder.append("}");
+ stringBuilder.append('}');
return stringBuilder.toString();
}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/VolumeUpgradeResult.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/VolumeUpgradeResult.java
index d7dce31e8746..067de25b3ba0 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/VolumeUpgradeResult.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/repair/datanode/schemaupgrade/VolumeUpgradeResult.java
@@ -94,7 +94,7 @@ public String toString() {
if (resultMap != null) {
resultMap.forEach((k, r) -> {
stringBuilder.append(r.toString());
- stringBuilder.append("\n");
+ stringBuilder.append('\n');
total.addAndGet(r.getTotalRow());
});
}
diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/FormattingCLIUtils.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/FormattingCLIUtils.java
index df5844ca0851..6bc22842015f 100644
--- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/FormattingCLIUtils.java
+++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/FormattingCLIUtils.java
@@ -133,12 +133,12 @@ private void buildTitle() {
if (this.title.length() > maxTitleSize) {
this.title = this.title.substring(0, maxTitleSize);
}
- this.join.append("+");
+ this.join.append('+');
for (int i = 0; i < maxTitleSize + 2; i++) {
- this.join.append("-");
+ this.join.append('-');
}
this.join.append("+\n")
- .append("|")
+ .append('|')
.append(StrUtils.center(this.title, maxTitleSize + 2, ' '))
.append("|\n");
this.lastTableRowType = TableRowType.TITLE;
@@ -177,14 +177,14 @@ private void buildTable() {
* @param data dataLine
*/
private void buildRowBorder(List data) {
- this.join.append("+");
+ this.join.append('+');
for (int i = 0, len = data.size(); i < len; i++) {
for (int j = 0; j < this.maxColMap.get(i) + 2; j++) {
- this.join.append("-");
+ this.join.append('-');
}
- this.join.append("+");
+ this.join.append('+');
}
- this.join.append("\n");
+ this.join.append('\n');
}
/**
@@ -192,12 +192,12 @@ private void buildRowBorder(List data) {
* @param data dataLine
*/
private void buildRowLine(List data) {
- this.join.append("|");
+ this.join.append('|');
for (int i = 0, len = data.size(); i < len; i++) {
this.join.append(StrUtils.center(data.get(i), this.maxColMap.get(i) + 2, ' '))
- .append("|");
+ .append('|');
}
- this.join.append("\n");
+ this.join.append('\n');
}
/**