Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HBASE-29026 Replace some deprecated calls #6585

Merged
merged 1 commit into from
Jan 27, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -135,4 +135,16 @@ public static Map<String, String> parseURIQueries(URI uri) {
public static void applyURIQueriesToConf(URI uri, Configuration conf) {
parseURIQueries(uri).forEach(conf::set);
}

/**
* Note: This method was taken from org.apache.hadoop.util.StringUtils.humanReadableInt(long).
* Reason: that method got deprecated and this method provides an easy-to-understand usage of
* StringUtils.TraditionalBinaryPrefix.long2String. Given an integer, return a string that is in
* an approximate, but human readable format.
* @param number the number to format
* @return a human readable form of the integer
*/
public static String humanReadableInt(long number) {
return org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String(number, "", 1);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,11 @@
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;

Expand Down Expand Up @@ -125,8 +125,8 @@ protected void testHdfsStreaming(Path filename) throws IOException {
.println("total time to open: " + fileOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
System.out.println("total time to read: " + streamTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
System.out.println(
"total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
System.out.println("throghput : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
"total bytes: " + totalBytes + " bytes (" + Strings.humanReadableInt(totalBytes) + ")");
System.out.println("throghput : " + Strings.humanReadableInt((long) throughput) + "B/s");
}

private Scan getScan() {
Expand Down Expand Up @@ -189,14 +189,14 @@ public void testScan() throws IOException {
System.out.println("Scan metrics:\n" + metrics.getMetricsMap());

System.out.println(
"total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
"total bytes: " + totalBytes + " bytes (" + Strings.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + Strings.humanReadableInt((long) throughput) + "B/s");
System.out.println("total rows : " + numRows);
System.out
.println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s");
.println("throughput : " + Strings.humanReadableInt((long) throughputRows) + " rows/s");
System.out.println("total cells : " + numCells);
System.out.println(
"throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s");
System.out
.println("throughput : " + Strings.humanReadableInt((long) throughputCells) + " cells/s");
}

public void testSnapshotScan() throws IOException {
Expand Down Expand Up @@ -246,14 +246,14 @@ public void testSnapshotScan() throws IOException {
System.out.println("Scan metrics:\n" + metrics.getMetricsMap());

System.out.println(
"total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
"total bytes: " + totalBytes + " bytes (" + Strings.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + Strings.humanReadableInt((long) throughput) + "B/s");
System.out.println("total rows : " + numRows);
System.out
.println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s");
.println("throughput : " + Strings.humanReadableInt((long) throughputRows) + " rows/s");
System.out.println("total cells : " + numCells);
System.out.println(
"throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s");
System.out
.println("throughput : " + Strings.humanReadableInt((long) throughputCells) + " cells/s");

}

Expand Down Expand Up @@ -311,14 +311,14 @@ public void testScanMapReduce() throws IOException, InterruptedException, ClassN
System.out.println("total time to scan: " + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");

System.out.println(
"total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
"total bytes: " + totalBytes + " bytes (" + Strings.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + Strings.humanReadableInt((long) throughput) + "B/s");
System.out.println("total rows : " + numRows);
System.out
.println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s");
.println("throughput : " + Strings.humanReadableInt((long) throughputRows) + " rows/s");
System.out.println("total cells : " + numCells);
System.out.println(
"throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s");
System.out
.println("throughput : " + Strings.humanReadableInt((long) throughputCells) + " cells/s");
}

public void testSnapshotScanMapReduce()
Expand Down Expand Up @@ -362,14 +362,14 @@ public void testSnapshotScanMapReduce()
System.out.println("total time to scan: " + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");

System.out.println(
"total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s");
"total bytes: " + totalBytes + " bytes (" + Strings.humanReadableInt(totalBytes) + ")");
System.out.println("throughput : " + Strings.humanReadableInt((long) throughput) + "B/s");
System.out.println("total rows : " + numRows);
System.out
.println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s");
.println("throughput : " + Strings.humanReadableInt((long) throughputRows) + " rows/s");
System.out.println("total cells : " + numCells);
System.out.println(
"throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s");
System.out
.println("throughput : " + Strings.humanReadableInt((long) throughputCells) + " cells/s");
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -206,7 +205,7 @@ public void run() {
double averageKeysPerSecond = (time > 0) ? (numKeys * 1000.0 / time) : 0;

LOG.info(threadsLeft + "Keys=" + numKeys + ", cols="
+ StringUtils.humanReadableInt(numCols.get()) + ", time=" + formatTime(time)
+ Strings.humanReadableInt(numCols.get()) + ", time=" + formatTime(time)
+ ((numKeys > 0 && time > 0)
? (" Overall: [" + "keys/s= " + (numKeys * 1000.0 / time) + ", latency="
+ String.format("%.2f", (double) totalOpTime / (double) numKeys) + " ms]")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;

/**
Expand Down Expand Up @@ -122,7 +122,7 @@ protected void reduce(ImmutableBytesWritable row, Iterable<Put> puts,
}
}
context.setStatus("Read " + map.size() + " entries of " + map.getClass() + "("
+ StringUtils.humanReadableInt(curSize) + ")");
+ Strings.humanReadableInt(curSize) + ")");
int index = 0;
for (KeyValue kv : map) {
context.write(row, kv);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.net.DNS;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -171,7 +170,7 @@ public RecordReader<ImmutableBytesWritable, Result> createRecordReader(InputSpli
throw new IOException(INITIALIZATION_ERROR, exception);
}
TableSplit tSplit = (TableSplit) split;
LOG.info("Input split length: " + StringUtils.humanReadableInt(tSplit.getLength()) + " bytes.");
LOG.info("Input split length: " + Strings.humanReadableInt(tSplit.getLength()) + " bytes.");
final TableRecordReader trr =
this.tableRecordReader != null ? this.tableRecordReader : new TableRecordReader();
Scan sc = new Scan(this.scan);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.security.visibility.InvalidLabelException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;

/**
Expand Down Expand Up @@ -187,7 +187,7 @@ protected void reduce(ImmutableBytesWritable rowKey, java.lang.Iterable<Text> li
}
}
context.setStatus("Read " + kvs.size() + " entries of " + kvs.getClass() + "("
+ StringUtils.humanReadableInt(curSize) + ")");
+ Strings.humanReadableInt(curSize) + ")");
int index = 0;
for (KeyValue kv : kvs) {
context.write(rowKey, kv);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Writable;
Expand Down Expand Up @@ -236,7 +237,7 @@ public void setup(Context context) throws IOException {
// Use the default block size of the outputFs if bigger
int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);
bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);
LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));
LOG.info("Using bufferSize=" + Strings.humanReadableInt(bufferSize));
reportSize = conf.getInt(CONF_REPORT_SIZE, REPORT_SIZE);

for (Counter c : Counter.values()) {
Expand Down Expand Up @@ -338,10 +339,9 @@ private void copyFile(final Context context, final SnapshotFileInfo inputInfo,

long etime = EnvironmentEdgeManager.currentTime();
LOG.info("copy completed for input=" + inputPath + " output=" + outputPath);
LOG
.info("size=" + totalBytesWritten + " (" + StringUtils.humanReadableInt(totalBytesWritten)
+ ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + String
.format(" %.3fM/sec", (totalBytesWritten / ((etime - stime) / 1000.0)) / 1048576.0));
LOG.info("size=" + totalBytesWritten + " (" + Strings.humanReadableInt(totalBytesWritten)
+ ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + String.format(" %.3fM/sec",
(totalBytesWritten / ((etime - stime) / 1000.0)) / 1048576.0));
context.getCounter(Counter.FILES_COPIED).increment(1);

// Try to Preserve attributes
Expand Down Expand Up @@ -433,7 +433,7 @@ private long copyData(final Context context, final Path inputPath, final InputSt
final Path outputPath, final FSDataOutputStream out, final long inputFileSize)
throws IOException {
final String statusMessage =
"copied %s/" + StringUtils.humanReadableInt(inputFileSize) + " (%.1f%%)";
"copied %s/" + Strings.humanReadableInt(inputFileSize) + " (%.1f%%)";

try {
byte[] buffer = new byte[bufferSize];
Expand All @@ -448,19 +448,18 @@ private long copyData(final Context context, final Path inputPath, final InputSt

if (reportBytes >= reportSize) {
context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);
context.setStatus(
String.format(statusMessage, StringUtils.humanReadableInt(totalBytesWritten),
context
.setStatus(String.format(statusMessage, Strings.humanReadableInt(totalBytesWritten),
(totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + inputPath
+ " to " + outputPath);
reportBytes = 0;
}
}

context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);
context
.setStatus(String.format(statusMessage, StringUtils.humanReadableInt(totalBytesWritten),
(totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + inputPath + " to "
+ outputPath);
context.setStatus(String.format(statusMessage, Strings.humanReadableInt(totalBytesWritten),
(totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + inputPath + " to "
+ outputPath);

return totalBytesWritten;
} finally {
Expand Down Expand Up @@ -760,7 +759,7 @@ public int compare(Pair<SnapshotFileInfo, Long> a, Pair<SnapshotFileInfo, Long>

if (LOG.isDebugEnabled()) {
for (int i = 0; i < sizeGroups.length; ++i) {
LOG.debug("export split=" + i + " size=" + StringUtils.humanReadableInt(sizeGroups[i]));
LOG.debug("export split=" + i + " size=" + Strings.humanReadableInt(sizeGroups[i]));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ org.apache.hadoop.hbase.util.CommonFSUtils;
org.apache.hadoop.hbase.util.JvmVersion;
org.apache.hadoop.hbase.util.PrettyPrinter;
org.apache.hadoop.util.StringUtils;
org.apache.hadoop.hbase.util.Strings;
</%import>

<%if format.equals("json") %>
Expand Down Expand Up @@ -789,7 +790,7 @@ AssignmentManager assignmentManager = master.getAssignmentManager();
<td><% peerConfig.isSerial() %></td>
<td><% peerConfig.getRemoteWALDir() == null ? "" : peerConfig.getRemoteWALDir() %>
<td><% peer.getSyncReplicationState() %>
<td><% peerConfig.getBandwidth() == 0? "UNLIMITED" : StringUtils.humanReadableInt(peerConfig.getBandwidth()) %></td>
<td><% peerConfig.getBandwidth() == 0? "UNLIMITED" : Strings.humanReadableInt(peerConfig.getBandwidth()) %></td>
<td><% peerConfig.replicateAllUserTables() %></td>
<td>
<% peerConfig.getNamespaces() == null ? "" : ReplicationPeerConfigUtil.convertToString(peerConfig.getNamespaces()).replaceAll(";", "; ") %>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.ObjectIntPair;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -1091,8 +1091,8 @@ public long writeIndexBlocks(FSDataOutputStream out) throws IOException {
LOG.trace("Wrote a " + numLevels + "-level index with root level at pos "
+ rootLevelIndexPos + ", " + rootChunk.getNumEntries() + " root-level entries, "
+ totalNumEntries + " total entries, "
+ StringUtils.humanReadableInt(this.totalBlockOnDiskSize) + " on-disk size, "
+ StringUtils.humanReadableInt(totalBlockUncompressedSize) + " total uncompressed size.");
+ Strings.humanReadableInt(this.totalBlockOnDiskSize) + " on-disk size, "
+ Strings.humanReadableInt(totalBlockUncompressedSize) + " total uncompressed size.");
}
return rootLevelIndexPos;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -63,7 +63,7 @@ public void postAppend(final long size, final long time, final WALKey logkey,
if (time > 1000) {
source.incrementSlowAppendCount();
LOG.warn(String.format("%s took %d ms appending an edit to wal; len~=%s",
Thread.currentThread().getName(), time, StringUtils.humanReadableInt(size)));
Thread.currentThread().getName(), time, Strings.humanReadableInt(size)));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
import org.apache.hadoop.hbase.io.WALLink;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -521,7 +521,7 @@ public void storeFile(final RegionInfo regionInfo, final String family,
}

private String fileSizeToString(long size) {
return printSizeInBytes ? Long.toString(size) : StringUtils.humanReadableInt(size);
return printSizeInBytes ? Long.toString(size) : Strings.humanReadableInt(size);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,29 +21,17 @@
import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml"
import="java.util.Collections"
import="java.util.Comparator"
import="java.util.ArrayList"
import="java.util.Date"
import="java.util.List"
import="java.util.Set"
import="org.apache.hadoop.hbase.master.HMaster"
import="org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv"
import="org.apache.hadoop.hbase.procedure2.LockedResource"
import="org.apache.hadoop.hbase.procedure2.Procedure"
import="org.apache.hadoop.hbase.procedure2.ProcedureExecutor"
import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
import="org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix"
%>
<%@ page import="org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure" %>
<%@ page import="org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure" %>
<%@ page import="org.apache.hadoop.hbase.master.assignment.OpenRegionProcedure" %>
<%@ page import="org.apache.hadoop.hbase.master.assignment.CloseRegionProcedure" %>
<%@ page import="org.apache.hadoop.hbase.metrics.OperationMetrics" %>
<%@ page import="java.util.Map" %>
<%@ page import="java.util.HashMap" %>
<%@ page import="org.apache.hadoop.hbase.master.MetricsAssignmentManagerSource" %>
<%@ page import="org.apache.hadoop.hbase.master.MetricsAssignmentManager" %>
<%@ page import="org.apache.hadoop.hbase.procedure2.ProcedureMetrics" %>
<%@ page import="org.apache.hadoop.hbase.metrics.Snapshot" %>
<%@ page import="org.apache.hadoop.hbase.metrics.Histogram" %>
<%@ page import="java.util.TreeMap" %>
<%@ page import="org.apache.hadoop.hbase.metrics.impl.HistogramImpl" %>
Expand Down Expand Up @@ -90,7 +78,7 @@
<h1>Procedure Time Statistics</h1>
</div>
</div>
<p>We list proceduces completed successfully of the following types only: ServerCrashProcedure, TransitRegionStateProcedure,
<p>We list procedures completed successfully of the following types only: ServerCrashProcedure, TransitRegionStateProcedure,
NihalJain marked this conversation as resolved.
Show resolved Hide resolved
OpenRegionProcedure, CloseRegionProcedure.</p>
<table class="table table-striped" width="90%" >
<tr>
Expand Down
Loading