From cc52e795a2689aa6153c07fb37891b4bae7e0536 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?D=C3=A1vid=20Paksy?= Date: Wed, 8 Jan 2025 13:11:16 +0100 Subject: [PATCH] HBASE-29026 Replace some deprecated calls - Replaced the usage of the following deprecated methods: - java.net.URLEncoder.encode(String) -> java.net.URLEncoder.encode(String, Charset) - StringUtils.humanReadableInt(long) -> StringUtils.TraditionalBinaryPrefix.long2String(long, "", 1): For this a new static util method is introduced: org.apache.hadoop.hbase.util.Strings.humanReadableInt - org.apache.hadoop.fs.FileSystem.getLength(Path) -> getFileStatus(Path).getLen() - org.apache.hadoop.hbase.ServerName.getStartcode() -> org.apache.hadoop.hbase.ServerName.getStartCode() - Also removed unused imports in the touched JSP files. --- .../org/apache/hadoop/hbase/util/Strings.java | 12 +++++ .../hbase/ScanPerformanceEvaluation.java | 46 +++++++++---------- .../hbase/util/MultiThreadedAction.java | 3 +- .../hbase/mapreduce/PutSortReducer.java | 4 +- .../hbase/mapreduce/TableInputFormatBase.java | 3 +- .../hbase/mapreduce/TextSortReducer.java | 4 +- .../hadoop/hbase/snapshot/ExportSnapshot.java | 25 +++++----- .../hbase/tmpl/master/MasterStatusTmpl.jamon | 3 +- .../hbase/io/hfile/HFileBlockIndex.java | 6 +-- .../hbase/regionserver/wal/MetricsWAL.java | 4 +- .../hadoop/hbase/snapshot/SnapshotInfo.java | 4 +- .../hbase-webapps/master/procedures.jsp | 14 +----- .../resources/hbase-webapps/master/quotas.jsp | 1 - .../hbase-webapps/master/rsgroup.jsp | 4 +- .../hbase-webapps/master/snapshot.jsp | 8 ++-- .../hbase-webapps/master/snapshotsStats.jsp | 20 ++++---- .../resources/hbase-webapps/master/table.jsp | 19 ++++---- .../hbase-webapps/master/tablesDetailed.jsp | 2 - .../hbase-webapps/regionserver/region.jsp | 6 +-- .../regionserver/TestHRegionReplayEvents.java | 6 +-- .../compactions/MockStoreFileGenerator.java | 4 +- 21 files changed, 95 insertions(+), 103 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java index b5d760bf0d7e..6759603f3aa5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java @@ -135,4 +135,16 @@ public static Map parseURIQueries(URI uri) { public static void applyURIQueriesToConf(URI uri, Configuration conf) { parseURIQueries(uri).forEach(conf::set); } + + /** + * Note: This method was taken from org.apache.hadoop.util.StringUtils.humanReadableInt(long). + * Reason: that method got deprecated and this method provides an easy-to-understand usage of + * StringUtils.TraditionalBinaryPrefix.long2String. Given an integer, return a string that is in + * an approximate, but human readable format. + * @param number the number to format + * @return a human readable form of the integer + */ + public static String humanReadableInt(long number) { + return org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String(number, "", 1); + } } diff --git a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java index 983af877b98c..889d769dbc41 100644 --- a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java +++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java @@ -36,11 +36,11 @@ import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.CommonFSUtils; +import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.apache.yetus.audience.InterfaceAudience; @@ -125,8 +125,8 @@ protected void testHdfsStreaming(Path filename) throws IOException { .println("total time to open: " + fileOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); System.out.println("total time to read: " + streamTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); System.out.println( - "total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")"); - System.out.println("throghput : " + StringUtils.humanReadableInt((long) throughput) + "B/s"); + "total bytes: " + totalBytes + " bytes (" + Strings.humanReadableInt(totalBytes) + ")"); + System.out.println("throghput : " + Strings.humanReadableInt((long) throughput) + "B/s"); } private Scan getScan() { @@ -189,14 +189,14 @@ public void testScan() throws IOException { System.out.println("Scan metrics:\n" + metrics.getMetricsMap()); System.out.println( - "total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")"); - System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s"); + "total bytes: " + totalBytes + " bytes (" + Strings.humanReadableInt(totalBytes) + ")"); + System.out.println("throughput : " + Strings.humanReadableInt((long) throughput) + "B/s"); System.out.println("total rows : " + numRows); System.out - .println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s"); + .println("throughput : " + Strings.humanReadableInt((long) throughputRows) + " rows/s"); System.out.println("total cells : " + numCells); - System.out.println( - "throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s"); + System.out + .println("throughput : " + Strings.humanReadableInt((long) throughputCells) + " cells/s"); } public void testSnapshotScan() throws IOException { @@ -246,14 +246,14 @@ public void testSnapshotScan() throws IOException { System.out.println("Scan metrics:\n" + metrics.getMetricsMap()); System.out.println( - "total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")"); - System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s"); + "total bytes: " + totalBytes + " bytes (" + Strings.humanReadableInt(totalBytes) + ")"); + System.out.println("throughput : " + Strings.humanReadableInt((long) throughput) + "B/s"); System.out.println("total rows : " + numRows); System.out - .println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s"); + .println("throughput : " + Strings.humanReadableInt((long) throughputRows) + " rows/s"); System.out.println("total cells : " + numCells); - System.out.println( - "throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s"); + System.out + .println("throughput : " + Strings.humanReadableInt((long) throughputCells) + " cells/s"); } @@ -311,14 +311,14 @@ public void testScanMapReduce() throws IOException, InterruptedException, ClassN System.out.println("total time to scan: " + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); System.out.println( - "total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")"); - System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s"); + "total bytes: " + totalBytes + " bytes (" + Strings.humanReadableInt(totalBytes) + ")"); + System.out.println("throughput : " + Strings.humanReadableInt((long) throughput) + "B/s"); System.out.println("total rows : " + numRows); System.out - .println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s"); + .println("throughput : " + Strings.humanReadableInt((long) throughputRows) + " rows/s"); System.out.println("total cells : " + numCells); - System.out.println( - "throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s"); + System.out + .println("throughput : " + Strings.humanReadableInt((long) throughputCells) + " cells/s"); } public void testSnapshotScanMapReduce() @@ -362,14 +362,14 @@ public void testSnapshotScanMapReduce() System.out.println("total time to scan: " + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); System.out.println( - "total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")"); - System.out.println("throughput : " + StringUtils.humanReadableInt((long) throughput) + "B/s"); + "total bytes: " + totalBytes + " bytes (" + Strings.humanReadableInt(totalBytes) + ")"); + System.out.println("throughput : " + Strings.humanReadableInt((long) throughput) + "B/s"); System.out.println("total rows : " + numRows); System.out - .println("throughput : " + StringUtils.humanReadableInt((long) throughputRows) + " rows/s"); + .println("throughput : " + Strings.humanReadableInt((long) throughputRows) + " rows/s"); System.out.println("total cells : " + numCells); - System.out.println( - "throughput : " + StringUtils.humanReadableInt((long) throughputCells) + " cells/s"); + System.out + .println("throughput : " + Strings.humanReadableInt((long) throughputCells) + " cells/s"); } @Override diff --git a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java index 6eb27c5be8fb..dbe22dc05498 100644 --- a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java +++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java @@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; -import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -206,7 +205,7 @@ public void run() { double averageKeysPerSecond = (time > 0) ? (numKeys * 1000.0 / time) : 0; LOG.info(threadsLeft + "Keys=" + numKeys + ", cols=" - + StringUtils.humanReadableInt(numCols.get()) + ", time=" + formatTime(time) + + Strings.humanReadableInt(numCols.get()) + ", time=" + formatTime(time) + ((numKeys > 0 && time > 0) ? (" Overall: [" + "keys/s= " + (numKeys * 1000.0 / time) + ", latency=" + String.format("%.2f", (double) totalOpTime / (double) numKeys) + " ms]") diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java index 4cf7bcd9ff22..cd24e01f1346 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java @@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; /** @@ -122,7 +122,7 @@ protected void reduce(ImmutableBytesWritable row, Iterable puts, } } context.setStatus("Read " + map.size() + " entries of " + map.getClass() + "(" - + StringUtils.humanReadableInt(curSize) + ")"); + + Strings.humanReadableInt(curSize) + ")"); int index = 0; for (KeyValue kv : map) { context.write(row, kv); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java index 7d172375c10c..11ea6e58770d 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java @@ -46,7 +46,6 @@ import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.net.DNS; -import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -171,7 +170,7 @@ public RecordReader createRecordReader(InputSpli throw new IOException(INITIALIZATION_ERROR, exception); } TableSplit tSplit = (TableSplit) split; - LOG.info("Input split length: " + StringUtils.humanReadableInt(tSplit.getLength()) + " bytes."); + LOG.info("Input split length: " + Strings.humanReadableInt(tSplit.getLength()) + " bytes."); final TableRecordReader trr = this.tableRecordReader != null ? this.tableRecordReader : new TableRecordReader(); Scan sc = new Scan(this.scan); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java index b374aa86c018..0c5e220b2b94 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java @@ -35,10 +35,10 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.security.visibility.InvalidLabelException; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; /** @@ -187,7 +187,7 @@ protected void reduce(ImmutableBytesWritable rowKey, java.lang.Iterable li } } context.setStatus("Read " + kvs.size() + " entries of " + kvs.getClass() + "(" - + StringUtils.humanReadableInt(curSize) + ")"); + + Strings.humanReadableInt(curSize) + ")"); int index = 0; for (KeyValue kv : kvs) { context.write(rowKey, kv); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java index 6f70eefd3b74..42d10ca8d46e 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java @@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Writable; @@ -236,7 +237,7 @@ public void setup(Context context) throws IOException { // Use the default block size of the outputFs if bigger int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE); bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize); - LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize)); + LOG.info("Using bufferSize=" + Strings.humanReadableInt(bufferSize)); reportSize = conf.getInt(CONF_REPORT_SIZE, REPORT_SIZE); for (Counter c : Counter.values()) { @@ -338,10 +339,9 @@ private void copyFile(final Context context, final SnapshotFileInfo inputInfo, long etime = EnvironmentEdgeManager.currentTime(); LOG.info("copy completed for input=" + inputPath + " output=" + outputPath); - LOG - .info("size=" + totalBytesWritten + " (" + StringUtils.humanReadableInt(totalBytesWritten) - + ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + String - .format(" %.3fM/sec", (totalBytesWritten / ((etime - stime) / 1000.0)) / 1048576.0)); + LOG.info("size=" + totalBytesWritten + " (" + Strings.humanReadableInt(totalBytesWritten) + + ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + String.format(" %.3fM/sec", + (totalBytesWritten / ((etime - stime) / 1000.0)) / 1048576.0)); context.getCounter(Counter.FILES_COPIED).increment(1); // Try to Preserve attributes @@ -433,7 +433,7 @@ private long copyData(final Context context, final Path inputPath, final InputSt final Path outputPath, final FSDataOutputStream out, final long inputFileSize) throws IOException { final String statusMessage = - "copied %s/" + StringUtils.humanReadableInt(inputFileSize) + " (%.1f%%)"; + "copied %s/" + Strings.humanReadableInt(inputFileSize) + " (%.1f%%)"; try { byte[] buffer = new byte[bufferSize]; @@ -448,8 +448,8 @@ private long copyData(final Context context, final Path inputPath, final InputSt if (reportBytes >= reportSize) { context.getCounter(Counter.BYTES_COPIED).increment(reportBytes); - context.setStatus( - String.format(statusMessage, StringUtils.humanReadableInt(totalBytesWritten), + context + .setStatus(String.format(statusMessage, Strings.humanReadableInt(totalBytesWritten), (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + inputPath + " to " + outputPath); reportBytes = 0; @@ -457,10 +457,9 @@ private long copyData(final Context context, final Path inputPath, final InputSt } context.getCounter(Counter.BYTES_COPIED).increment(reportBytes); - context - .setStatus(String.format(statusMessage, StringUtils.humanReadableInt(totalBytesWritten), - (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + inputPath + " to " - + outputPath); + context.setStatus(String.format(statusMessage, Strings.humanReadableInt(totalBytesWritten), + (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + inputPath + " to " + + outputPath); return totalBytesWritten; } finally { @@ -760,7 +759,7 @@ public int compare(Pair a, Pair if (LOG.isDebugEnabled()) { for (int i = 0; i < sizeGroups.length; ++i) { - LOG.debug("export split=" + i + " size=" + StringUtils.humanReadableInt(sizeGroups[i])); + LOG.debug("export split=" + i + " size=" + Strings.humanReadableInt(sizeGroups[i])); } } diff --git a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon index d66682325945..04387e12d54d 100644 --- a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon +++ b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon @@ -63,6 +63,7 @@ org.apache.hadoop.hbase.util.CommonFSUtils; org.apache.hadoop.hbase.util.JvmVersion; org.apache.hadoop.hbase.util.PrettyPrinter; org.apache.hadoop.util.StringUtils; +org.apache.hadoop.hbase.util.Strings; <%if format.equals("json") %> @@ -789,7 +790,7 @@ AssignmentManager assignmentManager = master.getAssignmentManager(); <% peerConfig.isSerial() %> <% peerConfig.getRemoteWALDir() == null ? "" : peerConfig.getRemoteWALDir() %> <% peer.getSyncReplicationState() %> - <% peerConfig.getBandwidth() == 0? "UNLIMITED" : StringUtils.humanReadableInt(peerConfig.getBandwidth()) %> + <% peerConfig.getBandwidth() == 0? "UNLIMITED" : Strings.humanReadableInt(peerConfig.getBandwidth()) %> <% peerConfig.replicateAllUserTables() %> <% peerConfig.getNamespaces() == null ? "" : ReplicationPeerConfigUtil.convertToString(peerConfig.getNamespaces()).replaceAll(";", "; ") %> diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java index 6f610af19721..00a14134c774 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java @@ -46,8 +46,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ObjectIntPair; +import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.io.WritableUtils; -import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -1091,8 +1091,8 @@ public long writeIndexBlocks(FSDataOutputStream out) throws IOException { LOG.trace("Wrote a " + numLevels + "-level index with root level at pos " + rootLevelIndexPos + ", " + rootChunk.getNumEntries() + " root-level entries, " + totalNumEntries + " total entries, " - + StringUtils.humanReadableInt(this.totalBlockOnDiskSize) + " on-disk size, " - + StringUtils.humanReadableInt(totalBlockUncompressedSize) + " total uncompressed size."); + + Strings.humanReadableInt(this.totalBlockOnDiskSize) + " on-disk size, " + + Strings.humanReadableInt(totalBlockUncompressedSize) + " total uncompressed size."); } return rootLevelIndexPos; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java index 89481161f4a6..44241d928956 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java @@ -21,9 +21,9 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKey; -import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -63,7 +63,7 @@ public void postAppend(final long size, final long time, final WALKey logkey, if (time > 1000) { source.incrementSlowAppendCount(); LOG.warn(String.format("%s took %d ms appending an edit to wal; len~=%s", - Thread.currentThread().getName(), time, StringUtils.humanReadableInt(size))); + Thread.currentThread().getName(), time, Strings.humanReadableInt(size))); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java index 25e7f3ebd33f..7a7d44241766 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java @@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.io.WALLink; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.hbase.util.Strings; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -521,7 +521,7 @@ public void storeFile(final RegionInfo regionInfo, final String family, } private String fileSizeToString(long size) { - return printSizeInBytes ? Long.toString(size) : StringUtils.humanReadableInt(size); + return printSizeInBytes ? Long.toString(size) : Strings.humanReadableInt(size); } @Override diff --git a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp index c25c5c3886ba..accc9043e802 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp @@ -21,29 +21,17 @@ import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml" import="java.util.Collections" import="java.util.Comparator" - import="java.util.ArrayList" import="java.util.Date" import="java.util.List" - import="java.util.Set" import="org.apache.hadoop.hbase.master.HMaster" import="org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv" import="org.apache.hadoop.hbase.procedure2.LockedResource" import="org.apache.hadoop.hbase.procedure2.Procedure" import="org.apache.hadoop.hbase.procedure2.ProcedureExecutor" - import="org.apache.hadoop.hbase.procedure2.util.StringUtils" - import="org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix" %> -<%@ page import="org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure" %> -<%@ page import="org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure" %> -<%@ page import="org.apache.hadoop.hbase.master.assignment.OpenRegionProcedure" %> -<%@ page import="org.apache.hadoop.hbase.master.assignment.CloseRegionProcedure" %> -<%@ page import="org.apache.hadoop.hbase.metrics.OperationMetrics" %> <%@ page import="java.util.Map" %> -<%@ page import="java.util.HashMap" %> -<%@ page import="org.apache.hadoop.hbase.master.MetricsAssignmentManagerSource" %> <%@ page import="org.apache.hadoop.hbase.master.MetricsAssignmentManager" %> <%@ page import="org.apache.hadoop.hbase.procedure2.ProcedureMetrics" %> -<%@ page import="org.apache.hadoop.hbase.metrics.Snapshot" %> <%@ page import="org.apache.hadoop.hbase.metrics.Histogram" %> <%@ page import="java.util.TreeMap" %> <%@ page import="org.apache.hadoop.hbase.metrics.impl.HistogramImpl" %> @@ -90,7 +78,7 @@

Procedure Time Statistics

-

We list proceduces completed successfully of the following types only: ServerCrashProcedure, TransitRegionStateProcedure, +

We list procedures completed successfully of the following types only: ServerCrashProcedure, TransitRegionStateProcedure, OpenRegionProcedure, CloseRegionProcedure.

diff --git a/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp b/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp index 52a92552432c..1c5bfb6647b6 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp @@ -21,7 +21,6 @@ import="java.util.concurrent.TimeUnit" import="java.util.ArrayList" import="java.util.List" - import="org.apache.hadoop.conf.Configuration" import="org.apache.hadoop.hbase.master.HMaster" import="org.apache.hadoop.hbase.quotas.MasterQuotaManager" import="org.apache.hadoop.hbase.quotas.QuotaRetriever" diff --git a/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp b/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp index 02fc1bfc39e8..c86c9902ea1b 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp @@ -24,8 +24,6 @@ import="java.util.List" import="java.util.Map" import="java.util.function.Function" - import="java.util.regex.Pattern" - import="java.util.stream.Stream" import="java.util.stream.Collectors" import="org.apache.hadoop.hbase.ServerName" import="org.apache.hadoop.hbase.TableName" @@ -169,7 +167,7 @@ totalRequestsPerSecond += sl.getRequestCountPerSecond(); lastContact = (System.currentTimeMillis() - sl.getReportTimestamp())/1000; } - long startcode = serverName.getStartcode(); + long startcode = serverName.getStartCode(); int infoPort = master.getRegionServerInfoPort(serverName); String url = "//" + serverName.getHostname() + ":" + infoPort + "/rs-status";%> diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp index e85cab95d7e5..35b2967fc1c4 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp @@ -26,7 +26,7 @@ import="org.apache.hadoop.hbase.master.HMaster" import="org.apache.hadoop.hbase.snapshot.SnapshotInfo" import="org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils" - import="org.apache.hadoop.util.StringUtils" + import="org.apache.hadoop.hbase.util.Strings" import="org.apache.hadoop.hbase.TableName" %> <% @@ -134,14 +134,14 @@
<%= stats.getStoreFilesCount() %> HFiles (<%= stats.getArchivedStoreFilesCount() %> in archive), - total size <%= StringUtils.humanReadableInt(stats.getStoreFilesSize()) %> + total size <%= Strings.humanReadableInt(stats.getStoreFilesSize()) %> (<%= stats.getSharedStoreFilePercentage() %>% - <%= StringUtils.humanReadableInt(stats.getSharedStoreFilesSize()) %> shared with the source + <%= Strings.humanReadableInt(stats.getSharedStoreFilesSize()) %> shared with the source table)
<%= stats.getLogsCount() %> Logs, total size - <%= StringUtils.humanReadableInt(stats.getLogsSize()) %> + <%= Strings.humanReadableInt(stats.getLogsSize()) %>
<% if (stats.isSnapshotCorrupted()) { %> diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp index 6202d7409b5c..bc913188282a 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp @@ -28,7 +28,7 @@ import="org.apache.hadoop.hbase.snapshot.SnapshotInfo" import="org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils" import="org.apache.hadoop.hbase.TableName" - import="org.apache.hadoop.util.StringUtils" + import="org.apache.hadoop.hbase.util.Strings" import="org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription" %> <%@ page import="org.apache.hadoop.hbase.util.PrettyPrinter" %> @@ -99,18 +99,18 @@ - - - + + + <% } %>

<%= snapshots.size() %> snapshot(s) in set.

-

Total Storefile Size: <%= StringUtils.humanReadableInt(totalSize) %>

-

Total Shared Storefile Size: <%= StringUtils.humanReadableInt(totalSharedSize.get()) %>, - Total Mob Storefile Size: <%= StringUtils.humanReadableInt(totalMobSize.get()) %>, - Total Archived Storefile Size: <%= StringUtils.humanReadableInt(totalArchivedSize.get()) %> - (<%= StringUtils.humanReadableInt(totalUnsharedArchivedSize) %>)

+

Total Storefile Size: <%= Strings.humanReadableInt(totalSize) %>

+

Total Shared Storefile Size: <%= Strings.humanReadableInt(totalSharedSize.get()) %>, + Total Mob Storefile Size: <%= Strings.humanReadableInt(totalMobSize.get()) %>, + Total Archived Storefile Size: <%= Strings.humanReadableInt(totalArchivedSize.get()) %> + (<%= Strings.humanReadableInt(totalUnsharedArchivedSize) %>)

Shared Storefile Size is the Storefile size shared between snapshots and active tables. Mob Storefile Size is the Mob Storefile size shared between snapshots and active tables. Archived Storefile Size is the Storefile size in Archive. diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp index c4c0e6204504..9d1bf2039610 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp @@ -71,6 +71,7 @@ <%@ page import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas" %> <%@ page import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota" %> <%@ page import="java.util.stream.Collectors" %> +<%@ page import="java.nio.charset.StandardCharsets" %> <%! /** * @return An empty region load stamped with the passed in regionInfo @@ -110,7 +111,7 @@ * @return an

- diff --git a/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp b/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp index 7e0d623e6867..b2f3938f56ff 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp @@ -19,10 +19,8 @@ --%> <%@ page contentType="text/html;charset=UTF-8" import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml" - import="java.io.IOException" import="java.util.ArrayList" import="java.util.List" - import="java.util.Map" %> <%@ page import="org.apache.hadoop.hbase.client.TableDescriptor" %> <%@ page import="org.apache.hadoop.hbase.master.HMaster" %> diff --git a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp index c6084f74e904..a251d335b798 100644 --- a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp @@ -25,7 +25,6 @@ import="org.apache.hadoop.fs.FileSystem" import="org.apache.hadoop.fs.FileStatus" import="org.apache.hadoop.fs.Path" - import="org.apache.hadoop.hbase.HConstants" import="org.apache.hadoop.hbase.client.RegionInfo" import="org.apache.hadoop.hbase.client.RegionInfoDisplay" import="org.apache.hadoop.hbase.mob.MobUtils" @@ -35,6 +34,7 @@ import="org.apache.hadoop.hbase.regionserver.HRegion" import="org.apache.hadoop.hbase.regionserver.HStore" %> +<%@ page import="java.nio.charset.StandardCharsets" %> <% String regionName = request.getParameter("name"); HRegionServer rs = (HRegionServer) getServletContext().getAttribute(HRegionServer.REGIONSERVER); @@ -95,7 +95,7 @@ count ++; %> - + @@ -130,7 +130,7 @@ mobCnt ++; FileStatus status = rs.getFileSystem().getFileStatus(mobPath); String mobPathStr = mobPath.toString(); - String encodedStr = URLEncoder.encode(mobPathStr, HConstants.UTF8_ENCODING); %> + String encodedStr = URLEncoder.encode(mobPathStr, StandardCharsets.UTF_8); %> diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index dd09a82ad806..3647a4e47ad6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -73,6 +73,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.NoEOFWALStreamReader; import org.apache.hadoop.hbase.wal.WAL; @@ -81,7 +82,6 @@ import org.apache.hadoop.hbase.wal.WALKeyImpl; import org.apache.hadoop.hbase.wal.WALSplitUtil.MutationReplay; import org.apache.hadoop.hbase.wal.WALStreamReader; -import org.apache.hadoop.util.StringUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -390,7 +390,7 @@ public void testReplayFlushesAndCompactions() throws IOException { // assert that the store memstore is smaller now long newStoreMemstoreSize = store.getMemStoreSize().getHeapSize(); LOG.info("Memstore size reduced by:" - + StringUtils.humanReadableInt(newStoreMemstoreSize - storeMemstoreSize)); + + Strings.humanReadableInt(newStoreMemstoreSize - storeMemstoreSize)); assertTrue(storeMemstoreSize > newStoreMemstoreSize); } else if (flushDesc.getAction() == FlushAction.COMMIT_FLUSH) { @@ -490,7 +490,7 @@ public void testReplayFlushStartMarkers() throws IOException { // assert that the store memstore is smaller now long newStoreMemstoreSize = store.getMemStoreSize().getHeapSize(); LOG.info("Memstore size reduced by:" - + StringUtils.humanReadableInt(newStoreMemstoreSize - storeMemstoreSize)); + + Strings.humanReadableInt(newStoreMemstoreSize - storeMemstoreSize)); assertTrue(storeMemstoreSize > newStoreMemstoreSize); verifyData(secondaryRegion, 0, lastReplayed + 1, cq, families); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java index 1638331c2880..9bc2f6a5773b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java @@ -27,7 +27,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.StoreFileReader; -import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.hbase.util.Strings; import org.apache.hbase.thirdparty.com.google.common.base.MoreObjects; @@ -71,7 +71,7 @@ protected HStoreFile createMockStoreFile(final long sizeInBytes, final long seqI // this when selection takes this into account when(mockSf.getReader()).thenReturn(reader); String toString = MoreObjects.toStringHelper("MockStoreFile").add("isReference", false) - .add("fileSize", StringUtils.humanReadableInt(sizeInBytes)).add("seqId", seqId) + .add("fileSize", Strings.humanReadableInt(sizeInBytes)).add("seqId", seqId) .add("path", stringPath).toString(); when(mockSf.toString()).thenReturn(toString);
<%= SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDesc.getTtl(), snapshotDesc.getCreationTime(), System.currentTimeMillis()) ? "Yes" : "No" %> <%= StringUtils.humanReadableInt(stats.getSharedStoreFilesSize()) %><%= StringUtils.humanReadableInt(stats.getMobStoreFilesSize()) %><%= StringUtils.humanReadableInt(stats.getArchivedStoreFileSize()) %> - (<%= StringUtils.humanReadableInt(stats.getNonSharedArchivedStoreFilesSize()) %>)<%= Strings.humanReadableInt(stats.getSharedStoreFilesSize()) %><%= Strings.humanReadableInt(stats.getMobStoreFilesSize()) %><%= Strings.humanReadableInt(stats.getArchivedStoreFileSize()) %> + (<%= Strings.humanReadableInt(stats.getNonSharedArchivedStoreFilesSize()) %>)
tag contents server name links to server rs-status page. */ private static String buildRegionDeployedServerTag(RegionInfo regionInfo, HMaster master, - Map regionsToServer) { + Map regionsToServer) { ServerName serverName = regionsToServer.get(regionInfo); if (serverName == null) { @@ -118,7 +119,7 @@ } String hostName = serverName.getHostname(); - String hostNameEncoded = URLEncoder.encode(hostName); + String hostNameEncoded = URLEncoder.encode(hostName, StandardCharsets.UTF_8); // This port might be wrong if RS actually ended up using something else. int serverInfoPort = master.getRegionServerInfoPort(serverName); String urlRegionServer = "//" + hostNameEncoded + ":" + serverInfoPort + "/rs-status"; @@ -132,7 +133,7 @@ */ private static String moreRegionsToRender(int numRegionsRendered, int numRegions, String fqtn) { if (numRegions > numRegionsRendered) { - String allRegionsUrl = "?name=" + URLEncoder.encode(fqtn) + "&numRegions=all"; + String allRegionsUrl = "?name=" + URLEncoder.encode(fqtn, StandardCharsets.UTF_8) + "&numRegions=all"; return "This table has " + numRegions + " regions in total, in order to improve the page load time, only " @@ -345,7 +346,7 @@ if (metaLocation != null) { ServerMetrics sl = master.getServerManager().getLoad(metaLocation); // The host name portion should be safe, but I don't know how we handle IDNs so err on the side of failing safely. - hostAndPort = URLEncoder.encode(metaLocation.getHostname()) + ":" + master.getRegionServerInfoPort(metaLocation); + hostAndPort = URLEncoder.encode(metaLocation.getHostname(), StandardCharsets.UTF_8) + ":" + master.getRegionServerInfoPort(metaLocation); if (sl != null) { Map map = sl.getRegionMetrics(); if (map.containsKey(meta.getRegionName())) { @@ -415,7 +416,7 @@ if (metaLocation != null) { ServerMetrics sl = master.getServerManager().getLoad(metaLocation); - hostAndPort = URLEncoder.encode(metaLocation.getHostname()) + ":" + master.getRegionServerInfoPort(metaLocation); + hostAndPort = URLEncoder.encode(metaLocation.getHostname(), StandardCharsets.UTF_8) + ":" + master.getRegionServerInfoPort(metaLocation); if (sl != null) { Map map = sl.getRegionMetrics(); if (map.containsKey(meta.getRegionName())) { @@ -468,7 +469,7 @@ if (metaLocation != null) { ServerMetrics sl = master.getServerManager().getLoad(metaLocation); - hostAndPort = URLEncoder.encode(metaLocation.getHostname()) + ":" + master.getRegionServerInfoPort(metaLocation); + hostAndPort = URLEncoder.encode(metaLocation.getHostname(), StandardCharsets.UTF_8) + ":" + master.getRegionServerInfoPort(metaLocation); if (sl != null) { Map map = sl.getRegionMetrics(); if (map.containsKey(meta.getRegionName())) { @@ -1054,11 +1055,9 @@ numRegionsRendered = 0; for (Map.Entry hriEntry : entryList) { RegionInfo regionInfo = hriEntry.getKey(); - ServerName addr = regionsToServer.get(regionInfo); RegionMetrics load = hriEntry.getValue(); float locality = 0.0f; float localityForSsd = 0.0f; - String state = "N/A"; if (load != null) { locality = load.getDataLocality(); localityForSsd = load.getDataLocalityForSsd(); @@ -1146,11 +1145,11 @@ <% for (Map.Entry rdEntry : regDistribution.entrySet()) { ServerName addr = rdEntry.getKey(); - String url = "//" + URLEncoder.encode(addr.getHostname()) + ":" + String url = "//" + URLEncoder.encode(addr.getHostname(), StandardCharsets.UTF_8) + ":" + master.getRegionServerInfoPort(addr) + "/rs-status"; %>
<%= StringEscapeUtils.escapeHtml4(addr.getHostname().toString()) + <%= StringEscapeUtils.escapeHtml4(addr.getHostname()) + ":" + master.getRegionServerInfoPort(addr) %> <%= rdEntry.getValue()%> <%= primaryRegDistribution.get(addr) == null ? 0 : primaryRegDistribution.get(addr)%>
<%= sf.getPath() %><%= (int) (fs.getLength(sf.getPath()) / 1024 / 1024) %><%= (int) (fs.getFileStatus(sf.getPath()).getLen() / 1024 / 1024) %> <%= new Date(sf.getModificationTimestamp()) %> <%= String.format("%,1d", sf.getFileInfo().getHFileInfo().getLenOfBiggestCell()) %> <%= sf.getFileInfo().getHFileInfo().getKeyOfBiggestCell() %>
<%= mobPathStr%>