From 4ef791eba65043fee6fbe975ea15f3ac4482e003 Mon Sep 17 00:00:00 2001 From: Jan Hentschel Date: Thu, 2 Jan 2020 00:39:54 +0100 Subject: [PATCH 1/3] HBASE-23623 Reduced the number of Checkstyle violations in hbase-rest --- .../hbase/rest/PerformanceEvaluation.java | 268 ++++++++---------- .../hbase/rest/TestGetAndPutResource.java | 40 ++- .../rest/TestNamespacesInstanceResource.java | 44 +-- .../hbase/rest/TestNamespacesResource.java | 14 +- .../hbase/rest/TestScannersWithLabels.java | 11 +- .../hadoop/hbase/rest/TestSchemaResource.java | 44 ++- .../hadoop/hbase/rest/TestTableScan.java | 61 ++-- .../rest/client/TestRemoteHTableRetries.java | 27 +- .../hbase/rest/client/TestRemoteTable.java | 71 +++-- .../hbase/rest/model/TestScannerModel.java | 5 +- .../model/TestStorageClusterStatusModel.java | 9 +- 11 files changed, 276 insertions(+), 318 deletions(-) diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java index 9a3ec2481dd6..710ed2016b64 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java @@ -18,6 +18,22 @@ */ package org.apache.hadoop.hbase.rest; +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.io.PrintStream; +import java.lang.reflect.Constructor; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.TreeMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -76,22 +92,6 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.io.PrintStream; -import java.lang.reflect.Constructor; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.TreeMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - /** * Script used evaluating Stargate performance and scalability. Runs a SG * client that steps through one of a set of hardcoded tests or 'experiments' @@ -118,12 +118,12 @@ public class PerformanceEvaluation extends Configured implements Tool { private static final int ROWS_PER_GB = ONE_GB / ROW_LENGTH; public static final TableName TABLE_NAME = TableName.valueOf("TestTable"); - public static final byte [] FAMILY_NAME = Bytes.toBytes("info"); - public static final byte [] QUALIFIER_NAME = Bytes.toBytes("data"); + public static final byte[] FAMILY_NAME = Bytes.toBytes("info"); + public static final byte[] QUALIFIER_NAME = Bytes.toBytes("data"); private TableName tableName = TABLE_NAME; protected HTableDescriptor TABLE_DESCRIPTOR; - protected Map commands = new TreeMap(); + protected Map commands = new TreeMap<>(); protected static Cluster cluster = new Cluster(); volatile Configuration conf; @@ -141,6 +141,7 @@ public class PerformanceEvaluation extends Configured implements Tool { private Connection connection; private static final Path PERF_EVAL_DIR = new Path("performance_evaluation"); + /** * Regex to parse lines in input file passed to mapreduce task. */ @@ -159,11 +160,12 @@ public class PerformanceEvaluation extends Configured implements Tool { * Enum for map metrics. Keep it out here rather than inside in the Map * inner-class so we can find associated properties. */ - protected static enum Counter { + protected enum Counter { /** elapsed time */ ELAPSED_TIME, /** number of rows */ - ROWS} + ROWS + } /** * Constructor @@ -211,30 +213,27 @@ interface Status { /** * Sets status * @param msg status message - * @throws IOException + * @throws IOException if setting the status fails */ void setStatus(final String msg) throws IOException; } /** * This class works as the InputSplit of Performance Evaluation - * MapReduce InputFormat, and the Record Value of RecordReader. - * Each map task will only read one record from a PeInputSplit, + * MapReduce InputFormat, and the Record Value of RecordReader. + * Each map task will only read one record from a PeInputSplit, * the record value is the PeInputSplit itself. */ public static class PeInputSplit extends InputSplit implements Writable { - private TableName tableName = TABLE_NAME; - private int startRow = 0; - private int rows = 0; - private int totalRows = 0; - private int clients = 0; - private boolean flushCommits = false; - private boolean writeToWAL = true; - private boolean useTags = false; - private int noOfTags = 0; - - public PeInputSplit() { - } + private TableName tableName; + private int startRow; + private int rows; + private int totalRows; + private int clients; + private boolean flushCommits; + private boolean writeToWAL; + private boolean useTags; + private int noOfTags; public PeInputSplit(TableName tableName, int startRow, int rows, int totalRows, int clients, boolean flushCommits, boolean writeToWAL, boolean useTags, int noOfTags) { @@ -281,12 +280,12 @@ public void write(DataOutput out) throws IOException { } @Override - public long getLength() throws IOException, InterruptedException { + public long getLength() { return 0; } @Override - public String[] getLocations() throws IOException, InterruptedException { + public String[] getLocations() { return new String[0]; } @@ -306,10 +305,6 @@ public int getTotalRows() { return totalRows; } - public int getClients() { - return clients; - } - public boolean isFlushCommits() { return flushCommits; } @@ -332,11 +327,10 @@ public int getNoOfTags() { * It extends from FileInputFormat, want to use it's methods such as setInputPaths(). */ public static class PeInputFormat extends FileInputFormat { - @Override public List getSplits(JobContext job) throws IOException { // generate splits - List splitList = new ArrayList(); + List splitList = new ArrayList<>(); for (FileStatus file: listStatus(job)) { if (file.isDirectory()) { @@ -346,15 +340,15 @@ public List getSplits(JobContext job) throws IOException { FileSystem fs = path.getFileSystem(job.getConfiguration()); FSDataInputStream fileIn = fs.open(path); LineReader in = new LineReader(fileIn, job.getConfiguration()); - int lineLen = 0; + int lineLen; while(true) { Text lineText = new Text(); lineLen = in.readLine(lineText); if(lineLen <= 0) { - break; + break; } Matcher m = LINE_PATTERN.matcher(lineText.toString()); - if((m != null) && m.matches()) { + if ((m != null) && m.matches()) { TableName tableName = TableName.valueOf(m.group(1)); int startRow = Integer.parseInt(m.group(2)); int rows = Integer.parseInt(m.group(3)); @@ -391,7 +385,7 @@ public List getSplits(JobContext job) throws IOException { @Override public RecordReader createRecordReader(InputSplit split, - TaskAttemptContext context) { + TaskAttemptContext context) { return new PeRecordReader(); } @@ -402,37 +396,36 @@ public static class PeRecordReader extends RecordReader Class forName(String className, Class type) { - Class clazz = null; + Class clazz; try { clazz = Class.forName(className).asSubclass(type); } catch (ClassNotFoundException e) { @@ -489,7 +482,7 @@ private Class forName(String className, Class type) @Override protected void map(NullWritable key, PeInputSplit value, final Context context) - throws IOException, InterruptedException { + throws IOException, InterruptedException { Status status = new Status() { @Override public void setStatus(String msg) { @@ -513,11 +506,11 @@ public void setStatus(String msg) { } } - /* + /** * If table does not already exist, create. - * @param c Client to use checking. + * @param admin Client to use checking. * @return True if we created the table. - * @throws IOException + * @throws IOException if an operation on the table fails */ private boolean checkTable(RemoteAdmin admin) throws IOException { HTableDescriptor tableDescriptor = getTableDescriptor(); @@ -532,7 +525,7 @@ private boolean checkTable(RemoteAdmin admin) throws IOException { LOG.debug(" split " + i + ": " + Bytes.toStringBinary(splits[i])); } admin.createTable(tableDescriptor); - LOG.info ("Table created with " + this.presplitRegions + " splits"); + LOG.info("Table created with " + this.presplitRegions + " splits"); } else { boolean tableExists = admin.isTableAvailable(tableDescriptor.getTableName().getName()); if (!tableExists) { @@ -540,8 +533,8 @@ private boolean checkTable(RemoteAdmin admin) throws IOException { LOG.info("Table " + tableDescriptor + " created"); } } - boolean tableExists = admin.isTableAvailable(tableDescriptor.getTableName().getName()); - return tableExists; + + return admin.isTableAvailable(tableDescriptor.getTableName().getName()); } protected HTableDescriptor getTableDescriptor() { @@ -564,8 +557,9 @@ protected HTableDescriptor getTableDescriptor() { * @return splits : array of byte [] */ protected byte[][] getSplits() { - if (this.presplitRegions == 0) - return new byte [0][]; + if (this.presplitRegions == 0) { + return new byte[0][]; + } int numSplitPoints = presplitRegions - 1; byte[][] splits = new byte[numSplitPoints][]; @@ -577,14 +571,13 @@ protected byte[][] getSplits() { return splits; } - /* + /** * We're to run multiple clients concurrently. Setup a mapreduce job. Run * one map per client. Then run a single reduce to sum the elapsed times. * @param cmd Command to run. - * @throws IOException */ private void runNIsMoreThanOne(final Class cmd) - throws IOException, InterruptedException, ClassNotFoundException { + throws IOException, InterruptedException, ClassNotFoundException { RemoteAdmin remoteAdmin = new RemoteAdmin(new Client(cluster), getConf()); checkTable(remoteAdmin); if (nomapred) { @@ -594,13 +587,13 @@ private void runNIsMoreThanOne(final Class cmd) } } - /* + /** * Run all clients in this vm each to its own thread. - * @param cmd Command to run. - * @throws IOException + * @param cmd Command to run + * @throws IOException if creating a connection fails */ private void doMultipleClients(final Class cmd) throws IOException { - final List threads = new ArrayList(this.N); + final List threads = new ArrayList<>(this.N); final long[] timings = new long[this.N]; final int perClientRows = R/N; final TableName tableName = this.tableName; @@ -614,7 +607,7 @@ private void doMultipleClients(final Class cmd) throws IOExcepti final Connection connection = ConnectionFactory.createConnection(getConf()); for (int i = 0; i < this.N; i++) { final int index = i; - Thread t = new Thread ("TestClient-" + i) { + Thread t = new Thread("TestClient-" + i) { @Override public void run() { super.run(); @@ -634,7 +627,7 @@ public void run() { perClientRows, R, flushCommits, writeToWAL, useTags, noOfTags, connection, new Status() { @Override - public void setStatus(final String msg) throws IOException { + public void setStatus(final String msg) { LOG.info("client-" + getName() + " " + msg); } }); @@ -674,15 +667,14 @@ public void setStatus(final String msg) throws IOException { + "\tAvg: " + (total / this.N) + "ms"); } - /* + /** * Run a mapreduce job. Run as many maps as asked-for clients. * Before we start up the job, write out an input file with instruction * per client regards which row they are to start on. * @param cmd Command to run. - * @throws IOException */ - private void doMapReduce(final Class cmd) throws IOException, - InterruptedException, ClassNotFoundException { + private void doMapReduce(final Class cmd) + throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = getConf(); Path inputDir = writeInputFile(conf); conf.set(EvaluationMapTask.CMD_KEY, cmd.getName()); @@ -708,11 +700,11 @@ private void doMapReduce(final Class cmd) throws IOException, job.waitForCompletion(true); } - /* + /** * Write input file of offsets-per-client for the mapreduce job. * @param c Configuration * @return Directory that contains file written. - * @throws IOException + * @throws IOException if creating the directory or the file fails */ private Path writeInputFile(final Configuration c) throws IOException { SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss"); @@ -724,21 +716,21 @@ private Path writeInputFile(final Configuration c) throws IOException { Path inputFile = new Path(inputDir, "input.txt"); PrintStream out = new PrintStream(fs.create(inputFile)); // Make input random. - Map m = new TreeMap(); + Map m = new TreeMap<>(); Hash h = MurmurHash.getInstance(); int perClientRows = (this.R / this.N); try { for (int i = 0; i < 10; i++) { for (int j = 0; j < N; j++) { String s = "tableName=" + this.tableName + - ", startRow=" + ((j * perClientRows) + (i * (perClientRows/10))) + - ", perClientRunRows=" + (perClientRows / 10) + - ", totalRows=" + this.R + - ", clients=" + this.N + - ", flushCommits=" + this.flushCommits + - ", writeToWAL=" + this.writeToWAL + - ", useTags=" + this.useTags + - ", noOfTags=" + this.noOfTags; + ", startRow=" + ((j * perClientRows) + (i * (perClientRows/10))) + + ", perClientRunRows=" + (perClientRows / 10) + + ", totalRows=" + this.R + + ", clients=" + this.N + + ", flushCommits=" + this.flushCommits + + ", writeToWAL=" + this.writeToWAL + + ", useTags=" + this.useTags + + ", noOfTags=" + this.noOfTags; int hash = h.hash(Bytes.toBytes(s)); m.put(hash, s); } @@ -780,31 +772,26 @@ public String getDescription() { } /** - * Wraps up options passed to {@link org.apache.hadoop.hbase.PerformanceEvaluation.Test - * tests}. This makes the reflection logic a little easier to understand... + * Wraps up options passed to {@link org.apache.hadoop.hbase.PerformanceEvaluation} tests + * This makes the reflection logic a little easier to understand... */ static class TestOptions { private int startRow; private int perClientRunRows; private int totalRows; - private int numClientThreads; private TableName tableName; private boolean flushCommits; - private boolean writeToWAL = true; - private boolean useTags = false; - private int noOfTags = 0; + private boolean writeToWAL; + private boolean useTags; + private int noOfTags; private Connection connection; - TestOptions() { - } - - TestOptions(int startRow, int perClientRunRows, int totalRows, int numClientThreads, - TableName tableName, boolean flushCommits, boolean writeToWAL, boolean useTags, + TestOptions(int startRow, int perClientRunRows, int totalRows, TableName tableName, + boolean flushCommits, boolean writeToWAL, boolean useTags, int noOfTags, Connection connection) { this.startRow = startRow; this.perClientRunRows = perClientRunRows; this.totalRows = totalRows; - this.numClientThreads = numClientThreads; this.tableName = tableName; this.flushCommits = flushCommits; this.writeToWAL = writeToWAL; @@ -825,10 +812,6 @@ public int getTotalRows() { return totalRows; } - public int getNumClientThreads() { - return numClientThreads; - } - public TableName getTableName() { return tableName; } @@ -907,10 +890,11 @@ protected int getReportingPeriod() { } abstract void testTakedown() throws IOException; - /* + + /** * Run test * @return Elapsed time. - * @throws IOException + * @throws IOException if something in the test fails */ long test() throws IOException { testSetup(); @@ -940,16 +924,16 @@ void testTimed() throws IOException { } } - /* - * Test for individual row. - * @param i Row index. - */ + /** + * Test for individual row. + * @param i Row index. + */ abstract void testRow(final int i) throws IOException; } static abstract class TableTest extends Test { protected Table table; - + public TableTest(Configuration conf, TestOptions options, Status status) { super(conf, options, status); } @@ -1007,7 +991,6 @@ protected int getReportingPeriod() { int period = this.perClientRunRows / 100; return period == 0? this.perClientRunRows: period; } - } @SuppressWarnings("unused") @@ -1036,12 +1019,12 @@ void testRow(final int i) throws IOException { s.close(); } - protected abstract Pair getStartAndStopRow(); + protected abstract Pair getStartAndStopRow(); protected Pair generateStartAndStopRows(int maxRange) { int start = this.rand.nextInt(Integer.MAX_VALUE) % totalRows; int stop = start + maxRange; - return new Pair(format(start), format(stop)); + return new Pair<>(format(start), format(stop)); } @Override @@ -1112,7 +1095,6 @@ protected int getReportingPeriod() { int period = this.perClientRunRows / 100; return period == 0? this.perClientRunRows: period; } - } static class RandomWriteTest extends BufferedMutatorTest { @@ -1158,7 +1140,6 @@ void testTakedown() throws IOException { super.testTakedown(); } - @Override void testRow(final int i) throws IOException { if (this.testScanner == null) { @@ -1168,7 +1149,6 @@ void testRow(final int i) throws IOException { } testScanner.next(); } - } static class SequentialReadTest extends TableTest { @@ -1182,11 +1162,9 @@ void testRow(final int i) throws IOException { get.addColumn(FAMILY_NAME, QUALIFIER_NAME); table.get(get); } - } static class SequentialWriteTest extends BufferedMutatorTest { - SequentialWriteTest(Configuration conf, TestOptions options, Status status) { super(conf, options, status); } @@ -1231,11 +1209,13 @@ void testRow(int i) throws IOException { while (scanner.next() != null) { } } finally { - if (scanner != null) scanner.close(); + if (scanner != null) { + scanner.close(); + } } } - protected Scan constructScan(byte[] valuePrefix) throws IOException { + protected Scan constructScan(byte[] valuePrefix) { Filter filter = new SingleColumnValueFilter( FAMILY_NAME, QUALIFIER_NAME, CompareFilter.CompareOp.EQUAL, new BinaryComparator(valuePrefix) @@ -1247,14 +1227,14 @@ protected Scan constructScan(byte[] valuePrefix) throws IOException { } } - /* + /** * Format passed integer. - * @param number - * @return Returns zero-prefixed 10-byte wide decimal version of passed - * number (Does absolute in case number is negative). + * @param number the integer to format + * @return Returns zero-prefixed 10-byte wide decimal version of passed number (Does absolute in + * case number is negative). */ public static byte [] format(final int number) { - byte [] b = new byte[DEFAULT_ROW_PREFIX_LENGTH + 10]; + byte[] b = new byte[DEFAULT_ROW_PREFIX_LENGTH + 10]; int d = Math.abs(number); for (int i = b.length - 1; i >= 0; i--) { b[i] = (byte)((d % 10) + '0'); @@ -1264,10 +1244,10 @@ protected Scan constructScan(byte[] valuePrefix) throws IOException { } public static byte[] generateData(final Random r, int length) { - byte [] b = new byte [length]; - int i = 0; + byte[] b = new byte [length]; + int i; - for(i = 0; i < (length-8); i += 8) { + for (i = 0; i < (length-8); i += 8) { b[i] = (byte) (65 + r.nextInt(26)); b[i+1] = b[i]; b[i+2] = b[i]; @@ -1279,7 +1259,7 @@ public static byte[] generateData(final Random r, int length) { } byte a = (byte) (65 + r.nextInt(26)); - for(; i < length; i++) { + for (; i < length; i++) { b[i] = a; } return b; @@ -1291,21 +1271,20 @@ public static byte[] generateValue(final Random r) { return b; } - static byte [] getRandomRow(final Random random, final int totalRows) { + static byte[] getRandomRow(final Random random, final int totalRows) { return format(random.nextInt(Integer.MAX_VALUE) % totalRows); } long runOneClient(final Class cmd, final int startRow, final int perClientRunRows, final int totalRows, boolean flushCommits, boolean writeToWAL, boolean useTags, int noOfTags, - Connection connection, final Status status) - throws IOException { + Connection connection, final Status status) throws IOException { status.setStatus("Start " + cmd + " at offset " + startRow + " for " + perClientRunRows + " rows"); - long totalElapsedTime = 0; + long totalElapsedTime; TestOptions options = new TestOptions(startRow, perClientRunRows, - totalRows, N, tableName, flushCommits, writeToWAL, useTags, noOfTags, connection); + totalRows, tableName, flushCommits, writeToWAL, useTags, noOfTags, connection); final Test t; try { Constructor constructor = cmd.getDeclaredConstructor( @@ -1329,12 +1308,12 @@ long runOneClient(final Class cmd, final int startRow, private void runNIsOne(final Class cmd) { Status status = new Status() { @Override - public void setStatus(String msg) throws IOException { + public void setStatus(String msg) { LOG.info(msg); } }; - RemoteAdmin admin = null; + RemoteAdmin admin; try { Client client = new Client(cluster); admin = new RemoteAdmin(client, getConf()); @@ -1346,8 +1325,8 @@ public void setStatus(String msg) throws IOException { } } - private void runTest(final Class cmd) throws IOException, - InterruptedException, ClassNotFoundException { + private void runTest(final Class cmd) + throws IOException, InterruptedException, ClassNotFoundException { if (N == 1) { // If there is only one client and one HRegionServer, we assume nothing // has been set up at all. @@ -1410,7 +1389,7 @@ protected void printUsage(final String message) { } private void getArgs(final int start, final String[] args) { - if(start + 1 > args.length) { + if (start + 1 > args.length) { throw new IllegalArgumentException("must supply the number of clients"); } N = Integer.parseInt(args[start]); @@ -1545,9 +1524,6 @@ private Class determineCommandClass(String cmd) { return descriptor != null ? descriptor.getCmdClass() : null; } - /** - * @param args - */ public static void main(final String[] args) throws Exception { int res = ToolRunner.run(new PerformanceEvaluation(HBaseConfiguration.create()), args); System.exit(res); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java index ba0f9288dc4a..22523146c555 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java @@ -15,7 +15,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.rest; import static org.junit.Assert.assertEquals; @@ -46,7 +45,6 @@ @Category(MediumTests.class) public class TestGetAndPutResource extends RowResourceBase { - private static final MetricsAssertHelper METRICS_ASSERT = CompatibilityFactory.getInstance(MetricsAssertHelper.class); @@ -110,7 +108,7 @@ public void testSingleCellGetPutXML() throws IOException, JAXBException { public void testSingleCellGetPutPB() throws IOException, JAXBException { Response response = getValuePB(TABLE, ROW_1, COLUMN_1); assertEquals(404, response.getCode()); - + response = putValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); @@ -130,7 +128,7 @@ public void testSingleCellGetPutPB() throws IOException, JAXBException { } @Test - public void testMultipleCellCheckPutPB() throws IOException, JAXBException { + public void testMultipleCellCheckPutPB() throws IOException { Response response = getValuePB(TABLE, ROW_1, COLUMN_1); assertEquals(404, response.getCode()); @@ -143,7 +141,7 @@ public void testMultipleCellCheckPutPB() throws IOException, JAXBException { assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_2, VALUE_2); - HashMap otherCells = new HashMap(); + HashMap otherCells = new HashMap<>(); otherCells.put(COLUMN_2,VALUE_3); // On Success update both the cells @@ -176,7 +174,7 @@ public void testMultipleCellCheckPutXML() throws IOException, JAXBException { assertEquals(200, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_2, VALUE_2); - HashMap otherCells = new HashMap(); + HashMap otherCells = new HashMap<>(); otherCells.put(COLUMN_2,VALUE_3); // On Success update both the cells @@ -196,7 +194,7 @@ public void testMultipleCellCheckPutXML() throws IOException, JAXBException { } @Test - public void testMultipleCellCheckDeletePB() throws IOException, JAXBException { + public void testMultipleCellCheckDeletePB() throws IOException { Response response = getValuePB(TABLE, ROW_1, COLUMN_1); assertEquals(404, response.getCode()); @@ -214,7 +212,7 @@ public void testMultipleCellCheckDeletePB() throws IOException, JAXBException { checkValuePB(TABLE, ROW_1, COLUMN_3, VALUE_3); // Deletes the following columns based on Column1 check - HashMap cellsToDelete = new HashMap(); + HashMap cellsToDelete = new HashMap<>(); cellsToDelete.put(COLUMN_2,VALUE_2); // Value does not matter cellsToDelete.put(COLUMN_3,VALUE_3); // Value does not matter @@ -249,7 +247,7 @@ public void testMultipleCellCheckDeletePB() throws IOException, JAXBException { assertEquals(200, response.getCode()); } - @Test + @Test public void testSingleCellGetPutBinary() throws IOException { final String path = "/" + TABLE + "/" + ROW_3 + "/" + COLUMN_1; final byte[] body = Bytes.toBytes(VALUE_3); @@ -275,7 +273,7 @@ public void testSingleCellGetPutBinary() throws IOException { } @Test - public void testSingleCellGetJSON() throws IOException, JAXBException { + public void testSingleCellGetJSON() throws IOException { final String path = "/" + TABLE + "/" + ROW_4 + "/" + COLUMN_1; Response response = client.put(path, Constants.MIMETYPE_BINARY, Bytes.toBytes(VALUE_4)); @@ -289,7 +287,7 @@ public void testSingleCellGetJSON() throws IOException, JAXBException { } @Test - public void testLatestCellGetJSON() throws IOException, JAXBException { + public void testLatestCellGetJSON() throws IOException { final String path = "/" + TABLE + "/" + ROW_4 + "/" + COLUMN_1; CellSetModel cellSetModel = new CellSetModel(); RowModel rowModel = new RowModel(ROW_4); @@ -336,7 +334,7 @@ public void testURLEncodedKey() throws IOException, JAXBException { } @Test - public void testNoSuchCF() throws IOException, JAXBException { + public void testNoSuchCF() throws IOException { final String goodPath = "/" + TABLE + "/" + ROW_1 + "/" + CFA+":"; final String badPath = "/" + TABLE + "/" + ROW_1 + "/" + "BAD"; Response response = client.post(goodPath, Constants.MIMETYPE_BINARY, @@ -524,9 +522,9 @@ public void testMultiCellGetJson() throws IOException, JAXBException { response = deleteRow(TABLE, ROW_2); assertEquals(200, response.getCode()); } - + @Test - public void testMetrics() throws IOException, JAXBException { + public void testMetrics() throws IOException { final String path = "/" + TABLE + "/" + ROW_4 + "/" + COLUMN_1; Response response = client.put(path, Constants.MIMETYPE_BINARY, Bytes.toBytes(VALUE_4)); @@ -539,19 +537,19 @@ public void testMetrics() throws IOException, JAXBException { assertEquals(200, response.getCode()); UserProvider userProvider = UserProvider.instantiate(conf); - METRICS_ASSERT.assertCounterGt("requests", 2l, + METRICS_ASSERT.assertCounterGt("requests", 2L, RESTServlet.getInstance(conf, userProvider).getMetrics().getSource()); - METRICS_ASSERT.assertCounterGt("successfulGet", 0l, + METRICS_ASSERT.assertCounterGt("successfulGet", 0L, RESTServlet.getInstance(conf, userProvider).getMetrics().getSource()); - METRICS_ASSERT.assertCounterGt("successfulPut", 0l, + METRICS_ASSERT.assertCounterGt("successfulPut", 0L, RESTServlet.getInstance(conf, userProvider).getMetrics().getSource()); - METRICS_ASSERT.assertCounterGt("successfulDelete", 0l, + METRICS_ASSERT.assertCounterGt("successfulDelete", 0L, RESTServlet.getInstance(conf, userProvider).getMetrics().getSource()); } - + @Test public void testMultiColumnGetXML() throws Exception { String path = "/" + TABLE + "/fakerow"; @@ -575,8 +573,8 @@ public void testMultiColumnGetXML() throws Exception { path = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_1 + "," + COLUMN_2 + "," + COLUMN_3; response = client.get(path, Constants.MIMETYPE_XML); assertEquals(200, response.getCode()); - CellSetModel cellSet = (CellSetModel) xmlUnmarshaller.unmarshal(new ByteArrayInputStream(response - .getBody())); + CellSetModel cellSet = + (CellSetModel) xmlUnmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); assertTrue(cellSet.getRows().size() == 1); assertTrue(cellSet.getRows().get(0).getCells().size() == 3); List cells = cellSet.getRows().get(0).getCells(); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java index cf7d3a312366..6c20457ae445 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java @@ -1,5 +1,4 @@ -/* - * +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -18,6 +17,11 @@ */ package org.apache.hadoop.hbase.rest; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + import com.fasterxml.jackson.databind.ObjectMapper; import java.io.ByteArrayInputStream; @@ -51,8 +55,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import static org.junit.Assert.*; - import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -61,13 +63,13 @@ @Category(MediumTests.class) public class TestNamespacesInstanceResource { private static String NAMESPACE1 = "TestNamespacesInstanceResource1"; - private static Map NAMESPACE1_PROPS = new HashMap(); + private static Map NAMESPACE1_PROPS = new HashMap<>(); private static String NAMESPACE2 = "TestNamespacesInstanceResource2"; - private static Map NAMESPACE2_PROPS = new HashMap(); + private static Map NAMESPACE2_PROPS = new HashMap<>(); private static String NAMESPACE3 = "TestNamespacesInstanceResource3"; - private static Map NAMESPACE3_PROPS = new HashMap(); + private static Map NAMESPACE3_PROPS = new HashMap<>(); private static String NAMESPACE4 = "TestNamespacesInstanceResource4"; - private static Map NAMESPACE4_PROPS = new HashMap(); + private static Map NAMESPACE4_PROPS = new HashMap<>(); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final HBaseRESTTestingUtility REST_TEST_UTIL = @@ -87,8 +89,8 @@ public static void setUpBeforeClass() throws Exception { REST_TEST_UTIL.getServletPort())); testNamespacesInstanceModel = new TestNamespacesInstanceModel(); context = JAXBContext.newInstance(NamespacesInstanceModel.class, TableListModel.class); - jsonMapper = new JacksonProvider() - .locateMapper(NamespacesInstanceModel.class, MediaType.APPLICATION_JSON_TYPE); + jsonMapper = new JacksonProvider().locateMapper(NamespacesInstanceModel.class, + MediaType.APPLICATION_JSON_TYPE); NAMESPACE1_PROPS.put("key1", "value1"); NAMESPACE2_PROPS.put("key2a", "value2a"); NAMESPACE2_PROPS.put("key2b", "value2b"); @@ -117,9 +119,9 @@ private static T fromXML(byte[] content) private NamespaceDescriptor findNamespace(Admin admin, String namespaceName) throws IOException{ NamespaceDescriptor[] nd = admin.listNamespaceDescriptors(); - for(int i = 0; i < nd.length; i++){ - if(nd[i].getName().equals(namespaceName)){ - return nd[i]; + for (NamespaceDescriptor namespaceDescriptor : nd) { + if (namespaceDescriptor.getName().equals(namespaceName)) { + return namespaceDescriptor; } } return null; @@ -129,18 +131,18 @@ private void checkNamespaceProperties(NamespaceDescriptor nd, Map checkNamespaceProperties(nd.getConfiguration(), testProps); } - private void checkNamespaceProperties(Map namespaceProps, + private void checkNamespaceProperties(Map namespaceProps, Map testProps){ assertTrue(namespaceProps.size() == testProps.size()); - for(String key: testProps.keySet()){ + for (String key: testProps.keySet()) { assertEquals(testProps.get(key), namespaceProps.get(key)); } } private void checkNamespaceTables(List namespaceTables, List testTables){ assertEquals(namespaceTables.size(), testTables.size()); - for(int i = 0 ; i < namespaceTables.size() ; i++){ - String tableName = ((TableModel) namespaceTables.get(i)).getName(); + for (TableModel namespaceTable : namespaceTables) { + String tableName = namespaceTable.getName(); assertTrue(testTables.contains(tableName)); } } @@ -174,7 +176,7 @@ public void testGetNamespaceTablesAndCannotDeleteNamespace() throws IOException, // Create namespace via admin. NamespaceDescriptor.Builder nsBuilder = NamespaceDescriptor.create(nsName); - NamespaceDescriptor nsd = nsBuilder.build(); + NamespaceDescriptor nsd = nsBuilder.build(); nsd.setConfiguration("key1", "value1"); admin.createNamespace(nsd); @@ -189,7 +191,7 @@ public void testGetNamespaceTablesAndCannotDeleteNamespace() throws IOException, table.addFamily(colDesc); admin.createTable(table); - Map nsProperties = new HashMap(); + Map nsProperties = new HashMap<>(); nsProperties.put("key1", "value1"); List nsTables = Arrays.asList("table1", "table2"); @@ -357,7 +359,7 @@ public void testNamespaceCreateAndDeleteXMLAndJSON() throws IOException, JAXBExc } @Test - public void testNamespaceCreateAndDeletePBAndNoBody() throws IOException, JAXBException { + public void testNamespaceCreateAndDeletePBAndNoBody() throws IOException { String namespacePath3 = "/namespaces/" + NAMESPACE3; String namespacePath4 = "/namespaces/" + NAMESPACE4; NamespacesInstanceModel model3; @@ -412,7 +414,7 @@ public void testNamespaceCreateAndDeletePBAndNoBody() throws IOException, JAXBEx // Check cannot post tables that already exist. response = client.post(namespacePath3, Constants.MIMETYPE_BINARY, new byte[]{}); assertEquals(403, response.getCode()); - response = client.post(namespacePath4, Constants.MIMETYPE_PROTOBUF, + response = client.post(namespacePath4, Constants.MIMETYPE_PROTOBUF, model4.createProtobufOutput()); assertEquals(403, response.getCode()); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesResource.java index a70835fc067d..ea07314fe383 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesResource.java @@ -1,5 +1,4 @@ -/* - * +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -18,6 +17,9 @@ */ package org.apache.hadoop.hbase.rest; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + import java.io.ByteArrayInputStream; import java.io.IOException; @@ -36,8 +38,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import static org.junit.Assert.*; - import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -79,8 +79,8 @@ private static NamespacesModel fromXML(byte[] content) throws JAXBException { private boolean doesNamespaceExist(Admin admin, String namespaceName) throws IOException { NamespaceDescriptor[] nd = admin.listNamespaceDescriptors(); - for(int i = 0; i < nd.length; i++) { - if(nd[i].getName().equals(namespaceName)) { + for (NamespaceDescriptor namespaceDescriptor : nd) { + if (namespaceDescriptor.getName().equals(namespaceName)) { return true; } } @@ -152,7 +152,7 @@ public void testNamespaceListXMLandJSON() throws IOException, JAXBException { } @Test - public void testNamespaceListPBandDefault() throws IOException, JAXBException { + public void testNamespaceListPBandDefault() throws IOException { String schemaPath = "/namespaces/"; NamespacesModel model; Response response; diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java index 356889a7fb42..7ecae05c6400 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -14,7 +14,8 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - */package org.apache.hadoop.hbase.rest; + */ +package org.apache.hadoop.hbase.rest; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -91,7 +92,8 @@ public class TestScannersWithLabels { private static Unmarshaller unmarshaller; private static Configuration conf; - private static int insertData(TableName tableName, String column, double prob) throws IOException { + private static int insertData(TableName tableName, String column, double prob) + throws IOException { byte[] k = new byte[3]; byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column)); @@ -177,6 +179,7 @@ public VisibilityLabelsResponse run() throws Exception { }; SUPERUSER.runAs(action); } + private static void setAuths() throws Exception { String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, PUBLIC, TOPSECRET }; try (Connection conn = ConnectionFactory.createConnection(conf)) { @@ -185,6 +188,7 @@ private static void setAuths() throws Exception { throw new IOException(t); } } + @Test public void testSimpleScannerXMLWithLabelsThatReceivesNoData() throws IOException, JAXBException { final int BATCH_SIZE = 5; @@ -237,5 +241,4 @@ public void testSimpleScannerXMLWithLabelsThatReceivesData() throws IOException, .getBody())); assertEquals(5, countCellSet(cellSet)); } - } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java index 7ca56f363f56..a1b0885be0a8 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java @@ -1,5 +1,4 @@ -/* - * +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -16,9 +15,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.rest; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; + import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.StringWriter; @@ -44,8 +46,6 @@ import org.apache.hadoop.hbase.rest.model.TestTableSchemaModel; import org.apache.hadoop.hbase.util.Bytes; -import static org.junit.Assert.*; - import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -67,7 +67,7 @@ public class TestSchemaResource { private static Configuration conf; private static TestTableSchemaModel testTableSchemaModel; private static Header extraHdr = null; - + private static boolean csrfEnabled = true; @Parameterized.Parameters @@ -130,27 +130,27 @@ public void testTableCreateAndDeleteXML() throws IOException, JAXBException { if (csrfEnabled) { // test put operation is forbidden without custom header response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model)); - assertEquals(response.getCode(), 400); + assertEquals(400, response.getCode()); } response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model), extraHdr); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); // recall the same put operation but in read-only mode conf.set("hbase.rest.readonly", "true"); response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model), extraHdr); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); // retrieve the schema and validate it response = client.get(schemaPath, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); model = fromXML(response.getBody()); testTableSchemaModel.checkModel(model, TABLE1); // with json retrieve the schema and validate it response = client.get(schemaPath, Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); model = testTableSchemaModel.fromJSON(Bytes.toString(response.getBody())); testTableSchemaModel.checkModel(model, TABLE1); @@ -163,19 +163,19 @@ public void testTableCreateAndDeleteXML() throws IOException, JAXBException { // test delete schema operation is forbidden in read-only mode response = client.delete(schemaPath, extraHdr); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); // return read-only setting back to default conf.set("hbase.rest.readonly", "false"); // delete the table and make sure HBase concurs response = client.delete(schemaPath, extraHdr); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertFalse(admin.tableExists(TableName.valueOf(TABLE1))); } @Test - public void testTableCreateAndDeletePB() throws IOException, JAXBException { + public void testTableCreateAndDeletePB() throws IOException { String schemaPath = "/" + TABLE2 + "/schema"; TableSchemaModel model; Response response; @@ -190,22 +190,22 @@ public void testTableCreateAndDeletePB() throws IOException, JAXBException { if (csrfEnabled) { // test put operation is forbidden without custom header response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - assertEquals(response.getCode(), 400); + assertEquals(400, response.getCode()); } response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput(), extraHdr); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); // recall the same put operation but in read-only mode conf.set("hbase.rest.readonly", "true"); response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput(), extraHdr); assertNotNull(extraHdr); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); // retrieve the schema and validate it response = client.get(schemaPath, Constants.MIMETYPE_PROTOBUF); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF, response.getHeader("content-type")); model = new TableSchemaModel(); model.getObjectFromMessage(response.getBody()); @@ -213,7 +213,7 @@ public void testTableCreateAndDeletePB() throws IOException, JAXBException { // retrieve the schema and validate it with alternate pbuf type response = client.get(schemaPath, Constants.MIMETYPE_PROTOBUF_IETF); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF_IETF, response.getHeader("content-type")); model = new TableSchemaModel(); model.getObjectFromMessage(response.getBody()); @@ -227,16 +227,14 @@ public void testTableCreateAndDeletePB() throws IOException, JAXBException { // test delete schema operation is forbidden in read-only mode response = client.delete(schemaPath, extraHdr); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); // return read-only setting back to default conf.set("hbase.rest.readonly", "false"); // delete the table and make sure HBase concurs response = client.delete(schemaPath, extraHdr); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertFalse(admin.tableExists(TableName.valueOf(TABLE2))); } - } - diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java index 03d4360a92b3..08ffe10030b9 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java @@ -76,7 +76,6 @@ @Category(MediumTests.class) public class TestTableScan { - private static final TableName TABLE = TableName.valueOf("TestScanResource"); private static final String CFA = "a"; private static final String CFB = "b"; @@ -96,7 +95,7 @@ public class TestTableScan { @BeforeClass public static void setUpBeforeClass() throws Exception { conf = TEST_UTIL.getConfiguration(); - conf.set(Constants.CUSTOM_FILTERS, "CustomFilter:" + CustomFilter.class.getName()); + conf.set(Constants.CUSTOM_FILTERS, "CustomFilter:" + CustomFilter.class.getName()); TEST_UTIL.startMiniCluster(); REST_TEST_UTIL.startServletContainer(conf); client = new Client(new Cluster().add("localhost", @@ -122,7 +121,7 @@ public static void tearDownAfterClass() throws Exception { } @Test - public void testSimpleScannerXML() throws IOException, JAXBException, XMLStreamException { + public void testSimpleScannerXML() throws IOException, JAXBException { // Test scanning particular columns StringBuilder builder = new StringBuilder(); builder.append("/*"); @@ -149,7 +148,7 @@ public void testSimpleScannerXML() throws IOException, JAXBException, XMLStreamE response = client.get("/" + TABLE + builder.toString(), Constants.MIMETYPE_XML); assertEquals(200, response.getCode()); - assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); + assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); model = (CellSetModel) ush.unmarshal(response.getStream()); count = TestScannerResource.countCellSet(model); assertEquals(expectedRows1, count); @@ -198,7 +197,7 @@ public void testSimpleScannerXML() throws IOException, JAXBException, XMLStreamE } @Test - public void testSimpleScannerJson() throws IOException, JAXBException { + public void testSimpleScannerJson() throws IOException { // Test scanning particular columns with limit. StringBuilder builder = new StringBuilder(); builder.append("/*"); @@ -285,16 +284,16 @@ public void handleRowModel(ClientSideCellSetModel helper, RowModel row) { unmarshaller.setListener(new Unmarshaller.Listener() { @Override public void beforeUnmarshal(Object target, Object parent) { - if (target instanceof ClientSideCellSetModel) { - ((ClientSideCellSetModel) target).setCellSetModelListener(listener); - } + if (target instanceof ClientSideCellSetModel) { + ((ClientSideCellSetModel) target).setCellSetModelListener(listener); + } } @Override public void afterUnmarshal(Object target, Object parent) { - if (target instanceof ClientSideCellSetModel) { - ((ClientSideCellSetModel) target).setCellSetModelListener(null); - } + if (target instanceof ClientSideCellSetModel) { + ((ClientSideCellSetModel) target).setCellSetModelListener(null); + } } }); @@ -396,7 +395,7 @@ public void testSimpleScannerProtobuf() throws Exception { int rowCount = readProtobufStream(response.getStream()); assertEquals(15, rowCount); - //Test with start row and end row. + //Test with start row and end row. builder = new StringBuilder(); builder.append("/*"); builder.append("?"); @@ -455,7 +454,7 @@ public int readProtobufStream(InputStream inputStream) throws IOException{ } @Test - public void testScanningUnknownColumnJson() throws IOException, JAXBException { + public void testScanningUnknownColumnJson() throws IOException { // Test scanning particular columns with limit. StringBuilder builder = new StringBuilder(); builder.append("/*"); @@ -471,7 +470,7 @@ public void testScanningUnknownColumnJson() throws IOException, JAXBException { int count = TestScannerResource.countCellSet(model); assertEquals(0, count); } - + @Test public void testSimpleFilter() throws IOException, JAXBException { StringBuilder builder = new StringBuilder(); @@ -503,7 +502,7 @@ public void testQualifierAndPrefixFilters() throws IOException, JAXBException { builder.append("?"); builder.append(Constants.SCAN_FILTER + "=" + URLEncoder.encode("QualifierFilter(=,'binary:1')", "UTF-8")); - Response response = + Response response = client.get("/" + TABLE + builder.toString(), Constants.MIMETYPE_XML); assertEquals(200, response.getCode()); JAXBContext ctx = JAXBContext.newInstance(CellSetModel.class); @@ -515,7 +514,6 @@ public void testQualifierAndPrefixFilters() throws IOException, JAXBException { StandardCharsets.UTF_8)); } - @Test public void testCompoundFilter() throws IOException, JAXBException { StringBuilder builder = new StringBuilder(); @@ -554,7 +552,7 @@ public void testCustomFilter() throws IOException, JAXBException { assertEquals("abc", new String(model.getRows().get(0).getCells().get(0).getValue(), StandardCharsets.UTF_8)); } - + @Test public void testNegativeCustomFilter() throws IOException, JAXBException { StringBuilder builder = new StringBuilder(); @@ -625,7 +623,7 @@ public void testReversed() throws IOException, JAXBException { } @Test - public void testColumnWithEmptyQualifier() throws IOException, JAXBException { + public void testColumnWithEmptyQualifier() throws IOException { // Test scanning with empty qualifier StringBuilder builder = new StringBuilder(); builder.append("/*"); @@ -670,7 +668,7 @@ public static class CustomFilter extends PrefixFilter { public CustomFilter(byte[] key) { super(key); } - + @Override public boolean filterRowKey(byte[] buffer, int offset, int length) { int cmp = Bytes.compareTo(buffer, offset, length, this.key, 0, this.key.length); @@ -690,7 +688,6 @@ public static Filter createFilterFromArguments(ArrayList filterArguments @XmlRootElement(name = "CellSet") @XmlAccessorType(XmlAccessType.FIELD) public static class ClientSideCellSetModel implements Serializable { - private static final long serialVersionUID = 1L; /** @@ -707,25 +704,23 @@ public static class ClientSideCellSetModel implements Serializable { * is removed again. */ public void setCellSetModelListener(final Listener l) { - row = (l == null) ? null : new ArrayList() { + row = (l == null) ? null : new ArrayList() { private static final long serialVersionUID = 1L; - @Override - public boolean add(RowModel o) { - l.handleRowModel(ClientSideCellSetModel.this, o); - listenerInvoked = true; - return false; - } - }; + + @Override + public boolean add(RowModel o) { + l.handleRowModel(ClientSideCellSetModel.this, o); + listenerInvoked = true; + return false; + } + }; } /** * This listener is invoked every time a new row model is unmarshalled. */ - public static interface Listener { - void handleRowModel(ClientSideCellSetModel helper, RowModel rowModel); + public interface Listener { + void handleRowModel(ClientSideCellSetModel helper, RowModel rowModel); } } } - - - diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java index adfeafe1e5df..391977d41e18 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -48,13 +48,12 @@ */ @Category(SmallTests.class) public class TestRemoteHTableRetries { - private static final int SLEEP_TIME = 50; private static final int RETRIES = 3; private static final long MAX_TIME = SLEEP_TIME * (RETRIES - 1); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - + private static final byte[] ROW_1 = Bytes.toBytes("testrow1"); private static final byte[] COLUMN_1 = Bytes.toBytes("a"); private static final byte[] QUALIFIER_1 = Bytes.toBytes("1"); @@ -62,7 +61,7 @@ public class TestRemoteHTableRetries { private Client client; private RemoteHTable remoteTable; - + @Before public void setup() throws Exception { client = mock(Client.class); @@ -86,7 +85,7 @@ public void setup() throws Exception { public void tearDownAfterClass() throws Exception { remoteTable.close(); } - + @Test public void testDelete() throws Exception { testTimedOutCall(new CallExecutor() { @@ -98,7 +97,7 @@ public void run() throws Exception { }); verify(client, times(RETRIES)).delete(anyString()); } - + @Test public void testGet() throws Exception { testTimedOutGetCall(new CallExecutor() { @@ -119,14 +118,13 @@ public void run() throws Exception { }); verify(client, times(RETRIES)).put(anyString(), anyString(), any(byte[].class)); } - + @Test public void testMultiRowPut() throws Exception { testTimedOutCall(new CallExecutor() { @Override public void run() throws Exception { - Put[] puts = { new Put(Bytes.toBytes("Row1")), - new Put(Bytes.toBytes("Row2")) }; + Put[] puts = { new Put(Bytes.toBytes("Row1")), new Put(Bytes.toBytes("Row2")) }; remoteTable.put(Arrays.asList(puts)); } }); @@ -143,7 +141,7 @@ public void run() throws Exception { }); verify(client, times(RETRIES)).post(anyString(), anyString(), any(byte[].class)); } - + @Test public void testCheckAndPut() throws Exception { testTimedOutCall(new CallExecutor() { @@ -165,16 +163,16 @@ public void run() throws Exception { Put put = new Put(ROW_1); put.add(COLUMN_1, QUALIFIER_1, VALUE_1); Delete delete= new Delete(ROW_1); - remoteTable.checkAndDelete(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, delete ); + remoteTable.checkAndDelete(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, delete); } }); } - + private void testTimedOutGetCall(CallExecutor callExecutor) throws Exception { testTimedOutCall(callExecutor); verify(client, times(RETRIES)).get(anyString(), anyString()); } - + private void testTimedOutCall(CallExecutor callExecutor) throws Exception { long start = System.currentTimeMillis(); try { @@ -186,8 +184,7 @@ private void testTimedOutCall(CallExecutor callExecutor) throws Exception { assertTrue((System.currentTimeMillis() - start) > MAX_TIME); } - private static interface CallExecutor { + private interface CallExecutor { void run() throws Exception; } - } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java index 9613e9fb73e3..c65772db8501 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java @@ -1,5 +1,4 @@ -/* - * +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -16,7 +15,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.rest.client; import static org.junit.Assert.assertEquals; @@ -60,7 +58,6 @@ @Category(MediumTests.class) public class TestRemoteTable { - // Verify that invalid URL characters and arbitrary bytes are escaped when // constructing REST URLs per HBASE-7621. RemoteHTable should support row keys // and qualifiers containing any byte for all table operations. @@ -97,7 +94,7 @@ public class TestRemoteTable { private static final long TS_1 = TS_2 - ONE_HOUR; private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final HBaseRESTTestingUtility REST_TEST_UTIL = + private static final HBaseRESTTestingUtility REST_TEST_UTIL = new HBaseRESTTestingUtility(); private RemoteHTable remoteTable; @@ -111,7 +108,10 @@ public static void setUpBeforeClass() throws Exception { public void before() throws Exception { Admin admin = TEST_UTIL.getHBaseAdmin(); if (admin.tableExists(TABLE)) { - if (admin.isTableEnabled(TABLE)) admin.disableTable(TABLE); + if (admin.isTableEnabled(TABLE)) { + admin.disableTable(TABLE); + } + admin.deleteTable(TABLE); } HTableDescriptor htd = new HTableDescriptor(TABLE); @@ -130,16 +130,16 @@ public void before() throws Exception { table.put(put); } remoteTable = new RemoteHTable( - new Client(new Cluster().add("localhost", + new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort())), TEST_UTIL.getConfiguration(), TABLE.toBytes()); } - + @After public void after() throws Exception { remoteTable.close(); } - + @AfterClass public static void tearDownAfterClass() throws Exception { REST_TEST_UTIL.shutdownServletContainer(); @@ -216,7 +216,6 @@ public void testGet() throws IOException { assertTrue(Bytes.equals(VALUE_2, value2)); // test timestamp - get = new Get(ROW_2); get.addFamily(COLUMN_1); get.addFamily(COLUMN_2); @@ -229,7 +228,6 @@ public void testGet() throws IOException { assertNull(value2); // test timerange - get = new Get(ROW_2); get.addFamily(COLUMN_1); get.addFamily(COLUMN_2); @@ -242,7 +240,6 @@ public void testGet() throws IOException { assertNull(value2); // test maxVersions - get = new Get(ROW_2); get.addFamily(COLUMN_1); get.setMaxVersions(2); @@ -263,7 +260,7 @@ public void testGet() throws IOException { @Test public void testMultiGet() throws Exception { - ArrayList gets = new ArrayList(); + ArrayList gets = new ArrayList<>(); gets.add(new Get(ROW_1)); gets.add(new Get(ROW_2)); Result[] results = remoteTable.get(gets); @@ -273,7 +270,7 @@ public void testMultiGet() throws Exception { assertEquals(2, results[1].size()); //Test Versions - gets = new ArrayList(); + gets = new ArrayList<>(); Get g = new Get(ROW_1); g.setMaxVersions(3); gets.add(g); @@ -285,13 +282,13 @@ public void testMultiGet() throws Exception { assertEquals(3, results[1].size()); //404 - gets = new ArrayList(); + gets = new ArrayList<>(); gets.add(new Get(Bytes.toBytes("RESALLYREALLYNOTTHERE"))); results = remoteTable.get(gets); assertNotNull(results); assertEquals(0, results.length); - gets = new ArrayList(); + gets = new ArrayList<>(); gets.add(new Get(Bytes.toBytes("RESALLYREALLYNOTTHERE"))); gets.add(new Get(ROW_1)); gets.add(new Get(ROW_2)); @@ -314,8 +311,7 @@ public void testPut() throws IOException { assertTrue(Bytes.equals(VALUE_1, value)); // multiput - - List puts = new ArrayList(); + List puts = new ArrayList<>(); put = new Put(ROW_3); put.add(COLUMN_2, QUALIFIER_2, VALUE_2); puts.add(put); @@ -341,8 +337,9 @@ public void testPut() throws IOException { value = result.getValue(COLUMN_2, QUALIFIER_2); assertNotNull(value); assertTrue(Bytes.equals(VALUE_2, value)); - - assertTrue(Bytes.equals(Bytes.toBytes("TestRemoteTable" + VALID_TABLE_NAME_CHARS), remoteTable.getTableName())); + + assertTrue(Bytes.equals(Bytes.toBytes("TestRemoteTable" + VALID_TABLE_NAME_CHARS), + remoteTable.getTableName())); } @Test @@ -425,13 +422,13 @@ public void testDelete() throws IOException { assertNull(value1); assertNull(value2); } - + /** - * Test RemoteHTable.Scanner + * Test RemoteHTable.Scanner */ @Test public void testScanner() throws IOException { - List puts = new ArrayList(); + List puts = new ArrayList<>(); Put put = new Put(ROW_1); put.add(COLUMN_1, QUALIFIER_1, VALUE_1); puts.add(put); @@ -466,7 +463,7 @@ public void testScanner() throws IOException { results = scanner.next(1); assertNull(results); scanner.close(); - + scanner = remoteTable.getScanner(COLUMN_1); results = scanner.next(4); assertNotNull(results); @@ -477,7 +474,7 @@ public void testScanner() throws IOException { assertTrue(Bytes.equals(ROW_4, results[3].getRow())); scanner.close(); - + scanner = remoteTable.getScanner(COLUMN_1,QUALIFIER_1); results = scanner.next(4); assertNotNull(results); @@ -488,9 +485,8 @@ public void testScanner() throws IOException { assertTrue(Bytes.equals(ROW_4, results[3].getRow())); scanner.close(); assertTrue(remoteTable.isAutoFlush()); - } - + @Test public void testCheckAndDelete() throws IOException { Get get = new Get(ROW_1); @@ -511,18 +507,16 @@ public void testCheckAndDelete() throws IOException { put.add(COLUMN_1, QUALIFIER_1, VALUE_1); remoteTable.put(put); - assertTrue(remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, - put)); - assertFalse(remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_2, - put)); + assertTrue(remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_1, put)); + assertFalse(remoteTable.checkAndPut(ROW_1, COLUMN_1, QUALIFIER_1, VALUE_2, put)); } - + /** - * Test RemoteHable.Scanner.iterator method + * Test RemoteHable.Scanner.iterator method */ @Test public void testIteratorScaner() throws IOException { - List puts = new ArrayList(); + List puts = new ArrayList<>(); Put put = new Put(ROW_1); put.add(COLUMN_1, QUALIFIER_1, VALUE_1); puts.add(put); @@ -547,7 +541,7 @@ public void testIteratorScaner() throws IOException { } assertEquals(4, counter); } - + /** * Test a some methods of class Response. */ @@ -569,13 +563,14 @@ public void testResponse(){ response.setHeaders(headers); assertEquals("value1.1", response.getHeader("header1")); response.setBody(Bytes.toBytes("body")); - assertTrue(response.hasBody()); + assertTrue(response.hasBody()); } /** * Tests keeping a HBase scanner alive for long periods of time. Each call to next() should reset - * the ConnectionCache timeout for the scanner's connection - * @throws Exception + * the ConnectionCache timeout for the scanner's connection. + * + * @throws Exception if starting the servlet container or disabling or truncating the table fails */ @Test public void testLongLivedScan() throws Exception { diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java index 705a552867c5..4590a7f9f180 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java @@ -1,5 +1,4 @@ -/* - * +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -16,7 +15,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.rest.model; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -106,5 +104,4 @@ protected void checkModel(ScannerModel model) { assertTrue(foundLabel2); } } - } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java index 78afd2f769c5..6aadef8858c5 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java @@ -1,5 +1,4 @@ -/* - * +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -16,7 +15,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.rest.model; import java.util.Iterator; @@ -29,7 +27,6 @@ @Category(SmallTests.class) public class TestStorageClusterStatusModel extends TestModelBase { - public TestStorageClusterStatusModel() throws Exception { super(StorageClusterStatusModel.class); @@ -99,7 +96,7 @@ protected void checkModel(StorageClusterStatusModel model) { assertEquals(1245219839331L, node.getStartCode()); assertEquals(128, node.getHeapSizeMB()); assertEquals(1024, node.getMaxHeapSizeMB()); - Iterator regions = + Iterator regions = node.getRegions().iterator(); StorageClusterStatusModel.Node.Region region = regions.next(); assertTrue(Bytes.toString(region.getName()).equals( @@ -138,7 +135,7 @@ protected void checkModel(StorageClusterStatusModel model) { assertEquals(1, region.getTotalStaticBloomSizeKB()); assertEquals(1, region.getTotalCompactingKVs()); assertEquals(1, region.getCurrentCompactedKVs()); - + assertFalse(regions.hasNext()); assertFalse(nodes.hasNext()); } From 53d977261f5b93b659c906073853537cf5ffe077 Mon Sep 17 00:00:00 2001 From: Jan Hentschel Date: Wed, 15 Jan 2020 19:50:27 +0100 Subject: [PATCH 2/3] HBASE-23623 Fixed Checkstyle violation from build --- .../test/java/org/apache/hadoop/hbase/rest/TestTableScan.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java index 08ffe10030b9..f4810be9d5c6 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java @@ -47,13 +47,11 @@ import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.parsers.SAXParserFactory; -import javax.xml.stream.XMLStreamException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.filter.Filter; @@ -66,6 +64,7 @@ import org.apache.hadoop.hbase.rest.model.CellSetModel; import org.apache.hadoop.hbase.rest.model.RowModel; import org.apache.hadoop.hbase.rest.provider.JacksonProvider; +import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.AfterClass; import org.junit.BeforeClass; From df8b99ac0c1ac4cf4a83a426275371b6102c0ef3 Mon Sep 17 00:00:00 2001 From: Jan Hentschel Date: Thu, 16 Jan 2020 10:18:50 +0100 Subject: [PATCH 3/3] HBASE-23623 Fixed review comments --- .../hadoop/hbase/rest/TestNamespacesInstanceResource.java | 2 +- .../org/apache/hadoop/hbase/rest/TestNamespacesResource.java | 2 +- .../org/apache/hadoop/hbase/rest/TestScannersWithLabels.java | 2 +- .../java/org/apache/hadoop/hbase/rest/TestSchemaResource.java | 2 +- .../hadoop/hbase/rest/client/TestRemoteHTableRetries.java | 2 +- .../org/apache/hadoop/hbase/rest/client/TestRemoteTable.java | 2 +- .../org/apache/hadoop/hbase/rest/model/TestScannerModel.java | 2 +- .../hadoop/hbase/rest/model/TestStorageClusterStatusModel.java | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java index 6c20457ae445..fb8ddcd519de 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesInstanceResource.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesResource.java index ea07314fe383..ee72db0ff5fa 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestNamespacesResource.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java index 7ecae05c6400..89fe3d4ac024 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java index a1b0885be0a8..a25ce265c204 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java index 391977d41e18..5133427e1ac6 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteHTableRetries.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java index c65772db8501..469e1afae743 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java index 4590a7f9f180..30d6ddd76989 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java index 6aadef8858c5..eb54db7ba8d5 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information