Skip to content

Commit a552add

Browse files
madrobndimiduk
andcommitted
HBASE-26622 Update error-prone to 2.10
Author: Mike Drob <[email protected]> Co-authored-by: Nick Dimiduk <[email protected]> Signed-off-by: Andrew Purtell <[email protected]>
1 parent 87f8d9a commit a552add

File tree

10 files changed

+34
-54
lines changed

10 files changed

+34
-54
lines changed

hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -132,17 +132,13 @@ protected void decode(
132132
try {
133133
getParserForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("getParserForType");
134134
newBuilderForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("newBuilderForType");
135+
// TODO: If this is false then the class will fail to load? Can refactor it out?
136+
hasParser = true;
135137
} catch (NoSuchMethodException e) {
136138
// If the method is not found, we are in trouble. Abort.
137139
throw new RuntimeException(e);
138140
}
139141

140-
try {
141-
protobufMessageLiteClass.getDeclaredMethod("getParserForType");
142-
hasParser = true;
143-
} catch (Throwable var2) {
144-
}
145-
146142
HAS_PARSER = hasParser;
147143
}
148144
}

hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1102,10 +1102,10 @@ private int calculateHashForKey(Cell cell) {
11021102
*/
11031103
@Override
11041104
public KeyValue clone() throws CloneNotSupportedException {
1105-
super.clone();
1106-
byte [] b = new byte[this.length];
1107-
System.arraycopy(this.bytes, this.offset, b, 0, this.length);
1108-
KeyValue ret = new KeyValue(b, 0, b.length);
1105+
KeyValue ret = (KeyValue) super.clone();
1106+
ret.bytes = Arrays.copyOf(this.bytes, this.bytes.length);
1107+
ret.offset = 0;
1108+
ret.length = ret.bytes.length;
11091109
// Important to clone the memstoreTS as well - otherwise memstore's
11101110
// update-in-place methods (eg increment) will end up creating
11111111
// new entries
@@ -1720,8 +1720,8 @@ public String getLegacyKeyComparatorName() {
17201720
}
17211721

17221722
@Override
1723-
protected Object clone() throws CloneNotSupportedException {
1724-
return new MetaComparator();
1723+
protected MetaComparator clone() throws CloneNotSupportedException {
1724+
return (MetaComparator) super.clone();
17251725
}
17261726

17271727
/**
@@ -2248,9 +2248,8 @@ public byte[] getShortMidpointKey(final byte[] leftKey, final byte[] rightKey) {
22482248
}
22492249

22502250
@Override
2251-
protected Object clone() throws CloneNotSupportedException {
2252-
super.clone();
2253-
return new KVComparator();
2251+
protected KVComparator clone() throws CloneNotSupportedException {
2252+
return (KVComparator) super.clone();
22542253
}
22552254

22562255
}

hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -53,11 +53,11 @@
5353
* {@link ColumnInterpreter} is used to interpret column value. This class is
5454
* parameterized with the following (these are the types with which the {@link ColumnInterpreter}
5555
* is parameterized, and for more description on these, refer to {@link ColumnInterpreter}):
56-
* @param T Cell value data type
57-
* @param S Promoted data type
58-
* @param P PB message that is used to transport initializer specific bytes
59-
* @param Q PB message that is used to transport Cell (&lt;T&gt;) instance
60-
* @param R PB message that is used to transport Promoted (&lt;S&gt;) instance
56+
* @param <T> Cell value data type
57+
* @param <S> Promoted data type
58+
* @param <P> PB message that is used to transport initializer specific bytes
59+
* @param <Q> PB message that is used to transport Cell (&lt;T&gt;) instance
60+
* @param <R> PB message that is used to transport Promoted (&lt;S&gt;) instance
6161
*/
6262
@InterfaceAudience.Private
6363
public class AggregateImplementation<T, S, P extends Message, Q extends Message, R extends Message>

hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@
9595
/**
9696
* Writes HFiles. Passed Cells must arrive in order.
9797
* Writes current time as the sequence id for the file. Sets the major compacted
98-
* attribute on created @{link {@link HFile}s. Calling write(null,null) will forcibly roll
98+
* attribute on created {@link HFile}s. Calling write(null,null) will forcibly roll
9999
* all HFiles being written.
100100
* <p>
101101
* Using this class as part of a MapReduce job is best done

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -256,6 +256,7 @@ public Configuration getConf() {
256256
* @param rsServices interface to available region server functionality
257257
* @param conf the configuration
258258
*/
259+
@SuppressWarnings("ReturnValueIgnored") // Checking method exists as CPU optimization
259260
public RegionCoprocessorHost(final HRegion region,
260261
final RegionServerServices rsServices, final Configuration conf) {
261262
super(rsServices);

hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ public void testPutIteration() throws IOException {
6161
for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) {
6262
Cell cell = cellScanner.current();
6363
byte [] bytes = Bytes.toBytes(index++);
64-
cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes));
64+
assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell);
6565
}
6666
assertEquals(COUNT, index);
6767
}
@@ -74,15 +74,13 @@ public void testPutConcurrentModificationOnIteration() throws IOException {
7474
p.addColumn(bytes, bytes, TIMESTAMP, bytes);
7575
}
7676
int index = 0;
77-
int trigger = 3;
7877
for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) {
7978
Cell cell = cellScanner.current();
8079
byte [] bytes = Bytes.toBytes(index++);
8180
// When we hit the trigger, try inserting a new KV; should trigger exception
82-
if (trigger == 3) p.addColumn(bytes, bytes, TIMESTAMP, bytes);
83-
cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes));
81+
p.addColumn(bytes, bytes, TIMESTAMP, bytes);
82+
assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell);
8483
}
85-
assertEquals(COUNT, index);
8684
}
8785

8886
@Test
@@ -96,7 +94,7 @@ public void testDeleteIteration() throws IOException {
9694
for (CellScanner cellScanner = d.cellScanner(); cellScanner.advance();) {
9795
Cell cell = cellScanner.current();
9896
byte [] bytes = Bytes.toBytes(index++);
99-
cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.DeleteColumn));
97+
assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.Delete), cell);
10098
}
10199
assertEquals(COUNT, index);
102100
}
@@ -151,7 +149,7 @@ public void testResultIteration() throws IOException {
151149
for (CellScanner cellScanner = r.cellScanner(); cellScanner.advance();) {
152150
Cell cell = cellScanner.current();
153151
byte [] bytes = Bytes.toBytes(index++);
154-
cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes));
152+
assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell);
155153
}
156154
assertEquals(COUNT, index);
157155
}

hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
*/
1818
package org.apache.hadoop.hbase.codec;
1919

20-
import static org.junit.Assert.assertEquals;
20+
import static org.junit.Assert.assertArrayEquals;
2121
import static org.junit.Assert.assertTrue;
2222

2323
import java.io.ByteArrayInputStream;
@@ -30,10 +30,6 @@
3030
import org.apache.hadoop.hbase.Cell;
3131
import org.apache.hadoop.hbase.CellScanner;
3232
import org.apache.hadoop.hbase.KeyValue;
33-
import org.apache.hadoop.hbase.codec.CellCodec;
34-
import org.apache.hadoop.hbase.codec.Codec;
35-
import org.apache.hadoop.hbase.codec.KeyValueCodec;
36-
import org.apache.hadoop.hbase.codec.MessageCodec;
3733
import org.apache.hadoop.hbase.io.CellOutputStream;
3834
import org.apache.hadoop.hbase.util.Bytes;
3935
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -96,10 +92,7 @@ static int getRoughSize(final Cell [] cells) {
9692
}
9793

9894
static void verifyCells(final Cell [] input, final Cell [] output) {
99-
assertEquals(input.length, output.length);
100-
for (int i = 0; i < input.length; i ++) {
101-
input[i].equals(output[i]);
102-
}
95+
assertArrayEquals(input, output);
10396
}
10497

10598
static void doCodec(final Codec codec, final Cell [] cells, final int cycles, final int count,

hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java

Lines changed: 9 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,18 @@
1818
package org.apache.hadoop.hbase.regionserver;
1919

2020
import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACKER_IMPL;
21+
import static org.hamcrest.MatcherAssert.assertThat;
22+
import static org.hamcrest.Matchers.containsString;
23+
import static org.hamcrest.Matchers.everyItem;
24+
import static org.hamcrest.Matchers.hasItem;
25+
import static org.hamcrest.Matchers.hasProperty;
26+
import static org.hamcrest.Matchers.not;
2127
import static org.junit.Assert.assertTrue;
22-
import static org.junit.Assert.fail;
23-
2428
import java.io.IOException;
2529
import java.util.ArrayList;
2630
import java.util.List;
2731
import java.util.UUID;
2832
import java.util.concurrent.TimeUnit;
29-
import org.apache.commons.lang3.mutable.MutableBoolean;
3033
import org.apache.hadoop.fs.FileStatus;
3134
import org.apache.hadoop.fs.FileSystem;
3235
import org.apache.hadoop.fs.FileUtil;
@@ -222,21 +225,12 @@ private Pair<StoreFileInfo,String> copyFileInTheStoreDir(HRegion region) throws
222225
return new Pair<>(fileInfo, copyName);
223226
}
224227

225-
private void validateDaughterRegionsFiles(HRegion region, String orignalFileName,
228+
private void validateDaughterRegionsFiles(HRegion region, String originalFileName,
226229
String untrackedFile) throws IOException {
227230
//verify there's no link for the untracked, copied file in first region
228231
List<StoreFileInfo> infos = region.getRegionFileSystem().getStoreFiles("info");
229-
final MutableBoolean foundLink = new MutableBoolean(false);
230-
infos.stream().forEach(i -> {
231-
i.getActiveFileName().contains(orignalFileName);
232-
if(i.getActiveFileName().contains(untrackedFile)){
233-
fail();
234-
}
235-
if(i.getActiveFileName().contains(orignalFileName)){
236-
foundLink.setTrue();
237-
}
238-
});
239-
assertTrue(foundLink.booleanValue());
232+
assertThat(infos, everyItem(hasProperty("activeFileName", not(containsString(untrackedFile)))));
233+
assertThat(infos, hasItem(hasProperty("activeFileName", containsString(originalFileName))));
240234
}
241235

242236
private void verifyFilesAreTracked(Path regionDir, FileSystem fs) throws Exception {

hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,6 @@ private synchronized ResultScannerWrapper getScanner(int id) {
155155
* id-&gt;scanner hash-map.
156156
*
157157
* @param id the ID of the scanner to remove
158-
* @return a Scanner, or null if ID was invalid.
159158
*/
160159
private synchronized void removeScanner(int id) {
161160
scannerMap.invalidate(id);

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1816,7 +1816,7 @@
18161816
-->
18171817
<checkstyle.version>8.28</checkstyle.version>
18181818
<exec.maven.version>1.6.0</exec.maven.version>
1819-
<error-prone.version>2.4.0</error-prone.version>
1819+
<error-prone.version>2.10.0</error-prone.version>
18201820
<jamon.plugin.version>2.4.2</jamon.plugin.version>
18211821
<lifecycle.mapping.version>1.0.0</lifecycle.mapping.version>
18221822
<maven.antrun.version>1.8</maven.antrun.version>

0 commit comments

Comments
 (0)