From 05c01575855fdac4fe69b3cb73b8dfce881455b2 Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Thu, 23 Dec 2021 11:00:54 -0800 Subject: [PATCH] HBASE-26622 Update error-prone to 2.10 Author: Mike Drob Co-authored-by: Nick Dimiduk Signed-off-by: Andrew Purtell --- .../hbase/io/asyncfs/ProtobufDecoder.java | 8 ++----- .../org/apache/hadoop/hbase/KeyValue.java | 17 +++++++------ .../coprocessor/AggregateImplementation.java | 10 ++++---- .../hbase/mapreduce/HFileOutputFormat2.java | 2 +- .../regionserver/RegionCoprocessorHost.java | 1 + .../hbase/security/token/TokenProvider.java | 2 +- .../client/TestPutDeleteEtcCellIteration.java | 12 ++++------ .../hadoop/hbase/codec/CodecPerformance.java | 11 ++------- .../TestMergesSplitsAddToTracker.java | 24 +++++++------------ .../thrift/ThriftHBaseServiceHandler.java | 1 - pom.xml | 2 +- 11 files changed, 35 insertions(+), 55 deletions(-) diff --git a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java index d6e68f30542b..3be9a2e49c1b 100644 --- a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java +++ b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java @@ -132,17 +132,13 @@ protected void decode( try { getParserForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("getParserForType"); newBuilderForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("newBuilderForType"); + // TODO: If this is false then the class will fail to load? Can refactor it out? + hasParser = true; } catch (NoSuchMethodException e) { // If the method is not found, we are in trouble. Abort. throw new RuntimeException(e); } - try { - protobufMessageLiteClass.getDeclaredMethod("getParserForType"); - hasParser = true; - } catch (Throwable var2) { - } - HAS_PARSER = hasParser; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index f4046e4af7b0..c05d0be3e566 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -1102,10 +1102,10 @@ private int calculateHashForKey(Cell cell) { */ @Override public KeyValue clone() throws CloneNotSupportedException { - super.clone(); - byte [] b = new byte[this.length]; - System.arraycopy(this.bytes, this.offset, b, 0, this.length); - KeyValue ret = new KeyValue(b, 0, b.length); + KeyValue ret = (KeyValue) super.clone(); + ret.bytes = Arrays.copyOf(this.bytes, this.bytes.length); + ret.offset = 0; + ret.length = ret.bytes.length; // Important to clone the memstoreTS as well - otherwise memstore's // update-in-place methods (eg increment) will end up creating // new entries @@ -1720,8 +1720,8 @@ public String getLegacyKeyComparatorName() { } @Override - protected Object clone() throws CloneNotSupportedException { - return new MetaComparator(); + protected MetaComparator clone() throws CloneNotSupportedException { + return (MetaComparator) super.clone(); } /** @@ -2248,9 +2248,8 @@ public byte[] getShortMidpointKey(final byte[] leftKey, final byte[] rightKey) { } @Override - protected Object clone() throws CloneNotSupportedException { - super.clone(); - return new KVComparator(); + protected KVComparator clone() throws CloneNotSupportedException { + return (KVComparator) super.clone(); } } diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index a7181f962cd5..5571e1b14cb6 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -53,11 +53,11 @@ * {@link ColumnInterpreter} is used to interpret column value. This class is * parameterized with the following (these are the types with which the {@link ColumnInterpreter} * is parameterized, and for more description on these, refer to {@link ColumnInterpreter}): - * @param T Cell value data type - * @param S Promoted data type - * @param P PB message that is used to transport initializer specific bytes - * @param Q PB message that is used to transport Cell (<T>) instance - * @param R PB message that is used to transport Promoted (<S>) instance + * @param Cell value data type + * @param Promoted data type + * @param

PB message that is used to transport initializer specific bytes + * @param PB message that is used to transport Cell (<T>) instance + * @param PB message that is used to transport Promoted (<S>) instance */ @InterfaceAudience.Private public class AggregateImplementation diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index ca7c9a39f325..a3c3f11c5aa8 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -95,7 +95,7 @@ /** * Writes HFiles. Passed Cells must arrive in order. * Writes current time as the sequence id for the file. Sets the major compacted - * attribute on created @{link {@link HFile}s. Calling write(null,null) will forcibly roll + * attribute on created {@link HFile}s. Calling write(null,null) will forcibly roll * all HFiles being written. *

* Using this class as part of a MapReduce job is best done diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index 6961bfdaf1a6..78565c129663 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -260,6 +260,7 @@ public Configuration getConf() { * @param rsServices interface to available region server functionality * @param conf the configuration */ + @SuppressWarnings("ReturnValueIgnored") // Checking method exists as CPU optimization public RegionCoprocessorHost(final HRegion region, final RegionServerServices rsServices, final Configuration conf) { super(rsServices); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java index 92bd0dbe3096..28fef37f5b00 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java @@ -129,7 +129,7 @@ public void getAuthenticationToken(RpcController controller, Token token = secretManager.generateToken(currentUser.getName()); - response.setToken(ClientTokenUtil.toToken(token)).build(); + response.setToken(ClientTokenUtil.toToken(token)); } catch (IOException ioe) { CoprocessorRpcUtils.setControllerException(controller, ioe); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java index 9f2cc0114649..b5e1178cca89 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java @@ -61,7 +61,7 @@ public void testPutIteration() throws IOException { for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } assertEquals(COUNT, index); } @@ -74,15 +74,13 @@ public void testPutConcurrentModificationOnIteration() throws IOException { p.addColumn(bytes, bytes, TIMESTAMP, bytes); } int index = 0; - int trigger = 3; for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); // When we hit the trigger, try inserting a new KV; should trigger exception - if (trigger == 3) p.addColumn(bytes, bytes, TIMESTAMP, bytes); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + p.addColumn(bytes, bytes, TIMESTAMP, bytes); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } - assertEquals(COUNT, index); } @Test @@ -96,7 +94,7 @@ public void testDeleteIteration() throws IOException { for (CellScanner cellScanner = d.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.DeleteColumn)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.Delete), cell); } assertEquals(COUNT, index); } @@ -151,7 +149,7 @@ public void testResultIteration() throws IOException { for (CellScanner cellScanner = r.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } assertEquals(COUNT, index); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java index 73f5ca0959fe..e801b5b4beca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.codec; -import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; @@ -30,10 +30,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.codec.CellCodec; -import org.apache.hadoop.hbase.codec.Codec; -import org.apache.hadoop.hbase.codec.KeyValueCodec; -import org.apache.hadoop.hbase.codec.MessageCodec; import org.apache.hadoop.hbase.io.CellOutputStream; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -96,10 +92,7 @@ static int getRoughSize(final Cell [] cells) { } static void verifyCells(final Cell [] input, final Cell [] output) { - assertEquals(input.length, output.length); - for (int i = 0; i < input.length; i ++) { - input[i].equals(output[i]); - } + assertArrayEquals(input, output); } static void doCodec(final Codec codec, final Cell [] cells, final int cycles, final int count, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java index 703d6193e5e5..85e7380ee3b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java @@ -18,15 +18,18 @@ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACKER_IMPL; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.everyItem; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.concurrent.TimeUnit; -import org.apache.commons.lang3.mutable.MutableBoolean; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -228,21 +231,12 @@ private Pair copyFileInTheStoreDir(HRegion region) throws return new Pair<>(fileInfo, copyName); } - private void validateDaughterRegionsFiles(HRegion region, String orignalFileName, + private void validateDaughterRegionsFiles(HRegion region, String originalFileName, String untrackedFile) throws IOException { //verify there's no link for the untracked, copied file in first region List infos = region.getRegionFileSystem().getStoreFiles("info"); - final MutableBoolean foundLink = new MutableBoolean(false); - infos.stream().forEach(i -> { - i.getActiveFileName().contains(orignalFileName); - if(i.getActiveFileName().contains(untrackedFile)){ - fail(); - } - if(i.getActiveFileName().contains(orignalFileName)){ - foundLink.setTrue(); - } - }); - assertTrue(foundLink.booleanValue()); + assertThat(infos, everyItem(hasProperty("activeFileName", not(containsString(untrackedFile))))); + assertThat(infos, hasItem(hasProperty("activeFileName", containsString(originalFileName)))); } private void verifyFilesAreTracked(Path regionDir, FileSystem fs) throws Exception { diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java index c942977a8e5e..369b2beed41f 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java @@ -154,7 +154,6 @@ private synchronized ResultScannerWrapper getScanner(int id) { * id->scanner hash-map. * * @param id the ID of the scanner to remove - * @return a Scanner, or null if ID was invalid. */ private synchronized void removeScanner(int id) { scannerMap.invalidate(id); diff --git a/pom.xml b/pom.xml index 896dc99b3554..f3f956e0cec9 100755 --- a/pom.xml +++ b/pom.xml @@ -1547,7 +1547,7 @@ --> 8.28 1.6.0 - 2.4.0 + 2.10.0 2.4.2 1.0.0 1.8