diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java index ba6d8a29fd79..e16303be22a5 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java @@ -15,7 +15,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase; import java.io.File; @@ -53,24 +52,32 @@ public class ClassFinder { public interface ResourcePathFilter { boolean isCandidatePath(String resourcePath, boolean isJar); - }; + } public interface FileNameFilter { boolean isCandidateFile(String fileName, String absFilePath); - }; + } public interface ClassFilter { boolean isCandidateClass(Class c); - }; + } public static class Not implements ResourcePathFilter, FileNameFilter, ClassFilter { private ResourcePathFilter resourcePathFilter; private FileNameFilter fileNameFilter; private ClassFilter classFilter; - public Not(ResourcePathFilter resourcePathFilter){this.resourcePathFilter = resourcePathFilter;} - public Not(FileNameFilter fileNameFilter){this.fileNameFilter = fileNameFilter;} - public Not(ClassFilter classFilter){this.classFilter = classFilter;} + public Not(ResourcePathFilter resourcePathFilter) { + this.resourcePathFilter = resourcePathFilter; + } + + public Not(FileNameFilter fileNameFilter) { + this.fileNameFilter = fileNameFilter; + } + + public Not(ClassFilter classFilter) { + this.classFilter = classFilter; + } @Override public boolean isCandidatePath(String resourcePath, boolean isJar) { @@ -90,7 +97,10 @@ public static class And implements ClassFilter, ResourcePathFilter { ClassFilter[] classFilters; ResourcePathFilter[] resourcePathFilters; - public And(ClassFilter...classFilters) { this.classFilters = classFilters; } + public And(ClassFilter...classFilters) { + this.classFilters = classFilters; + } + public And(ResourcePathFilter... resourcePathFilters) { this.resourcePathFilters = resourcePathFilters; } @@ -120,10 +130,6 @@ public ClassFinder(ClassLoader classLoader) { this(null, null, null, classLoader); } - public ClassFinder() { - this(ClassLoader.getSystemClassLoader()); - } - public ClassFinder(ResourcePathFilter resourcePathFilter, FileNameFilter fileNameFilter, ClassFilter classFilter) { this(resourcePathFilter, fileNameFilter, classFilter, ClassLoader.getSystemClassLoader()); @@ -180,7 +186,7 @@ public Set> findClasses(String packageName, boolean proceedOnExceptions } } - Set> classes = new HashSet>(); + Set> classes = new HashSet<>(); for (File directory : dirs) { classes.addAll(findClassesFromFiles(directory, packageName, proceedOnExceptions)); } @@ -193,7 +199,7 @@ public Set> findClasses(String packageName, boolean proceedOnExceptions private Set> findClassesFromJar(String jarFileName, String packageName, boolean proceedOnExceptions) throws IOException, ClassNotFoundException, LinkageError { - JarInputStream jarFile = null; + JarInputStream jarFile; try { jarFile = new JarInputStream(new FileInputStream(jarFileName)); } catch (IOException ioEx) { @@ -201,8 +207,8 @@ private Set> findClassesFromJar(String jarFileName, throw ioEx; } - Set> classes = new HashSet>(); - JarEntry entry = null; + Set> classes = new HashSet<>(); + JarEntry entry; try { while (true) { try { @@ -248,7 +254,7 @@ private Set> findClassesFromJar(String jarFileName, private Set> findClassesFromFiles(File baseDirectory, String packageName, boolean proceedOnExceptions) throws ClassNotFoundException, LinkageError { - Set> classes = new HashSet>(); + Set> classes = new HashSet<>(); if (!baseDirectory.exists()) { LOG.warn("Failed to find " + baseDirectory.getAbsolutePath()); return classes; @@ -285,16 +291,11 @@ private Class makeClass(String className, boolean proceedOnExceptions) Class c = Class.forName(className, false, classLoader); boolean isCandidateClass = null == classFilter || classFilter.isCandidateClass(c); return isCandidateClass ? c : null; - } catch (ClassNotFoundException classNotFoundEx) { - if (!proceedOnExceptions) { - throw classNotFoundEx; - } - LOG.debug("Failed to instantiate or check " + className + ": " + classNotFoundEx); - } catch (LinkageError linkageEx) { + } catch (ClassNotFoundException | LinkageError exception) { if (!proceedOnExceptions) { - throw linkageEx; + throw exception; } - LOG.debug("Failed to instantiate or check " + className + ": " + linkageEx); + LOG.debug("Failed to instantiate or check " + className + ": " + exception); } return null; } @@ -313,5 +314,5 @@ public boolean accept(File file) { && (null == nameFilter || nameFilter.isCandidateFile(file.getName(), file.getAbsolutePath()))); } - }; -}; + } +} diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index 02f27995f454..19a9ac290b5f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -76,9 +76,8 @@ public Configuration getConfiguration() { private File dataTestDir = null; /** - * @return Where to write test data on local filesystem, specific to - * the test. Useful for tests that do not use a cluster. - * Creates it if it does not exist already. + * @return Where to write test data on local filesystem, specific to the test. Useful for tests + * that do not use a cluster. Creates it if it does not exist already. */ public Path getDataTestDir() { if (this.dataTestDir == null) { @@ -88,10 +87,9 @@ public Path getDataTestDir() { } /** - * @param subdirName - * @return Path to a subdirectory named subdirName under - * {@link #getDataTestDir()}. - * Does *NOT* create it if it does not exist. + * @param subdirName the name of the subdirectory in the test data directory + * @return Path to a subdirectory named {code subdirName} under + * {@link #getDataTestDir()}. Does *NOT* create it if it does not exist. */ public Path getDataTestDir(final String subdirName) { return new Path(getDataTestDir(), subdirName); @@ -115,7 +113,10 @@ protected Path setupDataTestDir() { this.dataTestDir = new File(testPath.toString()).getAbsoluteFile(); // Set this property so if mapreduce jobs run, they will use this as their home dir. System.setProperty("test.build.dir", this.dataTestDir.toString()); - if (deleteOnExit()) this.dataTestDir.deleteOnExit(); + + if (deleteOnExit()) { + this.dataTestDir.deleteOnExit(); + } createSubDir("hbase.local.dir", testPath, "hbase-local-dir"); @@ -125,7 +126,11 @@ protected Path setupDataTestDir() { protected void createSubDir(String propertyName, Path parent, String subDirName) { Path newPath = new Path(parent, subDirName); File newDir = new File(newPath.toString()).getAbsoluteFile(); - if (deleteOnExit()) newDir.deleteOnExit(); + + if (deleteOnExit()) { + newDir.deleteOnExit(); + } + conf.set(propertyName, newDir.getAbsolutePath()); } @@ -140,9 +145,8 @@ boolean deleteOnExit() { /** * @return True if we removed the test dirs - * @throws IOException */ - public boolean cleanupTestDir() throws IOException { + public boolean cleanupTestDir() { if (deleteDir(this.dataTestDir)) { this.dataTestDir = null; return true; @@ -153,9 +157,8 @@ public boolean cleanupTestDir() throws IOException { /** * @param subdir Test subdir name. * @return True if we removed the test dir - * @throws IOException */ - boolean cleanupTestDir(final String subdir) throws IOException { + boolean cleanupTestDir(final String subdir) { if (this.dataTestDir == null) { return false; } @@ -164,9 +167,9 @@ boolean cleanupTestDir(final String subdir) throws IOException { /** * @return Where to write test data on local filesystem; usually - * {@link #DEFAULT_BASE_TEST_DIRECTORY} - * Should not be used by the unit tests, hence its's private. - * Unit test will use a subdirectory of this directory. + * {@link #DEFAULT_BASE_TEST_DIRECTORY} + * Should not be used by the unit tests, hence its's private. + * Unit test will use a subdirectory of this directory. * @see #setupDataTestDir() */ private Path getBaseTestDir() { @@ -185,9 +188,8 @@ public Path getRandomDir() { /** * @param dir Directory to delete * @return True if we deleted it. - * @throws IOException */ - boolean deleteDir(final File dir) throws IOException { + boolean deleteDir(final File dir) { if (dir == null || !dir.exists()) { return true; } @@ -195,7 +197,10 @@ boolean deleteDir(final File dir) throws IOException { do { ntries += 1; try { - if (deleteOnExit()) FileUtils.deleteDirectory(dir); + if (deleteOnExit()) { + FileUtils.deleteDirectory(dir); + } + return true; } catch (IOException ex) { LOG.warn("Failed to delete " + dir.getAbsolutePath()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java index e56dea8669d0..3f8727b8dbfe 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase; import org.apache.commons.logging.Log; @@ -46,7 +45,6 @@ public ResourceChecker(final String tagLine) { this.tagLine = tagLine; } - /** * Class to implement for each type of resource. */ @@ -83,21 +81,22 @@ public String getName() { /** * The value for the resource. - * @param phase + * @param phase the {@link Phase} to get the value for */ abstract public int getVal(Phase phase); /* * Retrieves List of Strings which would be logged in logEndings() */ - public List getStringsToLog() { return null; } + public List getStringsToLog() { + return null; + } } private List ras = new ArrayList(); private int[] initialValues; private int[] endingValues; - private void fillInit() { initialValues = new int[ras.size()]; fill(Phase.INITIAL, initialValues); @@ -141,7 +140,11 @@ private void logInit() { StringBuilder sb = new StringBuilder(); for (ResourceAnalyzer ra : ras) { int cur = initialValues[i++]; - if (sb.length() > 0) sb.append(", "); + + if (sb.length() > 0) { + sb.append(", "); + } + sb.append(ra.getName()).append("=").append(cur); } LOG.info("before: " + tagLine + " " + sb); @@ -156,7 +159,11 @@ private void logEndings() { for (ResourceAnalyzer ra : ras) { int curP = initialValues[i]; int curN = endingValues[i++]; - if (sb.length() > 0) sb.append(", "); + + if (sb.length() > 0) { + sb.append(", "); + } + sb.append(ra.getName()).append("=").append(curN).append(" (was ").append(curP).append(")"); if (curN > curP) { List strings = ra.getStringsToLog(); @@ -171,7 +178,6 @@ private void logEndings() { LOG.info("after: " + tagLine + " " + sb); } - /** * To be called as the beginning of a test method: * - measure the resources diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java index bdda4947dda9..7c9b6b19ad50 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java @@ -15,7 +15,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; @@ -38,7 +37,7 @@ public class TestCellUtil { /** * CellScannable used in test. Returns a {@link TestCellScanner} */ - private class TestCellScannable implements CellScannable { + private static class TestCellScannable implements CellScannable { private final int cellsCount; TestCellScannable(final int cellsCount) { this.cellsCount = cellsCount; @@ -47,7 +46,7 @@ private class TestCellScannable implements CellScannable { public CellScanner cellScanner() { return new TestCellScanner(this.cellsCount); } - }; + } /** * CellScanner used in test. @@ -67,7 +66,7 @@ public Cell current() { } @Override - public boolean advance() throws IOException { + public boolean advance() { if (this.count < cellsCount) { this.current = new TestCell(this.count); this.count++; @@ -221,34 +220,35 @@ public int getTagsLength() { // TODO Auto-generated method stub return 0; } - }; + } /** * Was overflowing if 100k or so lists of cellscanners to return. - * @throws IOException */ @Test public void testCreateCellScannerOverflow() throws IOException { - consume(doCreateCellScanner(1, 1), 1 * 1); - consume(doCreateCellScanner(3, 0), 3 * 0); + consume(doCreateCellScanner(1, 1), 1); + consume(doCreateCellScanner(3, 0), 0); consume(doCreateCellScanner(3, 3), 3 * 3); - consume(doCreateCellScanner(0, 1), 0 * 1); + consume(doCreateCellScanner(0, 1), 0); // Do big number. See HBASE-11813 for why. final int hundredK = 100000; - consume(doCreateCellScanner(hundredK, 0), hundredK * 0); + consume(doCreateCellScanner(hundredK, 0), 0); consume(doCreateCellArray(1), 1); consume(doCreateCellArray(0), 0); consume(doCreateCellArray(3), 3); - List cells = new ArrayList(hundredK); + List cells = new ArrayList<>(hundredK); for (int i = 0; i < hundredK; i++) { cells.add(new TestCellScannable(1)); } - consume(CellUtil.createCellScanner(cells), hundredK * 1); - NavigableMap> m = new TreeMap>(Bytes.BYTES_COMPARATOR); - List cellArray = new ArrayList(hundredK); - for (int i = 0; i < hundredK; i++) cellArray.add(new TestCell(i)); + consume(CellUtil.createCellScanner(cells), hundredK); + NavigableMap> m = new TreeMap<>(Bytes.BYTES_COMPARATOR); + List cellArray = new ArrayList<>(hundredK); + for (int i = 0; i < hundredK; i++) { + cellArray.add(new TestCell(i)); + } m.put(new byte [] {'f'}, cellArray); - consume(CellUtil.createCellScanner(m), hundredK * 1); + consume(CellUtil.createCellScanner(m), hundredK); } private CellScanner doCreateCellArray(final int itemsPerList) { @@ -259,9 +259,8 @@ private CellScanner doCreateCellArray(final int itemsPerList) { return CellUtil.createCellScanner(cells); } - private CellScanner doCreateCellScanner(final int listsCount, final int itemsPerList) - throws IOException { - List cells = new ArrayList(listsCount); + private CellScanner doCreateCellScanner(final int listsCount, final int itemsPerList) { + List cells = new ArrayList<>(listsCount); for (int i = 0; i < listsCount; i++) { CellScannable cs = new CellScannable() { @Override @@ -276,7 +275,9 @@ public CellScanner cellScanner() { private void consume(final CellScanner scanner, final int expected) throws IOException { int count = 0; - while (scanner.advance()) count++; + while (scanner.advance()) { + count++; + } Assert.assertEquals(expected, count); } @@ -384,7 +385,7 @@ public void testFindCommonPrefixInFlatKey() { @Test public void testToString() { byte [] row = Bytes.toBytes("row"); - long ts = 123l; + long ts = 123L; // Make a KeyValue and a Cell and see if same toString result. KeyValue kv = new KeyValue(row, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, ts, KeyValue.Type.Minimum, HConstants.EMPTY_BYTE_ARRAY); @@ -400,7 +401,6 @@ public void testToString() { HConstants.EMPTY_BYTE_ARRAY); cellToString = CellUtil.getCellKeyAsString(cell); assertEquals(kv.toString(), cellToString); - } @Test diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java index 24a4c0fad2e9..a20fffb49ec9 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java @@ -1,5 +1,4 @@ /** - * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -53,7 +52,6 @@ @Category(SmallTests.class) public class TestClassFinder { - private static final Log LOG = LogFactory.getLog(TestClassFinder.class); @Rule public TestName name = new TestName(); @@ -89,7 +87,7 @@ public static void createTestDir() throws IOException { } @AfterClass - public static void deleteTestDir() throws IOException { + public static void deleteTestDir() { testUtil.cleanupTestDir(TestClassFinder.class.getSimpleName()); } @@ -185,8 +183,7 @@ public boolean isCandidateClass(Class c) { } private static String createAndLoadJar(final String packageNameSuffix, - final String classNamePrefix, final long counter) - throws Exception { + final String classNamePrefix, final long counter) throws Exception { FileAndPath c1 = compileTestClass(counter, packageNameSuffix, classNamePrefix); FileAndPath c2 = compileTestClass(counter, packageNameSuffix, PREFIX + "1"); FileAndPath c3 = compileTestClass(counter, packageNameSuffix, PREFIX + classNamePrefix + "2"); @@ -241,7 +238,9 @@ public void testClassFinderCanFindClassesInDirs() throws Exception { private static boolean contains(final Set> classes, final String simpleName) { for (Class c: classes) { - if (c.getSimpleName().equals(simpleName)) return true; + if (c.getSimpleName().equals(simpleName)) { + return true; + } } return false; } @@ -299,8 +298,7 @@ public boolean isCandidateClass(Class c) { @Test public void testClassFinderFiltersByPathInDirs() throws Exception { final String hardcodedThisSubdir = "hbase-common"; - final ClassFinder.ResourcePathFilter notExcJarFilter = - new ClassFinder.ResourcePathFilter() { + final ClassFinder.ResourcePathFilter notExcJarFilter = new ClassFinder.ResourcePathFilter() { @Override public boolean isCandidatePath(String resourcePath, boolean isJar) { return isJar || !resourcePath.contains(hardcodedThisSubdir); @@ -383,7 +381,7 @@ private static String packageAndLoadJar(FileAndPath... filesInJar) throws Except // Directory entries for all packages have to be added explicitly for // resources to be findable via ClassLoader. Directory entries must end // with "/"; the initial one is expected to, also. - Set pathsInJar = new HashSet(); + Set pathsInJar = new HashSet<>(); for (FileAndPath fileAndPath : filesInJar) { String pathToAdd = fileAndPath.path; while (pathsInJar.add(pathToAdd)) { @@ -421,7 +419,6 @@ private static String packageAndLoadJar(FileAndPath... filesInJar) throws Except // Java 11 workaround - Custom class loader to expose addUrl method of URLClassLoader private static class CustomClassloader extends URLClassLoader { - public CustomClassloader(URL[] urls, ClassLoader parentLoader) { super(urls, parentLoader); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java index d8aed0445685..cfc676f082dd 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java @@ -15,7 +15,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; @@ -40,7 +39,6 @@ @Category(SmallTests.class) public class TestHBaseConfiguration { - private static final Log LOG = LogFactory.getLog(TestHBaseConfiguration.class); private static HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility(); @@ -51,7 +49,7 @@ public static void tearDown() throws IOException { } @Test - public void testSubset() throws Exception { + public void testSubset() { Configuration conf = HBaseConfiguration.create(); // subset is used in TableMapReduceUtil#initCredentials to support different security // configurations between source and destination clusters, so we'll use that as an example @@ -126,7 +124,6 @@ private static class ReflectiveCredentialProviderClient { private static Object hadoopCredProviderFactory = null; private static Method getProvidersMethod = null; - private static Method getAliasesMethod = null; private static Method getCredentialEntryMethod = null; private static Method getCredentialMethod = null; private static Method createCredentialEntryMethod = null; @@ -157,7 +154,7 @@ private boolean isHadoopCredentialProviderAvailable() { hadoopClassesAvailable = false; // Load Hadoop CredentialProviderFactory - Class hadoopCredProviderFactoryClz = null; + Class hadoopCredProviderFactoryClz; try { hadoopCredProviderFactoryClz = Class .forName(HADOOP_CRED_PROVIDER_FACTORY_CLASS_NAME); @@ -177,13 +174,13 @@ private boolean isHadoopCredentialProviderAvailable() { HADOOP_CRED_PROVIDER_FACTORY_GET_PROVIDERS_METHOD_NAME, Configuration.class); // Load Hadoop CredentialProvider - Class hadoopCredProviderClz = null; + Class hadoopCredProviderClz; hadoopCredProviderClz = Class.forName(HADOOP_CRED_PROVIDER_CLASS_NAME); getCredentialEntryMethod = loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_GET_CREDENTIAL_ENTRY_METHOD_NAME, String.class); - getAliasesMethod = loadMethod(hadoopCredProviderClz, - HADOOP_CRED_PROVIDER_GET_ALIASES_METHOD_NAME); + Method getAliasesMethod = + loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_GET_ALIASES_METHOD_NAME); createCredentialEntryMethod = loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_CREATE_CREDENTIAL_ENTRY_METHOD_NAME, @@ -193,7 +190,7 @@ private boolean isHadoopCredentialProviderAvailable() { HADOOP_CRED_PROVIDER_FLUSH_METHOD_NAME); // Load Hadoop CredentialEntry - Class hadoopCredentialEntryClz = null; + Class hadoopCredentialEntryClz; try { hadoopCredentialEntryClz = Class .forName(HADOOP_CRED_ENTRY_CLASS_NAME); @@ -212,17 +209,15 @@ private boolean isHadoopCredentialProviderAvailable() { LOG.info("Credential provider classes have been" + " loaded and initialized successfully through reflection."); return true; - } private Method loadMethod(Class clz, String name, Class... classes) throws Exception { - Method method = null; + Method method; try { method = clz.getMethod(name, classes); } catch (SecurityException e) { - fail("security exception caught for: " + name + " in " + - clz.getCanonicalName()); + fail("security exception caught for: " + name + " in " + clz.getCanonicalName()); throw e; } catch (NoSuchMethodException e) { LOG.error("Failed to load the " + name + ": " + e); @@ -242,19 +237,11 @@ private Method loadMethod(Class clz, String name, Class... classes) @SuppressWarnings("unchecked") protected List getCredentialProviders(Configuration conf) { // Call CredentialProviderFactory.getProviders(Configuration) - Object providersObj = null; + Object providersObj; try { providersObj = getProvidersMethod.invoke(hadoopCredProviderFactory, conf); - } catch (IllegalArgumentException e) { - LOG.error("Failed to invoke: " + getProvidersMethod.getName() + - ": " + e); - return null; - } catch (IllegalAccessException e) { - LOG.error("Failed to invoke: " + getProvidersMethod.getName() + - ": " + e); - return null; - } catch (InvocationTargetException e) { + } catch (IllegalArgumentException | InvocationTargetException | IllegalAccessException e) { LOG.error("Failed to invoke: " + getProvidersMethod.getName() + ": " + e); return null; @@ -281,7 +268,6 @@ protected List getCredentialProviders(Configuration conf) { */ public void createEntry(Configuration conf, String name, char[] credential) throws Exception { - if (!isHadoopCredentialProviderAvailable()) { return; } @@ -311,30 +297,17 @@ public void createEntry(Configuration conf, String name, char[] credential) */ private void createEntryInProvider(Object credentialProvider, String name, char[] credential) throws Exception { - if (!isHadoopCredentialProviderAvailable()) { return; } try { createCredentialEntryMethod.invoke(credentialProvider, name, credential); - } catch (IllegalArgumentException e) { - return; - } catch (IllegalAccessException e) { - return; - } catch (InvocationTargetException e) { + } catch (IllegalArgumentException | InvocationTargetException | IllegalAccessException e) { return; } - try { - flushMethod.invoke(credentialProvider); - } catch (IllegalArgumentException e) { - throw e; - } catch (IllegalAccessException e) { - throw e; - } catch (InvocationTargetException e) { - throw e; - } + flushMethod.invoke(credentialProvider); } } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java index 2a1569b7b68f..86658d54c3fa 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java @@ -43,7 +43,6 @@ @Category(SmallTests.class) public class TestCellCodecWithTags { - @Test public void testCellWithTag() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); @@ -110,4 +109,4 @@ public void testCellWithTag() throws IOException { dis.close(); assertEquals(offset, cis.getCount()); } -} \ No newline at end of file +} diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java index c217cfa0d02a..f41dffca4741 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java @@ -43,7 +43,6 @@ @Category(SmallTests.class) public class TestKeyValueCodecWithTags { - @Test public void testKeyValueWithTag() throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); @@ -110,4 +109,4 @@ public void testKeyValueWithTag() throws IOException { dis.close(); assertEquals(offset, cis.getCount()); } -} \ No newline at end of file +} diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java index e31ab49a10b7..78a21fb2df30 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java @@ -1,22 +1,24 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with this - * work for additional information regarding copyright ownership. The ASF - * licenses this file to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package org.apache.hadoop.hbase.io.crypto; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -36,7 +38,6 @@ @Category(SmallTests.class) public class TestEncryption { - private static final Log LOG = LogFactory.getLog(TestEncryption.class); @Test @@ -45,7 +46,7 @@ public void testSmallBlocks() throws Exception { Bytes.random(key); byte[] iv = new byte[16]; Bytes.random(iv); - for (int size: new int[] { 4, 8, 16, 32, 64, 128, 256, 512 } ) { + for (int size: new int[] { 4, 8, 16, 32, 64, 128, 256, 512 }) { checkTransformSymmetry(key, iv, getRandomBlock(size)); } } @@ -56,7 +57,7 @@ public void testLargeBlocks() throws Exception { Bytes.random(key); byte[] iv = new byte[16]; Bytes.random(iv); - for (int size: new int[] { 256 * 1024, 512 * 1024, 1024 * 1024 } ) { + for (int size: new int[] { 256 * 1024, 512 * 1024, 1024 * 1024 }) { checkTransformSymmetry(key, iv, getRandomBlock(size)); } } @@ -67,7 +68,7 @@ public void testOddSizedBlocks() throws Exception { Bytes.random(key); byte[] iv = new byte[16]; Bytes.random(iv); - for (int size: new int[] { 3, 7, 11, 23, 47, 79, 119, 175 } ) { + for (int size: new int[] { 3, 7, 11, 23, 47, 79, 119, 175 }) { checkTransformSymmetry(key, iv, getRandomBlock(size)); } } @@ -78,7 +79,7 @@ public void testTypicalHFileBlocks() throws Exception { Bytes.random(key); byte[] iv = new byte[16]; Bytes.random(iv); - for (int size: new int[] { 4 * 1024, 8 * 1024, 64 * 1024, 128 * 1024 } ) { + for (int size: new int[] { 4 * 1024, 8 * 1024, 64 * 1024, 128 * 1024 }) { checkTransformSymmetry(key, iv, getRandomBlock(size)); } } @@ -121,5 +122,4 @@ private byte[] getRandomBlock(int size) { Bytes.random(b); return b; } - } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java index 919001134878..df377291778a 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java @@ -18,6 +18,7 @@ import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -88,7 +89,6 @@ public RedundantKVGenerator() { ); } - /** * Various configuration options for generating key values * @param randomizer pick things by random @@ -111,8 +111,7 @@ public RedundantKVGenerator(Random randomizer, float chanceForZeroValue, int baseTimestampDivide, - int timestampDiffSize - ) { + int timestampDiffSize) { this.randomizer = randomizer; this.commonPrefix = DEFAULT_COMMON_PREFIX; @@ -140,33 +139,33 @@ public RedundantKVGenerator(Random randomizer, private Random randomizer; // row settings - private byte[] commonPrefix;//global prefix before rowPrefixes + private byte[] commonPrefix; //global prefix before rowPrefixes private int numberOfRowPrefixes; - private int averagePrefixLength = 6; - private int prefixLengthVariance = 3; - private int averageSuffixLength = 3; - private int suffixLengthVariance = 3; - private int numberOfRows = 500; + private int averagePrefixLength; + private int prefixLengthVariance; + private int averageSuffixLength; + private int suffixLengthVariance; + private int numberOfRows; - //family + // family private byte[] family; // qualifier - private float chanceForSameQualifier = 0.5f; - private float chanceForSimilarQualifier = 0.4f; - private int averageQualifierLength = 9; - private int qualifierLengthVariance = 3; + private float chanceForSameQualifier; + private float chanceForSimilarQualifier; + private int averageQualifierLength; + private int qualifierLengthVariance; - private int columnFamilyLength = 9; - private int valueLength = 8; - private float chanceForZeroValue = 0.5f; + private int columnFamilyLength; + private int valueLength; + private float chanceForZeroValue; - private int baseTimestampDivide = 1000000; - private int timestampDiffSize = 100000000; + private int baseTimestampDivide; + private int timestampDiffSize; private List generateRows() { // generate prefixes - List prefixes = new ArrayList(); + List prefixes = new ArrayList<>(); prefixes.add(new byte[0]); for (int i = 1; i < numberOfRowPrefixes; ++i) { int prefixLength = averagePrefixLength; @@ -174,12 +173,11 @@ private List generateRows() { prefixLengthVariance; byte[] newPrefix = new byte[prefixLength]; randomizer.nextBytes(newPrefix); - byte[] newPrefixWithCommon = newPrefix; - prefixes.add(newPrefixWithCommon); + prefixes.add(newPrefix); } // generate rest of the row - List rows = new ArrayList(); + List rows = new ArrayList<>(); for (int i = 0; i < numberOfRows; ++i) { int suffixLength = averageSuffixLength; suffixLength += randomizer.nextInt(2 * suffixLengthVariance + 1) - @@ -202,16 +200,17 @@ private List generateRows() { public List generateTestKeyValues(int howMany) { return generateTestKeyValues(howMany, false); } + /** * Generate test data useful to test encoders. * @param howMany How many Key values should be generated. * @return sorted list of key values */ public List generateTestKeyValues(int howMany, boolean useTags) { - List result = new ArrayList(); + List result = new ArrayList<>(); List rows = generateRows(); - Map> rowsToQualifier = new HashMap>(); + Map> rowsToQualifier = new HashMap<>(); if(family==null){ family = new byte[columnFamilyLength]; @@ -268,16 +267,14 @@ public List generateTestKeyValues(int howMany, boolean useTags) { } if (randomizer.nextFloat() < chanceForZeroValue) { - for (int j = 0; j < value.length; ++j) { - value[j] = (byte) 0; - } + Arrays.fill(value, (byte) 0); } else { randomizer.nextBytes(value); } if (useTags) { - result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new Tag( - (byte) 1, "value1") })); + result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { + new Tag((byte) 1, "value1") })); } else { result.add(new KeyValue(row, family, qualifier, timestamp, value)); } @@ -313,97 +310,9 @@ public static ByteBuffer convertKvToByteBuffer(List keyValues, return result; } - /************************ get/set ***********************************/ - - public RedundantKVGenerator setCommonPrefix(byte[] prefix){ - this.commonPrefix = prefix; - return this; - } - - public RedundantKVGenerator setRandomizer(Random randomizer) { - this.randomizer = randomizer; - return this; - } - - public RedundantKVGenerator setNumberOfRowPrefixes(int numberOfRowPrefixes) { - this.numberOfRowPrefixes = numberOfRowPrefixes; - return this; - } - - public RedundantKVGenerator setAveragePrefixLength(int averagePrefixLength) { - this.averagePrefixLength = averagePrefixLength; - return this; - } - - public RedundantKVGenerator setPrefixLengthVariance(int prefixLengthVariance) { - this.prefixLengthVariance = prefixLengthVariance; - return this; - } - - public RedundantKVGenerator setAverageSuffixLength(int averageSuffixLength) { - this.averageSuffixLength = averageSuffixLength; - return this; - } - - public RedundantKVGenerator setSuffixLengthVariance(int suffixLengthVariance) { - this.suffixLengthVariance = suffixLengthVariance; - return this; - } - - public RedundantKVGenerator setNumberOfRows(int numberOfRows) { - this.numberOfRows = numberOfRows; - return this; - } - - public RedundantKVGenerator setChanceForSameQualifier(float chanceForSameQualifier) { - this.chanceForSameQualifier = chanceForSameQualifier; - return this; - } - - public RedundantKVGenerator setChanceForSimilarQualifier(float chanceForSimiliarQualifier) { - this.chanceForSimilarQualifier = chanceForSimiliarQualifier; - return this; - } - - public RedundantKVGenerator setAverageQualifierLength(int averageQualifierLength) { - this.averageQualifierLength = averageQualifierLength; - return this; - } - - public RedundantKVGenerator setQualifierLengthVariance(int qualifierLengthVariance) { - this.qualifierLengthVariance = qualifierLengthVariance; - return this; - } - - public RedundantKVGenerator setColumnFamilyLength(int columnFamilyLength) { - this.columnFamilyLength = columnFamilyLength; - return this; - } - public RedundantKVGenerator setFamily(byte[] family) { this.family = family; this.columnFamilyLength = family.length; return this; } - - public RedundantKVGenerator setValueLength(int valueLength) { - this.valueLength = valueLength; - return this; - } - - public RedundantKVGenerator setChanceForZeroValue(float chanceForZeroValue) { - this.chanceForZeroValue = chanceForZeroValue; - return this; - } - - public RedundantKVGenerator setBaseTimestampDivide(int baseTimestampDivide) { - this.baseTimestampDivide = baseTimestampDivide; - return this; - } - - public RedundantKVGenerator setTimestampDiffSize(int timestampDiffSize) { - this.timestampDiffSize = timestampDiffSize; - return this; - } - } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 9a8a90258d9d..90ed49ccb4fa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -1187,10 +1187,9 @@ public void shutdownMiniCluster() throws Exception { /** * @return True if we removed the test dirs - * @throws IOException */ @Override - public boolean cleanupTestDir() throws IOException { + public boolean cleanupTestDir() { boolean ret = super.cleanupTestDir(); if (deleteDir(this.clusterTestDir)) { this.clusterTestDir = null;