diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java index b8ec0ddc7418..b299fab47e2e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java @@ -15,7 +15,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase; import java.io.File; @@ -68,9 +67,17 @@ public static class Not implements ResourcePathFilter, FileNameFilter, ClassFilt private FileNameFilter fileNameFilter; private ClassFilter classFilter; - public Not(ResourcePathFilter resourcePathFilter){this.resourcePathFilter = resourcePathFilter;} - public Not(FileNameFilter fileNameFilter){this.fileNameFilter = fileNameFilter;} - public Not(ClassFilter classFilter){this.classFilter = classFilter;} + public Not(ResourcePathFilter resourcePathFilter) { + this.resourcePathFilter = resourcePathFilter; + } + + public Not(FileNameFilter fileNameFilter) { + this.fileNameFilter = fileNameFilter; + } + + public Not(ClassFilter classFilter) { + this.classFilter = classFilter; + } @Override public boolean isCandidatePath(String resourcePath, boolean isJar) { @@ -90,7 +97,10 @@ public static class And implements ClassFilter, ResourcePathFilter { ClassFilter[] classFilters; ResourcePathFilter[] resourcePathFilters; - public And(ClassFilter...classFilters) { this.classFilters = classFilters; } + public And(ClassFilter...classFilters) { + this.classFilters = classFilters; + } + public And(ResourcePathFilter... resourcePathFilters) { this.resourcePathFilters = resourcePathFilters; } @@ -120,10 +130,6 @@ public ClassFinder(ClassLoader classLoader) { this(null, null, null, classLoader); } - public ClassFinder() { - this(ClassLoader.getSystemClassLoader()); - } - public ClassFinder(ResourcePathFilter resourcePathFilter, FileNameFilter fileNameFilter, ClassFilter classFilter) { this(resourcePathFilter, fileNameFilter, classFilter, ClassLoader.getSystemClassLoader()); @@ -193,7 +199,7 @@ public Set> findClasses(String packageName, boolean proceedOnExceptions private Set> findClassesFromJar(String jarFileName, String packageName, boolean proceedOnExceptions) throws IOException, ClassNotFoundException, LinkageError { - JarInputStream jarFile = null; + JarInputStream jarFile; try { jarFile = new JarInputStream(new FileInputStream(jarFileName)); } catch (IOException ioEx) { @@ -202,7 +208,7 @@ private Set> findClassesFromJar(String jarFileName, } Set> classes = new HashSet<>(); - JarEntry entry = null; + JarEntry entry; try { while (true) { try { @@ -285,16 +291,11 @@ private Class makeClass(String className, boolean proceedOnExceptions) Class c = Class.forName(className, false, classLoader); boolean isCandidateClass = null == classFilter || classFilter.isCandidateClass(c); return isCandidateClass ? c : null; - } catch (NoClassDefFoundError|ClassNotFoundException classNotFoundEx) { - if (!proceedOnExceptions) { - throw classNotFoundEx; - } - LOG.debug("Failed to instantiate or check " + className + ": " + classNotFoundEx); - } catch (LinkageError linkageEx) { + } catch (ClassNotFoundException | LinkageError exception) { if (!proceedOnExceptions) { - throw linkageEx; + throw exception; } - LOG.debug("Failed to instantiate or check " + className + ": " + linkageEx); + LOG.debug("Failed to instantiate or check " + className + ": " + exception); } return null; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index a7ae0a5c44e9..122dd2a198e3 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -37,29 +37,34 @@ /** * Common helpers for testing HBase that do not depend on specific server/etc. things. * @see org.apache.hadoop.hbase.HBaseCommonTestingUtility - * */ @InterfaceAudience.Public public class HBaseCommonTestingUtility { protected static final Logger LOG = LoggerFactory.getLogger(HBaseCommonTestingUtility.class); - /** Compression algorithms to use in parameterized JUnit 4 tests */ + /** + * Compression algorithms to use in parameterized JUnit 4 tests + */ public static final List COMPRESSION_ALGORITHMS_PARAMETERIZED = Arrays.asList(new Object[][] { { Compression.Algorithm.NONE }, { Compression.Algorithm.GZ } }); - /** This is for unit tests parameterized with a two booleans. */ + /** + * This is for unit tests parameterized with a two booleans. + */ public static final List BOOLEAN_PARAMETERIZED = Arrays.asList(new Object[][] { {false}, {true} }); - /** Compression algorithms to use in testing */ + /** + * Compression algorithms to use in testing + */ public static final Compression.Algorithm[] COMPRESSION_ALGORITHMS = { - Compression.Algorithm.NONE, Compression.Algorithm.GZ + Compression.Algorithm.NONE, Compression.Algorithm.GZ }; protected Configuration conf; @@ -98,9 +103,8 @@ public Configuration getConfiguration() { private File dataTestDir = null; /** - * @return Where to write test data on local filesystem, specific to - * the test. Useful for tests that do not use a cluster. - * Creates it if it does not exist already. + * @return Where to write test data on local filesystem, specific to the test. Useful for tests + * that do not use a cluster. Creates it if it does not exist already. */ public Path getDataTestDir() { if (this.dataTestDir == null) { @@ -110,10 +114,9 @@ public Path getDataTestDir() { } /** - * @param subdirName - * @return Path to a subdirectory named subdirName under - * {@link #getDataTestDir()}. - * Does *NOT* create it if it does not exist. + * @param subdirName the name of the subdirectory in the test data directory + * @return Path to a subdirectory named {code subdirName} under + * {@link #getDataTestDir()}. Does *NOT* create it if it does not exist. */ public Path getDataTestDir(final String subdirName) { return new Path(getDataTestDir(), subdirName); @@ -134,7 +137,10 @@ protected Path setupDataTestDir() { this.dataTestDir = new File(testPath.toString()).getAbsoluteFile(); // Set this property so if mapreduce jobs run, they will use this as their home dir. System.setProperty("test.build.dir", this.dataTestDir.toString()); - if (deleteOnExit()) this.dataTestDir.deleteOnExit(); + + if (deleteOnExit()) { + this.dataTestDir.deleteOnExit(); + } createSubDir("hbase.local.dir", testPath, "hbase-local-dir"); @@ -154,11 +160,14 @@ public UUID getRandomUUID() { ThreadLocalRandom.current().nextLong()); } - protected void createSubDir(String propertyName, Path parent, String subDirName) { Path newPath = new Path(parent, subDirName); File newDir = new File(newPath.toString()).getAbsoluteFile(); - if (deleteOnExit()) newDir.deleteOnExit(); + + if (deleteOnExit()) { + newDir.deleteOnExit(); + } + conf.set(propertyName, newDir.getAbsolutePath()); } @@ -173,9 +182,8 @@ boolean deleteOnExit() { /** * @return True if we removed the test dirs - * @throws IOException */ - public boolean cleanupTestDir() throws IOException { + public boolean cleanupTestDir() { if (deleteDir(this.dataTestDir)) { this.dataTestDir = null; return true; @@ -186,9 +194,8 @@ public boolean cleanupTestDir() throws IOException { /** * @param subdir Test subdir name. * @return True if we removed the test dir - * @throws IOException */ - boolean cleanupTestDir(final String subdir) throws IOException { + boolean cleanupTestDir(final String subdir) { if (this.dataTestDir == null) { return false; } @@ -197,9 +204,9 @@ boolean cleanupTestDir(final String subdir) throws IOException { /** * @return Where to write test data on local filesystem; usually - * {@link #DEFAULT_BASE_TEST_DIRECTORY} - * Should not be used by the unit tests, hence its's private. - * Unit test will use a subdirectory of this directory. + * {@link #DEFAULT_BASE_TEST_DIRECTORY} + * Should not be used by the unit tests, hence its's private. + * Unit test will use a subdirectory of this directory. * @see #setupDataTestDir() */ private Path getBaseTestDir() { @@ -212,9 +219,8 @@ private Path getBaseTestDir() { /** * @param dir Directory to delete * @return True if we deleted it. - * @throws IOException */ - boolean deleteDir(final File dir) throws IOException { + boolean deleteDir(final File dir) { if (dir == null || !dir.exists()) { return true; } @@ -222,7 +228,10 @@ boolean deleteDir(final File dir) throws IOException { do { ntries += 1; try { - if (deleteOnExit()) FileUtils.deleteDirectory(dir); + if (deleteOnExit()) { + FileUtils.deleteDirectory(dir); + } + return true; } catch (IOException ex) { LOG.warn("Failed to delete " + dir.getAbsolutePath()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java index b42db95cfcdd..d1513f1d91b6 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ResourceChecker.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase; import java.util.ArrayList; @@ -47,7 +46,6 @@ public ResourceChecker(final String tagLine) { this.tagLine = tagLine; } - /** * Class to implement for each type of resource. */ @@ -84,21 +82,22 @@ public String getName() { /** * The value for the resource. - * @param phase + * @param phase the {@link Phase} to get the value for */ abstract public int getVal(Phase phase); /* * Retrieves List of Strings which would be logged in logEndings() */ - public List getStringsToLog() { return null; } + public List getStringsToLog() { + return null; + } } private List ras = new ArrayList<>(); private int[] initialValues; private int[] endingValues; - private void fillInit() { initialValues = new int[ras.size()]; fill(Phase.INITIAL, initialValues); @@ -142,7 +141,11 @@ private void logInit() { StringBuilder sb = new StringBuilder(); for (ResourceAnalyzer ra : ras) { int cur = initialValues[i++]; - if (sb.length() > 0) sb.append(", "); + + if (sb.length() > 0) { + sb.append(", "); + } + sb.append(ra.getName()).append("=").append(cur); } LOG.info("before: " + tagLine + " " + sb); @@ -157,7 +160,11 @@ private void logEndings() { for (ResourceAnalyzer ra : ras) { int curP = initialValues[i]; int curN = endingValues[i++]; - if (sb.length() > 0) sb.append(", "); + + if (sb.length() > 0) { + sb.append(", "); + } + sb.append(ra.getName()).append("=").append(curN).append(" (was ").append(curP).append(")"); if (curN > curP) { List strings = ra.getStringsToLog(); @@ -172,7 +179,6 @@ private void logEndings() { LOG.info("after: " + tagLine + " " + sb); } - /** * To be called as the beginning of a test method: * - measure the resources diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java index 0ec62c1876f2..fc162fd34ec4 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java @@ -41,7 +41,6 @@ @Category({MiscTests.class, SmallTests.class}) public class TestCellUtil { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestCellUtil.class); @@ -78,7 +77,7 @@ public Cell current() { } @Override - public boolean advance() throws IOException { + public boolean advance() { if (this.count < cellsCount) { this.current = new TestCell(this.count); this.count++; @@ -219,13 +218,13 @@ public long heapSize() { */ @Test public void testCreateCellScannerOverflow() throws IOException { - consume(doCreateCellScanner(1, 1), 1 * 1); - consume(doCreateCellScanner(3, 0), 3 * 0); + consume(doCreateCellScanner(1, 1), 1); + consume(doCreateCellScanner(3, 0), 0); consume(doCreateCellScanner(3, 3), 3 * 3); - consume(doCreateCellScanner(0, 1), 0 * 1); + consume(doCreateCellScanner(0, 1), 0); // Do big number. See HBASE-11813 for why. final int hundredK = 100000; - consume(doCreateCellScanner(hundredK, 0), hundredK * 0); + consume(doCreateCellScanner(hundredK, 0), 0); consume(doCreateCellArray(1), 1); consume(doCreateCellArray(0), 0); consume(doCreateCellArray(3), 3); @@ -233,14 +232,14 @@ public void testCreateCellScannerOverflow() throws IOException { for (int i = 0; i < hundredK; i++) { cells.add(new TestCellScannable(1)); } - consume(CellUtil.createCellScanner(cells), hundredK * 1); + consume(CellUtil.createCellScanner(cells), hundredK); NavigableMap> m = new TreeMap<>(Bytes.BYTES_COMPARATOR); List cellArray = new ArrayList<>(hundredK); for (int i = 0; i < hundredK; i++) { cellArray.add(new TestCell(i)); } m.put(new byte [] {'f'}, cellArray); - consume(CellUtil.createCellScanner(m), hundredK * 1); + consume(CellUtil.createCellScanner(m), hundredK); } private CellScanner doCreateCellArray(final int itemsPerList) { @@ -251,8 +250,7 @@ private CellScanner doCreateCellArray(final int itemsPerList) { return CellUtil.createCellScanner(cells); } - private CellScanner doCreateCellScanner(final int listsCount, final int itemsPerList) - throws IOException { + private CellScanner doCreateCellScanner(final int listsCount, final int itemsPerList) { List cells = new ArrayList<>(listsCount); for (int i = 0; i < listsCount; i++) { CellScannable cs = new CellScannable() { @@ -554,11 +552,10 @@ public void testWriteCell() throws IOException { // Workaround for jdk 11 - reflective access to interface default methods for testGetType private abstract class CellForMockito implements Cell { - } @Test - public void testGetType() throws IOException { + public void testGetType() { CellForMockito c = Mockito.mock(CellForMockito.class); Mockito.when(c.getType()).thenCallRealMethod(); for (CellForMockito.Type type : CellForMockito.Type.values()) { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java index 21a092b06651..b1c090322479 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java @@ -52,7 +52,6 @@ @Category({MiscTests.class, SmallTests.class}) public class TestClassFinder { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestClassFinder.class); @@ -92,7 +91,7 @@ public static void createTestDir() throws IOException { } @AfterClass - public static void deleteTestDir() throws IOException { + public static void deleteTestDir() { testUtil.cleanupTestDir(TestClassFinder.class.getSimpleName()); } @@ -153,12 +152,8 @@ public void testClassFinderFiltersByNameInJar() throws Exception { final String classNamePrefix = name.getMethodName(); LOG.info("Created jar " + createAndLoadJar("", classNamePrefix, counter)); - ClassFinder.FileNameFilter notExcNameFilter = new ClassFinder.FileNameFilter() { - @Override - public boolean isCandidateFile(String fileName, String absFilePath) { - return !fileName.startsWith(PREFIX); - } - }; + ClassFinder.FileNameFilter notExcNameFilter = + (fileName, absFilePath) -> !fileName.startsWith(PREFIX); ClassFinder incClassesFinder = new ClassFinder(null, notExcNameFilter, null, classLoader); Set> incClasses = incClassesFinder.findClasses( makePackageName("", counter), false); @@ -173,12 +168,7 @@ public void testClassFinderFiltersByClassInJar() throws Exception { final String classNamePrefix = name.getMethodName(); LOG.info("Created jar " + createAndLoadJar("", classNamePrefix, counter)); - final ClassFinder.ClassFilter notExcClassFilter = new ClassFinder.ClassFilter() { - @Override - public boolean isCandidateClass(Class c) { - return !c.getSimpleName().startsWith(PREFIX); - } - }; + final ClassFinder.ClassFilter notExcClassFilter = c -> !c.getSimpleName().startsWith(PREFIX); ClassFinder incClassesFinder = new ClassFinder(null, null, notExcClassFilter, classLoader); Set> incClasses = incClassesFinder.findClasses( makePackageName("", counter), false); @@ -188,8 +178,7 @@ public boolean isCandidateClass(Class c) { } private static String createAndLoadJar(final String packageNameSuffix, - final String classNamePrefix, final long counter) - throws Exception { + final String classNamePrefix, final long counter) throws Exception { FileAndPath c1 = compileTestClass(counter, packageNameSuffix, classNamePrefix); FileAndPath c2 = compileTestClass(counter, packageNameSuffix, PREFIX + "1"); FileAndPath c3 = compileTestClass(counter, packageNameSuffix, PREFIX + classNamePrefix + "2"); @@ -212,12 +201,7 @@ public void testClassFinderFiltersByPathInJar() throws Exception { new File(excludedJar).toURI().getRawSchemeSpecificPart(); final ClassFinder.ResourcePathFilter notExcJarFilter = - new ClassFinder.ResourcePathFilter() { - @Override - public boolean isCandidatePath(String resourcePath, boolean isJar) { - return !isJar || !resourcePath.equals(excludedJarResource); - } - }; + (resourcePath, isJar) -> !isJar || !resourcePath.equals(excludedJarResource); ClassFinder incClassesFinder = new ClassFinder(notExcJarFilter, null, null, classLoader); Set> incClasses = incClassesFinder.findClasses( makePackageName("", counter), false); @@ -244,7 +228,9 @@ public void testClassFinderCanFindClassesInDirs() throws Exception { private static boolean contains(final Set> classes, final String simpleName) { for (Class c: classes) { - if (c.getSimpleName().equals(simpleName)) return true; + if (c.getSimpleName().equals(simpleName)) { + return true; + } } return false; } @@ -258,12 +244,8 @@ public void testClassFinderFiltersByNameInDirs() throws Exception { String pkgNameSuffix = name.getMethodName(); LOG.info("Created jar " + createAndLoadJar(pkgNameSuffix, classNamePrefix, counter)); final String classNameToFilterOut = classNamePrefix + counter; - final ClassFinder.FileNameFilter notThisFilter = new ClassFinder.FileNameFilter() { - @Override - public boolean isCandidateFile(String fileName, String absFilePath) { - return !fileName.equals(classNameToFilterOut + ".class"); - } - }; + final ClassFinder.FileNameFilter notThisFilter = + (fileName, absFilePath) -> !fileName.equals(classNameToFilterOut + ".class"); String pkgName = makePackageName(pkgNameSuffix, counter); ClassFinder allClassesFinder = new ClassFinder(classLoader); Set> allClasses = allClassesFinder.findClasses(pkgName, false); @@ -283,12 +265,7 @@ public void testClassFinderFiltersByClassInDirs() throws Exception { String pkgNameSuffix = name.getMethodName(); LOG.info("Created jar " + createAndLoadJar(pkgNameSuffix, classNamePrefix, counter)); final Class clazz = makeClass(pkgNameSuffix, classNamePrefix, counter); - final ClassFinder.ClassFilter notThisFilter = new ClassFinder.ClassFilter() { - @Override - public boolean isCandidateClass(Class c) { - return c != clazz; - } - }; + final ClassFinder.ClassFilter notThisFilter = c -> c != clazz; String pkgName = makePackageName(pkgNameSuffix, counter); ClassFinder allClassesFinder = new ClassFinder(classLoader); Set> allClasses = allClassesFinder.findClasses(pkgName, false); @@ -303,12 +280,7 @@ public boolean isCandidateClass(Class c) { public void testClassFinderFiltersByPathInDirs() throws Exception { final String hardcodedThisSubdir = "hbase-common"; final ClassFinder.ResourcePathFilter notExcJarFilter = - new ClassFinder.ResourcePathFilter() { - @Override - public boolean isCandidatePath(String resourcePath, boolean isJar) { - return isJar || !resourcePath.contains(hardcodedThisSubdir); - } - }; + (resourcePath, isJar) -> isJar || !resourcePath.contains(hardcodedThisSubdir); String thisPackage = this.getClass().getPackage().getName(); ClassFinder notThisClassFinder = new ClassFinder(notExcJarFilter, null, null, classLoader); Set> notAllClasses = notThisClassFinder.findClasses(thisPackage, false); @@ -424,7 +396,6 @@ private static String packageAndLoadJar(FileAndPath... filesInJar) throws Except // Java 11 workaround - Custom class loader to expose addUrl method of URLClassLoader private static class CustomClassloader extends URLClassLoader { - public CustomClassloader(URL[] urls, ClassLoader parentLoader) { super(urls, parentLoader); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java index 249db515912a..184cdf639f16 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java @@ -40,7 +40,6 @@ @Category({MiscTests.class, SmallTests.class}) public class TestHBaseConfiguration { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestHBaseConfiguration.class); @@ -55,7 +54,7 @@ public static void tearDown() throws IOException { } @Test - public void testSubset() throws Exception { + public void testSubset() { Configuration conf = HBaseConfiguration.create(); // subset is used in TableMapReduceUtil#initCredentials to support different security // configurations between source and destination clusters, so we'll use that as an example @@ -130,7 +129,6 @@ private static class ReflectiveCredentialProviderClient { private static Object hadoopCredProviderFactory = null; private static Method getProvidersMethod = null; - private static Method getAliasesMethod = null; private static Method getCredentialEntryMethod = null; private static Method getCredentialMethod = null; private static Method createCredentialEntryMethod = null; @@ -161,7 +159,7 @@ private boolean isHadoopCredentialProviderAvailable() { hadoopClassesAvailable = false; // Load Hadoop CredentialProviderFactory - Class hadoopCredProviderFactoryClz = null; + Class hadoopCredProviderFactoryClz; try { hadoopCredProviderFactoryClz = Class .forName(HADOOP_CRED_PROVIDER_FACTORY_CLASS_NAME); @@ -181,13 +179,13 @@ private boolean isHadoopCredentialProviderAvailable() { HADOOP_CRED_PROVIDER_FACTORY_GET_PROVIDERS_METHOD_NAME, Configuration.class); // Load Hadoop CredentialProvider - Class hadoopCredProviderClz = null; + Class hadoopCredProviderClz; hadoopCredProviderClz = Class.forName(HADOOP_CRED_PROVIDER_CLASS_NAME); getCredentialEntryMethod = loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_GET_CREDENTIAL_ENTRY_METHOD_NAME, String.class); - getAliasesMethod = loadMethod(hadoopCredProviderClz, - HADOOP_CRED_PROVIDER_GET_ALIASES_METHOD_NAME); + Method getAliasesMethod = + loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_GET_ALIASES_METHOD_NAME); createCredentialEntryMethod = loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_CREATE_CREDENTIAL_ENTRY_METHOD_NAME, @@ -197,7 +195,7 @@ private boolean isHadoopCredentialProviderAvailable() { HADOOP_CRED_PROVIDER_FLUSH_METHOD_NAME); // Load Hadoop CredentialEntry - Class hadoopCredentialEntryClz = null; + Class hadoopCredentialEntryClz; try { hadoopCredentialEntryClz = Class .forName(HADOOP_CRED_ENTRY_CLASS_NAME); @@ -216,17 +214,15 @@ private boolean isHadoopCredentialProviderAvailable() { LOG.info("Credential provider classes have been" + " loaded and initialized successfully through reflection."); return true; - } private Method loadMethod(Class clz, String name, Class... classes) throws Exception { - Method method = null; + Method method; try { method = clz.getMethod(name, classes); } catch (SecurityException e) { - fail("security exception caught for: " + name + " in " + - clz.getCanonicalName()); + fail("security exception caught for: " + name + " in " + clz.getCanonicalName()); throw e; } catch (NoSuchMethodException e) { LOG.error("Failed to load the " + name + ": " + e); @@ -246,19 +242,11 @@ private Method loadMethod(Class clz, String name, Class... classes) @SuppressWarnings("unchecked") protected List getCredentialProviders(Configuration conf) { // Call CredentialProviderFactory.getProviders(Configuration) - Object providersObj = null; + Object providersObj; try { providersObj = getProvidersMethod.invoke(hadoopCredProviderFactory, conf); - } catch (IllegalArgumentException e) { - LOG.error("Failed to invoke: " + getProvidersMethod.getName() + - ": " + e); - return null; - } catch (IllegalAccessException e) { - LOG.error("Failed to invoke: " + getProvidersMethod.getName() + - ": " + e); - return null; - } catch (InvocationTargetException e) { + } catch (IllegalArgumentException | InvocationTargetException | IllegalAccessException e) { LOG.error("Failed to invoke: " + getProvidersMethod.getName() + ": " + e); return null; @@ -285,7 +273,6 @@ protected List getCredentialProviders(Configuration conf) { */ public void createEntry(Configuration conf, String name, char[] credential) throws Exception { - if (!isHadoopCredentialProviderAvailable()) { return; } @@ -315,30 +302,17 @@ public void createEntry(Configuration conf, String name, char[] credential) */ private void createEntryInProvider(Object credentialProvider, String name, char[] credential) throws Exception { - if (!isHadoopCredentialProviderAvailable()) { return; } try { createCredentialEntryMethod.invoke(credentialProvider, name, credential); - } catch (IllegalArgumentException e) { - return; - } catch (IllegalAccessException e) { - return; - } catch (InvocationTargetException e) { + } catch (IllegalArgumentException | InvocationTargetException | IllegalAccessException e) { return; } - try { - flushMethod.invoke(credentialProvider); - } catch (IllegalArgumentException e) { - throw e; - } catch (IllegalAccessException e) { - throw e; - } catch (InvocationTargetException e) { - throw e; - } + flushMethod.invoke(credentialProvider); } } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java index 5687d23d8289..d1afc6884ee7 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java @@ -47,7 +47,6 @@ @Category({MiscTests.class, SmallTests.class}) public class TestCellCodecWithTags { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestCellCodecWithTags.class); @@ -61,16 +60,16 @@ public void testCellWithTag() throws IOException { Codec.Encoder encoder = codec.getEncoder(dos); final Cell cell1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] { - new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")), - new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) }); + new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")), + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) }); final Cell cell2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring3")), }); final Cell cell3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] { - new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")), - new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")), - new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) }); + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")), + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")), + new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) }); encoder.write(cell1); encoder.write(cell2); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java index 37336d1602bc..e541c69a2dd8 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java @@ -47,7 +47,6 @@ @Category({MiscTests.class, SmallTests.class}) public class TestKeyValueCodecWithTags { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestKeyValueCodecWithTags.class); @@ -61,16 +60,16 @@ public void testKeyValueWithTag() throws IOException { Codec.Encoder encoder = codec.getEncoder(dos); final KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] { - new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")), - new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) }); + new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")), + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) }); final KeyValue kv2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring3")), }); final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] { - new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")), - new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")), - new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) }); + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")), + new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")), + new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) }); encoder.write(kv1); encoder.write(kv2); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java index 2a468897f2e3..829be39f6120 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestEncryption.java @@ -39,7 +39,6 @@ @Category({MiscTests.class, SmallTests.class}) public class TestEncryption { - @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestEncryption.class); @@ -52,7 +51,7 @@ public void testSmallBlocks() throws Exception { Bytes.random(key); byte[] iv = new byte[16]; Bytes.random(iv); - for (int size: new int[] { 4, 8, 16, 32, 64, 128, 256, 512 } ) { + for (int size: new int[] { 4, 8, 16, 32, 64, 128, 256, 512 }) { checkTransformSymmetry(key, iv, getRandomBlock(size)); } } @@ -63,7 +62,7 @@ public void testLargeBlocks() throws Exception { Bytes.random(key); byte[] iv = new byte[16]; Bytes.random(iv); - for (int size: new int[] { 256 * 1024, 512 * 1024, 1024 * 1024 } ) { + for (int size: new int[] { 256 * 1024, 512 * 1024, 1024 * 1024 }) { checkTransformSymmetry(key, iv, getRandomBlock(size)); } } @@ -74,7 +73,7 @@ public void testOddSizedBlocks() throws Exception { Bytes.random(key); byte[] iv = new byte[16]; Bytes.random(iv); - for (int size: new int[] { 3, 7, 11, 23, 47, 79, 119, 175 } ) { + for (int size: new int[] { 3, 7, 11, 23, 47, 79, 119, 175 }) { checkTransformSymmetry(key, iv, getRandomBlock(size)); } } @@ -85,7 +84,7 @@ public void testTypicalHFileBlocks() throws Exception { Bytes.random(key); byte[] iv = new byte[16]; Bytes.random(iv); - for (int size: new int[] { 4 * 1024, 8 * 1024, 64 * 1024, 128 * 1024 } ) { + for (int size: new int[] { 4 * 1024, 8 * 1024, 64 * 1024, 128 * 1024 }) { checkTransformSymmetry(key, iv, getRandomBlock(size)); } } @@ -128,5 +127,4 @@ private byte[] getRandomBlock(int size) { Bytes.random(b); return b; } - } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java index 61ecb4f832b0..d7f486e1635d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RedundantKVGenerator.java @@ -18,7 +18,7 @@ import java.nio.ByteBuffer; import java.util.ArrayList; -import java.util.Collections; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -92,7 +92,6 @@ public RedundantKVGenerator() { ); } - /** * Various configuration options for generating key values * @param randomizer pick things by random @@ -115,8 +114,7 @@ public RedundantKVGenerator(Random randomizer, float chanceForZeroValue, int baseTimestampDivide, - int timestampDiffSize - ) { + int timestampDiffSize) { this.randomizer = randomizer; this.commonPrefix = DEFAULT_COMMON_PREFIX; @@ -144,29 +142,29 @@ public RedundantKVGenerator(Random randomizer, private Random randomizer; // row settings - private byte[] commonPrefix;//global prefix before rowPrefixes + private byte[] commonPrefix; //global prefix before rowPrefixes private int numberOfRowPrefixes; - private int averagePrefixLength = 6; - private int prefixLengthVariance = 3; - private int averageSuffixLength = 3; - private int suffixLengthVariance = 3; - private int numberOfRows = 500; + private int averagePrefixLength; + private int prefixLengthVariance; + private int averageSuffixLength; + private int suffixLengthVariance; + private int numberOfRows; - //family + // family private byte[] family; // qualifier - private float chanceForSameQualifier = 0.5f; - private float chanceForSimilarQualifier = 0.4f; - private int averageQualifierLength = 9; - private int qualifierLengthVariance = 3; + private float chanceForSameQualifier; + private float chanceForSimilarQualifier; + private int averageQualifierLength; + private int qualifierLengthVariance; - private int columnFamilyLength = 9; - private int valueLength = 8; - private float chanceForZeroValue = 0.5f; + private int columnFamilyLength; + private int valueLength; + private float chanceForZeroValue; - private int baseTimestampDivide = 1000000; - private int timestampDiffSize = 100000000; + private int baseTimestampDivide; + private int timestampDiffSize; private List generateRows() { // generate prefixes @@ -178,8 +176,7 @@ private List generateRows() { prefixLengthVariance; byte[] newPrefix = new byte[prefixLength]; randomizer.nextBytes(newPrefix); - byte[] newPrefixWithCommon = newPrefix; - prefixes.add(newPrefixWithCommon); + prefixes.add(newPrefix); } // generate rest of the row @@ -206,6 +203,7 @@ private List generateRows() { public List generateTestKeyValues(int howMany) { return generateTestKeyValues(howMany, false); } + /** * Generate test data useful to test encoders. * @param howMany How many Key values should be generated. @@ -272,9 +270,7 @@ public List generateTestKeyValues(int howMany, boolean useTags) { } if (randomizer.nextFloat() < chanceForZeroValue) { - for (int j = 0; j < value.length; ++j) { - value[j] = (byte) 0; - } + Arrays.fill(value, (byte) 0); } else { randomizer.nextBytes(value); } @@ -287,7 +283,7 @@ public List generateTestKeyValues(int howMany, boolean useTags) { } } - Collections.sort(result, CellComparator.getInstance()); + result.sort(CellComparator.getInstance()); return result; } @@ -357,9 +353,7 @@ public List generateTestExtendedOffheapKeyValues(int howMany, boolean useT } if (randomizer.nextFloat() < chanceForZeroValue) { - for (int j = 0; j < value.length; ++j) { - value[j] = (byte) 0; - } + Arrays.fill(value, (byte) 0); } else { randomizer.nextBytes(value); } @@ -383,7 +377,7 @@ public List generateTestExtendedOffheapKeyValues(int howMany, boolean useT } } - Collections.sort(result, CellComparator.getInstance()); + result.sort(CellComparator.getInstance()); return result; } @@ -469,95 +463,9 @@ public static ByteBuffer convertKvToByteBuffer(List keyValues, return result; } - /************************ get/set ***********************************/ - public RedundantKVGenerator setCommonPrefix(byte[] prefix){ - this.commonPrefix = prefix; - return this; - } - - public RedundantKVGenerator setRandomizer(Random randomizer) { - this.randomizer = randomizer; - return this; - } - - public RedundantKVGenerator setNumberOfRowPrefixes(int numberOfRowPrefixes) { - this.numberOfRowPrefixes = numberOfRowPrefixes; - return this; - } - - public RedundantKVGenerator setAveragePrefixLength(int averagePrefixLength) { - this.averagePrefixLength = averagePrefixLength; - return this; - } - - public RedundantKVGenerator setPrefixLengthVariance(int prefixLengthVariance) { - this.prefixLengthVariance = prefixLengthVariance; - return this; - } - - public RedundantKVGenerator setAverageSuffixLength(int averageSuffixLength) { - this.averageSuffixLength = averageSuffixLength; - return this; - } - - public RedundantKVGenerator setSuffixLengthVariance(int suffixLengthVariance) { - this.suffixLengthVariance = suffixLengthVariance; - return this; - } - - public RedundantKVGenerator setNumberOfRows(int numberOfRows) { - this.numberOfRows = numberOfRows; - return this; - } - - public RedundantKVGenerator setChanceForSameQualifier(float chanceForSameQualifier) { - this.chanceForSameQualifier = chanceForSameQualifier; - return this; - } - - public RedundantKVGenerator setChanceForSimilarQualifier(float chanceForSimiliarQualifier) { - this.chanceForSimilarQualifier = chanceForSimiliarQualifier; - return this; - } - - public RedundantKVGenerator setAverageQualifierLength(int averageQualifierLength) { - this.averageQualifierLength = averageQualifierLength; - return this; - } - - public RedundantKVGenerator setQualifierLengthVariance(int qualifierLengthVariance) { - this.qualifierLengthVariance = qualifierLengthVariance; - return this; - } - - public RedundantKVGenerator setColumnFamilyLength(int columnFamilyLength) { - this.columnFamilyLength = columnFamilyLength; - return this; - } - public RedundantKVGenerator setFamily(byte[] family) { this.family = family; this.columnFamilyLength = family.length; return this; } - - public RedundantKVGenerator setValueLength(int valueLength) { - this.valueLength = valueLength; - return this; - } - - public RedundantKVGenerator setChanceForZeroValue(float chanceForZeroValue) { - this.chanceForZeroValue = chanceForZeroValue; - return this; - } - - public RedundantKVGenerator setBaseTimestampDivide(int baseTimestampDivide) { - this.baseTimestampDivide = baseTimestampDivide; - return this; - } - - public RedundantKVGenerator setTimestampDiffSize(int timestampDiffSize) { - this.timestampDiffSize = timestampDiffSize; - return this; - } } diff --git a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java index d3c27df89b0f..dd8b3909d6d2 100644 --- a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java +++ b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java @@ -199,7 +199,7 @@ public boolean isAborted() { * @return True if we removed the test dirs */ @Override - public boolean cleanupTestDir() throws IOException { + public boolean cleanupTestDir() { boolean ret = super.cleanupTestDir(); if (deleteDir(this.clusterTestDir)) { this.clusterTestDir = null;