diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index 13142036f077..5b51b88f9313 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -188,6 +188,10 @@
org.apache.hbase
hbase-compression-zstd
+
+ org.apache.hbase
+ hbase-diagnostics
+
jline
jline
diff --git a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
index e3100bb88fcb..4ba995409f08 100644
--- a/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
+++ b/hbase-assembly/src/main/assembly/hadoop-three-compat.xml
@@ -46,6 +46,7 @@
org.apache.hbase:hbase-it
org.apache.hbase:hbase-logging
org.apache.hbase:hbase-mapreduce
+ org.apache.hbase:hbase-diagnostics
org.apache.hbase:hbase-metrics
org.apache.hbase:hbase-metrics-api
org.apache.hbase:hbase-procedure
diff --git a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
index f45b949079bb..ca0d5a7ff66a 100644
--- a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
+++ b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hbase.security;
import java.io.File;
-import java.io.IOException;
-import java.net.InetAddress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.AuthUtil;
@@ -172,23 +170,6 @@ public static void setSSLConfiguration(HBaseCommonTestingUtil utility, Class>
KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false);
}
- public static UserGroupInformation loginAndReturnUGI(Configuration conf, String username)
- throws IOException {
- String hostname = InetAddress.getLocalHost().getHostName();
- String keyTabFileConfKey = "hbase." + username + ".keytab.file";
- String keyTabFileLocation = conf.get(keyTabFileConfKey);
- String principalConfKey = "hbase." + username + ".kerberos.principal";
- String principal = org.apache.hadoop.security.SecurityUtil
- .getServerPrincipal(conf.get(principalConfKey), hostname);
- if (keyTabFileLocation == null || principal == null) {
- LOG.warn(
- "Principal or key tab file null for : " + principalConfKey + ", " + keyTabFileConfKey);
- }
- UserGroupInformation ugi =
- UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keyTabFileLocation);
- return ugi;
- }
-
public static UserGroupInformation loginKerberosPrincipal(String krbKeytab, String krbPrincipal)
throws Exception {
Configuration conf = new Configuration();
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/MockAesKeyProvider.java
similarity index 91%
rename from hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java
rename to hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/MockAesKeyProvider.java
index b795356d8bbf..0bb2aef7d99b 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/KeyProviderForTesting.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/MockAesKeyProvider.java
@@ -19,11 +19,13 @@
import java.security.Key;
import javax.crypto.spec.SecretKeySpec;
+import org.apache.yetus.audience.InterfaceAudience;
/**
* Return a fixed secret key for AES for testing.
*/
-public class KeyProviderForTesting implements KeyProvider {
+@InterfaceAudience.Private
+public class MockAesKeyProvider implements KeyProvider {
@Override
public void init(String parameters) {
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RandomDistribution.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RandomDistribution.java
similarity index 98%
rename from hbase-common/src/test/java/org/apache/hadoop/hbase/util/RandomDistribution.java
rename to hbase-common/src/main/java/org/apache/hadoop/hbase/util/RandomDistribution.java
index 6635accedbb0..e5d2a66ebe5b 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/RandomDistribution.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/RandomDistribution.java
@@ -21,6 +21,7 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.Random;
+import org.apache.yetus.audience.InterfaceAudience;
/**
* A class that generates random numbers that follow some distribution.
@@ -29,6 +30,7 @@
* Remove after tfile is committed and use the tfile version of this class instead.
*
*/
+@InterfaceAudience.Private
public class RandomDistribution {
/**
* Interface for discrete (integer) random distributions.
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
index 3d549ed29137..741cf05744d8 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
@@ -43,10 +43,10 @@ public class TestKeyProvider {
@Test
public void testTestProvider() {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
KeyProvider provider = Encryption.getKeyProvider(conf);
assertNotNull("Null returned for provider", provider);
- assertTrue("Provider is not the expected type", provider instanceof KeyProviderForTesting);
+ assertTrue("Provider is not the expected type", provider instanceof MockAesKeyProvider);
Key key = provider.getKey("foo");
assertNotNull("Test provider did not return a key as expected", key);
diff --git a/hbase-diagnostics/pom.xml b/hbase-diagnostics/pom.xml
new file mode 100644
index 000000000000..dd750dec0be2
--- /dev/null
+++ b/hbase-diagnostics/pom.xml
@@ -0,0 +1,190 @@
+
+
+
+ 4.0.0
+
+ org.apache.hbase
+ hbase-build-configuration
+ ${revision}
+ ../hbase-build-configuration
+
+ hbase-diagnostics
+ Apache HBase - Diagnostics Tools
+ Diagnostics Tools for HBase
+
+
+
+
+ org.apache.hbase
+ hbase-mapreduce
+
+
+ org.apache.hbase
+ hbase-logging
+
+
+ org.apache.hbase.thirdparty
+ hbase-shaded-miscellaneous
+
+
+ org.apache.hbase
+ hbase-common
+
+
+ org.apache.hbase
+ hbase-protocol-shaded
+
+
+ org.apache.hbase
+ hbase-client
+
+
+ org.apache.hbase
+ hbase-zookeeper
+
+
+ org.apache.hbase
+ hbase-balancer
+
+
+ org.apache.hbase.thirdparty
+ hbase-shaded-gson
+
+
+
+ io.opentelemetry
+ opentelemetry-context
+
+
+ io.opentelemetry
+ opentelemetry-api
+
+
+ io.dropwizard.metrics
+ metrics-core
+
+
+ commons-io
+ commons-io
+
+
+ org.apache.commons
+ commons-lang3
+
+
+ org.slf4j
+ slf4j-api
+
+
+ org.apache.commons
+ commons-math3
+
+
+ org.apache.zookeeper
+ zookeeper
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop-three.version}
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop-three.version}
+
+
+
+ org.apache.hbase
+ hbase-common
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-server
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-annotations
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-zookeeper
+ test-jar
+ test
+
+
+ org.apache.hbase
+ hbase-hadoop-compat
+ test-jar
+ test
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop-three.version}
+ test-jar
+ test
+
+
+ org.apache.hadoop
+ hadoop-minicluster
+ ${hadoop-three.version}
+ test
+
+
+ javax.xml.bind
+ jaxb-api
+
+
+ javax.ws.rs
+ jsr311-api
+
+
+
+
+
+ junit
+ junit
+ test
+
+
+ org.mockito
+ mockito-core
+ test
+
+
+ org.slf4j
+ jul-to-slf4j
+ test
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+ test
+
+
+
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
index 60ba0f85c006..4744a078111f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
@@ -28,7 +28,7 @@
import org.apache.hadoop.hbase.io.crypto.CryptoCipherProvider;
import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
@@ -121,7 +121,7 @@ private void runBenchmarks() throws Exception {
// Add configuration for AES cipher
final Configuration aesconf = new Configuration();
- aesconf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ aesconf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
aesconf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
aesconf.setInt("hfile.format.version", 3);
final FileSystem aesfs = FileSystem.get(aesconf);
@@ -137,7 +137,7 @@ private void runBenchmarks() throws Exception {
// Add configuration for Commons cipher
final Configuration cryptoconf = new Configuration();
- cryptoconf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ cryptoconf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
cryptoconf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
cryptoconf.setInt("hfile.format.version", 3);
cryptoconf.set(HConstants.CRYPTO_CIPHERPROVIDER_CONF_KEY, CryptoCipherProvider.class.getName());
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
similarity index 100%
rename from hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
similarity index 97%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
index 8a6347ce6056..4d278c639467 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
@@ -21,12 +21,14 @@
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Code shared by PE tests.
*/
+@InterfaceAudience.Private
public class PerformanceEvaluationCommons {
private static final Logger LOG =
LoggerFactory.getLogger(PerformanceEvaluationCommons.class.getName());
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
similarity index 100%
rename from hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
diff --git a/hbase-balancer/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
similarity index 97%
rename from hbase-balancer/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
index 3a435e140989..0e3977dc31ab 100644
--- a/hbase-balancer/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java
@@ -24,7 +24,7 @@
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
-import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
+import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
@@ -54,8 +54,6 @@ public class LoadBalancerPerformanceEvaluation extends AbstractHBaseTool {
private static final Logger LOG =
LoggerFactory.getLogger(LoadBalancerPerformanceEvaluation.class.getName());
- protected static final HBaseCommonTestingUtil UTIL = new HBaseCommonTestingUtil();
-
private static final int DEFAULT_NUM_REGIONS = 1000000;
private static Option NUM_REGIONS_OPT = new Option("regions", true,
"Number of regions to consider by load balancer. Default: " + DEFAULT_NUM_REGIONS);
@@ -177,7 +175,7 @@ protected int doWork() throws Exception {
public static void main(String[] args) throws IOException {
LoadBalancerPerformanceEvaluation tool = new LoadBalancerPerformanceEvaluation();
- tool.setConf(UTIL.getConfiguration());
+ tool.setConf(HBaseConfiguration.create());
tool.run(args);
}
}
diff --git a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/KerberosUtils.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/KerberosUtils.java
new file mode 100644
index 000000000000..06398b761b3d
--- /dev/null
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/KerberosUtils.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@InterfaceAudience.Private
+/**
+ * Utility class for Kerberos authentication.
+ */
+public class KerberosUtils {
+ private static final Logger LOG = LoggerFactory.getLogger(KerberosUtils.class);
+
+ /**
+ * Logs in a user using Kerberos keytab and returns the UserGroupInformation (UGI) instance.
+ * @param conf the configuration object
+ * @param username the username for which the keytab file and principal are configured.
+ * @return the UserGroupInformation instance for the logged-in user.
+ * @throws IOException If an I/O error occurs during login.
+ */
+ public static UserGroupInformation loginAndReturnUGI(Configuration conf, String username)
+ throws IOException {
+ String hostname = InetAddress.getLocalHost().getHostName();
+ String keyTabFileConfKey = "hbase." + username + ".keytab.file";
+ String keyTabFileLocation = conf.get(keyTabFileConfKey);
+ String principalConfKey = "hbase." + username + ".kerberos.principal";
+ String principal = org.apache.hadoop.security.SecurityUtil
+ .getServerPrincipal(conf.get(principalConfKey), hostname);
+ if (keyTabFileLocation == null || principal == null) {
+ LOG.warn(
+ "Principal or key tab file null for : " + principalConfKey + ", " + keyTabFileConfKey);
+ }
+ UserGroupInformation ugi =
+ UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keyTabFileLocation);
+ return ugi;
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
similarity index 96%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
index 77397f116ca0..c02eed483d33 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithMOB.java
@@ -18,10 +18,12 @@
package org.apache.hadoop.hbase.util;
import java.util.Arrays;
+import org.apache.yetus.audience.InterfaceAudience;
/**
* A load test data generator for MOB
*/
+@InterfaceAudience.Private
public class LoadTestDataGeneratorWithMOB extends MultiThreadedAction.DefaultDataGenerator {
private byte[] mobColumnFamily;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestDataGeneratorWithTags.java
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestTool.java
similarity index 98%
rename from hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestTool.java
index 72a73eab8311..1e0ece6309fb 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestTool.java
@@ -29,7 +29,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
@@ -47,7 +46,6 @@
import org.apache.hadoop.hbase.logging.Log4jUtils;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.security.EncryptionUtil;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessControlClient;
import org.apache.hadoop.hbase.security.access.Permission;
@@ -83,6 +81,10 @@ public class LoadTestTool extends AbstractHBaseTool {
/** Column families for the test */
private byte[][] families;
+ /** Column family used by the test */
+ private static byte[] DEFAULT_COLUMN_FAMILY = Bytes.toBytes("test_cf");
+ /** Column families used by the test */
+ private static final byte[][] DEFAULT_COLUMN_FAMILIES = { DEFAULT_COLUMN_FAMILY };
/** Table name to use of not overridden on the command line */
protected static final String DEFAULT_TABLE_NAME = "cluster_test";
@@ -322,8 +324,7 @@ protected void addOptions() {
addOptWithArg(OPT_BLOOM, OPT_USAGE_BLOOM);
addOptWithArg(OPT_BLOOM_PARAM, "the parameter of bloom filter type");
addOptWithArg(OPT_COMPRESSION, OPT_USAGE_COMPRESSION);
- addOptWithArg(HFileTestUtil.OPT_DATA_BLOCK_ENCODING,
- HFileTestUtil.OPT_DATA_BLOCK_ENCODING_USAGE);
+ addOptWithArg(LoadTestUtil.OPT_DATA_BLOCK_ENCODING, LoadTestUtil.OPT_DATA_BLOCK_ENCODING_USAGE);
addOptWithArg(OPT_MAX_READ_ERRORS,
"The maximum number of read errors "
+ "to tolerate before terminating all reader threads. The default is "
@@ -409,7 +410,7 @@ protected void processOptions(CommandLine cmd) {
families[i] = Bytes.toBytes(list[i]);
}
} else {
- families = HFileTestUtil.DEFAULT_COLUMN_FAMILIES;
+ families = DEFAULT_COLUMN_FAMILIES;
}
isVerbose = cmd.hasOption(OPT_VERBOSE);
@@ -522,7 +523,7 @@ protected void processOptions(CommandLine cmd) {
}
private void parseColumnFamilyOptions(CommandLine cmd) {
- String dataBlockEncodingStr = cmd.getOptionValue(HFileTestUtil.OPT_DATA_BLOCK_ENCODING);
+ String dataBlockEncodingStr = cmd.getOptionValue(LoadTestUtil.OPT_DATA_BLOCK_ENCODING);
dataBlockEncodingAlgo =
dataBlockEncodingStr == null ? null : DataBlockEncoding.valueOf(dataBlockEncodingStr);
@@ -554,7 +555,7 @@ public void initTestTable() throws IOException {
durability = Durability.ASYNC_WAL;
}
- HBaseTestingUtil.createPreSplitLoadTestTable(conf, tableName, getColumnFamilies(), compressAlgo,
+ LoadTestUtil.createPreSplitLoadTestTable(conf, tableName, getColumnFamilies(), compressAlgo,
dataBlockEncodingAlgo, numRegionsPerServer, regionReplication, durability);
applyColumnFamilyOptions(tableName, getColumnFamilies());
}
@@ -609,7 +610,7 @@ protected int loadTable() throws IOException {
LOG.error(exp.toString(), exp);
return EXIT_FAILURE;
}
- userOwner = User.create(HBaseKerberosUtils.loginAndReturnUGI(conf, superUser));
+ userOwner = User.create(KerberosUtils.loginAndReturnUGI(conf, superUser));
} else {
superUser = clazzAndArgs[1];
userNames = clazzAndArgs[2];
@@ -648,7 +649,7 @@ protected int loadTable() throws IOException {
User user = null;
for (String userStr : users) {
if (User.isHBaseSecurityEnabled(conf)) {
- user = User.create(HBaseKerberosUtils.loginAndReturnUGI(conf, userStr));
+ user = User.create(KerberosUtils.loginAndReturnUGI(conf, userStr));
} else {
user = User.createUserForTesting(conf, userStr, new String[0]);
}
diff --git a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestUtil.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestUtil.java
new file mode 100644
index 000000000000..b9c55e9e02f9
--- /dev/null
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/LoadTestUtil.java
@@ -0,0 +1,194 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Locale;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.TableExistsException;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.util.RegionSplitter.SplitAlgorithm;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class was created by moving all load test related code from HFileTestUtil and
+ * HBaseTestingUtil as part of refactoring for hbase-diagnostics module creation in HBASE-28432
+ */
+@InterfaceAudience.Private
+public class LoadTestUtil {
+ private static final Logger LOG = LoggerFactory.getLogger(LoadTestUtil.class);
+
+ public static final String OPT_DATA_BLOCK_ENCODING_USAGE = "Encoding algorithm (e.g. prefix "
+ + "compression) to use for data blocks in the test column family, " + "one of "
+ + Arrays.toString(DataBlockEncoding.values()) + ".";
+ public static final String OPT_DATA_BLOCK_ENCODING =
+ ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING.toLowerCase(Locale.ROOT);
+
+ /**
+ * The default number of regions per regionserver when creating a pre-split table.
+ */
+ private static final int DEFAULT_REGIONS_PER_SERVER = 3;
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableName tableName,
+ byte[] columnFamily, Algorithm compression, DataBlockEncoding dataBlockEncoding)
+ throws IOException {
+ return createPreSplitLoadTestTable(conf, tableName, columnFamily, compression,
+ dataBlockEncoding, DEFAULT_REGIONS_PER_SERVER, 1, Durability.USE_DEFAULT);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableName tableName,
+ byte[] columnFamily, Algorithm compression, DataBlockEncoding dataBlockEncoding,
+ int numRegionsPerServer, int regionReplication, Durability durability) throws IOException {
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
+ builder.setDurability(durability);
+ builder.setRegionReplication(regionReplication);
+ ColumnFamilyDescriptorBuilder cfBuilder =
+ ColumnFamilyDescriptorBuilder.newBuilder(columnFamily);
+ cfBuilder.setDataBlockEncoding(dataBlockEncoding);
+ cfBuilder.setCompressionType(compression);
+ return createPreSplitLoadTestTable(conf, builder.build(), cfBuilder.build(),
+ numRegionsPerServer);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableName tableName,
+ byte[][] columnFamilies, Algorithm compression, DataBlockEncoding dataBlockEncoding,
+ int numRegionsPerServer, int regionReplication, Durability durability) throws IOException {
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
+ builder.setDurability(durability);
+ builder.setRegionReplication(regionReplication);
+ ColumnFamilyDescriptor[] hcds = new ColumnFamilyDescriptor[columnFamilies.length];
+ for (int i = 0; i < columnFamilies.length; i++) {
+ ColumnFamilyDescriptorBuilder cfBuilder =
+ ColumnFamilyDescriptorBuilder.newBuilder(columnFamilies[i]);
+ cfBuilder.setDataBlockEncoding(dataBlockEncoding);
+ cfBuilder.setCompressionType(compression);
+ hcds[i] = cfBuilder.build();
+ }
+ return createPreSplitLoadTestTable(conf, builder.build(), hcds, numRegionsPerServer);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor desc,
+ ColumnFamilyDescriptor hcd) throws IOException {
+ return createPreSplitLoadTestTable(conf, desc, hcd, DEFAULT_REGIONS_PER_SERVER);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor desc,
+ ColumnFamilyDescriptor hcd, int numRegionsPerServer) throws IOException {
+ return createPreSplitLoadTestTable(conf, desc, new ColumnFamilyDescriptor[] { hcd },
+ numRegionsPerServer);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor desc,
+ ColumnFamilyDescriptor[] hcds, int numRegionsPerServer) throws IOException {
+ return createPreSplitLoadTestTable(conf, desc, hcds, new RegionSplitter.HexStringSplit(),
+ numRegionsPerServer);
+ }
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor td,
+ ColumnFamilyDescriptor[] cds, SplitAlgorithm splitter, int numRegionsPerServer)
+ throws IOException {
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(td);
+ for (ColumnFamilyDescriptor cd : cds) {
+ if (!td.hasColumnFamily(cd.getName())) {
+ builder.setColumnFamily(cd);
+ }
+ }
+ td = builder.build();
+ int totalNumberOfRegions = 0;
+ Connection unmanagedConnection = ConnectionFactory.createConnection(conf);
+ Admin admin = unmanagedConnection.getAdmin();
+
+ try {
+ // create a table a pre-splits regions.
+ // The number of splits is set as:
+ // region servers * regions per region server).
+ int numberOfServers = admin.getRegionServers().size();
+ if (numberOfServers == 0) {
+ throw new IllegalStateException("No live regionservers");
+ }
+
+ totalNumberOfRegions = numberOfServers * numRegionsPerServer;
+ LOG.info("Number of live regionservers: " + numberOfServers + ", "
+ + "pre-splitting table into " + totalNumberOfRegions + " regions " + "(regions per server: "
+ + numRegionsPerServer + ")");
+
+ byte[][] splits = splitter.split(totalNumberOfRegions);
+
+ admin.createTable(td, splits);
+ } catch (MasterNotRunningException e) {
+ LOG.error("Master not running", e);
+ throw new IOException(e);
+ } catch (TableExistsException e) {
+ LOG.warn("Table " + td.getTableName() + " already exists, continuing");
+ } finally {
+ admin.close();
+ unmanagedConnection.close();
+ }
+ return totalNumberOfRegions;
+ }
+
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
index 2476fb388084..3914884fec95 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
@@ -41,6 +41,7 @@
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -51,6 +52,7 @@
/**
* Common base class for reader and writer parts of multi-thread HBase load test (See LoadTestTool).
*/
+@InterfaceAudience.Private
public abstract class MultiThreadedAction {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedAction.class);
@@ -78,7 +80,7 @@ public abstract class MultiThreadedAction {
* Default implementation of LoadTestDataGenerator that uses LoadTestKVGenerator, fixed set of
* column families, and random number of columns in range. The table for it can be created
* manually or, for example, via
- * {@link org.apache.hadoop.hbase.HBaseTestingUtil#createPreSplitLoadTestTable(Configuration, TableName, byte[], org.apache.hadoop.hbase.io.compress.Compression.Algorithm, org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)}
+ * {@link org.apache.hadoop.hbase.util.LoadTestUtil#createPreSplitLoadTestTable(Configuration, TableName, byte[], org.apache.hadoop.hbase.io.compress.Compression.Algorithm, org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)}
*/
public static class DefaultDataGenerator extends LoadTestDataGenerator {
private byte[][] columnFamilies = null;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
similarity index 99%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
index b69f67e2f184..e8e3bea66742 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
@@ -33,10 +33,12 @@
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Creates multiple threads that read and verify previously written data */
+@InterfaceAudience.Private
public class MultiThreadedReader extends MultiThreadedAction {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedReader.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
similarity index 97%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
index 23087ae3c0a7..9286b58ce6b0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
@@ -26,16 +26,17 @@
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A MultiThreadReader that helps to work with ACL
*/
+@InterfaceAudience.Private
public class MultiThreadedReaderWithACL extends MultiThreadedReader {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedReaderWithACL.class);
@@ -123,7 +124,7 @@ public Object run() throws Exception {
UserGroupInformation realUserUgi;
if (!users.containsKey(userNames[mod])) {
if (User.isHBaseSecurityEnabled(conf)) {
- realUserUgi = HBaseKerberosUtils.loginAndReturnUGI(conf, userNames[mod]);
+ realUserUgi = KerberosUtils.loginAndReturnUGI(conf, userNames[mod]);
} else {
realUserUgi = UserGroupInformation.createRemoteUser(userNames[mod]);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
similarity index 99%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
index ff70cef91f8a..99ce506c0c9e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
@@ -45,6 +45,7 @@
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -53,6 +54,7 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
/** Creates multiple threads that write key/values into the */
+@InterfaceAudience.Private
public class MultiThreadedUpdater extends MultiThreadedWriterBase {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedUpdater.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
index a3a2c4946572..1ad93a54cdd7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
@@ -34,17 +34,18 @@
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A MultiThreadUpdater that helps to work with ACL
*/
+@InterfaceAudience.Private
public class MultiThreadedUpdaterWithACL extends MultiThreadedUpdater {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedUpdaterWithACL.class);
private final static String COMMA = ",";
@@ -138,7 +139,7 @@ public Object run() throws Exception {
try {
if (!users.containsKey(userNames[mod])) {
if (User.isHBaseSecurityEnabled(conf)) {
- realUserUgi = HBaseKerberosUtils.loginAndReturnUGI(conf, userNames[mod]);
+ realUserUgi = KerberosUtils.loginAndReturnUGI(conf, userNames[mod]);
} else {
realUserUgi = UserGroupInformation.createRemoteUser(userNames[mod]);
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
index 3a83446cddfe..f3b8b2033dcb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterBase.java
@@ -32,10 +32,12 @@
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Creates multiple threads that write key/values into the */
+@InterfaceAudience.Private
public abstract class MultiThreadedWriterBase extends MultiThreadedAction {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedWriterBase.class);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
index 96e2748012ea..a127b0230477 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
@@ -29,12 +29,14 @@
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
import org.apache.hadoop.util.StringUtils;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* MultiThreadedWriter that helps in testing ACL
*/
+@InterfaceAudience.Private
public class MultiThreadedWriterWithACL extends MultiThreadedWriter {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedWriterWithACL.class);
diff --git a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/WALPerformanceEvaluationUtil.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/WALPerformanceEvaluationUtil.java
new file mode 100644
index 000000000000..dc53c4bb2bc3
--- /dev/null
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/WALPerformanceEvaluationUtil.java
@@ -0,0 +1,272 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.UUID;
+import java.util.concurrent.ThreadLocalRandom;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.fs.HFileSystem;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class contains code copied from HBaseTestingUtil and its super classes required by
+ * WALPerformanceEvaluation. This was done as part of refactoring for hbase-diagnostics module
+ * creation in HBASE-28432 to break cyclic dependency.
+ */
+@InterfaceAudience.Private
+public class WALPerformanceEvaluationUtil {
+ private static final Logger LOG = LoggerFactory.getLogger(WALPerformanceEvaluationUtil.class);
+
+ /**
+ * Directory on test filesystem where we put the data for this instance of
+ * WALPerformanceEvaluationUtil
+ */
+ private Path dataTestDirOnTestFS = null;
+ /**
+ * Directory where we put the data for this instance of WALPerformanceEvaluationUtil
+ */
+ private File dataTestDir = null;
+ /**
+ * System property key to get base test directory value
+ */
+ private static final String BASE_TEST_DIRECTORY_KEY = "test.build.data.basedirectory";
+
+ /**
+ * Default base directory for test output.
+ */
+ private static final String DEFAULT_BASE_TEST_DIRECTORY = "target/test-data";
+
+ private Configuration conf;
+
+ public WALPerformanceEvaluationUtil(Configuration conf) {
+ this.conf = conf;
+ }
+
+ /**
+ * @return Where to write test data on local filesystem; usually
+ * {@link #DEFAULT_BASE_TEST_DIRECTORY} Should not be used by the unit tests, hence its's
+ * private. Unit test will use a subdirectory of this directory.
+ * @see #setupDataTestDir()
+ */
+ private Path getBaseTestDir() {
+ String PathName = System.getProperty(BASE_TEST_DIRECTORY_KEY, DEFAULT_BASE_TEST_DIRECTORY);
+
+ return new Path(PathName);
+ }
+
+ private static UUID getRandomUUID() {
+ return new UUID(ThreadLocalRandom.current().nextLong(), ThreadLocalRandom.current().nextLong());
+ }
+
+ /**
+ * @return A dir with a random (uuid) name under the test dir
+ * @see #getBaseTestDir()
+ */
+ private Path getRandomDir() {
+ return new Path(getBaseTestDir(), getRandomUUID().toString());
+ }
+
+ private void createSubDir(String propertyName, Path parent, String subDirName) {
+ Path newPath = new Path(parent, subDirName);
+ File newDir = new File(newPath.toString()).getAbsoluteFile();
+
+ if (deleteOnExit()) {
+ newDir.deleteOnExit();
+ }
+
+ conf.set(propertyName, newDir.getAbsolutePath());
+ }
+
+ /**
+ * Home our data in a dir under {@link #DEFAULT_BASE_TEST_DIRECTORY}. Give it a random name so can
+ * have many concurrent tests running if we need to. Moding a System property is not the way to do
+ * concurrent instances -- another instance could grab the temporary value unintentionally -- but
+ * not anything can do about it at moment; single instance only is how the minidfscluster works.
+ * We also create the underlying directory names for hadoop.log.dir, mapreduce.cluster.local.dir
+ * and hadoop.tmp.dir, and set the values in the conf, and as a system property for hadoop.tmp.dir
+ * (We do not create them!).
+ * @return The calculated data test build directory, if newly-created.
+ */
+ protected Path setupDataTestDir() {
+ Path testPath = setupDataTestDirInternal();
+ if (null == testPath) {
+ return null;
+ }
+
+ createSubDirAndSystemProperty("hadoop.log.dir", testPath, "hadoop-log-dir");
+
+ // This is defaulted in core-default.xml to /tmp/hadoop-${user.name}, but
+ // we want our own value to ensure uniqueness on the same machine
+ createSubDirAndSystemProperty("hadoop.tmp.dir", testPath, "hadoop-tmp-dir");
+
+ // Read and modified in org.apache.hadoop.mapred.MiniMRCluster
+ createSubDir("mapreduce.cluster.local.dir", testPath, "mapred-local-dir");
+ return testPath;
+ }
+
+ /**
+ * Sets up a directory for a test to use.
+ * @return New directory path, if created.
+ */
+ private Path setupDataTestDirInternal() {
+ if (this.dataTestDir != null) {
+ LOG.warn("Data test dir already setup in " + dataTestDir.getAbsolutePath());
+ return null;
+ }
+ Path testPath = getRandomDir();
+ this.dataTestDir = new File(testPath.toString()).getAbsoluteFile();
+ // Set this property so if mapreduce jobs run, they will use this as their home dir.
+ System.setProperty("test.build.dir", this.dataTestDir.toString());
+
+ if (deleteOnExit()) {
+ this.dataTestDir.deleteOnExit();
+ }
+
+ createSubDir("hbase.local.dir", testPath, "hbase-local-dir");
+
+ return testPath;
+ }
+
+ private void createSubDirAndSystemProperty(String propertyName, Path parent, String subDirName) {
+
+ String sysValue = System.getProperty(propertyName);
+
+ if (sysValue != null) {
+ // There is already a value set. So we do nothing but hope
+ // that there will be no conflicts
+ LOG.info("System.getProperty(\"" + propertyName + "\") already set to: " + sysValue
+ + " so I do NOT create it in " + parent);
+ String confValue = conf.get(propertyName);
+ if (confValue != null && !confValue.endsWith(sysValue)) {
+ LOG.warn(propertyName + " property value differs in configuration and system: "
+ + "Configuration=" + confValue + " while System=" + sysValue
+ + " Erasing configuration value by system value.");
+ }
+ conf.set(propertyName, sysValue);
+ } else {
+ // Ok, it's not set, so we create it as a subdirectory
+ createSubDir(propertyName, parent, subDirName);
+ System.setProperty(propertyName, conf.get(propertyName));
+ }
+ }
+
+ private FileSystem getTestFileSystem() throws IOException {
+ return HFileSystem.get(conf);
+ }
+
+ /**
+ * @return Where to write test data on the test filesystem; Returns working directory for the test
+ * filesystem by default
+ * @see #setupDataTestDirOnTestFS()
+ * @see #getTestFileSystem()
+ */
+ private Path getBaseTestDirOnTestFS() throws IOException {
+ FileSystem fs = getTestFileSystem();
+ return new Path(fs.getWorkingDirectory(), "test-data");
+ }
+
+ /**
+ * Returns True if we should delete testing dirs on exit.
+ */
+ private boolean deleteOnExit() {
+ String v = System.getProperty("hbase.testing.preserve.testdir");
+ // Let default be true, to delete on exit.
+ return v == null ? true : !Boolean.parseBoolean(v);
+ }
+
+ /**
+ * @return Where to write test data on local filesystem, specific to the test. Useful for tests
+ * that do not use a cluster. Creates it if it does not exist already.
+ */
+ private Path getDataTestDir() {
+ if (this.dataTestDir == null) {
+ setupDataTestDir();
+ }
+ return new Path(this.dataTestDir.getAbsolutePath());
+ }
+
+ /**
+ * Sets up a new path in test filesystem to be used by tests.
+ */
+ private Path getNewDataTestDirOnTestFS() throws IOException {
+ // The file system can be either local, mini dfs, or if the configuration
+ // is supplied externally, it can be an external cluster FS. If it is a local
+ // file system, the tests should use getBaseTestDir, otherwise, we can use
+ // the working directory, and create a unique sub dir there
+ FileSystem fs = getTestFileSystem();
+ Path newDataTestDir;
+ String randomStr = getRandomUUID().toString();
+ if (fs.getUri().getScheme().equals(FileSystem.getLocal(conf).getUri().getScheme())) {
+ newDataTestDir = new Path(getDataTestDir(), randomStr);
+ File dataTestDir = new File(newDataTestDir.toString());
+ if (deleteOnExit()) {
+ dataTestDir.deleteOnExit();
+ }
+ } else {
+ Path base = getBaseTestDirOnTestFS();
+ newDataTestDir = new Path(base, randomStr);
+ if (deleteOnExit()) {
+ fs.deleteOnExit(newDataTestDir);
+ }
+ }
+ return newDataTestDir;
+ }
+
+ /**
+ * Sets up a path in test filesystem to be used by tests. Creates a new directory if not already
+ * setup.
+ */
+ private void setupDataTestDirOnTestFS() throws IOException {
+ if (dataTestDirOnTestFS != null) {
+ LOG.warn("Data test on test fs dir already setup in " + dataTestDirOnTestFS.toString());
+ return;
+ }
+ dataTestDirOnTestFS = getNewDataTestDirOnTestFS();
+ }
+
+ /**
+ * Returns a Path in the test filesystem, obtained from {@link #getTestFileSystem()} to write
+ * temporary test data. Call this method after setting up the mini dfs cluster if the test relies
+ * on it.
+ * @return a unique path in the test filesystem
+ */
+ private Path getDataTestDirOnTestFS() throws IOException {
+ if (dataTestDirOnTestFS == null) {
+ setupDataTestDirOnTestFS();
+ }
+
+ return dataTestDirOnTestFS;
+ }
+
+ /**
+ * Returns a Path in the test filesystem, obtained from {@link #getTestFileSystem()} to write
+ * temporary test data. Call this method after setting up the mini dfs cluster if the test relies
+ * on it.
+ * @param subdirName name of the subdir to create under the base test dir
+ * @return a unique path in the test filesystem
+ */
+ public Path getDataTestDirOnTestFS(final String subdirName) throws IOException {
+ return new Path(getDataTestDirOnTestFS(), subdirName);
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGenerator.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestDataGeneratorWithACL.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
similarity index 97%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
rename to hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
index 97ad666c1b18..b296296e9d46 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
+++ b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
@@ -43,7 +43,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestingUtil;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MockRegionServerServices;
import org.apache.hadoop.hbase.TableName;
@@ -54,7 +54,7 @@
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.LogRoller;
import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
@@ -65,6 +65,7 @@
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hadoop.hbase.util.WALPerformanceEvaluationUtil;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
@@ -77,7 +78,7 @@
* This class runs performance benchmarks for {@link WAL}. See usage for this tool by running:
* $ hbase org.apache.hadoop.hbase.wal.WALPerformanceEvaluation -h
*/
-@InterfaceAudience.Private
+@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public final class WALPerformanceEvaluation extends Configured implements Tool {
private static final Logger LOG = LoggerFactory.getLogger(WALPerformanceEvaluation.class);
@@ -96,8 +97,6 @@ public final class WALPerformanceEvaluation extends Configured implements Tool {
private final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl();
- private HBaseTestingUtil TEST_UTIL;
-
static final String TABLE_NAME = "WALPerformanceEvaluation";
static final String QUALIFIER_PREFIX = "q";
static final String FAMILY_PREFIX = "cf";
@@ -255,7 +254,7 @@ public int run(String[] args) throws Exception {
if (cipher != null) {
// Set up WAL for encryption
Configuration conf = getConf();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
conf.set(HConstants.CRYPTO_WAL_ALGORITHM_CONF_KEY, cipher);
@@ -273,8 +272,8 @@ public int run(String[] args) throws Exception {
getConf().setBoolean(MemStoreLAB.USEMSLAB_KEY, false);
if (rootRegionDir == null) {
- TEST_UTIL = new HBaseTestingUtil(getConf());
- rootRegionDir = TEST_UTIL.getDataTestDirOnTestFS("WALPerformanceEvaluation");
+ rootRegionDir = new WALPerformanceEvaluationUtil(getConf())
+ .getDataTestDirOnTestFS("WALPerformanceEvaluation");
}
// Run WAL Performance Evaluation
// First set the fs from configs. In case we are on hadoop1
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestClientClusterMetrics.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/TestClientClusterMetrics.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/TestClientClusterMetrics.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/TestClientClusterMetrics.java
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
similarity index 100%
rename from hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java
similarity index 95%
rename from hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java
index 6f7be8315853..8c6ae1ec50ab 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java
+++ b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/security/TestEncryptionUtil.java
@@ -28,7 +28,7 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -105,7 +105,7 @@ public void testWALKeyWrappingWithInvalidHashAlg() throws Exception {
public void testWALKeyWrappingWithIncorrectKey() throws Exception {
// set up the key provider for testing to resolve a key for our test subject
Configuration conf = new Configuration(); // we don't need HBaseConfiguration for this
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
// generate a test key
byte[] keyBytes = new byte[AES.KEY_LENGTH];
@@ -144,7 +144,7 @@ public void testHashAlgorithmMismatchShouldNotFailWithDefaultConfig() throws Exc
private void testKeyWrapping(String hashAlgorithm) throws Exception {
// set up the key provider for testing to resolve a key for our test subject
Configuration conf = new Configuration(); // we don't need HBaseConfiguration for this
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
if (!hashAlgorithm.equals(DEFAULT_HASH_ALGORITHM)) {
conf.set(Encryption.CRYPTO_KEY_HASH_ALGORITHM_CONF_KEY, hashAlgorithm);
}
@@ -180,7 +180,7 @@ private void testKeyWrapping(String hashAlgorithm) throws Exception {
private void testWALKeyWrapping(String hashAlgorithm) throws Exception {
// set up the key provider for testing to resolve a key for our test subject
Configuration conf = new Configuration(); // we don't need HBaseConfiguration for this
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
if (!hashAlgorithm.equals(DEFAULT_HASH_ALGORITHM)) {
conf.set(Encryption.CRYPTO_KEY_HASH_ALGORITHM_CONF_KEY, hashAlgorithm);
}
@@ -207,7 +207,7 @@ private void testWALKeyWrapping(String hashAlgorithm) throws Exception {
private void testKeyWrappingWithMismatchingAlgorithms(Configuration conf) throws Exception {
// we use MD5 to hash the encryption key during wrapping
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(Encryption.CRYPTO_KEY_HASH_ALGORITHM_CONF_KEY, "MD5");
// generate a test key
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
similarity index 90%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
index cdf9f6101e47..144ea6503b06 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
+++ b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
@@ -43,6 +43,11 @@ public class RestartMetaTest extends AbstractHBaseTool {
private static final Logger LOG = LoggerFactory.getLogger(RestartMetaTest.class);
+ /** Column family used by the test */
+ private static byte[] DEFAULT_COLUMN_FAMILY = Bytes.toBytes("test_cf");
+ /** Column families used by the test */
+ private static final byte[][] DEFAULT_COLUMN_FAMILIES = { DEFAULT_COLUMN_FAMILY };
+
/** The number of region servers used if not specified */
private static final int DEFAULT_NUM_RS = 2;
@@ -77,7 +82,7 @@ private void loadData() throws IOException {
// start the writers
LoadTestDataGenerator dataGen = new MultiThreadedAction.DefaultDataGenerator(minColDataSize,
- maxColDataSize, minColsPerKey, maxColsPerKey, HFileTestUtil.DEFAULT_COLUMN_FAMILY);
+ maxColDataSize, minColsPerKey, maxColsPerKey, DEFAULT_COLUMN_FAMILY);
MultiThreadedWriter writer = new MultiThreadedWriter(dataGen, conf, TABLE_NAME);
writer.setMultiPut(true);
writer.start(startKey, endKey, numThreads);
@@ -96,8 +101,8 @@ protected int doWork() throws Exception {
hbaseCluster.startHBase();
// create tables if needed
- HBaseTestingUtil.createPreSplitLoadTestTable(conf, TABLE_NAME,
- HFileTestUtil.DEFAULT_COLUMN_FAMILY, Compression.Algorithm.NONE, DataBlockEncoding.NONE);
+ LoadTestUtil.createPreSplitLoadTestTable(conf, TABLE_NAME, DEFAULT_COLUMN_FAMILY,
+ Compression.Algorithm.NONE, DataBlockEncoding.NONE);
LOG.debug("Loading data....\n\n");
loadData();
@@ -135,8 +140,7 @@ protected int doWork() throws Exception {
@Override
protected void addOptions() {
addOptWithArg(OPT_NUM_RS, "Number of Region Servers");
- addOptWithArg(HFileTestUtil.OPT_DATA_BLOCK_ENCODING,
- HFileTestUtil.OPT_DATA_BLOCK_ENCODING_USAGE);
+ addOptWithArg(LoadTestUtil.OPT_DATA_BLOCK_ENCODING, LoadTestUtil.OPT_DATA_BLOCK_ENCODING_USAGE);
}
@Override
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
similarity index 100%
rename from hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
similarity index 98%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
rename to hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
index 7d0666886128..4980b292c08d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
+++ b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
@@ -148,7 +148,7 @@ protected void runLoadTestOnExistingTable() throws IOException {
protected void createPreSplitLoadTestTable(TableDescriptor tableDescriptor,
ColumnFamilyDescriptor familyDescriptor) throws IOException {
- HBaseTestingUtil.createPreSplitLoadTestTable(conf, tableDescriptor, familyDescriptor);
+ LoadTestUtil.createPreSplitLoadTestTable(conf, tableDescriptor, familyDescriptor);
TEST_UTIL.waitUntilAllRegionsAssigned(tableDescriptor.getTableName());
}
diff --git a/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategyUsingWPETool.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategyUsingWPETool.java
new file mode 100644
index 000000000000..57f29d31a18e
--- /dev/null
+++ b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategyUsingWPETool.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.wal;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Tests for TestBoundedRegionGroupingStrategy which use WALPerformanceEvaluation for WAL data
+ * creation. This class was created as part of refactoring for hbase-diagnostics module creation in
+ * HBASE-28432 to break cyclic dependency.
+ */
+@RunWith(Parameterized.class)
+@Category({ RegionServerTests.class, MediumTests.class })
+public class TestBoundedRegionGroupingStrategyUsingWPETool
+ extends TestBoundedRegionGroupingStrategy {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestBoundedRegionGroupingStrategyUsingWPETool.class);
+
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestBoundedRegionGroupingStrategyUsingWPETool.class);
+
+ /**
+ * Write to a log file with three concurrent threads and verifying all data is written.
+ */
+ @Test
+ public void testConcurrentWrites() throws Exception {
+ // Run the WPE tool with three threads writing 3000 edits each concurrently.
+ // When done, verify that all edits were written.
+ int errCode = WALPerformanceEvaluation.innerMain(new Configuration(CONF),
+ new String[] { "-threads", "3", "-verify", "-noclosefs", "-iterations", "3000" });
+ assertEquals(0, errCode);
+ }
+
+ /**
+ * Make sure we can successfully run with more regions then our bound.
+ */
+ @Test
+ public void testMoreRegionsThanBound() throws Exception {
+ final String parallelism =
+ Integer.toString(BoundedGroupingStrategy.DEFAULT_NUM_REGION_GROUPS * 2);
+ int errCode =
+ WALPerformanceEvaluation.innerMain(new Configuration(CONF), new String[] { "-threads",
+ parallelism, "-verify", "-noclosefs", "-iterations", "3000", "-regions", parallelism });
+ assertEquals(0, errCode);
+ }
+
+ @Test
+ public void testBoundsGreaterThanDefault() throws Exception {
+ final int temp = CONF.getInt(BoundedGroupingStrategy.NUM_REGION_GROUPS,
+ BoundedGroupingStrategy.DEFAULT_NUM_REGION_GROUPS);
+ try {
+ CONF.setInt(BoundedGroupingStrategy.NUM_REGION_GROUPS, temp * 4);
+ final String parallelism = Integer.toString(temp * 4);
+ int errCode =
+ WALPerformanceEvaluation.innerMain(new Configuration(CONF), new String[] { "-threads",
+ parallelism, "-verify", "-noclosefs", "-iterations", "3000", "-regions", parallelism });
+ assertEquals(0, errCode);
+ } finally {
+ CONF.setInt(BoundedGroupingStrategy.NUM_REGION_GROUPS, temp);
+ }
+ }
+
+ @Test
+ public void testMoreRegionsThanBoundWithBoundsGreaterThanDefault() throws Exception {
+ final int temp = CONF.getInt(BoundedGroupingStrategy.NUM_REGION_GROUPS,
+ BoundedGroupingStrategy.DEFAULT_NUM_REGION_GROUPS);
+ try {
+ CONF.setInt(BoundedGroupingStrategy.NUM_REGION_GROUPS, temp * 4);
+ final String parallelism = Integer.toString(temp * 4 * 2);
+ int errCode =
+ WALPerformanceEvaluation.innerMain(new Configuration(CONF), new String[] { "-threads",
+ parallelism, "-verify", "-noclosefs", "-iterations", "3000", "-regions", parallelism });
+ assertEquals(0, errCode);
+ } finally {
+ CONF.setInt(BoundedGroupingStrategy.NUM_REGION_GROUPS, temp);
+ }
+ }
+}
diff --git a/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProviderWithConcurrentWrites.java b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProviderWithConcurrentWrites.java
new file mode 100644
index 000000000000..37efaa9f9dd6
--- /dev/null
+++ b/hbase-diagnostics/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProviderWithConcurrentWrites.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.wal;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtil;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Tests for TestFSHLogProvider which use WALPerformanceEvaluation for WAL data creation. This class
+ * was created as part of refactoring for hbase-diagnostics module creation in HBASE-28432 to break
+ * cyclic dependency.
+ */
+@Category({ RegionServerTests.class, MediumTests.class })
+public class TestFSHLogProviderWithConcurrentWrites {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestFSHLogProviderWithConcurrentWrites.class);
+
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestFSHLogProviderWithConcurrentWrites.class);
+
+ private static FileSystem fs;
+ private final static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
+
+ @Rule
+ public final TestName currentTest = new TestName();
+
+ @Before
+ public void setUp() throws Exception {
+ FileStatus[] entries = fs.listStatus(new Path("/"));
+ for (FileStatus dir : entries) {
+ fs.delete(dir.getPath(), true);
+ }
+ }
+
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception {
+ // Make block sizes small.
+ TEST_UTIL.getConfiguration().setInt("dfs.blocksize", 1024 * 1024);
+ // quicker heartbeat interval for faster DN death notification
+ TEST_UTIL.getConfiguration().setInt("dfs.namenode.heartbeat.recheck-interval", 5000);
+ TEST_UTIL.getConfiguration().setInt("dfs.heartbeat.interval", 1);
+ TEST_UTIL.getConfiguration().setInt("dfs.client.socket-timeout", 5000);
+
+ // faster failover with cluster.shutdown();fs.close() idiom
+ TEST_UTIL.getConfiguration().setInt("hbase.ipc.client.connect.max.retries", 1);
+ TEST_UTIL.getConfiguration().setInt("dfs.client.block.recovery.retries", 1);
+ TEST_UTIL.getConfiguration().setInt("hbase.ipc.client.connection.maxidletime", 500);
+ TEST_UTIL.startMiniDFSCluster(3);
+
+ // Set up a working space for our tests.
+ TEST_UTIL.createRootDir();
+ fs = TEST_UTIL.getDFSCluster().getFileSystem();
+ }
+
+ @AfterClass
+ public static void tearDownAfterClass() throws Exception {
+ TEST_UTIL.shutdownMiniCluster();
+ }
+
+ /**
+ * Write to a log file with three concurrent threads and verifying all data is written.
+ */
+ @Test
+ public void testConcurrentWrites() throws Exception {
+ // Run the WPE tool with three threads writing 3000 edits each concurrently.
+ // When done, verify that all edits were written.
+ int errCode =
+ WALPerformanceEvaluation.innerMain(new Configuration(TEST_UTIL.getConfiguration()),
+ new String[] { "-threads", "3", "-verify", "-noclosefs", "-iterations", "3000" });
+ assertEquals(0, errCode);
+ }
+}
diff --git a/hbase-mapreduce/src/test/resources/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties b/hbase-diagnostics/src/test/resources/org/apache/hadoop/hbase/mapreduce/PerformanceEvaluation_Counter.properties
similarity index 100%
rename from hbase-mapreduce/src/test/resources/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties
rename to hbase-diagnostics/src/test/resources/org/apache/hadoop/hbase/mapreduce/PerformanceEvaluation_Counter.properties
diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml
index 96033b151299..d36f753b283b 100644
--- a/hbase-it/pom.xml
+++ b/hbase-it/pom.xml
@@ -106,6 +106,12 @@
+
+ org.apache.hbase
+ hbase-diagnostics
+ test
+
+
org.bouncycastle
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
index 1a0446381aed..85ad99bdcf8a 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase;
+import static org.apache.hadoop.hbase.IntegrationTestingUtility.createPreSplitLoadTestTable;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
@@ -212,8 +213,7 @@ private void createTable(TableName tableName) throws Exception {
LOG.info("Creating table {} with {} splits.", tableName,
regionsCountPerServer * regionServerCount);
startTime = EnvironmentEdgeManager.currentTime();
- HBaseTestingUtil.createPreSplitLoadTestTable(util.getConfiguration(), desc, columns,
- regionsCountPerServer);
+ createPreSplitLoadTestTable(util.getConfiguration(), desc, columns, regionsCountPerServer);
util.waitTableAvailable(tableName);
endTime = EnvironmentEdgeManager.currentTime();
LOG.info("Pre-split table created successfully in {}ms.", (endTime - startTime));
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java
index 7bcb017cbb1b..e39bb2c90f44 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestStripeCompactions.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase;
+import static org.apache.hadoop.hbase.IntegrationTestingUtility.createPreSplitLoadTestTable;
+
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
@@ -45,8 +47,9 @@ protected void initTable() throws IOException {
.setValue(HStore.BLOCKING_STOREFILES_KEY, "100").build();
ColumnFamilyDescriptor familyDescriptor =
ColumnFamilyDescriptorBuilder.of(HFileTestUtil.DEFAULT_COLUMN_FAMILY);
- HBaseTestingUtil.createPreSplitLoadTestTable(util.getConfiguration(), tableDescriptor,
- familyDescriptor);
+ ColumnFamilyDescriptor[] columns = new ColumnFamilyDescriptor[] { familyDescriptor };
+ createPreSplitLoadTestTable(util.getConfiguration(), tableDescriptor, columns,
+ IntegrationTestingUtility.DEFAULT_REGIONS_PER_SERVER);
}
public static void main(String[] args) throws Exception {
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
index e0dcb0c48582..e5aa4308651a 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
@@ -24,7 +24,7 @@
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.EncryptionTest;
@@ -48,7 +48,7 @@ public void setUpCluster() throws Exception {
if (!util.isDistributedCluster()) {
// Inject required configuration if we are not running in distributed mode
conf.setInt(HFile.FORMAT_VERSION_KEY, 3);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
}
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java
index 729110c04a00..c61ab324575b 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java
@@ -19,7 +19,16 @@
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.util.RegionSplitter;
import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Facility for integration/system tests. This extends {@link HBaseTestingUtil} and
@@ -39,6 +48,7 @@
* {@link #setUseDistributedCluster(Configuration)}.
*/
public class IntegrationTestingUtility extends HBaseTestingUtil {
+ private static final Logger LOG = LoggerFactory.getLogger(IntegrationTestingUtility.class);
public IntegrationTestingUtility() {
this(HBaseConfiguration.create());
@@ -64,6 +74,15 @@ public IntegrationTestingUtility(Configuration conf) {
private static final Class extends ClusterManager> DEFAULT_HBASE_CLUSTER_MANAGER_CLASS =
HBaseClusterManager.class;
+ public static final String REGIONS_PER_SERVER_KEY = "hbase.test.regions-per-server";
+ /**
+ * The default number of regions per regionserver when creating a pre-split table.
+ */
+ public static final int DEFAULT_REGIONS_PER_SERVER = 3;
+
+ public static final String PRESPLIT_TEST_TABLE_KEY = "hbase.test.pre-split-table";
+ public static final boolean PRESPLIT_TEST_TABLE = true;
+
/**
* Initializes the state of the cluster. It starts a new in-process mini cluster, OR if we are
* given an already deployed distributed cluster it initializes the state.
@@ -152,4 +171,51 @@ public void createDistributedHBaseCluster() throws IOException {
setHBaseCluster(new DistributedHBaseCluster(conf, clusterManager));
getAdmin();
}
+
+ /**
+ * Creates a pre-split table for load testing. If the table already exists, logs a warning and
+ * continues.
+ * @return the number of regions the table was split into
+ */
+ public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor td,
+ ColumnFamilyDescriptor[] cds, int numRegionsPerServer) throws IOException {
+ TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(td);
+ for (ColumnFamilyDescriptor cd : cds) {
+ if (!td.hasColumnFamily(cd.getName())) {
+ builder.setColumnFamily(cd);
+ }
+ }
+ td = builder.build();
+ int totalNumberOfRegions = 0;
+ Connection unmanagedConnection = ConnectionFactory.createConnection(conf);
+ Admin admin = unmanagedConnection.getAdmin();
+
+ try {
+ // create a table a pre-splits regions.
+ // The number of splits is set as:
+ // region servers * regions per region server).
+ int numberOfServers = admin.getRegionServers().size();
+ if (numberOfServers == 0) {
+ throw new IllegalStateException("No live regionservers");
+ }
+
+ totalNumberOfRegions = numberOfServers * numRegionsPerServer;
+ LOG.info("Number of live regionservers: " + numberOfServers + ", "
+ + "pre-splitting table into " + totalNumberOfRegions + " regions " + "(regions per server: "
+ + numRegionsPerServer + ")");
+
+ byte[][] splits = new RegionSplitter.HexStringSplit().split(totalNumberOfRegions);
+
+ admin.createTable(td, splits);
+ } catch (MasterNotRunningException e) {
+ LOG.error("Master not running", e);
+ throw new IOException(e);
+ } catch (TableExistsException e) {
+ LOG.warn("Table " + td.getTableName() + " already exists, continuing");
+ } finally {
+ admin.close();
+ unmanagedConnection.close();
+ }
+ return totalNumberOfRegions;
+ }
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/LoadTestDataGeneratorWithVisibilityLabels.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/security/visibility/LoadTestDataGeneratorWithVisibilityLabels.java
similarity index 100%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/LoadTestDataGeneratorWithVisibilityLabels.java
rename to hbase-it/src/test/java/org/apache/hadoop/hbase/security/visibility/LoadTestDataGeneratorWithVisibilityLabels.java
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 0e259f5072ae..58c329c0cd76 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -17,6 +17,11 @@
*/
package org.apache.hadoop.hbase.test;
+import static org.apache.hadoop.hbase.IntegrationTestingUtility.DEFAULT_REGIONS_PER_SERVER;
+import static org.apache.hadoop.hbase.IntegrationTestingUtility.PRESPLIT_TEST_TABLE;
+import static org.apache.hadoop.hbase.IntegrationTestingUtility.PRESPLIT_TEST_TABLE_KEY;
+import static org.apache.hadoop.hbase.IntegrationTestingUtility.REGIONS_PER_SERVER_KEY;
+
import java.io.DataInput;
import java.io.DataOutput;
import java.io.FileNotFoundException;
@@ -40,7 +45,6 @@
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.IntegrationTestBase;
@@ -751,16 +755,12 @@ protected void createSchema() throws IOException {
.build();
// If we want to pre-split compute how many splits.
- if (
- conf.getBoolean(HBaseTestingUtil.PRESPLIT_TEST_TABLE_KEY,
- HBaseTestingUtil.PRESPLIT_TEST_TABLE)
- ) {
+ if (conf.getBoolean(PRESPLIT_TEST_TABLE_KEY, PRESPLIT_TEST_TABLE)) {
int numberOfServers = admin.getRegionServers().size();
if (numberOfServers == 0) {
throw new IllegalStateException("No live regionservers");
}
- int regionsPerServer = conf.getInt(HBaseTestingUtil.REGIONS_PER_SERVER_KEY,
- HBaseTestingUtil.DEFAULT_REGIONS_PER_SERVER);
+ int regionsPerServer = conf.getInt(REGIONS_PER_SERVER_KEY, DEFAULT_REGIONS_PER_SERVER);
int totalNumberOfRegions = numberOfServers * regionsPerServer;
LOG.info("Number of live regionservers: " + numberOfServers + ", "
+ "pre-splitting table into " + totalNumberOfRegions + " regions "
@@ -1899,9 +1899,9 @@ private void printCommands() {
System.err.println(" -D" + TABLE_NAME_KEY + "=");
System.err.println(
" Run using the as the tablename. Defaults to " + DEFAULT_TABLE_NAME);
- System.err.println(" -D" + HBaseTestingUtil.REGIONS_PER_SERVER_KEY + "=<# regions>");
+ System.err.println(" -D" + REGIONS_PER_SERVER_KEY + "=<# regions>");
System.err.println(" Create table with presplit regions per server. Defaults to "
- + HBaseTestingUtil.DEFAULT_REGIONS_PER_SERVER);
+ + DEFAULT_REGIONS_PER_SERVER);
System.err.println(" -DuseMob=");
System.err.println(
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java
index fab08dbb2ec3..6d1ad065055b 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadCommonCrawl.java
@@ -17,6 +17,10 @@
*/
package org.apache.hadoop.hbase.test;
+import static org.apache.hadoop.hbase.IntegrationTestingUtility.DEFAULT_REGIONS_PER_SERVER;
+import static org.apache.hadoop.hbase.IntegrationTestingUtility.PRESPLIT_TEST_TABLE;
+import static org.apache.hadoop.hbase.IntegrationTestingUtility.PRESPLIT_TEST_TABLE_KEY;
+import static org.apache.hadoop.hbase.IntegrationTestingUtility.REGIONS_PER_SERVER_KEY;
import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
import java.io.BufferedReader;
@@ -49,7 +53,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
@@ -478,16 +481,13 @@ void createSchema(final TableName tableName) throws IOException {
TableDescriptor tableDescriptor =
TableDescriptorBuilder.newBuilder(tableName).setColumnFamilies(families).build();
- if (
- getConf().getBoolean(HBaseTestingUtil.PRESPLIT_TEST_TABLE_KEY,
- HBaseTestingUtil.PRESPLIT_TEST_TABLE)
- ) {
+ if (getConf().getBoolean(PRESPLIT_TEST_TABLE_KEY, PRESPLIT_TEST_TABLE)) {
int numberOfServers = admin.getRegionServers().size();
if (numberOfServers == 0) {
throw new IllegalStateException("No live regionservers");
}
- int regionsPerServer = getConf().getInt(HBaseTestingUtil.REGIONS_PER_SERVER_KEY,
- HBaseTestingUtil.DEFAULT_REGIONS_PER_SERVER);
+ int regionsPerServer =
+ getConf().getInt(REGIONS_PER_SERVER_KEY, DEFAULT_REGIONS_PER_SERVER);
int totalNumberOfRegions = numberOfServers * regionsPerServer;
LOG.info("Creating test table: " + tableDescriptor);
LOG.info("Number of live regionservers: " + numberOfServers + ", "
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 0714f27e64d9..0ba5834e075d 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -64,7 +64,6 @@
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HadoopShims;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.PerformanceEvaluation;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.StartTestingClusterOption;
@@ -137,7 +136,7 @@
/**
* Simple test for {@link HFileOutputFormat2}. Sets up and runs a mapreduce job that writes hfile
* output. Creates a few inner classes to implement splits and an inputformat that emits keys and
- * values like those of {@link PerformanceEvaluation}.
+ * values.
*/
@Category({ VerySlowMapReduceTests.class, LargeTests.class })
public class TestHFileOutputFormat2 {
@@ -147,6 +146,7 @@ public class TestHFileOutputFormat2 {
HBaseClassTestRule.forClass(TestHFileOutputFormat2.class);
private final static int ROWSPERSPLIT = 1024;
+ private static final int DEFAULT_VALUE_LENGTH = 1000;
public static final byte[] FAMILY_NAME = TestHRegionFileSystem.FAMILY_NAME;
private static final byte[][] FAMILIES =
@@ -554,18 +554,43 @@ private byte[][] generateRandomStartKeys(int numKeys) {
// first region start key is always empty
ret[0] = HConstants.EMPTY_BYTE_ARRAY;
for (int i = 1; i < numKeys; i++) {
- ret[i] =
- PerformanceEvaluation.generateData(random, PerformanceEvaluation.DEFAULT_VALUE_LENGTH);
+ ret[i] = generateData(random, DEFAULT_VALUE_LENGTH);
}
return ret;
}
+ /*
+ * This method takes some time and is done inline uploading data. For example, doing the mapfile
+ * test, generation of the key and value consumes about 30% of CPU time.
+ * @return Generated random value to insert into a table cell.
+ */
+ public static byte[] generateData(final Random r, int length) {
+ byte[] b = new byte[length];
+ int i;
+
+ for (i = 0; i < (length - 8); i += 8) {
+ b[i] = (byte) (65 + r.nextInt(26));
+ b[i + 1] = b[i];
+ b[i + 2] = b[i];
+ b[i + 3] = b[i];
+ b[i + 4] = b[i];
+ b[i + 5] = b[i];
+ b[i + 6] = b[i];
+ b[i + 7] = b[i];
+ }
+
+ byte a = (byte) (65 + r.nextInt(26));
+ for (; i < length; i++) {
+ b[i] = a;
+ }
+ return b;
+ }
+
private byte[][] generateRandomSplitKeys(int numKeys) {
Random random = ThreadLocalRandom.current();
byte[][] ret = new byte[numKeys][];
for (int i = 0; i < numKeys; i++) {
- ret[i] =
- PerformanceEvaluation.generateData(random, PerformanceEvaluation.DEFAULT_VALUE_LENGTH);
+ ret[i] = generateData(random, DEFAULT_VALUE_LENGTH);
}
return ret;
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java
similarity index 99%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
rename to hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java
index 5f6297c8dc4a..32ad587ad96d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MockRegionServerServices.java
@@ -60,6 +60,7 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
+import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -70,6 +71,7 @@
/**
* Basic mock region server services. Should only be instantiated by HBaseTestingUtility.b
*/
+@InterfaceAudience.Private
public class MockRegionServerServices implements RegionServerServices {
protected static final Logger LOG = LoggerFactory.getLogger(MockRegionServerServices.class);
private final Map regions = new HashMap<>();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java
similarity index 95%
rename from hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java
rename to hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java
index a5650adad914..a9ae79ffb6ca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterAllFilter.java
@@ -20,7 +20,9 @@
import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.yetus.audience.InterfaceAudience;
+@InterfaceAudience.Private
public class FilterAllFilter extends FilterBase {
public FilterAllFilter() {
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
similarity index 100%
rename from hbase-common/src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
rename to hbase-server/src/main/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
index 4d3f1f1ec085..6b638cdda7ff 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java
@@ -207,7 +207,7 @@ static WALProvider createProvider(Class extends WALProvider> clazz) throws IOE
* Create a WALFactory.
*/
@RestrictedApi(explanation = "Should only be called in tests", link = "",
- allowedOnPath = ".*/src/test/.*|.*/HBaseTestingUtility.java")
+ allowedOnPath = ".*/src/test/.*|.*/HBaseTestingUtility.java|.*/WALPerformanceEvaluation.java")
public WALFactory(Configuration conf, String factoryId) throws IOException {
// default enableSyncReplicationWALProvider is true, only disable SyncReplicationWALProvider
// for HMaster or HRegionServer which take system table only. See HBASE-19999
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtil.java
index fd5b7dd729e0..6b659161d1cc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtil.java
@@ -130,8 +130,6 @@
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ReflectionUtils;
-import org.apache.hadoop.hbase.util.RegionSplitter;
-import org.apache.hadoop.hbase.util.RegionSplitter.SplitAlgorithm;
import org.apache.hadoop.hbase.util.RetryCounter;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL;
@@ -183,15 +181,8 @@
@InterfaceStability.Evolving
public class HBaseTestingUtil extends HBaseZKTestingUtil {
- public static final String REGIONS_PER_SERVER_KEY = "hbase.test.regions-per-server";
- /**
- * The default number of regions per regionserver when creating a pre-split table.
- */
public static final int DEFAULT_REGIONS_PER_SERVER = 3;
- public static final String PRESPLIT_TEST_TABLE_KEY = "hbase.test.pre-split-table";
- public static final boolean PRESPLIT_TEST_TABLE = true;
-
private MiniDFSCluster dfsCluster = null;
private FsDatasetAsyncDiskServiceFixer dfsClusterFixer = null;
@@ -3351,139 +3342,6 @@ public static void waitForHostPort(String host, int port) throws IOException {
}
}
- /**
- * Creates a pre-split table for load testing. If the table already exists, logs a warning and
- * continues.
- * @return the number of regions the table was split into
- */
- public static int createPreSplitLoadTestTable(Configuration conf, TableName tableName,
- byte[] columnFamily, Algorithm compression, DataBlockEncoding dataBlockEncoding)
- throws IOException {
- return createPreSplitLoadTestTable(conf, tableName, columnFamily, compression,
- dataBlockEncoding, DEFAULT_REGIONS_PER_SERVER, 1, Durability.USE_DEFAULT);
- }
-
- /**
- * Creates a pre-split table for load testing. If the table already exists, logs a warning and
- * continues.
- * @return the number of regions the table was split into
- */
- public static int createPreSplitLoadTestTable(Configuration conf, TableName tableName,
- byte[] columnFamily, Algorithm compression, DataBlockEncoding dataBlockEncoding,
- int numRegionsPerServer, int regionReplication, Durability durability) throws IOException {
- TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
- builder.setDurability(durability);
- builder.setRegionReplication(regionReplication);
- ColumnFamilyDescriptorBuilder cfBuilder =
- ColumnFamilyDescriptorBuilder.newBuilder(columnFamily);
- cfBuilder.setDataBlockEncoding(dataBlockEncoding);
- cfBuilder.setCompressionType(compression);
- return createPreSplitLoadTestTable(conf, builder.build(), cfBuilder.build(),
- numRegionsPerServer);
- }
-
- /**
- * Creates a pre-split table for load testing. If the table already exists, logs a warning and
- * continues.
- * @return the number of regions the table was split into
- */
- public static int createPreSplitLoadTestTable(Configuration conf, TableName tableName,
- byte[][] columnFamilies, Algorithm compression, DataBlockEncoding dataBlockEncoding,
- int numRegionsPerServer, int regionReplication, Durability durability) throws IOException {
- TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
- builder.setDurability(durability);
- builder.setRegionReplication(regionReplication);
- ColumnFamilyDescriptor[] hcds = new ColumnFamilyDescriptor[columnFamilies.length];
- for (int i = 0; i < columnFamilies.length; i++) {
- ColumnFamilyDescriptorBuilder cfBuilder =
- ColumnFamilyDescriptorBuilder.newBuilder(columnFamilies[i]);
- cfBuilder.setDataBlockEncoding(dataBlockEncoding);
- cfBuilder.setCompressionType(compression);
- hcds[i] = cfBuilder.build();
- }
- return createPreSplitLoadTestTable(conf, builder.build(), hcds, numRegionsPerServer);
- }
-
- /**
- * Creates a pre-split table for load testing. If the table already exists, logs a warning and
- * continues.
- * @return the number of regions the table was split into
- */
- public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor desc,
- ColumnFamilyDescriptor hcd) throws IOException {
- return createPreSplitLoadTestTable(conf, desc, hcd, DEFAULT_REGIONS_PER_SERVER);
- }
-
- /**
- * Creates a pre-split table for load testing. If the table already exists, logs a warning and
- * continues.
- * @return the number of regions the table was split into
- */
- public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor desc,
- ColumnFamilyDescriptor hcd, int numRegionsPerServer) throws IOException {
- return createPreSplitLoadTestTable(conf, desc, new ColumnFamilyDescriptor[] { hcd },
- numRegionsPerServer);
- }
-
- /**
- * Creates a pre-split table for load testing. If the table already exists, logs a warning and
- * continues.
- * @return the number of regions the table was split into
- */
- public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor desc,
- ColumnFamilyDescriptor[] hcds, int numRegionsPerServer) throws IOException {
- return createPreSplitLoadTestTable(conf, desc, hcds, new RegionSplitter.HexStringSplit(),
- numRegionsPerServer);
- }
-
- /**
- * Creates a pre-split table for load testing. If the table already exists, logs a warning and
- * continues.
- * @return the number of regions the table was split into
- */
- public static int createPreSplitLoadTestTable(Configuration conf, TableDescriptor td,
- ColumnFamilyDescriptor[] cds, SplitAlgorithm splitter, int numRegionsPerServer)
- throws IOException {
- TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(td);
- for (ColumnFamilyDescriptor cd : cds) {
- if (!td.hasColumnFamily(cd.getName())) {
- builder.setColumnFamily(cd);
- }
- }
- td = builder.build();
- int totalNumberOfRegions = 0;
- Connection unmanagedConnection = ConnectionFactory.createConnection(conf);
- Admin admin = unmanagedConnection.getAdmin();
-
- try {
- // create a table a pre-splits regions.
- // The number of splits is set as:
- // region servers * regions per region server).
- int numberOfServers = admin.getRegionServers().size();
- if (numberOfServers == 0) {
- throw new IllegalStateException("No live regionservers");
- }
-
- totalNumberOfRegions = numberOfServers * numRegionsPerServer;
- LOG.info("Number of live regionservers: " + numberOfServers + ", "
- + "pre-splitting table into " + totalNumberOfRegions + " regions " + "(regions per server: "
- + numRegionsPerServer + ")");
-
- byte[][] splits = splitter.split(totalNumberOfRegions);
-
- admin.createTable(td, splits);
- } catch (MasterNotRunningException e) {
- LOG.error("Master not running", e);
- throw new IOException(e);
- } catch (TableExistsException e) {
- LOG.warn("Table " + td.getTableName() + " already exists, continuing");
- } finally {
- admin.close();
- unmanagedConnection.close();
- }
- return totalNumberOfRegions;
- }
-
public static int getMetaRSPort(Connection connection) throws IOException {
try (RegionLocator locator = connection.getRegionLocator(TableName.META_TABLE_NAME)) {
return locator.getRegionLocation(Bytes.toBytes("")).getPort();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
index 663c0d540499..bbcdceccdd6b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
@@ -46,7 +46,7 @@
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.crypto.Cipher;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -77,7 +77,7 @@ public static void setUp() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();
// Disable block cache in this test.
conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.setInt("hfile.format.version", 3);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionDisabled.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionDisabled.java
index 0506bd020726..6ab78e5974c4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionDisabled.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionDisabled.java
@@ -26,7 +26,7 @@
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
@@ -56,7 +56,7 @@ public class TestEncryptionDisabled {
@BeforeClass
public static void setUp() throws Exception {
conf.setInt("hfile.format.version", 3);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.set(Encryption.CRYPTO_ENABLED_CONF_KEY, "false");
conf.set(TableDescriptorChecker.TABLE_SANITY_CHECKS, "true");
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
index 6040084ee4dc..2b2da6125b2c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
@@ -41,7 +41,7 @@
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
@@ -86,7 +86,7 @@ public class TestEncryptionKeyRotation {
@BeforeClass
public static void setUp() throws Exception {
conf.setInt("hfile.format.version", 3);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
// Start the minicluster
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
index a71415a16a61..bfdd6211ec43 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -89,7 +89,7 @@ private static byte[] extractHFileKey(Path path) throws Exception {
@BeforeClass
public static void setUp() throws Exception {
conf.setInt("hfile.format.version", 3);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
// Create the table schema
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
index 0ec1f75e2690..9ff116ee53f9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHMobStore.java
@@ -55,7 +55,7 @@
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.mob.MobConstants;
@@ -469,7 +469,7 @@ private static void flushStore(HMobStore store, long id) throws IOException {
public void testMOBStoreEncryption() throws Exception {
final Configuration conf = TEST_UTIL.getConfiguration();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
byte[] keyBytes = new byte[AES.KEY_LENGTH];
Bytes.secureRandom(keyBytes);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureAsyncWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureAsyncWALReplay.java
index b702ef394d52..c123fb2dcf16 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureAsyncWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureAsyncWALReplay.java
@@ -20,7 +20,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.junit.BeforeClass;
@@ -37,7 +37,7 @@ public class TestSecureAsyncWALReplay extends TestAsyncWALReplay {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
Configuration conf = AbstractTestWALReplay.TEST_UTIL.getConfiguration();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
TestAsyncWALReplay.setUpBeforeClass();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java
index 796996ce180a..e6a001b054b9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureWALReplay.java
@@ -20,7 +20,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.junit.BeforeClass;
@@ -37,7 +37,7 @@ public class TestSecureWALReplay extends TestWALReplay {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
Configuration conf = AbstractTestWALReplay.TEST_UTIL.getConfiguration();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
AbstractTestWALReplay.setUpBeforeClass();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
index d017db6eb9bd..8f5e660f1f7f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileTestUtil.java
@@ -22,7 +22,6 @@
import static org.junit.Assert.fail;
import java.io.IOException;
-import java.util.Arrays;
import java.util.Locale;
import java.util.Optional;
import org.apache.hadoop.conf.Configuration;
@@ -51,10 +50,6 @@
* Utility class for HFile-related testing.
*/
public class HFileTestUtil {
-
- public static final String OPT_DATA_BLOCK_ENCODING_USAGE = "Encoding algorithm (e.g. prefix "
- + "compression) to use for data blocks in the test column family, " + "one of "
- + Arrays.toString(DataBlockEncoding.values()) + ".";
public static final String OPT_DATA_BLOCK_ENCODING =
ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING.toLowerCase(Locale.ROOT);
/** Column family used by the test */
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java
index f9459bc5cdd1..f0cc2febd6e8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestEncryptionTest.java
@@ -30,7 +30,7 @@
import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.crypto.KeyProvider;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.ClassRule;
@@ -47,7 +47,7 @@ public class TestEncryptionTest {
@Test
public void testTestKeyProvider() throws Exception {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
EncryptionTest.testKeyProvider(conf);
}
@@ -77,7 +77,7 @@ public void testBadCipherProvider() throws Exception {
@Test
public void testAESCipher() {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
try {
EncryptionTest.testEncryption(conf, algorithm, null);
@@ -89,7 +89,7 @@ public void testAESCipher() {
@Test(expected = IOException.class)
public void testUnknownCipher() throws Exception {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
EncryptionTest.testEncryption(conf, "foobar", null);
fail("Test for bogus cipher should have failed");
}
@@ -97,7 +97,7 @@ public void testUnknownCipher() throws Exception {
@Test
public void testTestEnabledWithDefaultConfig() {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
try {
EncryptionTest.testEncryption(conf, algorithm, null);
@@ -110,7 +110,7 @@ public void testTestEnabledWithDefaultConfig() {
@Test
public void testTestEnabledWhenCryptoIsExplicitlyEnabled() {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
conf.setBoolean(Encryption.CRYPTO_ENABLED_CONF_KEY, true);
try {
@@ -124,7 +124,7 @@ public void testTestEnabledWhenCryptoIsExplicitlyEnabled() {
@Test(expected = IOException.class)
public void testTestEnabledWhenCryptoIsExplicitlyDisabled() throws Exception {
Configuration conf = HBaseConfiguration.create();
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
conf.setBoolean(Encryption.CRYPTO_ENABLED_CONF_KEY, false);
EncryptionTest.testEncryption(conf, algorithm, null);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
index aeed1a9a4837..560ebc70c065 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
@@ -38,7 +38,7 @@
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.crypto.Encryption;
-import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
+import org.apache.hadoop.hbase.io.crypto.MockAesKeyProvider;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
@@ -78,7 +78,7 @@ public class TestHBaseFsckEncryption {
public void setUp() throws Exception {
conf = TEST_UTIL.getConfiguration();
conf.setInt("hfile.format.version", 3);
- conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
+ conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, MockAesKeyProvider.class.getName());
conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
// Create the test encryption key
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
index 87287879ce20..f2571851f71f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
@@ -17,11 +17,6 @@
*/
package org.apache.hadoop.hbase.wal;
-import static org.apache.hadoop.hbase.wal.BoundedGroupingStrategy.DEFAULT_NUM_REGION_GROUPS;
-import static org.apache.hadoop.hbase.wal.BoundedGroupingStrategy.NUM_REGION_GROUPS;
-import static org.apache.hadoop.hbase.wal.RegionGroupingProvider.DELEGATE_PROVIDER;
-import static org.apache.hadoop.hbase.wal.RegionGroupingProvider.REGION_GROUPING_STRATEGY;
-import static org.apache.hadoop.hbase.wal.WALFactory.WAL_PROVIDER;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
@@ -65,10 +60,10 @@ public class TestBoundedRegionGroupingStrategy {
private static final Logger LOG =
LoggerFactory.getLogger(TestBoundedRegionGroupingStrategy.class);
- private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
+ protected static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
- private static Configuration CONF;
- private static DistributedFileSystem FS;
+ protected static Configuration CONF;
+ protected static DistributedFileSystem FS;
@Parameter
public String walProvider;
@@ -80,7 +75,7 @@ public static Iterable
+
+ org.apache.hbase
+ hbase-diagnostics
+ ${project.version}
+ test-jar
+ test
+
org.apache.hbase
hbase-endpoint
@@ -1361,6 +1369,11 @@
hbase-openssl
${project.version}
+
+ org.apache.hbase
+ hbase-diagnostics
+ ${project.version}
+
com.github.stephenc.findbugs