Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.util.TableDescriptorChecker;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
Expand Down Expand Up @@ -7371,14 +7372,14 @@ protected HRegion openHRegion(final CancelableProgressable reporter)
throws IOException {
try {
// Refuse to open the region if we are missing local compression support
checkCompressionCodecs();
TableDescriptorChecker.checkCompression(htableDescriptor);
// Refuse to open the region if encryption configuration is incorrect or
// codec support is missing
LOG.debug("checking encryption for " + this.getRegionInfo().getEncodedName());
checkEncryption();
TableDescriptorChecker.checkEncryption(conf, htableDescriptor);
// Refuse to open the region if a required class cannot be loaded
LOG.debug("checking classloading for " + this.getRegionInfo().getEncodedName());
checkClassLoading();
TableDescriptorChecker.checkClassLoading(conf, htableDescriptor);
this.openSeqNum = initialize(reporter);
this.mvcc.advanceTo(openSeqNum);
// The openSeqNum must be increased every time when a region is assigned, as we rely on it to
Expand Down Expand Up @@ -7448,25 +7449,6 @@ public static void warmupHRegion(final RegionInfo info,
r.initializeWarmup(reporter);
}


private void checkCompressionCodecs() throws IOException {
for (ColumnFamilyDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
CompressionTest.testCompression(fam.getCompressionType());
CompressionTest.testCompression(fam.getCompactionCompressionType());
}
}

private void checkEncryption() throws IOException {
for (ColumnFamilyDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
EncryptionTest.testEncryption(conf, fam.getEncryptionType(), fam.getEncryptionKey());
}
}

private void checkClassLoading() throws IOException {
RegionSplitPolicy.getSplitPolicyClass(this.htableDescriptor, conf);
RegionCoprocessorHost.testTableCoprocessorAttrs(conf, this.htableDescriptor);
}

/**
* Computes the Path of the HRegion
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,12 @@ private TableDescriptorChecker() {
* Checks whether the table conforms to some sane limits, and configured
* values (compression, etc) work. Throws an exception if something is wrong.
*/
public static void sanityCheck(final Configuration conf, final TableDescriptor td)
public static void sanityCheck(final Configuration c, final TableDescriptor td)
throws IOException {
CompoundConfiguration conf = new CompoundConfiguration()
.add(c)
.addBytesMap(td.getValues());

// Setting this to true logs the warning instead of throwing exception
boolean logWarn = false;
if (!conf.getBoolean(TABLE_SANITY_CHECKS, DEFAULT_TABLE_SANITY_CHECKS)) {
Expand Down Expand Up @@ -276,21 +280,21 @@ private static void checkBloomFilterType(ColumnFamilyDescriptor cfd) throws IOEx
}
}

private static void checkCompression(final TableDescriptor td) throws IOException {
public static void checkCompression(final TableDescriptor td) throws IOException {
for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {
CompressionTest.testCompression(cfd.getCompressionType());
CompressionTest.testCompression(cfd.getCompactionCompressionType());
}
}

private static void checkEncryption(final Configuration conf, final TableDescriptor td)
public static void checkEncryption(final Configuration conf, final TableDescriptor td)
throws IOException {
for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {
EncryptionTest.testEncryption(conf, cfd.getEncryptionType(), cfd.getEncryptionKey());
}
}

private static void checkClassLoading(final Configuration conf, final TableDescriptor td)
public static void checkClassLoading(final Configuration conf, final TableDescriptor td)
throws IOException {
RegionSplitPolicy.getSplitPolicyClass(td, conf);
RegionCoprocessorHost.testTableCoprocessorAttrs(conf, td);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.ClientTests;
Expand Down Expand Up @@ -112,6 +113,11 @@ public void testIllegalTableDescriptor() throws Exception {
htd.setRegionSplitPolicyClassName(null);
checkTableIsLegal(htd);

htd.setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, "nonexisting.foo.class");
checkTableIsIllegal(htd);
htd.remove(HConstants.HBASE_REGION_SPLIT_POLICY_KEY);
checkTableIsLegal(htd);

hcd.setBlocksize(0);
checkTableIsIllegal(htd);
hcd.setBlocksize(1024 * 1024 * 128); // 128M
Expand Down