Skip to content

Commit

Permalink
HBASE-23581 Creating table gets stuck when specifying an invalid spli…
Browse files Browse the repository at this point in the history
…t policy as METADATA (apache#942)

Signed-off-by: Lijin Bin <[email protected]>
Signed-off-by: Anoop Sam John <[email protected]>
Signed-off-by: Xu Cang <[email protected]>
(cherry picked from commit e18c99f)

Change-Id: Iee5c19809916e646bf4ff5697f2496ba741e5abe
  • Loading branch information
brfrn169 authored and Jenkins committed Dec 24, 2019
1 parent c05bdae commit ff9cd68
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.util.TableDescriptorChecker;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
Expand Down Expand Up @@ -7329,12 +7330,12 @@ protected HRegion openHRegion(final CancelableProgressable reporter)
throws IOException {
try {
// Refuse to open the region if we are missing local compression support
checkCompressionCodecs();
TableDescriptorChecker.checkCompression(htableDescriptor);
// Refuse to open the region if encryption configuration is incorrect or
// codec support is missing
checkEncryption();
TableDescriptorChecker.checkEncryption(conf, htableDescriptor);
// Refuse to open the region if a required class cannot be loaded
checkClassLoading();
TableDescriptorChecker.checkClassLoading(conf, htableDescriptor);
this.openSeqNum = initialize(reporter);
this.mvcc.advanceTo(openSeqNum);
// The openSeqNum must be increased every time when a region is assigned, as we rely on it to
Expand Down Expand Up @@ -7405,25 +7406,6 @@ public static void warmupHRegion(final RegionInfo info,
r.initializeWarmup(reporter);
}


private void checkCompressionCodecs() throws IOException {
for (ColumnFamilyDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
CompressionTest.testCompression(fam.getCompressionType());
CompressionTest.testCompression(fam.getCompactionCompressionType());
}
}

private void checkEncryption() throws IOException {
for (ColumnFamilyDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
EncryptionTest.testEncryption(conf, fam.getEncryptionType(), fam.getEncryptionKey());
}
}

private void checkClassLoading() throws IOException {
RegionSplitPolicy.getSplitPolicyClass(this.htableDescriptor, conf);
RegionCoprocessorHost.testTableCoprocessorAttrs(conf, this.htableDescriptor);
}

/**
* Computes the Path of the HRegion
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,12 @@ private TableDescriptorChecker() {
* Checks whether the table conforms to some sane limits, and configured
* values (compression, etc) work. Throws an exception if something is wrong.
*/
public static void sanityCheck(final Configuration conf, final TableDescriptor td)
public static void sanityCheck(final Configuration c, final TableDescriptor td)
throws IOException {
CompoundConfiguration conf = new CompoundConfiguration()
.add(c)
.addBytesMap(td.getValues());

// Setting this to true logs the warning instead of throwing exception
boolean logWarn = false;
if (!conf.getBoolean(TABLE_SANITY_CHECKS, DEFAULT_TABLE_SANITY_CHECKS)) {
Expand Down Expand Up @@ -276,21 +280,21 @@ private static void checkBloomFilterType(ColumnFamilyDescriptor cfd) throws IOEx
}
}

private static void checkCompression(final TableDescriptor td) throws IOException {
public static void checkCompression(final TableDescriptor td) throws IOException {
for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {
CompressionTest.testCompression(cfd.getCompressionType());
CompressionTest.testCompression(cfd.getCompactionCompressionType());
}
}

private static void checkEncryption(final Configuration conf, final TableDescriptor td)
public static void checkEncryption(final Configuration conf, final TableDescriptor td)
throws IOException {
for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {
EncryptionTest.testEncryption(conf, cfd.getEncryptionType(), cfd.getEncryptionKey());
}
}

private static void checkClassLoading(final Configuration conf, final TableDescriptor td)
public static void checkClassLoading(final Configuration conf, final TableDescriptor td)
throws IOException {
RegionSplitPolicy.getSplitPolicyClass(td, conf);
RegionCoprocessorHost.testTableCoprocessorAttrs(conf, td);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.ClientTests;
Expand Down Expand Up @@ -112,6 +113,11 @@ public void testIllegalTableDescriptor() throws Exception {
htd.setRegionSplitPolicyClassName(null);
checkTableIsLegal(htd);

htd.setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, "nonexisting.foo.class");
checkTableIsIllegal(htd);
htd.remove(HConstants.HBASE_REGION_SPLIT_POLICY_KEY);
checkTableIsLegal(htd);

hcd.setBlocksize(0);
checkTableIsIllegal(htd);
hcd.setBlocksize(1024 * 1024 * 128); // 128M
Expand Down

0 comments on commit ff9cd68

Please sign in to comment.