You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by bi...@apache.org on 2019/12/24 11:29:25 UTC

[hbase] branch branch-2 updated: HBASE-23581 Creating table gets stuck when specifying an invalid split policy as METADATA (#942)

This is an automated email from the ASF dual-hosted git repository.

binlijin pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2 by this push:
     new 0936bc2  HBASE-23581 Creating table gets stuck when specifying an invalid split policy as METADATA (#942)
0936bc2 is described below

commit 0936bc23c83105d6f65c7f6a4dc2da98231e55ec
Author: Toshihiro Suzuki <br...@gmail.com>
AuthorDate: Tue Dec 24 20:16:31 2019 +0900

    HBASE-23581 Creating table gets stuck when specifying an invalid split policy as METADATA (#942)
    
    Signed-off-by: Lijin Bin <bi...@apache.org>
    Signed-off-by: Anoop Sam John <an...@apacher.org>
    Signed-off-by: Xu Cang <xu...@apache.org>
---
 .../apache/hadoop/hbase/regionserver/HRegion.java  | 26 ++++------------------
 .../hadoop/hbase/util/TableDescriptorChecker.java  | 12 ++++++----
 .../hbase/client/TestIllegalTableDescriptor.java   |  6 +++++
 3 files changed, 18 insertions(+), 26 deletions(-)

diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index d1490c5..c85fb59 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -168,6 +168,7 @@ import org.apache.hadoop.hbase.util.HashedBytes;
 import org.apache.hadoop.hbase.util.NonceKey;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
+import org.apache.hadoop.hbase.util.TableDescriptorChecker;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALEdit;
@@ -7334,12 +7335,12 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
   throws IOException {
     try {
       // Refuse to open the region if we are missing local compression support
-      checkCompressionCodecs();
+      TableDescriptorChecker.checkCompression(htableDescriptor);
       // Refuse to open the region if encryption configuration is incorrect or
       // codec support is missing
-      checkEncryption();
+      TableDescriptorChecker.checkEncryption(conf, htableDescriptor);
       // Refuse to open the region if a required class cannot be loaded
-      checkClassLoading();
+      TableDescriptorChecker.checkClassLoading(conf, htableDescriptor);
       this.openSeqNum = initialize(reporter);
       this.mvcc.advanceTo(openSeqNum);
       // The openSeqNum must be increased every time when a region is assigned, as we rely on it to
@@ -7410,25 +7411,6 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
     r.initializeWarmup(reporter);
   }
 
-
-  private void checkCompressionCodecs() throws IOException {
-    for (ColumnFamilyDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
-      CompressionTest.testCompression(fam.getCompressionType());
-      CompressionTest.testCompression(fam.getCompactionCompressionType());
-    }
-  }
-
-  private void checkEncryption() throws IOException {
-    for (ColumnFamilyDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
-      EncryptionTest.testEncryption(conf, fam.getEncryptionType(), fam.getEncryptionKey());
-    }
-  }
-
-  private void checkClassLoading() throws IOException {
-    RegionSplitPolicy.getSplitPolicyClass(this.htableDescriptor, conf);
-    RegionCoprocessorHost.testTableCoprocessorAttrs(conf, this.htableDescriptor);
-  }
-
   /**
    * Computes the Path of the HRegion
    *
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/TableDescriptorChecker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/TableDescriptorChecker.java
index 3852a41..906ae45 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/TableDescriptorChecker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/TableDescriptorChecker.java
@@ -63,8 +63,12 @@ public final class TableDescriptorChecker {
    * Checks whether the table conforms to some sane limits, and configured
    * values (compression, etc) work. Throws an exception if something is wrong.
    */
-  public static void sanityCheck(final Configuration conf, final TableDescriptor td)
+  public static void sanityCheck(final Configuration c, final TableDescriptor td)
       throws IOException {
+    CompoundConfiguration conf = new CompoundConfiguration()
+      .add(c)
+      .addBytesMap(td.getValues());
+
     // Setting this to true logs the warning instead of throwing exception
     boolean logWarn = false;
     if (!conf.getBoolean(TABLE_SANITY_CHECKS, DEFAULT_TABLE_SANITY_CHECKS)) {
@@ -276,21 +280,21 @@ public final class TableDescriptorChecker {
     }
   }
 
-  private static void checkCompression(final TableDescriptor td) throws IOException {
+  public static void checkCompression(final TableDescriptor td) throws IOException {
     for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {
       CompressionTest.testCompression(cfd.getCompressionType());
       CompressionTest.testCompression(cfd.getCompactionCompressionType());
     }
   }
 
-  private static void checkEncryption(final Configuration conf, final TableDescriptor td)
+  public static void checkEncryption(final Configuration conf, final TableDescriptor td)
       throws IOException {
     for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {
       EncryptionTest.testEncryption(conf, cfd.getEncryptionType(), cfd.getEncryptionKey());
     }
   }
 
-  private static void checkClassLoading(final Configuration conf, final TableDescriptor td)
+  public static void checkClassLoading(final Configuration conf, final TableDescriptor td)
       throws IOException {
     RegionSplitPolicy.getSplitPolicyClass(td, conf);
     RegionCoprocessorHost.testTableCoprocessorAttrs(conf, td);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java
index e55f3ff..dc56cf7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestIllegalTableDescriptor.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
@@ -112,6 +113,11 @@ public class TestIllegalTableDescriptor {
     htd.setRegionSplitPolicyClassName(null);
     checkTableIsLegal(htd);
 
+    htd.setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, "nonexisting.foo.class");
+    checkTableIsIllegal(htd);
+    htd.remove(HConstants.HBASE_REGION_SPLIT_POLICY_KEY);
+    checkTableIsLegal(htd);
+
     hcd.setBlocksize(0);
     checkTableIsIllegal(htd);
     hcd.setBlocksize(1024 * 1024 * 128); // 128M