You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zh...@apache.org on 2017/12/20 01:30:15 UTC

[08/24] hbase git commit: HBASE-18440 ITs and Actions modify immutable TableDescriptors

HBASE-18440 ITs and Actions modify immutable TableDescriptors

Signed-off-by: Guanghao Zhang <zg...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/74beb5a3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/74beb5a3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/74beb5a3

Branch: refs/heads/HBASE-19397
Commit: 74beb5a3b95c64abd8cdceb67db0233b89da0746
Parents: e343b0c
Author: Mike Drob <md...@apache.org>
Authored: Sun Jul 23 12:57:23 2017 -0500
Committer: Guanghao Zhang <zg...@apache.org>
Committed: Tue Dec 19 09:59:07 2017 +0800

----------------------------------------------------------------------
 .../IntegrationTestIngestWithEncryption.java    | 19 ++++++++------
 .../hbase/chaos/actions/AddColumnAction.java    | 21 ++++++++-------
 .../actions/DecreaseMaxHFileSizeAction.java     | 27 ++++++++++++--------
 3 files changed, 40 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/74beb5a3/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
index 1f85a51..e730239 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngestWithEncryption.java
@@ -24,6 +24,9 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Waiter.Predicate;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileReaderImpl;
@@ -73,7 +76,7 @@ public class IntegrationTestIngestWithEncryption extends IntegrationTestIngest {
     try {
       EncryptionTest.testEncryption(conf, "AES", null);
     } catch (Exception e) {
-      LOG.warn("Encryption configuration test did not pass, skipping test");
+      LOG.warn("Encryption configuration test did not pass, skipping test", e);
       return;
     }
     super.setUpCluster();
@@ -94,14 +97,14 @@ public class IntegrationTestIngestWithEncryption extends IntegrationTestIngest {
     // Update the test table schema so HFiles from this point will be written with
     // encryption features enabled.
     final Admin admin = util.getAdmin();
-    HTableDescriptor tableDescriptor =
-        new HTableDescriptor(admin.getTableDescriptor(getTablename()));
-    for (HColumnDescriptor columnDescriptor: tableDescriptor.getColumnFamilies()) {
-      columnDescriptor.setEncryptionType("AES");
-      LOG.info("Updating CF schema for " + getTablename() + "." +
-        columnDescriptor.getNameAsString());
+    TableDescriptor tableDescriptor = admin.getDescriptor(getTablename());
+    for (ColumnFamilyDescriptor columnDescriptor : tableDescriptor.getColumnFamilies()) {
+      ColumnFamilyDescriptor updatedColumn = ColumnFamilyDescriptorBuilder
+          .newBuilder(columnDescriptor).setEncryptionType("AES").build();
+      LOG.info(
+        "Updating CF schema for " + getTablename() + "." + columnDescriptor.getNameAsString());
       admin.disableTable(getTablename());
-      admin.modifyColumnFamily(getTablename(), columnDescriptor);
+      admin.modifyColumnFamily(getTablename(), updatedColumn);
       admin.enableTable(getTablename());
       util.waitFor(30000, 1000, true, new Predicate<IOException>() {
         @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/74beb5a3/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java
index 0ef8cfd..6c8554a 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/AddColumnAction.java
@@ -21,10 +21,12 @@ package org.apache.hadoop.hbase.chaos.actions;
 import java.io.IOException;
 
 import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 
 /**
  * Action the adds a column family to a table.
@@ -45,12 +47,12 @@ public class AddColumnAction extends Action {
 
   @Override
   public void perform() throws Exception {
-    HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
-    HColumnDescriptor columnDescriptor = null;
+    TableDescriptor tableDescriptor = admin.getDescriptor(tableName);
+    ColumnFamilyDescriptor columnDescriptor = null;
 
-    while(columnDescriptor == null ||
-        tableDescriptor.getFamily(columnDescriptor.getName()) != null) {
-      columnDescriptor = new HColumnDescriptor(RandomStringUtils.randomAlphabetic(5));
+    while (columnDescriptor == null
+        || tableDescriptor.getColumnFamily(columnDescriptor.getName()) != null) {
+      columnDescriptor = ColumnFamilyDescriptorBuilder.of(RandomStringUtils.randomAlphabetic(5));
     }
 
     // Don't try the modify if we're stopping
@@ -60,7 +62,8 @@ public class AddColumnAction extends Action {
 
     LOG.debug("Performing action: Adding " + columnDescriptor + " to " + tableName);
 
-    tableDescriptor.addFamily(columnDescriptor);
-    admin.modifyTable(tableName, tableDescriptor);
+    TableDescriptor modifiedTable = TableDescriptorBuilder.newBuilder(tableDescriptor)
+        .addColumnFamily(columnDescriptor).build();
+    admin.modifyTable(modifiedTable);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/74beb5a3/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DecreaseMaxHFileSizeAction.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DecreaseMaxHFileSizeAction.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DecreaseMaxHFileSizeAction.java
index 98babeb..4610ef0 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DecreaseMaxHFileSizeAction.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/DecreaseMaxHFileSizeAction.java
@@ -18,13 +18,14 @@
 
 package org.apache.hadoop.hbase.chaos.actions;
 
-import org.apache.hadoop.hbase.HBaseTestingUtility;
+import java.io.IOException;
+import java.util.Random;
+
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
-
-import java.util.Random;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 
 public class DecreaseMaxHFileSizeAction extends Action {
 
@@ -33,6 +34,7 @@ public class DecreaseMaxHFileSizeAction extends Action {
   private final long sleepTime;
   private final TableName tableName;
   private final Random random;
+  private Admin admin;
 
   public DecreaseMaxHFileSizeAction(long sleepTime, TableName tableName) {
     this.sleepTime = sleepTime;
@@ -41,13 +43,17 @@ public class DecreaseMaxHFileSizeAction extends Action {
   }
 
   @Override
+  public void init(ActionContext context) throws IOException {
+    super.init(context);
+    this.admin = context.getHBaseIntegrationTestingUtility().getAdmin();
+  }
+
+  @Override
   public void perform() throws Exception {
-    HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
-    Admin admin = util.getAdmin();
-    HTableDescriptor htd = admin.getTableDescriptor(tableName);
+    TableDescriptor td = admin.getDescriptor(tableName);
 
     // Try and get the current value.
-    long currentValue = htd.getMaxFileSize();
+    long currentValue = td.getMaxFileSize();
 
     // If the current value is not set use the default for the cluster.
     // If configs are really weird this might not work.
@@ -66,7 +72,8 @@ public class DecreaseMaxHFileSizeAction extends Action {
     newValue = Math.max(minFileSize, newValue) - (512 - random.nextInt(1024));
 
     // Change the table descriptor.
-    htd.setMaxFileSize(newValue);
+    TableDescriptor modifiedTable =
+        TableDescriptorBuilder.newBuilder(td).setMaxFileSize(newValue).build();
 
     // Don't try the modify if we're stopping
     if (context.isStopping()) {
@@ -74,7 +81,7 @@ public class DecreaseMaxHFileSizeAction extends Action {
     }
 
     // modify the table.
-    admin.modifyTable(tableName, htd);
+    admin.modifyTable(modifiedTable);
 
     // Sleep some time.
     if (sleepTime > 0) {