You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ec...@apache.org on 2013/08/09 20:21:08 UTC

svn commit: r1512429 - in /hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase: ./ test/ util/

Author: eclark
Date: Fri Aug  9 18:21:07 2013
New Revision: 1512429

URL: http://svn.apache.org/r1512429
Log:
HBASE-8726 Create an Integration Test for online schema change

Modified:
    hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestSlowDeterministic.java
    hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestWithChaosMonkey.java
    hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithChaosMonkey.java
    hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/util/ChaosMonkey.java

Modified: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestSlowDeterministic.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestSlowDeterministic.java?rev=1512429&r1=1512428&r2=1512429&view=diff
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestSlowDeterministic.java (original)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestSlowDeterministic.java Fri Aug  9 18:21:07 2013
@@ -76,7 +76,11 @@ public class IntegrationTestDataIngestSl
     Action[] actions2 = new Action[] {
       new SplitRandomRegionOfTable(tableName),
       new MergeRandomAdjacentRegionsOfTable(tableName),
-      new SnapshotTable(tableName)
+      new SnapshotTable(tableName),
+      new ChaosMonkey.AddColumnAction(tableName),
+      new ChaosMonkey.RemoveColumnAction(tableName),
+      new ChaosMonkey.ChangeEncodingAction(tableName),
+      new ChaosMonkey.ChangeVersionsAction(tableName)
     };
 
     // Destructive actions to mess things around.

Modified: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestWithChaosMonkey.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestWithChaosMonkey.java?rev=1512429&r1=1512428&r2=1512429&view=diff
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestWithChaosMonkey.java (original)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestWithChaosMonkey.java Fri Aug  9 18:21:07 2013
@@ -20,17 +20,7 @@ package org.apache.hadoop.hbase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.util.ChaosMonkey;
-import org.apache.hadoop.hbase.util.ChaosMonkey.Action;
-import org.apache.hadoop.hbase.util.ChaosMonkey.CompactRandomRegionOfTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.CompactTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.FlushRandomRegionOfTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.FlushTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.MergeRandomAdjacentRegionsOfTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.MoveRandomRegionOfTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.MoveRegionsOfTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.PeriodicRandomActionPolicy;
-import org.apache.hadoop.hbase.util.ChaosMonkey.SnapshotTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.SplitRandomRegionOfTable;
+import org.apache.hadoop.hbase.util.ChaosMonkey.*;
 import org.apache.hadoop.hbase.util.LoadTestTool;
 import org.junit.After;
 import org.junit.Before;
@@ -88,7 +78,11 @@ public class IntegrationTestDataIngestWi
       new SplitRandomRegionOfTable(tableName),
       new MergeRandomAdjacentRegionsOfTable(tableName),
       new SnapshotTable(tableName),
-      new MoveRegionsOfTable(tableName)
+      new MoveRegionsOfTable(tableName),
+      new AddColumnAction(tableName),
+      new RemoveColumnAction(tableName),
+      new ChangeEncodingAction(tableName),
+      new ChangeVersionsAction(tableName)
     };
 
     monkey = new ChaosMonkey(util,

Modified: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithChaosMonkey.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithChaosMonkey.java?rev=1512429&r1=1512428&r2=1512429&view=diff
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithChaosMonkey.java (original)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithChaosMonkey.java Fri Aug  9 18:21:07 2013
@@ -25,17 +25,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
 import org.apache.hadoop.hbase.IntegrationTests;
 import org.apache.hadoop.hbase.util.ChaosMonkey;
-import org.apache.hadoop.hbase.util.ChaosMonkey.Action;
-import org.apache.hadoop.hbase.util.ChaosMonkey.CompactRandomRegionOfTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.CompactTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.FlushRandomRegionOfTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.FlushTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.MergeRandomAdjacentRegionsOfTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.MoveRandomRegionOfTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.MoveRegionsOfTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.PeriodicRandomActionPolicy;
-import org.apache.hadoop.hbase.util.ChaosMonkey.SnapshotTable;
-import org.apache.hadoop.hbase.util.ChaosMonkey.SplitRandomRegionOfTable;
+import org.apache.hadoop.hbase.util.ChaosMonkey.*;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.After;
 import org.junit.Before;
@@ -93,7 +83,11 @@ public class IntegrationTestBigLinkedLis
       new SplitRandomRegionOfTable(tableName),
       new MergeRandomAdjacentRegionsOfTable(tableName),
       new SnapshotTable(tableName),
-      new MoveRegionsOfTable(tableName)
+      new MoveRegionsOfTable(tableName),
+      new AddColumnAction(tableName),
+      new RemoveColumnAction(tableName),
+      new ChangeEncodingAction(tableName),
+      new ChangeVersionsAction(tableName)
     };
 
     monkey = new ChaosMonkey(util,

Modified: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/util/ChaosMonkey.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/util/ChaosMonkey.java?rev=1512429&r1=1512428&r2=1512429&view=diff
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/util/ChaosMonkey.java (original)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/util/ChaosMonkey.java Fri Aug  9 18:21:07 2013
@@ -26,8 +26,10 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Queue;
+import java.util.Random;
 
 import org.apache.commons.cli.CommandLine;
+import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.lang.math.RandomUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -36,7 +38,9 @@ import org.apache.hadoop.hbase.ClusterSt
 import org.apache.hadoop.hbase.HBaseCluster;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.IntegrationTestDataIngestWithChaosMonkey;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
 import org.apache.hadoop.hbase.ServerLoad;
@@ -44,6 +48,7 @@ import org.apache.hadoop.hbase.ServerNam
 import org.apache.hadoop.hbase.Stoppable;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -761,6 +766,138 @@ public class ChaosMonkey extends Abstrac
     }
   }
 
+  public static class AddColumnAction extends ChaosMonkey.Action {
+
+    private byte[] tableName;
+    private HBaseAdmin admin;
+
+    public AddColumnAction(String tableName) {
+      this.tableName = Bytes.toBytes(tableName);
+    }
+
+    @Override
+    public void perform() throws Exception {
+      HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
+      HColumnDescriptor columnDescriptor = null;
+
+      while(columnDescriptor == null ||
+          tableDescriptor.getFamily(columnDescriptor.getName()) != null) {
+        columnDescriptor = new HColumnDescriptor(RandomStringUtils.randomAlphabetic(5));
+      }
+
+      tableDescriptor.addFamily(columnDescriptor);
+      admin.modifyTable(tableName, tableDescriptor);
+    }
+  }
+
+  public static class RemoveColumnAction extends ChaosMonkey.Action {
+    private byte[] tableName;
+    private HBaseAdmin admin;
+    private Random random;
+
+    public RemoveColumnAction(String tableName) {
+      this.tableName = Bytes.toBytes(tableName);
+      random = new Random();
+    }
+
+    @Override
+    public void init(ActionContext context) throws IOException {
+      super.init(context);
+      this.admin = context.getHaseIntegrationTestingUtility().getHBaseAdmin();
+    }
+
+    @Override
+    public void perform() throws Exception {
+      HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
+      HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
+
+      if (columnDescriptors.length <= 1) {
+        return;
+      }
+
+      int index = random.nextInt(columnDescriptors.length);
+      while(columnDescriptors[index].getNameAsString().equals(
+          Bytes.toString(LoadTestTool.COLUMN_FAMILY))) {
+        index = random.nextInt(columnDescriptors.length);
+      }
+
+      tableDescriptor.removeFamily(columnDescriptors[index].getName());
+
+      admin.modifyTable(tableName, tableDescriptor);
+    }
+  }
+
+  public static class ChangeVersionsAction extends ChaosMonkey.Action {
+    private byte[] tableName;
+    private HBaseAdmin admin;
+    private Random random;
+
+    public ChangeVersionsAction(String tableName) {
+      this.tableName = Bytes.toBytes(tableName);
+      random = new Random();
+    }
+
+    @Override
+    public void init(ActionContext context) throws IOException {
+      super.init(context);
+      this.admin = context.getHaseIntegrationTestingUtility().getHBaseAdmin();
+    }
+
+    @Override
+    public void perform() throws Exception {
+      HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
+      HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
+
+      if ( columnDescriptors == null || columnDescriptors.length == 0) {
+        return;
+      }
+
+      int versions =  random.nextInt(3) + 1;
+      for(HColumnDescriptor descriptor:columnDescriptors) {
+        descriptor.setMaxVersions(versions);
+        descriptor.setMinVersions(versions);
+      }
+
+      admin.modifyTable(tableName, tableDescriptor);
+    }
+  }
+
+  public static class ChangeEncodingAction extends ChaosMonkey.Action {
+    private byte[] tableName;
+    private HBaseAdmin admin;
+    private Random random;
+
+    public ChangeEncodingAction(String tableName) {
+      this.tableName = Bytes.toBytes(tableName);
+      random = new Random();
+    }
+
+    @Override
+    public void init(ActionContext context) throws IOException {
+      super.init(context);
+      this.admin = context.getHaseIntegrationTestingUtility().getHBaseAdmin();
+    }
+
+    @Override
+    public void perform() throws Exception {
+      HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
+      HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
+
+      if (columnDescriptors == null || columnDescriptors.length == 0) {
+        return;
+      }
+
+      // possible DataBlockEncoding id's
+      int[] possibleIds = {0, 2, 3, 4, 6};
+      for (HColumnDescriptor descriptor : columnDescriptors) {
+        short id = (short) possibleIds[random.nextInt(possibleIds.length)];
+        descriptor.setDataBlockEncoding(DataBlockEncoding.getEncodingById(id));
+      }
+
+      admin.modifyTable(tableName, tableDescriptor);
+    }
+  }
+
   /**
    * A context for a Policy
    */