You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2014/06/20 02:18:53 UTC

[2/2] git commit: HBASE-11362 Minor improvements to LoadTestTool and PerformanceEvaluation (Vandana Ayyalasomayajula)

HBASE-11362 Minor improvements to LoadTestTool and PerformanceEvaluation (Vandana Ayyalasomayajula)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0cce7d16
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0cce7d16
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0cce7d16

Branch: refs/heads/0.98
Commit: 0cce7d16a4b6995388d7e5cb15846e5e8ee93c6e
Parents: 23cd02a
Author: Andrew Purtell <ap...@apache.org>
Authored: Thu Jun 19 17:11:16 2014 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Thu Jun 19 17:16:13 2014 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/PerformanceEvaluation.java     | 15 ++++++++++++--
 .../apache/hadoop/hbase/util/LoadTestTool.java  | 21 ++++++++++++++++----
 2 files changed, 30 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/0cce7d16/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index fc7b512..d6b6046 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -68,6 +68,7 @@ import org.apache.hadoop.hbase.filter.WhileMatchFilter;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
+import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Hash;
 import org.apache.hadoop.hbase.util.MurmurHash;
@@ -289,6 +290,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
     HColumnDescriptor family = new HColumnDescriptor(FAMILY_NAME);
     family.setDataBlockEncoding(opts.blockEncoding);
     family.setCompressionType(opts.compression);
+    family.setBloomFilterType(opts.bloomType);
     if (opts.inMemoryCF) {
       family.setInMemory(true);
     }
@@ -503,6 +505,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
       this.compression = that.compression;
       this.blockEncoding = that.blockEncoding;
       this.filterAll = that.filterAll;
+      this.bloomType = that.bloomType;
     }
 
     public boolean nomapred = false;
@@ -522,6 +525,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
     boolean inMemoryCF = false;
     int presplitRegions = 0;
     public Compression.Algorithm compression = Compression.Algorithm.NONE;
+    public BloomType bloomType = BloomType.ROW;
     public DataBlockEncoding blockEncoding = DataBlockEncoding.NONE;
   }
 
@@ -1115,6 +1119,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
         + " performance.  Uses FilterAllFilter internally. ");
     System.err.println(" latency         Set to report operation latencies. " +
       "Currently only supported by randomRead test. Default: False");
+    System.err.println(" bloomFilter      Bloom filter type, one of " + Arrays.toString(BloomType.values()));
     System.err.println();
     System.err.println(" Note: -D properties will be applied to the conf used. ");
     System.err.println("  For example: ");
@@ -1226,7 +1231,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
           opts.presplitRegions = Integer.parseInt(cmd.substring(presplit.length()));
           continue;
         }
-        
+
         final String inMemory = "--inmemory=";
         if (cmd.startsWith(inMemory)) {
           opts.inMemoryCF = Boolean.parseBoolean(cmd.substring(inMemory.length()));
@@ -1262,7 +1267,13 @@ public class PerformanceEvaluation extends Configured implements Tool {
           opts.filterAll = true;
           continue;
         }
-        
+
+        final String bloomFilter = "--bloomFilter";
+        if (cmd.startsWith(bloomFilter)) {
+          opts.bloomType = BloomType.valueOf(cmd.substring(bloomFilter.length()));
+          continue;
+        }
+
         Class<? extends Test> cmdClass = determineCommandClass(cmd);
         if (cmdClass != null) {
           opts.numClientThreads = getNumClients(i + 1, args);

http://git-wip-us.apache.org/repos/asf/hbase/blob/0cce7d16/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
index 2406efe..b96f265 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
@@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.PerformanceEvaluation;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.crypto.Cipher;
@@ -112,6 +112,8 @@ public class LoadTestTool extends AbstractHBaseTool {
 
   private static final String OPT_BLOOM = "bloom";
   private static final String OPT_COMPRESSION = "compression";
+  private static final String OPT_DEFERRED_LOG_FLUSH = "deferredlogflush";
+  public static final String OPT_DEFERRED_LOG_FLUSH_USAGE = "Enable deferred log flush.";
   public static final String OPT_DATA_BLOCK_ENCODING =
       HColumnDescriptor.DATA_BLOCK_ENCODING.toLowerCase();
 
@@ -157,6 +159,7 @@ public class LoadTestTool extends AbstractHBaseTool {
   protected long startKey, endKey;
 
   protected boolean isWrite, isRead, isUpdate;
+  protected boolean deferredLogFlush;
 
   // Column family options
   protected DataBlockEncoding dataBlockEncodingAlgo;
@@ -260,6 +263,7 @@ public class LoadTestTool extends AbstractHBaseTool {
     }
     LOG.info("Enabling table " + tableName);
     admin.enableTable(tableName);
+    admin.close();
   }
 
   @Override
@@ -306,6 +310,7 @@ public class LoadTestTool extends AbstractHBaseTool {
           + "tool will create the test table with n regions per server");
 
     addOptWithArg(OPT_ENCRYPTION, OPT_ENCRYPTION_USAGE);
+    addOptNoArg(OPT_DEFERRED_LOG_FLUSH, OPT_DEFERRED_LOG_FLUSH_USAGE);
   }
 
   @Override
@@ -319,6 +324,7 @@ public class LoadTestTool extends AbstractHBaseTool {
     isRead = cmd.hasOption(OPT_READ);
     isUpdate = cmd.hasOption(OPT_UPDATE);
     isInitOnly = cmd.hasOption(OPT_INIT_ONLY);
+    deferredLogFlush = cmd.hasOption(OPT_DEFERRED_LOG_FLUSH);
 
     if (!isWrite && !isRead && !isUpdate && !isInitOnly) {
       throw new IllegalArgumentException("Either -" + OPT_WRITE + " or " +
@@ -434,18 +440,25 @@ public class LoadTestTool extends AbstractHBaseTool {
         Compression.Algorithm.valueOf(compressStr);
 
     String bloomStr = cmd.getOptionValue(OPT_BLOOM);
-    bloomType = bloomStr == null ? null :
+    bloomType = bloomStr == null ? BloomType.ROW :
         BloomType.valueOf(bloomStr);
 
     inMemoryCF = cmd.hasOption(OPT_INMEMORY);
     if (cmd.hasOption(OPT_ENCRYPTION)) {
       cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION));
     }
+
   }
 
   public void initTestTable() throws IOException {
-    HBaseTestingUtility.createPreSplitLoadTestTable(conf, tableName,
-        COLUMN_FAMILY, compressAlgo, dataBlockEncodingAlgo);
+    HTableDescriptor desc = new HTableDescriptor(tableName);
+    if (deferredLogFlush) {
+      desc.setDurability(Durability.ASYNC_WAL);
+    }
+    HColumnDescriptor hcd = new HColumnDescriptor(COLUMN_FAMILY);
+    hcd.setDataBlockEncoding(dataBlockEncodingAlgo);
+    hcd.setCompressionType(compressAlgo);
+    HBaseTestingUtility.createPreSplitLoadTestTable(conf, desc, hcd);
     applyColumnFamilyOptions(tableName, COLUMN_FAMILIES);
   }