You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ns...@apache.org on 2011/10/11 04:18:12 UTC
svn commit: r1181541 -
/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/manual/HBaseTest.java
Author: nspiegelberg
Date: Tue Oct 11 02:18:08 2011
New Revision: 1181541
URL: http://svn.apache.org/viewvc?rev=1181541&view=rev
Log:
Adding command-line options to HBaseTest to specify Bloom filter and compression types
Summary:
Adding new options -bloom and -compression to HBaseTest to automatically
configure the test column family with the specified Bloom filter type and
compression type.
Test Plan:
Run HBaseTest.
Reviewed By: kannan
Reviewers: kannan, kranganathan, gqchen, aaiyer
CC: hbase@lists, , mbautin, kannan
Revert Plan:
OK
Differential Revision: 250836
Modified:
hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/manual/HBaseTest.java
Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/manual/HBaseTest.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/manual/HBaseTest.java?rev=1181541&r1=1181540&r2=1181541&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/manual/HBaseTest.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/manual/HBaseTest.java Tue Oct 11 02:18:08 2011
@@ -19,7 +19,9 @@
*/
package org.apache.hadoop.hbase.manual;
+import java.io.IOException;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import java.util.Random;
@@ -32,11 +34,16 @@ import org.apache.commons.cli.ParseExcep
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.io.hfile.Compression;
import org.apache.hadoop.hbase.manual.utils.HBaseUtils;
import org.apache.hadoop.hbase.manual.utils.HdfsAppender;
import org.apache.hadoop.hbase.manual.utils.KillProcessesAndVerify;
import org.apache.hadoop.hbase.manual.utils.MultiThreadedReader;
import org.apache.hadoop.hbase.manual.utils.MultiThreadedWriter;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
@@ -173,6 +180,12 @@ public class HBaseTest
System.out.printf("Started append test...");
}
+ private static final String OPT_USAGE_BLOOM = " Bloom filter type, one of " +
+ Arrays.toString(StoreFile.BloomType.values());
+
+ private static final String OPT_USAGE_COMPRESSION = " Compression type, " +
+ "one of " + Arrays.toString(Compression.Algorithm.values());
+
public static void main(String[] args) {
try {
// parse the command line args
@@ -193,6 +206,7 @@ public class HBaseTest
// create tables if needed
for(HBaseConfiguration conf : configList_) {
HBaseUtils.createTableIfNotExists(conf, tableName_, columnFamilies_);
+ applyBloomFilterAndCompression(conf, tableName_, columnFamilies_);
}
// write some test data in an infinite loop if needed
@@ -217,6 +231,39 @@ public class HBaseTest
}
}
+ /**
+ * Apply the given Bloom filter type to all column families we care about.
+ */
+ private static void applyBloomFilterAndCompression(HBaseConfiguration conf,
+ byte[] tableName, byte[][] columnFamilies)
+ throws IOException {
+ String bloomStr = cmd_.getOptionValue(OPT_BLOOM);
+ StoreFile.BloomType bloomType = bloomStr == null ? null :
+ StoreFile.BloomType.valueOf(bloomStr);
+
+ String compressStr = cmd_.getOptionValue(OPT_COMPRESSION);
+ Compression.Algorithm compressAlgo = compressStr == null ? null :
+ Compression.Algorithm.valueOf(compressStr);
+
+ if (bloomStr == null && compressStr == null)
+ return;
+
+ HBaseAdmin admin = new HBaseAdmin(conf);
+ HTableDescriptor tableDesc = admin.getTableDescriptor(tableName);
+ LOG.info("Disabling table " + Bytes.toString(tableName));
+ admin.disableTable(tableName);
+ for (byte[] cf : columnFamilies) {
+ HColumnDescriptor columnDesc = tableDesc.getFamily(cf);
+ if (bloomStr != null)
+ columnDesc.setBloomFilterType(bloomType);
+ if (compressStr != null)
+ columnDesc.setCompressionType(compressAlgo);
+ admin.modifyColumn(tableName, columnDesc.getName(), columnDesc);
+ }
+ LOG.info("Enabling table " + Bytes.toString(tableName));
+ admin.enableTable(tableName);
+ }
+
private static String USAGE;
private static final String HEADER = "HBaseTest";
private static final String FOOTER = "";
@@ -225,6 +272,8 @@ public class HBaseTest
private static final String OPT_READ = "read";
private static final String OPT_KILL = "kill";
private static final String OPT_APPEND = "append";
+ private static final String OPT_BLOOM = "bloom";
+ private static final String OPT_COMPRESSION = "compression";
private static final String OPT_TABLE_NAME = "tn";
static void initAndParseArgs(String[] args) throws ParseException {
// set the usage object
@@ -241,6 +290,8 @@ public class HBaseTest
options_.addOption(OPT_READ , true, OPT_USAGE_READ);
options_.addOption(OPT_KILL , true, OPT_USAGE_KILL);
options_.addOption(OPT_APPEND , true, OPT_USAGE_APPEND);
+ options_.addOption(OPT_BLOOM , true, OPT_USAGE_BLOOM);
+ options_.addOption(OPT_COMPRESSION, true, OPT_USAGE_COMPRESSION);
// parse the passed in options
CommandLineParser parser = new BasicParser();
cmd_ = parser.parse(options_, args);