You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by li...@apache.org on 2014/05/14 02:27:04 UTC
svn commit: r1594423 [14/17] - in /hbase/branches/0.89-fb: ./ bin/
src/main/java/org/apache/hadoop/hbase/rest/
src/main/java/org/apache/hadoop/hbase/rest/client/
src/main/java/org/apache/hadoop/hbase/rest/metrics/
src/main/java/org/apache/hadoop/hbase/...
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java Wed May 14 00:26:57 2014
@@ -42,6 +42,8 @@ import org.apache.hadoop.hbase.HBaseTest
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
@@ -50,6 +52,7 @@ import org.apache.hadoop.hbase.util.Byte
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@@ -58,6 +61,7 @@ import org.junit.runners.Parameterized.P
* Test various seek optimizations for correctness and check if they are
* actually saving I/O operations.
*/
+@Category(MediumTests.class)
@RunWith(Parameterized.class)
public class TestSeekOptimizations {
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java Wed May 14 00:26:57 2014
@@ -29,6 +29,8 @@ import java.util.concurrent.atomic.Atomi
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
import org.apache.hadoop.hbase.zookeeper.ZKSplitLog.TaskState;
@@ -42,9 +44,9 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
-
-
+@Category(MediumTests.class)
public class TestSplitLogWorker {
private static final Log LOG = LogFactory.getLog(TestSplitLogWorker.class);
static {
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java Wed May 14 00:26:57 2014
@@ -50,6 +50,7 @@ import org.apache.hadoop.hbase.HConstant
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
@@ -57,6 +58,7 @@ import org.apache.hadoop.hbase.regionser
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.util.Progressable;
+import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import com.google.common.base.Joiner;
@@ -64,6 +66,7 @@ import com.google.common.base.Joiner;
/**
* Test class for the Store
*/
+@Category(SmallTests.class)
public class TestStore extends TestCase {
public static final Log LOG = LogFactory.getLog(TestStore.class);
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java Wed May 14 00:26:57 2014
@@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.HConstant
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
@@ -64,6 +65,7 @@ import org.apache.hadoop.hbase.util.Bloo
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.Assert;
+import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import com.google.common.base.Joiner;
@@ -73,6 +75,7 @@ import com.google.common.collect.Lists;
/**
* Test HStoreFile
*/
+@Category(MediumTests.class)
@SuppressWarnings("deprecation")
public class TestStoreFile extends HBaseTestCase {
static final Log LOG = LogFactory.getLog(TestStoreFile.class);
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileBlockCacheSummary.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileBlockCacheSummary.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileBlockCacheSummary.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileBlockCacheSummary.java Wed May 14 00:26:57 2014
@@ -28,6 +28,7 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
@@ -47,6 +48,7 @@ import org.junit.experimental.categories
* which contains the BlockCache
*
*/
+@Category(MediumTests.class)
public class TestStoreFileBlockCacheSummary {
final Log LOG = LogFactory.getLog(getClass());
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java Wed May 14 00:26:57 2014
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.HBaseConf
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueTestUtil;
+import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.kvaggregator.DefaultKeyValueAggregator;
@@ -49,7 +50,9 @@ import org.apache.hadoop.hbase.util.Byte
import org.apache.hadoop.hbase.util.InjectionEvent;
import org.apache.hadoop.hbase.util.InjectionHandler;
import org.apache.hadoop.hbase.util.Threads;
+import org.junit.experimental.categories.Category;
+@Category(SmallTests.class)
public class TestStoreScanner extends TestCase {
private static final String CF_STR = "cf";
final byte [] CF = Bytes.toBytes(CF_STR);
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSyncFileRangeThrottling.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSyncFileRangeThrottling.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSyncFileRangeThrottling.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestSyncFileRangeThrottling.java Wed May 14 00:26:57 2014
@@ -3,6 +3,7 @@ package org.apache.hadoop.hbase.regionse
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.util.InjectionEventCore;
import org.apache.hadoop.util.InjectionEventI;
import org.apache.hadoop.util.InjectionHandler;
@@ -16,7 +17,9 @@ import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
+@Category(MediumTests.class)
public class TestSyncFileRangeThrottling {
private static final Log LOG = LogFactory
.getLog(TestSyncFileRangeThrottling.class);
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestTierCompactSelection.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestTierCompactSelection.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestTierCompactSelection.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestTierCompactSelection.java Wed May 14 00:26:57 2014
@@ -27,7 +27,10 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MediumTests;
+import org.junit.experimental.categories.Category;
+@Category(MediumTests.class)
public class TestTierCompactSelection extends TestDefaultCompactSelection {
private final static Log LOG = LogFactory.getLog(TestTierCompactSelection.class);
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java Wed May 14 00:26:57 2014
@@ -34,12 +34,15 @@ import org.apache.hadoop.hbase.HConstant
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.hfile.Compression;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.junit.experimental.categories.Category;
+@Category(MediumTests.class)
public class TestWideScanner extends HBaseTestCase {
private final Log LOG = LogFactory.getLog(this.getClass());
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/compactionhook/TestLowerToUpperCompactionHook.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/compactionhook/TestLowerToUpperCompactionHook.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/compactionhook/TestLowerToUpperCompactionHook.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/compactionhook/TestLowerToUpperCompactionHook.java Wed May 14 00:26:57 2014
@@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.HColumnDe
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.hfile.Compression;
@@ -36,7 +37,9 @@ import org.apache.hadoop.hbase.util.Byte
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
+@Category(MediumTests.class)
@SuppressWarnings("deprecation")
public class TestLowerToUpperCompactionHook {
private static String TABLE_STRING = "TestCompactionHook";
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/compactionhook/TestSkipCompactionHook.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/compactionhook/TestSkipCompactionHook.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/compactionhook/TestSkipCompactionHook.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/compactionhook/TestSkipCompactionHook.java Wed May 14 00:26:57 2014
@@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.HColumnDe
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.hfile.Compression;
@@ -35,7 +36,9 @@ import org.apache.hadoop.hbase.util.Byte
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
+@Category(MediumTests.class)
@SuppressWarnings("deprecation")
public class TestSkipCompactionHook {
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/kvaggregator/TestKeyValueAggregator.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/kvaggregator/TestKeyValueAggregator.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/kvaggregator/TestKeyValueAggregator.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/kvaggregator/TestKeyValueAggregator.java Wed May 14 00:26:57 2014
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.HColumnDe
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.hfile.Compression;
@@ -36,7 +37,9 @@ import org.apache.hadoop.hbase.util.Byte
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
+@Category(MediumTests.class)
@SuppressWarnings("deprecation")
public class TestKeyValueAggregator {
private static byte[] TABLE = Bytes.toBytes("TestKeyValueAggregator");
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestRpcMetricWrapper.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestRpcMetricWrapper.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestRpcMetricWrapper.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestRpcMetricWrapper.java Wed May 14 00:26:57 2014
@@ -19,8 +19,10 @@
package org.apache.hadoop.hbase.regionserver.metrics;
import junit.framework.TestCase;
+import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.util.MetricsRegistry;
+import org.junit.experimental.categories.Category;
import java.util.HashMap;
@@ -94,6 +96,7 @@ class DummyMetricsRecord implements Metr
}
}
+@Category(SmallTests.class)
public class TestRpcMetricWrapper extends TestCase {
private MetricsRegistry registry = new MetricsRegistry();
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaMetrics.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaMetrics.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaMetrics.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaMetrics.java Wed May 14 00:26:57 2014
@@ -35,6 +35,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.BlockMetricType;
@@ -42,10 +43,12 @@ import org.apache.hadoop.hbase.util.Byte
import org.apache.hadoop.hbase.util.ClassSize;
import org.junit.Before;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
+@Category(SmallTests.class)
@RunWith(Parameterized.class)
public class TestSchemaMetrics {
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestThriftMetrics.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestThriftMetrics.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestThriftMetrics.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestThriftMetrics.java Wed May 14 00:26:57 2014
@@ -26,21 +26,21 @@ import junit.framework.TestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.UnstableTests;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.TagRunner;
-import org.apache.hadoop.hbase.util.TestTag;
import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.experimental.categories.Category;
/**
* Test to verify that the thrift metrics are calculated and propagated in the
* HBaseRpcMetrics.
*/
-@RunWith(TagRunner.class)
+@Category(MediumTests.class)
public class TestThriftMetrics extends TestCase {
private final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final int SLAVES = 1;
@@ -53,7 +53,7 @@ public class TestThriftMetrics extends T
* @throws IOException
*/
// Marked as unstable and recored in #3921380
- @TestTag({ "unstable" })
+ @Category(UnstableTests.class)
@Test
public void testThriftMetricsArePopulated() throws IOException,
InterruptedException {
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java Wed May 14 00:26:57 2014
@@ -43,12 +43,12 @@ import org.apache.hadoop.hbase.HConstant
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.UnstableTests;
import org.apache.hadoop.hbase.regionserver.wal.HLog.Reader;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HasThread;
-import org.apache.hadoop.hbase.util.TagRunner;
-import org.apache.hadoop.hbase.util.TestTag;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -62,10 +62,10 @@ import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.experimental.categories.Category;
/** JUnit test case for HLog */
-@RunWith(TagRunner.class)
+@Category(MediumTests.class)
public class TestHLog {
private static final Log LOG = LogFactory.getLog(TestHLog.class);
{
@@ -188,7 +188,7 @@ public class TestHLog {
* @throws Exception
*/
// Marked as unstable and recorded in 3297526
- @TestTag({ "unstable" })
+ @Category(UnstableTests.class)
@Test
public void testSync() throws Exception {
byte [] bytes = Bytes.toBytes(getName());
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogFiltering.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogFiltering.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogFiltering.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogFiltering.java Wed May 14 00:26:57 2014
@@ -30,6 +30,7 @@ import java.util.TreeMap;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
@@ -41,7 +42,9 @@ import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.Lists;
+import org.junit.experimental.categories.Category;
+@Category(MediumTests.class)
public class TestHLogFiltering {
private static final int NUM_MASTERS = 1;
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogMethods.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogMethods.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogMethods.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogMethods.java Wed May 14 00:26:57 2014
@@ -28,11 +28,14 @@ import org.apache.hadoop.fs.FSDataOutput
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.MediumTests;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
/**
* Simple testing of a few HLog methods.
*/
+@Category(MediumTests.class)
public class TestHLogMethods {
private final HBaseTestingUtility util = new HBaseTestingUtility();
@@ -78,4 +81,4 @@ public class TestHLogMethods {
FSDataOutputStream fdos = fs.create(new Path(testdir, name), true);
fdos.close();
}
-}
\ No newline at end of file
+}
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java Wed May 14 00:26:57 2014
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HConstant
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CancelableProgressable;
@@ -44,6 +45,7 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import java.io.FileNotFoundException;
import java.io.IOException;
@@ -64,6 +66,7 @@ import static org.junit.Assert.assertTru
/**
* Testing {@link HLog} splitting code.
*/
+@Category(MediumTests.class)
public class TestHLogSplit {
private static final Log LOG = LogFactory.getLog(TestHLogSplit.class);
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogActionsListener.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogActionsListener.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogActionsListener.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogActionsListener.java Wed May 14 00:26:57 2014
@@ -29,16 +29,20 @@ import org.apache.hadoop.hbase.HConstant
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
import static org.junit.Assert.*;
/**
* Test that the actions are called while playing with an HLog
*/
+@Category(MediumTests.class)
public class TestLogActionsListener {
protected static final Log LOG =
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java Wed May 14 00:26:57 2014
@@ -31,6 +31,8 @@ import org.apache.hadoop.hbase.HBaseClus
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.UnstableTests;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
@@ -38,8 +40,6 @@ import org.apache.hadoop.hbase.regionser
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hbase.util.TagRunner;
-import org.apache.hadoop.hbase.util.TestTag;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@@ -48,12 +48,12 @@ import org.apache.hadoop.hdfs.server.nam
import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.log4j.Level;
import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.experimental.categories.Category;
/**
* Test log deletion as logs are rolled.
*/
-@RunWith(TagRunner.class)
+@Category(MediumTests.class)
public class TestLogRolling extends HBaseClusterTestCase {
private static final Log LOG = LogFactory.getLog(TestLogRolling.class);
private HRegionServer server;
@@ -169,7 +169,7 @@ public class TestLogRolling extends HBas
* @throws Exception
*/
// Marked as unstable and recored in #3896573
- @TestTag({ "unstable" })
+ @Category(UnstableTests.class)
@Test
public void testLogRolling() throws Exception {
this.tableName = getName();
@@ -218,7 +218,7 @@ public class TestLogRolling extends HBas
* @throws Exception
*/
// Marked as unstable and recored in #3344583
- @TestTag({ "unstable" })
+ @Category(org.apache.hadoop.hbase.UnstableTests.class)
@Test
public void testLogRollOnDatanodeDeath() throws Exception {
assertTrue("This test requires HLog file replication.",
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestMultipleHLogs.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestMultipleHLogs.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestMultipleHLogs.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestMultipleHLogs.java Wed May 14 00:26:57 2014
@@ -9,6 +9,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -20,10 +21,12 @@ import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+@Category(MediumTests.class)
public class TestMultipleHLogs {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private final static int USER_REGION_NUM = 3;
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java Wed May 14 00:26:57 2014
@@ -39,7 +39,9 @@ import org.apache.hadoop.hbase.HRegionIn
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.UnstableTests;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
@@ -55,20 +57,18 @@ import org.apache.hadoop.hbase.regionser
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdge;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.TagRunner;
-import org.apache.hadoop.hbase.util.TestTag;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
/**
* Test replay of edits out of a WAL split.
*/
-@RunWith(TagRunner.class)
+@Category(MediumTests.class)
public class TestWALReplay {
public static final Log LOG = LogFactory.getLog(TestWALReplay.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -232,7 +232,7 @@ public class TestWALReplay {
* @throws IllegalArgumentException
* @throws SecurityException
*/
- @TestTag({ "unstable" })
+ @Category(UnstableTests.class)
@Test
public void testReplayEditsWrittenViaHRegion()
throws IOException, SecurityException, IllegalArgumentException,
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java Wed May 14 00:26:57 2014
@@ -30,6 +30,8 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.LargeTests;
+import org.apache.hadoop.hbase.UnstableTests;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
@@ -39,15 +41,13 @@ import org.apache.hadoop.hbase.regionser
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.TagRunner;
-import org.apache.hadoop.hbase.util.TestTag;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.experimental.categories.Category;
-@RunWith(TagRunner.class)
+@Category(LargeTests.class)
public class TestReplicationSink {
private static final Log LOG =
@@ -175,7 +175,7 @@ public class TestReplicationSink {
* Insert then do different types of deletes
* @throws Exception
*/
- @TestTag({ "unstable" })
+ @Category(UnstableTests.class)
@Test
public void testMixedDeletes() throws Exception {
HLog.Entry[] entries = new HLog.Entry[3];
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTClusterTestBase.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTClusterTestBase.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTClusterTestBase.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTClusterTestBase.java Wed May 14 00:26:57 2014
@@ -1,94 +0,0 @@
-/**
- * Copyright 2010 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.rest;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HBaseClusterTestCase;
-import org.apache.hadoop.hbase.zookeeper.ZooKeeperWrapper;
-import org.apache.hadoop.util.StringUtils;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.servlet.Context;
-import org.mortbay.jetty.servlet.ServletHolder;
-
-import com.sun.jersey.spi.container.servlet.ServletContainer;
-
-public class HBaseRESTClusterTestBase extends HBaseClusterTestCase
- implements Constants {
-
- static final Log LOG =
- LogFactory.getLog(HBaseRESTClusterTestBase.class);
-
- protected int testServletPort;
- Server server;
-
- protected void setUp() throws Exception {
- super.setUp();
- ZooKeeperWrapper.copyClientPort(conf,
- RESTServlet.getInstance().getConfiguration());
- startServletContainer();
- }
-
- protected void tearDown() throws Exception {
- stopServletContainer();
- super.tearDown();
- }
-
- private void startServletContainer() throws Exception {
- if (server != null) {
- LOG.error("ServletContainer already running");
- return;
- }
-
- // set up the Jersey servlet container for Jetty
- ServletHolder sh = new ServletHolder(ServletContainer.class);
- sh.setInitParameter(
- "com.sun.jersey.config.property.resourceConfigClass",
- ResourceConfig.class.getCanonicalName());
- sh.setInitParameter("com.sun.jersey.config.property.packages",
- "jetty");
-
- LOG.info("configured " + ServletContainer.class.getName());
-
- // set up Jetty and run the embedded server
- server = new Server(0);
- server.setSendServerVersion(false);
- server.setSendDateHeader(false);
- // set up context
- Context context = new Context(server, "/", Context.SESSIONS);
- context.addServlet(sh, "/*");
- // start the server
- server.start();
- // get the port
- testServletPort = server.getConnectors()[0].getLocalPort();
-
- LOG.info("started " + server.getClass().getName() + " on port " +
- testServletPort);
- }
-
- private void stopServletContainer() {
- if (server != null) try {
- server.stop();
- server = null;
- } catch (Exception e) {
- LOG.warn(StringUtils.stringifyException(e));
- }
- }
-}
Modified: hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java?rev=1594423&r1=1594422&r2=1594423&view=diff
==============================================================================
--- hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java (original)
+++ hbase/branches/0.89-fb/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java Wed May 14 00:26:57 2014
@@ -1,1255 +0,0 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.rest;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.TreeMap;
-import java.util.Arrays;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import java.lang.reflect.Constructor;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.PageFilter;
-import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
-import org.apache.hadoop.hbase.filter.CompareFilter;
-import org.apache.hadoop.hbase.filter.BinaryComparator;
-import org.apache.hadoop.hbase.rest.client.Client;
-import org.apache.hadoop.hbase.rest.client.Cluster;
-import org.apache.hadoop.hbase.rest.client.RemoteAdmin;
-import org.apache.hadoop.hbase.rest.client.RemoteHTable;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.Hash;
-import org.apache.hadoop.hbase.util.MurmurHash;
-import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
-import org.apache.hadoop.util.LineReader;
-
-/**
- * Script used evaluating Stargate performance and scalability. Runs a SG
- * client that steps through one of a set of hardcoded tests or 'experiments'
- * (e.g. a random reads test, a random writes test, etc.). Pass on the
- * command-line which test to run and how many clients are participating in
- * this experiment. Run <code>java PerformanceEvaluation --help</code> to
- * obtain usage.
- *
- * <p>This class sets up and runs the evaluation programs described in
- * Section 7, <i>Performance Evaluation</i>, of the <a
- * href="http://labs.google.com/papers/bigtable.html">Bigtable</a>
- * paper, pages 8-10.
- *
- * <p>If number of clients > 1, we start up a MapReduce job. Each map task
- * runs an individual client. Each client does about 1GB of data.
- */
-public class PerformanceEvaluation {
- protected static final Log LOG = LogFactory.getLog(PerformanceEvaluation.class.getName());
-
- private static final int ROW_LENGTH = 1000;
- private static final int ONE_GB = 1024 * 1024 * 1000;
- private static final int ROWS_PER_GB = ONE_GB / ROW_LENGTH;
-
- public static final byte [] TABLE_NAME = Bytes.toBytes("TestTable");
- public static final byte [] FAMILY_NAME = Bytes.toBytes("info");
- public static final byte [] QUALIFIER_NAME = Bytes.toBytes("data");
-
- protected static final HTableDescriptor TABLE_DESCRIPTOR;
- static {
- TABLE_DESCRIPTOR = new HTableDescriptor(TABLE_NAME);
- TABLE_DESCRIPTOR.addFamily(new HColumnDescriptor(FAMILY_NAME));
- }
-
- protected Map<String, CmdDescriptor> commands = new TreeMap<String, CmdDescriptor>();
- protected static Cluster cluster = new Cluster();
- protected static String accessToken = null;
-
- volatile Configuration conf;
- private boolean nomapred = false;
- private int N = 1;
- private int R = ROWS_PER_GB;
- private int B = 100;
-
- private static final Path PERF_EVAL_DIR = new Path("performance_evaluation");
- /**
- * Regex to parse lines in input file passed to mapreduce task.
- */
- public static final Pattern LINE_PATTERN =
- Pattern.compile("startRow=(\\d+),\\s+" +
- "perClientRunRows=(\\d+),\\s+" +
- "totalRows=(\\d+),\\s+" +
- "clients=(\\d+),\\s+" +
- "rowsPerPut=(\\d+)");
-
- /**
- * Enum for map metrics. Keep it out here rather than inside in the Map
- * inner-class so we can find associated properties.
- */
- protected static enum Counter {
- /** elapsed time */
- ELAPSED_TIME,
- /** number of rows */
- ROWS}
-
- /**
- * Constructor
- * @param c Configuration object
- */
- public PerformanceEvaluation(final Configuration c) {
- this.conf = c;
-
- addCommandDescriptor(RandomReadTest.class, "randomRead",
- "Run random read test");
- addCommandDescriptor(RandomSeekScanTest.class, "randomSeekScan",
- "Run random seek and scan 100 test");
- addCommandDescriptor(RandomScanWithRange10Test.class, "scanRange10",
- "Run random seek scan with both start and stop row (max 10 rows)");
- addCommandDescriptor(RandomScanWithRange100Test.class, "scanRange100",
- "Run random seek scan with both start and stop row (max 100 rows)");
- addCommandDescriptor(RandomScanWithRange1000Test.class, "scanRange1000",
- "Run random seek scan with both start and stop row (max 1000 rows)");
- addCommandDescriptor(RandomScanWithRange10000Test.class, "scanRange10000",
- "Run random seek scan with both start and stop row (max 10000 rows)");
- addCommandDescriptor(RandomWriteTest.class, "randomWrite",
- "Run random write test");
- addCommandDescriptor(SequentialReadTest.class, "sequentialRead",
- "Run sequential read test");
- addCommandDescriptor(SequentialWriteTest.class, "sequentialWrite",
- "Run sequential write test");
- addCommandDescriptor(ScanTest.class, "scan",
- "Run scan test (read every row)");
- addCommandDescriptor(FilteredScanTest.class, "filterScan",
- "Run scan test using a filter to find a specific row based on it's value (make sure to use --rows=20)");
- }
-
- protected void addCommandDescriptor(Class<? extends Test> cmdClass,
- String name, String description) {
- CmdDescriptor cmdDescriptor =
- new CmdDescriptor(cmdClass, name, description);
- commands.put(name, cmdDescriptor);
- }
-
- /**
- * Implementations can have their status set.
- */
- static interface Status {
- /**
- * Sets status
- * @param msg status message
- * @throws IOException
- */
- void setStatus(final String msg) throws IOException;
- }
-
- /**
- * This class works as the InputSplit of Performance Evaluation
- * MapReduce InputFormat, and the Record Value of RecordReader.
- * Each map task will only read one record from a PeInputSplit,
- * the record value is the PeInputSplit itself.
- */
- public static class PeInputSplit extends InputSplit implements Writable {
- private int startRow = 0;
- private int rows = 0;
- private int totalRows = 0;
- private int clients = 0;
- private int rowsPerPut = 1;
-
- public PeInputSplit() {
- this.startRow = 0;
- this.rows = 0;
- this.totalRows = 0;
- this.clients = 0;
- this.rowsPerPut = 1;
- }
-
- public PeInputSplit(int startRow, int rows, int totalRows, int clients,
- int rowsPerPut) {
- this.startRow = startRow;
- this.rows = rows;
- this.totalRows = totalRows;
- this.clients = clients;
- this.rowsPerPut = 1;
- }
-
- @Override
- public void readFields(DataInput in) throws IOException {
- this.startRow = in.readInt();
- this.rows = in.readInt();
- this.totalRows = in.readInt();
- this.clients = in.readInt();
- this.rowsPerPut = in.readInt();
- }
-
- @Override
- public void write(DataOutput out) throws IOException {
- out.writeInt(startRow);
- out.writeInt(rows);
- out.writeInt(totalRows);
- out.writeInt(clients);
- out.writeInt(rowsPerPut);
- }
-
- @Override
- public long getLength() throws IOException, InterruptedException {
- return 0;
- }
-
- @Override
- public String[] getLocations() throws IOException, InterruptedException {
- return new String[0];
- }
-
- public int getStartRow() {
- return startRow;
- }
-
- public int getRows() {
- return rows;
- }
-
- public int getTotalRows() {
- return totalRows;
- }
-
- public int getClients() {
- return clients;
- }
-
- public int getRowsPerPut() {
- return rowsPerPut;
- }
- }
-
- /**
- * InputFormat of Performance Evaluation MapReduce job.
- * It extends from FileInputFormat, want to use it's methods such as setInputPaths().
- */
- public static class PeInputFormat extends FileInputFormat<NullWritable, PeInputSplit> {
-
- @Override
- public List<InputSplit> getSplits(JobContext job) throws IOException {
- // generate splits
- List<InputSplit> splitList = new ArrayList<InputSplit>();
-
- for (FileStatus file: listStatus(job)) {
- Path path = file.getPath();
- FileSystem fs = path.getFileSystem(job.getConfiguration());
- FSDataInputStream fileIn = fs.open(path);
- LineReader in = new LineReader(fileIn, job.getConfiguration());
- int lineLen = 0;
- while(true) {
- Text lineText = new Text();
- lineLen = in.readLine(lineText);
- if(lineLen <= 0) {
- break;
- }
- Matcher m = LINE_PATTERN.matcher(lineText.toString());
- if((m != null) && m.matches()) {
- int startRow = Integer.parseInt(m.group(1));
- int rows = Integer.parseInt(m.group(2));
- int totalRows = Integer.parseInt(m.group(3));
- int clients = Integer.parseInt(m.group(4));
- int rowsPerPut = Integer.parseInt(m.group(5));
-
- LOG.debug("split["+ splitList.size() + "] " +
- " startRow=" + startRow +
- " rows=" + rows +
- " totalRows=" + totalRows +
- " clients=" + clients +
- " rowsPerPut=" + rowsPerPut);
-
- PeInputSplit newSplit =
- new PeInputSplit(startRow, rows, totalRows, clients, rowsPerPut);
- splitList.add(newSplit);
- }
- }
- in.close();
- }
-
- LOG.info("Total # of splits: " + splitList.size());
- return splitList;
- }
-
- @Override
- public RecordReader<NullWritable, PeInputSplit> createRecordReader(InputSplit split,
- TaskAttemptContext context) {
- return new PeRecordReader();
- }
-
- public static class PeRecordReader extends RecordReader<NullWritable, PeInputSplit> {
- private boolean readOver = false;
- private PeInputSplit split = null;
- private NullWritable key = null;
- private PeInputSplit value = null;
-
- @Override
- public void initialize(InputSplit split, TaskAttemptContext context)
- throws IOException, InterruptedException {
- this.readOver = false;
- this.split = (PeInputSplit)split;
- }
-
- @Override
- public boolean nextKeyValue() throws IOException, InterruptedException {
- if(readOver) {
- return false;
- }
-
- key = NullWritable.get();
- value = (PeInputSplit)split;
-
- readOver = true;
- return true;
- }
-
- @Override
- public NullWritable getCurrentKey() throws IOException, InterruptedException {
- return key;
- }
-
- @Override
- public PeInputSplit getCurrentValue() throws IOException, InterruptedException {
- return value;
- }
-
- @Override
- public float getProgress() throws IOException, InterruptedException {
- if(readOver) {
- return 1.0f;
- } else {
- return 0.0f;
- }
- }
-
- @Override
- public void close() throws IOException {
- // do nothing
- }
- }
- }
-
- /**
- * MapReduce job that runs a performance evaluation client in each map task.
- */
- public static class EvaluationMapTask
- extends Mapper<NullWritable, PeInputSplit, LongWritable, LongWritable> {
-
- /** configuration parameter name that contains the command */
- public final static String CMD_KEY = "EvaluationMapTask.command";
- /** configuration parameter name that contains the PE impl */
- public static final String PE_KEY = "EvaluationMapTask.performanceEvalImpl";
-
- private Class<? extends Test> cmd;
- private PerformanceEvaluation pe;
-
- @Override
- protected void setup(Context context) throws IOException, InterruptedException {
- this.cmd = forName(context.getConfiguration().get(CMD_KEY), Test.class);
-
- // this is required so that extensions of PE are instantiated within the
- // map reduce task...
- Class<? extends PerformanceEvaluation> peClass =
- forName(context.getConfiguration().get(PE_KEY), PerformanceEvaluation.class);
- try {
- this.pe = peClass.getConstructor(Configuration.class)
- .newInstance(context.getConfiguration());
- } catch (Exception e) {
- throw new IllegalStateException("Could not instantiate PE instance", e);
- }
- }
-
- private <Type> Class<? extends Type> forName(String className, Class<Type> type) {
- Class<? extends Type> clazz = null;
- try {
- clazz = Class.forName(className).asSubclass(type);
- } catch (ClassNotFoundException e) {
- throw new IllegalStateException("Could not find class for name: " + className, e);
- }
- return clazz;
- }
-
- protected void map(NullWritable key, PeInputSplit value, final Context context)
- throws IOException, InterruptedException {
-
- Status status = new Status() {
- public void setStatus(String msg) {
- context.setStatus(msg);
- }
- };
-
- // Evaluation task
- long elapsedTime = this.pe.runOneClient(this.cmd, value.getStartRow(),
- value.getRows(), value.getTotalRows(), value.getRowsPerPut(), status);
- // Collect how much time the thing took. Report as map output and
- // to the ELAPSED_TIME counter.
- context.getCounter(Counter.ELAPSED_TIME).increment(elapsedTime);
- context.getCounter(Counter.ROWS).increment(value.rows);
- context.write(new LongWritable(value.startRow), new LongWritable(elapsedTime));
- context.progress();
- }
- }
-
- /*
- * If table does not already exist, create.
- * @param c Client to use checking.
- * @return True if we created the table.
- * @throws IOException
- */
- private boolean checkTable() throws IOException {
- HTableDescriptor tableDescriptor = getTableDescriptor();
- RemoteAdmin admin =
- new RemoteAdmin(new Client(cluster), conf, accessToken);
- if (!admin.isTableAvailable(tableDescriptor.getName())) {
- admin.createTable(tableDescriptor);
- return true;
- }
- return false;
- }
-
- protected HTableDescriptor getTableDescriptor() {
- return TABLE_DESCRIPTOR;
- }
-
- /*
- * We're to run multiple clients concurrently. Setup a mapreduce job. Run
- * one map per client. Then run a single reduce to sum the elapsed times.
- * @param cmd Command to run.
- * @throws IOException
- */
- private void runNIsMoreThanOne(final Class<? extends Test> cmd)
- throws IOException, InterruptedException, ClassNotFoundException {
- checkTable();
- if (nomapred) {
- doMultipleClients(cmd);
- } else {
- doMapReduce(cmd);
- }
- }
-
- /*
- * Run all clients in this vm each to its own thread.
- * @param cmd Command to run.
- * @throws IOException
- */
- private void doMultipleClients(final Class<? extends Test> cmd) throws IOException {
- final List<Thread> threads = new ArrayList<Thread>(N);
- final int perClientRows = R/N;
- for (int i = 0; i < N; i++) {
- Thread t = new Thread (Integer.toString(i)) {
- @Override
- public void run() {
- super.run();
- PerformanceEvaluation pe = new PerformanceEvaluation(conf);
- int index = Integer.parseInt(getName());
- try {
- long elapsedTime = pe.runOneClient(cmd, index * perClientRows,
- perClientRows, R, B, new Status() {
- public void setStatus(final String msg) throws IOException {
- LOG.info("client-" + getName() + " " + msg);
- }
- });
- LOG.info("Finished " + getName() + " in " + elapsedTime +
- "ms writing " + perClientRows + " rows");
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
- };
- threads.add(t);
- }
- for (Thread t: threads) {
- t.start();
- }
- for (Thread t: threads) {
- while(t.isAlive()) {
- try {
- t.join();
- } catch (InterruptedException e) {
- LOG.debug("Interrupted, continuing" + e.toString());
- }
- }
- }
- }
-
- /*
- * Run a mapreduce job. Run as many maps as asked-for clients.
- * Before we start up the job, write out an input file with instruction
- * per client regards which row they are to start on.
- * @param cmd Command to run.
- * @throws IOException
- */
- private void doMapReduce(final Class<? extends Test> cmd) throws IOException,
- InterruptedException, ClassNotFoundException {
- Path inputDir = writeInputFile(this.conf);
- this.conf.set(EvaluationMapTask.CMD_KEY, cmd.getName());
- this.conf.set(EvaluationMapTask.PE_KEY, getClass().getName());
- Job job = new Job(this.conf);
- job.setJarByClass(PerformanceEvaluation.class);
- job.setJobName("HBase Performance Evaluation");
-
- job.setInputFormatClass(PeInputFormat.class);
- PeInputFormat.setInputPaths(job, inputDir);
-
- job.setOutputKeyClass(LongWritable.class);
- job.setOutputValueClass(LongWritable.class);
-
- job.setMapperClass(EvaluationMapTask.class);
- job.setReducerClass(LongSumReducer.class);
-
- job.setNumReduceTasks(1);
-
- job.setOutputFormatClass(TextOutputFormat.class);
- TextOutputFormat.setOutputPath(job, new Path(inputDir,"outputs"));
-
- job.waitForCompletion(true);
- }
-
- /*
- * Write input file of offsets-per-client for the mapreduce job.
- * @param c Configuration
- * @return Directory that contains file written.
- * @throws IOException
- */
- private Path writeInputFile(final Configuration c) throws IOException {
- FileSystem fs = FileSystem.get(c);
- if (!fs.exists(PERF_EVAL_DIR)) {
- fs.mkdirs(PERF_EVAL_DIR);
- }
- SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss");
- Path subdir = new Path(PERF_EVAL_DIR, formatter.format(new Date()));
- fs.mkdirs(subdir);
- Path inputFile = new Path(subdir, "input.txt");
- PrintStream out = new PrintStream(fs.create(inputFile));
- // Make input random.
- Map<Integer, String> m = new TreeMap<Integer, String>();
- Hash h = MurmurHash.getInstance();
- int perClientRows = (R / N);
- try {
- for (int i = 0; i < 10; i++) {
- for (int j = 0; j < N; j++) {
- String s = "startRow=" + ((j * perClientRows) + (i * (perClientRows/10))) +
- ", perClientRunRows=" + (perClientRows / 10) +
- ", totalRows=" + R +
- ", clients=" + N +
- ", rowsPerPut=" + B;
- int hash = h.hash(Bytes.toBytes(s));
- m.put(hash, s);
- }
- }
- for (Map.Entry<Integer, String> e: m.entrySet()) {
- out.println(e.getValue());
- }
- } finally {
- out.close();
- }
- return subdir;
- }
-
- /**
- * Describes a command.
- */
- static class CmdDescriptor {
- private Class<? extends Test> cmdClass;
- private String name;
- private String description;
-
- CmdDescriptor(Class<? extends Test> cmdClass, String name, String description) {
- this.cmdClass = cmdClass;
- this.name = name;
- this.description = description;
- }
-
- public Class<? extends Test> getCmdClass() {
- return cmdClass;
- }
-
- public String getName() {
- return name;
- }
-
- public String getDescription() {
- return description;
- }
- }
-
- /**
- * Wraps up options passed to {@link org.apache.hadoop.hbase.PerformanceEvaluation.Test
- * tests}. This makes the reflection logic a little easier to understand...
- */
- static class TestOptions {
- private int startRow;
- private int perClientRunRows;
- private int totalRows;
- private byte[] tableName;
- private int rowsPerPut;
-
- TestOptions() {
- }
-
- TestOptions(int startRow, int perClientRunRows, int totalRows, byte[] tableName, int rowsPerPut) {
- this.startRow = startRow;
- this.perClientRunRows = perClientRunRows;
- this.totalRows = totalRows;
- this.tableName = tableName;
- this.rowsPerPut = rowsPerPut;
- }
-
- public int getStartRow() {
- return startRow;
- }
-
- public int getPerClientRunRows() {
- return perClientRunRows;
- }
-
- public int getTotalRows() {
- return totalRows;
- }
-
- public byte[] getTableName() {
- return tableName;
- }
-
- public int getRowsPerPut() {
- return rowsPerPut;
- }
- }
-
- /*
- * A test.
- * Subclass to particularize what happens per row.
- */
- static abstract class Test {
- // Below is make it so when Tests are all running in the one
- // jvm, that they each have a differently seeded Random.
- private static final Random randomSeed =
- new Random(System.currentTimeMillis());
- private static long nextRandomSeed() {
- return randomSeed.nextLong();
- }
- protected final Random rand = new Random(nextRandomSeed());
-
- protected final int startRow;
- protected final int perClientRunRows;
- protected final int totalRows;
- protected final Status status;
- protected byte[] tableName;
- protected RemoteHTable table;
- protected volatile Configuration conf;
-
- /**
- * Note that all subclasses of this class must provide a public contructor
- * that has the exact same list of arguments.
- */
- Test(final Configuration conf, final TestOptions options, final Status status) {
- super();
- this.startRow = options.getStartRow();
- this.perClientRunRows = options.getPerClientRunRows();
- this.totalRows = options.getTotalRows();
- this.status = status;
- this.tableName = options.getTableName();
- this.table = null;
- this.conf = conf;
- }
-
- protected String generateStatus(final int sr, final int i, final int lr) {
- return sr + "/" + i + "/" + lr;
- }
-
- protected int getReportingPeriod() {
- int period = this.perClientRunRows / 10;
- return period == 0? this.perClientRunRows: period;
- }
-
- void testSetup() throws IOException {
- this.table = new RemoteHTable(new Client(cluster), conf, tableName,
- accessToken);
- }
-
- void testTakedown() throws IOException {
- this.table.close();
- }
-
- /*
- * Run test
- * @return Elapsed time.
- * @throws IOException
- */
- long test() throws IOException {
- long elapsedTime;
- testSetup();
- long startTime = System.currentTimeMillis();
- try {
- testTimed();
- elapsedTime = System.currentTimeMillis() - startTime;
- } finally {
- testTakedown();
- }
- return elapsedTime;
- }
-
- /**
- * Provides an extension point for tests that don't want a per row invocation.
- */
- void testTimed() throws IOException {
- int lastRow = this.startRow + this.perClientRunRows;
- // Report on completion of 1/10th of total.
- for (int i = this.startRow; i < lastRow; i++) {
- testRow(i);
- if (status != null && i > 0 && (i % getReportingPeriod()) == 0) {
- status.setStatus(generateStatus(this.startRow, i, lastRow));
- }
- }
- }
-
- /*
- * Test for individual row.
- * @param i Row index.
- */
- void testRow(final int i) throws IOException {
- }
- }
-
- @SuppressWarnings("unused")
- static class RandomSeekScanTest extends Test {
- RandomSeekScanTest(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- }
-
- @Override
- void testRow(final int i) throws IOException {
- Scan scan = new Scan(getRandomRow(this.rand, this.totalRows));
- scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
- scan.setFilter(new WhileMatchFilter(new PageFilter(120)));
- ResultScanner s = this.table.getScanner(scan);
- //int count = 0;
- for (Result rr = null; (rr = s.next()) != null;) {
- // LOG.info("" + count++ + " " + rr.toString());
- }
- s.close();
- }
-
- @Override
- protected int getReportingPeriod() {
- int period = this.perClientRunRows / 100;
- return period == 0? this.perClientRunRows: period;
- }
-
- }
-
- @SuppressWarnings("unused")
- static abstract class RandomScanWithRangeTest extends Test {
- RandomScanWithRangeTest(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- }
-
- @Override
- void testRow(final int i) throws IOException {
- Pair<byte[], byte[]> startAndStopRow = getStartAndStopRow();
- Scan scan = new Scan(startAndStopRow.getFirst(), startAndStopRow.getSecond());
- scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
- ResultScanner s = this.table.getScanner(scan);
- int count = 0;
- for (Result rr = null; (rr = s.next()) != null;) {
- count++;
- }
-
- if (i % 100 == 0) {
- LOG.info(String.format("Scan for key range %s - %s returned %s rows",
- Bytes.toString(startAndStopRow.getFirst()),
- Bytes.toString(startAndStopRow.getSecond()), count));
- }
-
- s.close();
- }
-
- protected abstract Pair<byte[],byte[]> getStartAndStopRow();
-
- protected Pair<byte[], byte[]> generateStartAndStopRows(int maxRange) {
- int start = this.rand.nextInt(Integer.MAX_VALUE) % totalRows;
- int stop = start + maxRange;
- return new Pair<byte[],byte[]>(format(start), format(stop));
- }
-
- @Override
- protected int getReportingPeriod() {
- int period = this.perClientRunRows / 100;
- return period == 0? this.perClientRunRows: period;
- }
- }
-
- static class RandomScanWithRange10Test extends RandomScanWithRangeTest {
- RandomScanWithRange10Test(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- }
-
- @Override
- protected Pair<byte[], byte[]> getStartAndStopRow() {
- return generateStartAndStopRows(10);
- }
- }
-
- static class RandomScanWithRange100Test extends RandomScanWithRangeTest {
- RandomScanWithRange100Test(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- }
-
- @Override
- protected Pair<byte[], byte[]> getStartAndStopRow() {
- return generateStartAndStopRows(100);
- }
- }
-
- static class RandomScanWithRange1000Test extends RandomScanWithRangeTest {
- RandomScanWithRange1000Test(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- }
-
- @Override
- protected Pair<byte[], byte[]> getStartAndStopRow() {
- return generateStartAndStopRows(1000);
- }
- }
-
- static class RandomScanWithRange10000Test extends RandomScanWithRangeTest {
- RandomScanWithRange10000Test(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- }
-
- @Override
- protected Pair<byte[], byte[]> getStartAndStopRow() {
- return generateStartAndStopRows(10000);
- }
- }
-
- static class RandomReadTest extends Test {
- RandomReadTest(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- }
-
- @Override
- void testRow(final int i) throws IOException {
- Get get = new Get(getRandomRow(this.rand, this.totalRows));
- get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
- this.table.get(get);
- }
-
- @Override
- protected int getReportingPeriod() {
- int period = this.perClientRunRows / 100;
- return period == 0? this.perClientRunRows: period;
- }
-
- }
-
- static class RandomWriteTest extends Test {
- int rowsPerPut;
-
- RandomWriteTest(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- rowsPerPut = options.getRowsPerPut();
- }
-
- @Override
- void testTimed() throws IOException {
- int lastRow = this.startRow + this.perClientRunRows;
- // Report on completion of 1/10th of total.
- List<Put> puts = new ArrayList<Put>();
- for (int i = this.startRow; i < lastRow; i += rowsPerPut) {
- for (int j = 0; j < rowsPerPut; j++) {
- byte [] row = getRandomRow(this.rand, this.totalRows);
- Put put = new Put(row);
- byte[] value = generateValue(this.rand);
- put.add(FAMILY_NAME, QUALIFIER_NAME, value);
- puts.add(put);
- if (status != null && i > 0 && (i % getReportingPeriod()) == 0) {
- status.setStatus(generateStatus(this.startRow, i, lastRow));
- }
- }
- table.put(puts);
- }
- }
- }
-
- static class ScanTest extends Test {
- private ResultScanner testScanner;
-
- ScanTest(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- }
-
- @Override
- void testSetup() throws IOException {
- super.testSetup();
- }
-
- @Override
- void testTakedown() throws IOException {
- if (this.testScanner != null) {
- this.testScanner.close();
- }
- super.testTakedown();
- }
-
-
- @Override
- void testRow(final int i) throws IOException {
- if (this.testScanner == null) {
- Scan scan = new Scan(format(this.startRow));
- scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
- this.testScanner = table.getScanner(scan);
- }
- testScanner.next();
- }
-
- }
-
- static class SequentialReadTest extends Test {
- SequentialReadTest(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- }
-
- @Override
- void testRow(final int i) throws IOException {
- Get get = new Get(format(i));
- get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
- table.get(get);
- }
-
- }
-
- static class SequentialWriteTest extends Test {
- int rowsPerPut;
-
- SequentialWriteTest(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- rowsPerPut = options.getRowsPerPut();
- }
-
- @Override
- void testTimed() throws IOException {
- int lastRow = this.startRow + this.perClientRunRows;
- // Report on completion of 1/10th of total.
- List<Put> puts = new ArrayList<Put>();
- for (int i = this.startRow; i < lastRow; i += rowsPerPut) {
- for (int j = 0; j < rowsPerPut; j++) {
- Put put = new Put(format(i + j));
- byte[] value = generateValue(this.rand);
- put.add(FAMILY_NAME, QUALIFIER_NAME, value);
- puts.add(put);
- if (status != null && i > 0 && (i % getReportingPeriod()) == 0) {
- status.setStatus(generateStatus(this.startRow, i, lastRow));
- }
- }
- table.put(puts);
- }
- }
- }
-
- static class FilteredScanTest extends Test {
- protected static final Log LOG = LogFactory.getLog(FilteredScanTest.class.getName());
-
- FilteredScanTest(Configuration conf, TestOptions options, Status status) {
- super(conf, options, status);
- }
-
- @Override
- void testRow(int i) throws IOException {
- byte[] value = generateValue(this.rand);
- Scan scan = constructScan(value);
- ResultScanner scanner = null;
- try {
- scanner = this.table.getScanner(scan);
- while (scanner.next() != null) {
- }
- } finally {
- if (scanner != null) scanner.close();
- }
- }
-
- protected Scan constructScan(byte[] valuePrefix) throws IOException {
- Filter filter = new SingleColumnValueFilter(
- FAMILY_NAME, QUALIFIER_NAME, CompareFilter.CompareOp.EQUAL,
- new BinaryComparator(valuePrefix)
- );
- Scan scan = new Scan();
- scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
- scan.setFilter(filter);
- return scan;
- }
- }
-
- /*
- * Format passed integer.
- * @param number
- * @return Returns zero-prefixed 10-byte wide decimal version of passed
- * number (Does absolute in case number is negative).
- */
- public static byte [] format(final int number) {
- byte [] b = new byte[10];
- int d = Math.abs(number);
- for (int i = b.length - 1; i >= 0; i--) {
- b[i] = (byte)((d % 10) + '0');
- d /= 10;
- }
- return b;
- }
-
- /*
- * This method takes some time and is done inline uploading data. For
- * example, doing the mapfile test, generation of the key and value
- * consumes about 30% of CPU time.
- * @return Generated random value to insert into a table cell.
- */
- public static byte[] generateValue(final Random r) {
- byte [] b = new byte [ROW_LENGTH];
- r.nextBytes(b);
- return b;
- }
-
- static byte [] getRandomRow(final Random random, final int totalRows) {
- return format(random.nextInt(Integer.MAX_VALUE) % totalRows);
- }
-
- long runOneClient(final Class<? extends Test> cmd, final int startRow,
- final int perClientRunRows, final int totalRows,
- final int rowsPerPut, final Status status)
- throws IOException {
- status.setStatus("Start " + cmd + " at offset " + startRow + " for " +
- perClientRunRows + " rows");
- long totalElapsedTime = 0;
-
- Test t = null;
- TestOptions options = new TestOptions(startRow, perClientRunRows,
- totalRows, getTableDescriptor().getName(), rowsPerPut);
- try {
- Constructor<? extends Test> constructor = cmd.getDeclaredConstructor(
- Configuration.class, TestOptions.class, Status.class);
- t = constructor.newInstance(this.conf, options, status);
- } catch (NoSuchMethodException e) {
- throw new IllegalArgumentException("Invalid command class: " +
- cmd.getName() + ". It does not provide a constructor as described by" +
- "the javadoc comment. Available constructors are: " +
- Arrays.toString(cmd.getConstructors()));
- } catch (Exception e) {
- throw new IllegalStateException("Failed to construct command class", e);
- }
- totalElapsedTime = t.test();
-
- status.setStatus("Finished " + cmd + " in " + totalElapsedTime +
- "ms at offset " + startRow + " for " + perClientRunRows + " rows");
- return totalElapsedTime;
- }
-
- private void runNIsOne(final Class<? extends Test> cmd) {
- Status status = new Status() {
- public void setStatus(String msg) throws IOException {
- LOG.info(msg);
- }
- };
-
- try {
- checkTable();
- runOneClient(cmd, 0, R, R, B, status);
- } catch (Exception e) {
- LOG.error("Failed", e);
- }
- }
-
- private void runTest(final Class<? extends Test> cmd) throws IOException,
- InterruptedException, ClassNotFoundException {
- if (N == 1) {
- // If there is only one client and one HRegionServer, we assume nothing
- // has been set up at all.
- runNIsOne(cmd);
- } else {
- // Else, run
- runNIsMoreThanOne(cmd);
- }
- }
-
- protected void printUsage() {
- printUsage(null);
- }
-
- protected void printUsage(final String message) {
- if (message != null && message.length() > 0) {
- System.err.println(message);
- }
- System.err.println("Usage: java " + this.getClass().getName() + " \\");
- System.err.println(" [--option] [--option=value] <command> <nclients>");
- System.err.println();
- System.err.println("Options:");
- System.err.println(" host String. Specify Stargate endpoint.");
- System.err.println(" token String. API access token.");
- System.err.println(" rows Integer. Rows each client runs. Default: One million");
- System.err.println(" rowsPerPut Integer. Rows each Stargate (multi)Put. Default: 100");
- System.err.println(" nomapred (Flag) Run multiple clients using threads " +
- "(rather than use mapreduce)");
- System.err.println();
- System.err.println("Command:");
- for (CmdDescriptor command : commands.values()) {
- System.err.println(String.format(" %-15s %s", command.getName(), command.getDescription()));
- }
- System.err.println();
- System.err.println("Args:");
- System.err.println(" nclients Integer. Required. Total number of " +
- "clients (and HRegionServers)");
- System.err.println(" running: 1 <= value <= 500");
- System.err.println("Examples:");
- System.err.println(" To run a single evaluation client:");
- System.err.println(" $ bin/hbase " + this.getClass().getName()
- + " sequentialWrite 1");
- }
-
- private void getArgs(final int start, final String[] args) {
- if(start + 1 > args.length) {
- throw new IllegalArgumentException("must supply the number of clients");
- }
- N = Integer.parseInt(args[start]);
- if (N < 1) {
- throw new IllegalArgumentException("Number of clients must be > 1");
- }
- // Set total number of rows to write.
- R = R * N;
- }
-
- public int doCommandLine(final String[] args) {
- // Process command-line args. TODO: Better cmd-line processing
- // (but hopefully something not as painful as cli options).
- int errCode = -1;
- if (args.length < 1) {
- printUsage();
- return errCode;
- }
-
- try {
- for (int i = 0; i < args.length; i++) {
- String cmd = args[i];
- if (cmd.equals("-h")) {
- printUsage();
- errCode = 0;
- break;
- }
-
- final String nmr = "--nomapred";
- if (cmd.startsWith(nmr)) {
- nomapred = true;
- continue;
- }
-
- final String rows = "--rows=";
- if (cmd.startsWith(rows)) {
- R = Integer.parseInt(cmd.substring(rows.length()));
- continue;
- }
-
- final String rowsPerPut = "--rowsPerPut=";
- if (cmd.startsWith(rowsPerPut)) {
- this.B = Integer.parseInt(cmd.substring(rowsPerPut.length()));
- continue;
- }
-
- final String host = "--host=";
- if (cmd.startsWith(host)) {
- cluster.add(cmd.substring(host.length()));
- continue;
- }
-
- final String token = "--token=";
- if (cmd.startsWith(token)) {
- accessToken = cmd.substring(token.length());
- continue;
- }
-
- Class<? extends Test> cmdClass = determineCommandClass(cmd);
- if (cmdClass != null) {
- getArgs(i + 1, args);
- if (cluster.isEmpty()) {
- String s = conf.get("stargate.hostname", "localhost");
- if (s.contains(":")) {
- cluster.add(s);
- } else {
- cluster.add(s, conf.getInt("stargate.port", 8080));
- }
- }
- runTest(cmdClass);
- errCode = 0;
- break;
- }
-
- printUsage();
- break;
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- return errCode;
- }
-
- private Class<? extends Test> determineCommandClass(String cmd) {
- CmdDescriptor descriptor = commands.get(cmd);
- return descriptor != null ? descriptor.getCmdClass() : null;
- }
-
- /**
- * @param args
- */
- public static void main(final String[] args) {
- Configuration c = HBaseConfiguration.create();
- System.exit(new PerformanceEvaluation(c).doCommandLine(args));
- }
-}