You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by bu...@apache.org on 2017/09/11 07:42:49 UTC
[22/50] [abbrv] hbase git commit: HBASE-18106 Redo ProcedureInfo and
LockInfo
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
index d85146a..e2e97dc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
@@ -18,6 +18,12 @@
package org.apache.hadoop.hbase.master.locking;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.List;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -26,10 +32,9 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.master.MasterServices;
-import org.apache.hadoop.hbase.master.locking.LockProcedure;
-import org.apache.hadoop.hbase.master.locking.TestLockProcedure;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.procedure2.LockType;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
@@ -43,12 +48,6 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
@Category({MasterTests.class, SmallTests.class})
public class TestLockManager {
@Rule
@@ -94,7 +93,7 @@ public class TestLockManager {
@After
public void tearDown() throws Exception {
- for (Procedure<?> proc : getMasterProcedureExecutor().listProcedures()) {
+ for (Procedure<?> proc : getMasterProcedureExecutor().getProcedures()) {
if (proc instanceof LockProcedure) {
((LockProcedure) proc).unlock(getMasterProcedureExecutor().getEnvironment());
ProcedureTestingUtility.waitProcedure(getMasterProcedureExecutor(), proc);
@@ -113,7 +112,7 @@ public class TestLockManager {
@Test
public void testMasterLockAcquire() throws Exception {
LockManager.MasterLock lock = masterServices.getLockManager().createMasterLock(namespace,
- LockProcedure.LockType.EXCLUSIVE, "desc");
+ LockType.EXCLUSIVE, "desc");
assertTrue(lock.tryAcquire(2000));
assertTrue(lock.getProc().isLocked());
lock.release();
@@ -126,9 +125,9 @@ public class TestLockManager {
@Test
public void testMasterLockAcquireTimeout() throws Exception {
LockManager.MasterLock lock = masterServices.getLockManager().createMasterLock(
- tableName, LockProcedure.LockType.EXCLUSIVE, "desc");
+ tableName, LockType.EXCLUSIVE, "desc");
LockManager.MasterLock lock2 = masterServices.getLockManager().createMasterLock(
- tableName, LockProcedure.LockType.EXCLUSIVE, "desc");
+ tableName, LockType.EXCLUSIVE, "desc");
assertTrue(lock.tryAcquire(2000));
assertFalse(lock2.tryAcquire(LOCAL_LOCKS_TIMEOUT/2)); // wait less than other lock's timeout
assertEquals(null, lock2.getProc());
@@ -146,7 +145,7 @@ public class TestLockManager {
LockManager.MasterLock lock = masterServices.getLockManager().createMasterLock(
tableRegions, "desc");
LockManager.MasterLock lock2 = masterServices.getLockManager().createMasterLock(
- tableName, LockProcedure.LockType.EXCLUSIVE, "desc");
+ tableName, LockType.EXCLUSIVE, "desc");
assertTrue(lock.tryAcquire(2000));
assertFalse(lock2.tryAcquire(LOCAL_LOCKS_TIMEOUT/2)); // wait less than other lock's timeout
assertEquals(null, lock2.getProc());
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
index adaebf4..e338849 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockProcedure.java
@@ -18,9 +18,20 @@
package org.apache.hadoop.hbase.master.locking;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@@ -28,40 +39,33 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.locking.LockServiceClient;
+import org.apache.hadoop.hbase.master.MasterRpcServices;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
-import org.apache.hadoop.hbase.master.MasterRpcServices;
+import org.apache.hadoop.hbase.procedure2.LockType;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.*;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.hamcrest.core.IsInstanceOf;
import org.hamcrest.core.StringStartsWith;
-import org.junit.rules.TestRule;
-import org.junit.experimental.categories.Category;
-
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestName;
-import org.apache.hadoop.hbase.CategoryBasedTimeout;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import org.junit.rules.TestRule;
@Category({MasterTests.class, SmallTests.class})
public class TestLockProcedure {
@@ -130,7 +134,7 @@ public class TestLockProcedure {
public void tearDown() throws Exception {
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, false);
// Kill all running procedures.
- for (Procedure<?> proc : procExec.listProcedures()) {
+ for (Procedure<?> proc : procExec.getProcedures()) {
procExec.abort(proc.getProcId());
ProcedureTestingUtility.waitProcedure(procExec, proc);
}
@@ -138,17 +142,17 @@ public class TestLockProcedure {
}
private LockRequest getNamespaceLock(String namespace, String description) {
- return LockServiceClient.buildLockRequest(LockType.EXCLUSIVE,
+ return LockServiceClient.buildLockRequest(LockServiceProtos.LockType.EXCLUSIVE,
namespace, null, null, description, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
private LockRequest getTableExclusiveLock(TableName tableName, String description) {
- return LockServiceClient.buildLockRequest(LockType.EXCLUSIVE,
+ return LockServiceClient.buildLockRequest(LockServiceProtos.LockType.EXCLUSIVE,
null, tableName, null, description, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
private LockRequest getRegionLock(List<HRegionInfo> regionInfos, String description) {
- return LockServiceClient.buildLockRequest(LockType.EXCLUSIVE,
+ return LockServiceClient.buildLockRequest(LockServiceProtos.LockType.EXCLUSIVE,
null, null, regionInfos, description, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
@@ -345,7 +349,7 @@ public class TestLockProcedure {
CountDownLatch latch = new CountDownLatch(1);
// MasterRpcServices don't set latch with LockProcedure, so create one and submit it directly.
LockProcedure lockProc = new LockProcedure(UTIL.getConfiguration(),
- TableName.valueOf("table"), LockProcedure.LockType.EXCLUSIVE, "desc", latch);
+ TableName.valueOf("table"), org.apache.hadoop.hbase.procedure2.LockType.EXCLUSIVE, "desc", latch);
procExec.submitProcedure(lockProc);
assertTrue(latch.await(2000, TimeUnit.MILLISECONDS));
releaseLock(lockProc.getProcId());
@@ -359,7 +363,7 @@ public class TestLockProcedure {
CountDownLatch latch = new CountDownLatch(1);
// MasterRpcServices don't set latch with LockProcedure, so create one and submit it directly.
LockProcedure lockProc = new LockProcedure(UTIL.getConfiguration(),
- TableName.valueOf("table"), LockProcedure.LockType.EXCLUSIVE, "desc", latch);
+ TableName.valueOf("table"), LockType.EXCLUSIVE, "desc", latch);
procExec.submitProcedure(lockProc);
assertTrue(awaitForLocked(lockProc.getProcId(), 2000));
Thread.sleep(LOCAL_LOCKS_TIMEOUT / 2);
@@ -421,7 +425,7 @@ public class TestLockProcedure {
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
CountDownLatch latch = new CountDownLatch(1);
LockProcedure lockProc = new LockProcedure(UTIL.getConfiguration(),
- TableName.valueOf("table"), LockProcedure.LockType.EXCLUSIVE, "desc", latch);
+ TableName.valueOf("table"), LockType.EXCLUSIVE, "desc", latch);
procExec.submitProcedure(lockProc);
assertTrue(latch.await(2000, TimeUnit.MILLISECONDS));
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
index 5f20c7f..e2d6b0c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
@@ -32,10 +32,11 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.master.locking.LockProcedure;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
-import org.apache.hadoop.hbase.procedure2.LockInfo.WaitingProcedure;
+import org.apache.hadoop.hbase.procedure2.LockType;
+import org.apache.hadoop.hbase.procedure2.LockedResource;
+import org.apache.hadoop.hbase.procedure2.LockedResourceType;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -903,7 +904,7 @@ public class TestMasterProcedureScheduler {
}
}
- private static LockProcedure createLockProcedure(LockProcedure.LockType lockType, long procId) throws Exception {
+ private static LockProcedure createLockProcedure(LockType lockType, long procId) throws Exception {
LockProcedure procedure = new LockProcedure();
Field typeField = LockProcedure.class.getDeclaredField("type");
@@ -918,31 +919,31 @@ public class TestMasterProcedureScheduler {
}
private static LockProcedure createExclusiveLockProcedure(long procId) throws Exception {
- return createLockProcedure(LockProcedure.LockType.EXCLUSIVE, procId);
+ return createLockProcedure(LockType.EXCLUSIVE, procId);
}
private static LockProcedure createSharedLockProcedure(long procId) throws Exception {
- return createLockProcedure(LockProcedure.LockType.SHARED, procId);
+ return createLockProcedure(LockType.SHARED, procId);
}
- private static void assertLockResource(LockInfo lock,
- LockInfo.ResourceType resourceType, String resourceName)
+ private static void assertLockResource(LockedResource resource,
+ LockedResourceType resourceType, String resourceName)
{
- assertEquals(resourceType, lock.getResourceType());
- assertEquals(resourceName, lock.getResourceName());
+ assertEquals(resourceType, resource.getResourceType());
+ assertEquals(resourceName, resource.getResourceName());
}
- private static void assertExclusiveLock(LockInfo lock, long procId)
+ private static void assertExclusiveLock(LockedResource resource, Procedure<?> procedure)
{
- assertEquals(LockInfo.LockType.EXCLUSIVE, lock.getLockType());
- assertEquals(procId, lock.getExclusiveLockOwnerProcedure().getProcId());
- assertEquals(0, lock.getSharedLockCount());
+ assertEquals(LockType.EXCLUSIVE, resource.getLockType());
+ assertEquals(procedure, resource.getExclusiveLockOwnerProcedure());
+ assertEquals(0, resource.getSharedLockCount());
}
- private static void assertSharedLock(LockInfo lock, int lockCount)
+ private static void assertSharedLock(LockedResource resource, int lockCount)
{
- assertEquals(LockInfo.LockType.SHARED, lock.getLockType());
- assertEquals(lockCount, lock.getSharedLockCount());
+ assertEquals(LockType.SHARED, resource.getLockType());
+ assertEquals(lockCount, resource.getSharedLockCount());
}
@Test
@@ -950,13 +951,13 @@ public class TestMasterProcedureScheduler {
LockProcedure procedure = createExclusiveLockProcedure(0);
queue.waitServerExclusiveLock(procedure, ServerName.valueOf("server1,1234,0"));
- List<LockInfo> locks = queue.listLocks();
- assertEquals(1, locks.size());
+ List<LockedResource> resources = queue.getLocks();
+ assertEquals(1, resources.size());
- LockInfo serverLock = locks.get(0);
- assertLockResource(serverLock, LockInfo.ResourceType.SERVER, "server1,1234,0");
- assertExclusiveLock(serverLock, 0);
- assertTrue(serverLock.getWaitingProcedures().isEmpty());
+ LockedResource serverResource = resources.get(0);
+ assertLockResource(serverResource, LockedResourceType.SERVER, "server1,1234,0");
+ assertExclusiveLock(serverResource, procedure);
+ assertTrue(serverResource.getWaitingProcedures().isEmpty());
}
@Test
@@ -964,19 +965,19 @@ public class TestMasterProcedureScheduler {
LockProcedure procedure = createExclusiveLockProcedure(1);
queue.waitNamespaceExclusiveLock(procedure, "ns1");
- List<LockInfo> locks = queue.listLocks();
+ List<LockedResource> locks = queue.getLocks();
assertEquals(2, locks.size());
- LockInfo namespaceLock = locks.get(0);
- assertLockResource(namespaceLock, LockInfo.ResourceType.NAMESPACE, "ns1");
- assertExclusiveLock(namespaceLock, 1);
- assertTrue(namespaceLock.getWaitingProcedures().isEmpty());
+ LockedResource namespaceResource = locks.get(0);
+ assertLockResource(namespaceResource, LockedResourceType.NAMESPACE, "ns1");
+ assertExclusiveLock(namespaceResource, procedure);
+ assertTrue(namespaceResource.getWaitingProcedures().isEmpty());
- LockInfo tableLock = locks.get(1);
- assertLockResource(tableLock, LockInfo.ResourceType.TABLE,
+ LockedResource tableResource = locks.get(1);
+ assertLockResource(tableResource, LockedResourceType.TABLE,
TableName.NAMESPACE_TABLE_NAME.getNameAsString());
- assertSharedLock(tableLock, 1);
- assertTrue(tableLock.getWaitingProcedures().isEmpty());
+ assertSharedLock(tableResource, 1);
+ assertTrue(tableResource.getWaitingProcedures().isEmpty());
}
@Test
@@ -984,18 +985,18 @@ public class TestMasterProcedureScheduler {
LockProcedure procedure = createExclusiveLockProcedure(2);
queue.waitTableExclusiveLock(procedure, TableName.valueOf("ns2", "table2"));
- List<LockInfo> locks = queue.listLocks();
+ List<LockedResource> locks = queue.getLocks();
assertEquals(2, locks.size());
- LockInfo namespaceLock = locks.get(0);
- assertLockResource(namespaceLock, LockInfo.ResourceType.NAMESPACE, "ns2");
- assertSharedLock(namespaceLock, 1);
- assertTrue(namespaceLock.getWaitingProcedures().isEmpty());
+ LockedResource namespaceResource = locks.get(0);
+ assertLockResource(namespaceResource, LockedResourceType.NAMESPACE, "ns2");
+ assertSharedLock(namespaceResource, 1);
+ assertTrue(namespaceResource.getWaitingProcedures().isEmpty());
- LockInfo tableLock = locks.get(1);
- assertLockResource(tableLock, LockInfo.ResourceType.TABLE, "ns2:table2");
- assertExclusiveLock(tableLock, 2);
- assertTrue(tableLock.getWaitingProcedures().isEmpty());
+ LockedResource tableResource = locks.get(1);
+ assertLockResource(tableResource, LockedResourceType.TABLE, "ns2:table2");
+ assertExclusiveLock(tableResource, procedure);
+ assertTrue(tableResource.getWaitingProcedures().isEmpty());
}
@Test
@@ -1005,23 +1006,23 @@ public class TestMasterProcedureScheduler {
queue.waitRegion(procedure, regionInfo);
- List<LockInfo> locks = queue.listLocks();
- assertEquals(3, locks.size());
+ List<LockedResource> resources = queue.getLocks();
+ assertEquals(3, resources.size());
- LockInfo namespaceLock = locks.get(0);
- assertLockResource(namespaceLock, LockInfo.ResourceType.NAMESPACE, "ns3");
- assertSharedLock(namespaceLock, 1);
- assertTrue(namespaceLock.getWaitingProcedures().isEmpty());
+ LockedResource namespaceResource = resources.get(0);
+ assertLockResource(namespaceResource, LockedResourceType.NAMESPACE, "ns3");
+ assertSharedLock(namespaceResource, 1);
+ assertTrue(namespaceResource.getWaitingProcedures().isEmpty());
- LockInfo tableLock = locks.get(1);
- assertLockResource(tableLock, LockInfo.ResourceType.TABLE, "ns3:table3");
- assertSharedLock(tableLock, 1);
- assertTrue(tableLock.getWaitingProcedures().isEmpty());
+ LockedResource tableResource = resources.get(1);
+ assertLockResource(tableResource, LockedResourceType.TABLE, "ns3:table3");
+ assertSharedLock(tableResource, 1);
+ assertTrue(tableResource.getWaitingProcedures().isEmpty());
- LockInfo regionLock = locks.get(2);
- assertLockResource(regionLock, LockInfo.ResourceType.REGION, regionInfo.getEncodedName());
- assertExclusiveLock(regionLock, 3);
- assertTrue(regionLock.getWaitingProcedures().isEmpty());
+ LockedResource regionResource = resources.get(2);
+ assertLockResource(regionResource, LockedResourceType.REGION, regionInfo.getEncodedName());
+ assertExclusiveLock(regionResource, procedure);
+ assertTrue(regionResource.getWaitingProcedures().isEmpty());
}
@Test
@@ -1035,28 +1036,28 @@ public class TestMasterProcedureScheduler {
LockProcedure procedure3 = createExclusiveLockProcedure(3);
queue.waitTableExclusiveLock(procedure3, TableName.valueOf("ns4", "table4"));
- List<LockInfo> locks = queue.listLocks();
- assertEquals(2, locks.size());
+ List<LockedResource> resources = queue.getLocks();
+ assertEquals(2, resources.size());
- LockInfo namespaceLock = locks.get(0);
- assertLockResource(namespaceLock, LockInfo.ResourceType.NAMESPACE, "ns4");
- assertSharedLock(namespaceLock, 1);
- assertTrue(namespaceLock.getWaitingProcedures().isEmpty());
+ LockedResource namespaceResource = resources.get(0);
+ assertLockResource(namespaceResource, LockedResourceType.NAMESPACE, "ns4");
+ assertSharedLock(namespaceResource, 1);
+ assertTrue(namespaceResource.getWaitingProcedures().isEmpty());
- LockInfo tableLock = locks.get(1);
- assertLockResource(tableLock, LockInfo.ResourceType.TABLE, "ns4:table4");
- assertExclusiveLock(tableLock, 1);
+ LockedResource tableLock = resources.get(1);
+ assertLockResource(tableLock, LockedResourceType.TABLE, "ns4:table4");
+ assertExclusiveLock(tableLock, procedure1);
- List<WaitingProcedure> waitingProcedures = tableLock.getWaitingProcedures();
+ List<Procedure<?>> waitingProcedures = tableLock.getWaitingProcedures();
assertEquals(2, waitingProcedures.size());
- WaitingProcedure waitingProcedure1 = waitingProcedures.get(0);
- assertEquals(LockInfo.LockType.SHARED, waitingProcedure1.getLockType());
- assertEquals(2, waitingProcedure1.getProcedure().getProcId());
+ LockProcedure waitingProcedure2 = (LockProcedure) waitingProcedures.get(0);
+ assertEquals(LockType.SHARED, waitingProcedure2.getType());
+ assertEquals(procedure2, waitingProcedure2);
- WaitingProcedure waitingProcedure2 = waitingProcedures.get(1);
- assertEquals(LockInfo.LockType.EXCLUSIVE, waitingProcedure2.getLockType());
- assertEquals(3, waitingProcedure2.getProcedure().getProcId());
+ LockProcedure waitingProcedure3 = (LockProcedure) waitingProcedures.get(1);
+ assertEquals(LockType.EXCLUSIVE, waitingProcedure3.getType());
+ assertEquals(procedure3, waitingProcedure3);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
index 692815f..38a12e8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
@@ -193,7 +193,7 @@ public class TestProcedureAdmin {
}
@Test(timeout=60000)
- public void testListProcedure() throws Exception {
+ public void testGetProcedure() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
@@ -206,10 +206,10 @@ public class TestProcedureAdmin {
// Wait for one step to complete
ProcedureTestingUtility.waitProcedure(procExec, procId);
- List<Procedure> listProcedures = procExec.listProcedures();
- assertTrue(listProcedures.size() >= 1);
+ List<Procedure<?>> procedures = procExec.getProcedures();
+ assertTrue(procedures.size() >= 1);
boolean found = false;
- for (Procedure proc: listProcedures) {
+ for (Procedure<?> proc: procedures) {
if (proc.getProcId() == procId) {
assertTrue(proc.isRunnable());
found = true;
@@ -223,8 +223,8 @@ public class TestProcedureAdmin {
ProcedureTestingUtility.restart(procExec);
ProcedureTestingUtility.waitNoProcedureRunning(procExec);
ProcedureTestingUtility.assertProcNotFailed(procExec, procId);
- listProcedures = procExec.listProcedures();
- for (Procedure proc: listProcedures) {
+ procedures = procExec.getProcedures();
+ for (Procedure proc: procedures) {
assertTrue(proc.isSuccess());
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java
new file mode 100644
index 0000000..1cf33c4
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.procedure;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.util.Date;
+
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.hbase.master.procedure.ProcedureDescriber;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
+import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
+import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({MasterTests.class, SmallTests.class})
+public class TestProcedureDescriber {
+ public static class TestProcedure extends Procedure {
+ @Override
+ protected Procedure[] execute(Object env) throws ProcedureYieldException,
+ ProcedureSuspendedException, InterruptedException {
+ return null;
+ }
+
+ @Override
+ protected void rollback(Object env)
+ throws IOException, InterruptedException {
+ }
+
+ @Override
+ protected boolean abort(Object env) {
+ return false;
+ }
+
+ @Override
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ ByteString byteString = ByteString.copyFrom(new byte[] { 'A' });
+ BytesValue state = BytesValue.newBuilder().setValue(byteString).build();
+ serializer.serialize(state);
+ }
+
+ @Override
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ }
+ }
+
+ @Test
+ public void test() {
+ TestProcedure procedure = new TestProcedure();
+ String result = ProcedureDescriber.describe(procedure);
+
+ Date epoch = new Date(0);
+
+ assertEquals("{ ID => '-1', PARENT_ID => '-1', STATE => 'INITIALIZING', OWNER => '', "
+ + "TYPE => 'org.apache.hadoop.hbase.procedure.TestProcedureDescriber$TestProcedure', "
+ + "START_TIME => '" + epoch + "', LAST_UPDATE => '" + epoch + "', PARAMETERS => [ "
+ + "{ value => 'QQ==' } ] }", result);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
index 153babf..364055c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.protobuf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
import java.io.IOException;
import java.nio.ByteBuffer;
@@ -30,15 +28,12 @@ import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.ProcedureInfo;
-import org.apache.hadoop.hbase.ProcedureState;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
@@ -341,40 +336,4 @@ public class TestProtobufUtil {
Cell newOffheapKV = ProtobufUtil.toCell(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell);
assertTrue(CellComparator.COMPARATOR.compare(offheapKV, newOffheapKV) == 0);
}
-
- private static ProcedureInfo createProcedureInfo(long procId)
- {
- return new ProcedureInfo(procId, "java.lang.Object", null,
- ProcedureState.RUNNABLE, -1, null, null, 0, 0, null);
- }
-
- private static void assertProcedureInfoEquals(ProcedureInfo expected,
- ProcedureInfo result)
- {
- if (expected == result) {
- return;
- } else if (expected == null || result == null) {
- fail();
- }
-
- assertEquals(expected.getProcId(), result.getProcId());
- }
-
- private static void assertLockInfoEquals(LockInfo expected, LockInfo result)
- {
- assertEquals(expected.getResourceType(), result.getResourceType());
- assertEquals(expected.getResourceName(), result.getResourceName());
- assertEquals(expected.getLockType(), result.getLockType());
- assertProcedureInfoEquals(expected.getExclusiveLockOwnerProcedure(),
- result.getExclusiveLockOwnerProcedure());
- assertEquals(expected.getSharedLockCount(), result.getSharedLockCount());
- }
-
- private static void assertWaitingProcedureEquals(
- LockInfo.WaitingProcedure expected, LockInfo.WaitingProcedure result)
- {
- assertEquals(expected.getLockType(), result.getLockType());
- assertProcedureInfoEquals(expected.getProcedure(),
- result.getProcedure());
- }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index 1e38179..97b1633 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -32,8 +32,6 @@ import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Arrays;
@@ -58,7 +56,6 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
@@ -102,12 +99,12 @@ import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.master.locking.LockProcedure;
-import org.apache.hadoop.hbase.master.locking.LockProcedure.LockType;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.master.procedure.TableProcedureInterface;
+import org.apache.hadoop.hbase.procedure2.LockType;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
@@ -578,17 +575,19 @@ public class TestAccessController extends SecureTestUtil {
}
@Override
- protected void serializeStateData(OutputStream stream) throws IOException {
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
TestProcedureProtos.TestTableDDLStateData.Builder testTableDDLMsg =
TestProcedureProtos.TestTableDDLStateData.newBuilder()
.setTableName(tableName.getNameAsString());
- testTableDDLMsg.build().writeDelimitedTo(stream);
+ serializer.serialize(testTableDDLMsg.build());
}
@Override
- protected void deserializeStateData(InputStream stream) throws IOException {
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
TestProcedureProtos.TestTableDDLStateData testTableDDLMsg =
- TestProcedureProtos.TestTableDDLStateData.parseDelimitedFrom(stream);
+ serializer.deserialize(TestProcedureProtos.TestTableDDLStateData.class);
tableName = TableName.valueOf(testTableDDLMsg.getTableName());
}
@@ -630,32 +629,43 @@ public class TestAccessController extends SecureTestUtil {
}
@Test
- public void testListProcedures() throws Exception {
+ public void testGetProcedures() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
final ProcedureExecutor<MasterProcedureEnv> procExec =
TEST_UTIL.getHBaseCluster().getMaster().getMasterProcedureExecutor();
Procedure proc = new TestTableDDLProcedure(procExec.getEnvironment(), tableName);
proc.setOwner(USER_OWNER);
procExec.submitProcedure(proc);
- final List<Procedure> procList = procExec.listProcedures();
+ final List<Procedure<?>> procList = procExec.getProcedures();
- AccessTestAction listProceduresAction = new AccessTestAction() {
+ AccessTestAction getProceduresAction = new AccessTestAction() {
@Override
public Object run() throws Exception {
- List<ProcedureInfo> procInfoList = new ArrayList<>(procList.size());
- for(Procedure p : procList) {
- procInfoList.add(ProcedureUtil.convertToProcedureInfo(p));
- }
ACCESS_CONTROLLER
- .postListProcedures(ObserverContext.createAndPrepare(CP_ENV, null), procInfoList);
+ .postGetProcedures(ObserverContext.createAndPrepare(CP_ENV, null), procList);
return null;
}
};
- verifyAllowed(listProceduresAction, SUPERUSER, USER_ADMIN, USER_GROUP_ADMIN);
- verifyAllowed(listProceduresAction, USER_OWNER);
+ verifyAllowed(getProceduresAction, SUPERUSER, USER_ADMIN, USER_GROUP_ADMIN);
+ verifyAllowed(getProceduresAction, USER_OWNER);
verifyIfNull(
- listProceduresAction, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE);
+ getProceduresAction, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE);
+ }
+
+ @Test (timeout=180000)
+ public void testGetLocks() throws Exception {
+ AccessTestAction action = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ ACCESS_CONTROLLER.preGetLocks(ObserverContext.createAndPrepare(CP_ENV, null));
+ return null;
+ }
+ };
+
+ verifyAllowed(action, SUPERUSER, USER_ADMIN, USER_GROUP_ADMIN);
+ verifyDenied(action, USER_CREATE, USER_OWNER, USER_RW, USER_RO, USER_NONE,
+ USER_GROUP_READ, USER_GROUP_WRITE, USER_GROUP_CREATE);
}
@Test (timeout=180000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-server/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java
deleted file mode 100644
index c5ad1cc..0000000
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java
+++ /dev/null
@@ -1,460 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.shaded.protobuf;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellBuilderFactory;
-import org.apache.hadoop.hbase.CellBuilderType;
-import org.apache.hadoop.hbase.CellComparator;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.ProcedureInfo;
-import org.apache.hadoop.hbase.ProcedureState;
-import org.apache.hadoop.hbase.ByteBufferKeyValue;
-import org.apache.hadoop.hbase.client.Append;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Increment;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.procedure2.LockInfo;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos;
-
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-@Category(SmallTests.class)
-public class TestProtobufUtil {
- @Test
- public void testException() throws IOException {
- NameBytesPair.Builder builder = NameBytesPair.newBuilder();
- final String omg = "OMG!!!";
- builder.setName("java.io.IOException");
- builder.setValue(ByteString.copyFrom(Bytes.toBytes(omg)));
- Throwable t = ProtobufUtil.toException(builder.build());
- assertEquals(omg, t.getMessage());
- builder.clear();
- builder.setName("org.apache.hadoop.ipc.RemoteException");
- builder.setValue(ByteString.copyFrom(Bytes.toBytes(omg)));
- t = ProtobufUtil.toException(builder.build());
- assertEquals(omg, t.getMessage());
- }
-
- /**
- * Test basic Get conversions.
- *
- * @throws IOException
- */
- @Test
- public void testGet() throws IOException {
- ClientProtos.Get.Builder getBuilder = ClientProtos.Get.newBuilder();
- getBuilder.setRow(ByteString.copyFromUtf8("row"));
- Column.Builder columnBuilder = Column.newBuilder();
- columnBuilder.setFamily(ByteString.copyFromUtf8("f1"));
- columnBuilder.addQualifier(ByteString.copyFromUtf8("c1"));
- columnBuilder.addQualifier(ByteString.copyFromUtf8("c2"));
- getBuilder.addColumn(columnBuilder.build());
-
- columnBuilder.clear();
- columnBuilder.setFamily(ByteString.copyFromUtf8("f2"));
- getBuilder.addColumn(columnBuilder.build());
- getBuilder.setLoadColumnFamiliesOnDemand(true);
- ClientProtos.Get proto = getBuilder.build();
- // default fields
- assertEquals(1, proto.getMaxVersions());
- assertEquals(true, proto.getCacheBlocks());
-
- // set the default value for equal comparison
- getBuilder = ClientProtos.Get.newBuilder(proto);
- getBuilder.setMaxVersions(1);
- getBuilder.setCacheBlocks(true);
-
- Get get = ProtobufUtil.toGet(proto);
- assertEquals(getBuilder.build(), ProtobufUtil.toGet(get));
- }
-
- /**
- * Test Delete Mutate conversions.
- *
- * @throws IOException
- */
- @Test
- public void testDelete() throws IOException {
- MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
- mutateBuilder.setRow(ByteString.copyFromUtf8("row"));
- mutateBuilder.setMutateType(MutationType.DELETE);
- mutateBuilder.setTimestamp(111111);
- ColumnValue.Builder valueBuilder = ColumnValue.newBuilder();
- valueBuilder.setFamily(ByteString.copyFromUtf8("f1"));
- QualifierValue.Builder qualifierBuilder = QualifierValue.newBuilder();
- qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c1"));
- qualifierBuilder.setDeleteType(DeleteType.DELETE_ONE_VERSION);
- qualifierBuilder.setTimestamp(111222);
- valueBuilder.addQualifierValue(qualifierBuilder.build());
- qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c2"));
- qualifierBuilder.setDeleteType(DeleteType.DELETE_MULTIPLE_VERSIONS);
- qualifierBuilder.setTimestamp(111333);
- valueBuilder.addQualifierValue(qualifierBuilder.build());
- mutateBuilder.addColumnValue(valueBuilder.build());
-
- MutationProto proto = mutateBuilder.build();
- // default fields
- assertEquals(MutationProto.Durability.USE_DEFAULT, proto.getDurability());
-
- // set the default value for equal comparison
- mutateBuilder = MutationProto.newBuilder(proto);
- mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT);
-
- Delete delete = ProtobufUtil.toDelete(proto);
-
- // delete always have empty value,
- // add empty value to the original mutate
- for (ColumnValue.Builder column:
- mutateBuilder.getColumnValueBuilderList()) {
- for (QualifierValue.Builder qualifier:
- column.getQualifierValueBuilderList()) {
- qualifier.setValue(ByteString.EMPTY);
- }
- }
- assertEquals(mutateBuilder.build(),
- ProtobufUtil.toMutation(MutationType.DELETE, delete));
- }
-
- /**
- * Test Put Mutate conversions.
- *
- * @throws IOException
- */
- @Test
- public void testPut() throws IOException {
- MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
- mutateBuilder.setRow(ByteString.copyFromUtf8("row"));
- mutateBuilder.setMutateType(MutationType.PUT);
- mutateBuilder.setTimestamp(111111);
- ColumnValue.Builder valueBuilder = ColumnValue.newBuilder();
- valueBuilder.setFamily(ByteString.copyFromUtf8("f1"));
- QualifierValue.Builder qualifierBuilder = QualifierValue.newBuilder();
- qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c1"));
- qualifierBuilder.setValue(ByteString.copyFromUtf8("v1"));
- valueBuilder.addQualifierValue(qualifierBuilder.build());
- qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c2"));
- qualifierBuilder.setValue(ByteString.copyFromUtf8("v2"));
- qualifierBuilder.setTimestamp(222222);
- valueBuilder.addQualifierValue(qualifierBuilder.build());
- mutateBuilder.addColumnValue(valueBuilder.build());
-
- MutationProto proto = mutateBuilder.build();
- // default fields
- assertEquals(MutationProto.Durability.USE_DEFAULT, proto.getDurability());
-
- // set the default value for equal comparison
- mutateBuilder = MutationProto.newBuilder(proto);
- mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT);
-
- Put put = ProtobufUtil.toPut(proto);
-
- // put value always use the default timestamp if no
- // value level timestamp specified,
- // add the timestamp to the original mutate
- long timestamp = put.getTimeStamp();
- for (ColumnValue.Builder column:
- mutateBuilder.getColumnValueBuilderList()) {
- for (QualifierValue.Builder qualifier:
- column.getQualifierValueBuilderList()) {
- if (!qualifier.hasTimestamp()) {
- qualifier.setTimestamp(timestamp);
- }
- }
- }
- assertEquals(mutateBuilder.build(),
- ProtobufUtil.toMutation(MutationType.PUT, put));
- }
-
- /**
- * Test basic Scan conversions.
- *
- * @throws IOException
- */
- @Test
- public void testScan() throws IOException {
- ClientProtos.Scan.Builder scanBuilder = ClientProtos.Scan.newBuilder();
- scanBuilder.setStartRow(ByteString.copyFromUtf8("row1"));
- scanBuilder.setStopRow(ByteString.copyFromUtf8("row2"));
- Column.Builder columnBuilder = Column.newBuilder();
- columnBuilder.setFamily(ByteString.copyFromUtf8("f1"));
- columnBuilder.addQualifier(ByteString.copyFromUtf8("c1"));
- columnBuilder.addQualifier(ByteString.copyFromUtf8("c2"));
- scanBuilder.addColumn(columnBuilder.build());
-
- columnBuilder.clear();
- columnBuilder.setFamily(ByteString.copyFromUtf8("f2"));
- scanBuilder.addColumn(columnBuilder.build());
-
- ClientProtos.Scan proto = scanBuilder.build();
-
- // Verify default values
- assertEquals(1, proto.getMaxVersions());
- assertEquals(true, proto.getCacheBlocks());
-
- // Verify fields survive ClientProtos.Scan -> Scan -> ClientProtos.Scan
- // conversion
- scanBuilder = ClientProtos.Scan.newBuilder(proto);
- scanBuilder.setMaxVersions(2);
- scanBuilder.setCacheBlocks(false);
- scanBuilder.setCaching(1024);
- ClientProtos.Scan expectedProto = scanBuilder.build();
-
- ClientProtos.Scan actualProto = ProtobufUtil.toScan(
- ProtobufUtil.toScan(expectedProto));
- assertEquals(expectedProto, actualProto);
- }
-
- @Test
- public void testToCell() throws Exception {
- KeyValue kv1 =
- new KeyValue(Bytes.toBytes("aaa"), Bytes.toBytes("f1"), Bytes.toBytes("q1"), new byte[30]);
- KeyValue kv2 =
- new KeyValue(Bytes.toBytes("bbb"), Bytes.toBytes("f1"), Bytes.toBytes("q1"), new byte[30]);
- KeyValue kv3 =
- new KeyValue(Bytes.toBytes("ccc"), Bytes.toBytes("f1"), Bytes.toBytes("q1"), new byte[30]);
- byte[] arr = new byte[kv1.getLength() + kv2.getLength() + kv3.getLength()];
- System.arraycopy(kv1.getBuffer(), kv1.getOffset(), arr, 0, kv1.getLength());
- System.arraycopy(kv2.getBuffer(), kv2.getOffset(), arr, kv1.getLength(), kv2.getLength());
- System.arraycopy(kv3.getBuffer(), kv3.getOffset(), arr, kv1.getLength() + kv2.getLength(),
- kv3.getLength());
- ByteBuffer dbb = ByteBuffer.allocateDirect(arr.length);
- dbb.put(arr);
- ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength());
- CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV);
- Cell newOffheapKV = ProtobufUtil.toCell(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell);
- assertTrue(CellComparator.COMPARATOR.compare(offheapKV, newOffheapKV) == 0);
- }
-
- public TestProtobufUtil() {
- }
-
- private static ProcedureInfo createProcedureInfo(long procId)
- {
- return new ProcedureInfo(procId, "java.lang.Object", null,
- ProcedureState.RUNNABLE, -1, null, null, 0, 0, null);
- }
-
- private static void assertProcedureInfoEquals(ProcedureInfo expected,
- ProcedureInfo result)
- {
- if (expected == result) {
- return;
- } else if (expected == null || result == null) {
- fail();
- }
-
- assertEquals(expected.getProcId(), result.getProcId());
- }
-
- private static void assertLockInfoEquals(LockInfo expected, LockInfo result)
- {
- assertEquals(expected.getResourceType(), result.getResourceType());
- assertEquals(expected.getResourceName(), result.getResourceName());
- assertEquals(expected.getLockType(), result.getLockType());
- assertProcedureInfoEquals(expected.getExclusiveLockOwnerProcedure(),
- result.getExclusiveLockOwnerProcedure());
- assertEquals(expected.getSharedLockCount(), result.getSharedLockCount());
- }
-
- private static void assertWaitingProcedureEquals(
- LockInfo.WaitingProcedure expected, LockInfo.WaitingProcedure result)
- {
- assertEquals(expected.getLockType(), result.getLockType());
- assertProcedureInfoEquals(expected.getProcedure(),
- result.getProcedure());
- }
-
- @Test
- public void testServerLockInfo() {
- LockInfo lock = new LockInfo();
- lock.setResourceType(LockInfo.ResourceType.SERVER);
- lock.setResourceName("server");
- lock.setLockType(LockInfo.LockType.SHARED);
- lock.setSharedLockCount(2);
-
- LockServiceProtos.LockInfo proto = ProtobufUtil.toProtoLockInfo(lock);
- LockInfo lock2 = ProtobufUtil.toLockInfo(proto);
-
- assertLockInfoEquals(lock, lock2);
- }
-
- @Test
- public void testNamespaceLockInfo() {
- LockInfo lock = new LockInfo();
- lock.setResourceType(LockInfo.ResourceType.NAMESPACE);
- lock.setResourceName("ns");
- lock.setLockType(LockInfo.LockType.EXCLUSIVE);
- lock.setExclusiveLockOwnerProcedure(createProcedureInfo(2));
-
- LockServiceProtos.LockInfo proto = ProtobufUtil.toProtoLockInfo(lock);
- LockInfo lock2 = ProtobufUtil.toLockInfo(proto);
-
- assertLockInfoEquals(lock, lock2);
- }
-
- @Test
- public void testTableLockInfo() {
- LockInfo lock = new LockInfo();
- lock.setResourceType(LockInfo.ResourceType.TABLE);
- lock.setResourceName("table");
- lock.setLockType(LockInfo.LockType.SHARED);
- lock.setSharedLockCount(2);
-
- LockServiceProtos.LockInfo proto = ProtobufUtil.toProtoLockInfo(lock);
- LockInfo lock2 = ProtobufUtil.toLockInfo(proto);
-
- assertLockInfoEquals(lock, lock2);
- }
-
- @Test
- public void testRegionLockInfo() {
- LockInfo lock = new LockInfo();
- lock.setResourceType(LockInfo.ResourceType.REGION);
- lock.setResourceName("region");
- lock.setLockType(LockInfo.LockType.EXCLUSIVE);
- lock.setExclusiveLockOwnerProcedure(createProcedureInfo(2));
-
- LockServiceProtos.LockInfo proto = ProtobufUtil.toProtoLockInfo(lock);
- LockInfo lock2 = ProtobufUtil.toLockInfo(proto);
-
- assertLockInfoEquals(lock, lock2);
- }
-
- @Test
- public void testExclusiveWaitingLockInfo() {
- LockInfo.WaitingProcedure waitingProcedure = new LockInfo.WaitingProcedure();
- waitingProcedure.setLockType(LockInfo.LockType.EXCLUSIVE);
- waitingProcedure.setProcedure(createProcedureInfo(1));
-
- LockServiceProtos.WaitingProcedure proto = ProtobufUtil.toProtoWaitingProcedure(waitingProcedure);
- LockInfo.WaitingProcedure waitingProcedure2 = ProtobufUtil.toWaitingProcedure(proto);
-
- assertWaitingProcedureEquals(waitingProcedure, waitingProcedure2);
- }
-
- @Test
- public void testSharedWaitingLockInfo() {
- LockInfo.WaitingProcedure waitingProcedure = new LockInfo.WaitingProcedure();
- waitingProcedure.setLockType(LockInfo.LockType.SHARED);
- waitingProcedure.setProcedure(createProcedureInfo(2));
-
- LockServiceProtos.WaitingProcedure proto = ProtobufUtil.toProtoWaitingProcedure(waitingProcedure);
- LockInfo.WaitingProcedure waitingProcedure2 = ProtobufUtil.toWaitingProcedure(proto);
-
- assertWaitingProcedureEquals(waitingProcedure, waitingProcedure2);
- }
-
- /**
- * Test Increment Mutate conversions.
- *
- * @throws IOException
- */
- @Test
- public void testIncrement() throws IOException {
- long timeStamp = 111111;
- MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
- mutateBuilder.setRow(ByteString.copyFromUtf8("row"));
- mutateBuilder.setMutateType(MutationProto.MutationType.INCREMENT);
- ColumnValue.Builder valueBuilder = ColumnValue.newBuilder();
- valueBuilder.setFamily(ByteString.copyFromUtf8("f1"));
- QualifierValue.Builder qualifierBuilder = QualifierValue.newBuilder();
- qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c1"));
- qualifierBuilder.setValue(ByteString.copyFrom(Bytes.toBytes(11L)));
- qualifierBuilder.setTimestamp(timeStamp);
- valueBuilder.addQualifierValue(qualifierBuilder.build());
- qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c2"));
- qualifierBuilder.setValue(ByteString.copyFrom(Bytes.toBytes(22L)));
- valueBuilder.addQualifierValue(qualifierBuilder.build());
- mutateBuilder.addColumnValue(valueBuilder.build());
-
- MutationProto proto = mutateBuilder.build();
- // default fields
- assertEquals(MutationProto.Durability.USE_DEFAULT, proto.getDurability());
-
- // set the default value for equal comparison
- mutateBuilder = MutationProto.newBuilder(proto);
- mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT);
-
- Increment increment = ProtobufUtil.toIncrement(proto, null);
- mutateBuilder.setTimestamp(increment.getTimeStamp());
- assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.INCREMENT, increment));
- }
-
- /**
- * Test Append Mutate conversions.
- *
- * @throws IOException
- */
- @Test
- public void testAppend() throws IOException {
- long timeStamp = 111111;
- MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
- mutateBuilder.setRow(ByteString.copyFromUtf8("row"));
- mutateBuilder.setMutateType(MutationType.APPEND);
- mutateBuilder.setTimestamp(timeStamp);
- ColumnValue.Builder valueBuilder = ColumnValue.newBuilder();
- valueBuilder.setFamily(ByteString.copyFromUtf8("f1"));
- QualifierValue.Builder qualifierBuilder = QualifierValue.newBuilder();
- qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c1"));
- qualifierBuilder.setValue(ByteString.copyFromUtf8("v1"));
- qualifierBuilder.setTimestamp(timeStamp);
- valueBuilder.addQualifierValue(qualifierBuilder.build());
- qualifierBuilder.setQualifier(ByteString.copyFromUtf8("c2"));
- qualifierBuilder.setValue(ByteString.copyFromUtf8("v2"));
- valueBuilder.addQualifierValue(qualifierBuilder.build());
- mutateBuilder.addColumnValue(valueBuilder.build());
-
- MutationProto proto = mutateBuilder.build();
- // default fields
- assertEquals(MutationProto.Durability.USE_DEFAULT, proto.getDurability());
-
- // set the default value for equal comparison
- mutateBuilder = MutationProto.newBuilder(proto);
- mutateBuilder.setDurability(MutationProto.Durability.USE_DEFAULT);
-
- Append append = ProtobufUtil.toAppend(proto, null);
-
- // append always use the latest timestamp,
- // reset the timestamp to the original mutate
- mutateBuilder.setTimestamp(append.getTimeStamp());
- assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.APPEND, append));
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-shell/src/main/ruby/hbase/admin.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb b/hbase-shell/src/main/ruby/hbase/admin.rb
index 2aacd7f..1dfa0c1 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -1198,12 +1198,12 @@ module Hbase
# List all procedures
def list_procedures
- @admin.listProcedures
+ @admin.getProcedures
end
# List all locks
def list_locks
- @admin.listLocks
+ @admin.getLocks
end
# Parse arguments and update HTableDescriptor accordingly
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-shell/src/main/ruby/shell/commands/list_locks.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/shell/commands/list_locks.rb b/hbase-shell/src/main/ruby/shell/commands/list_locks.rb
index a7f7b73..1bebdd5 100644
--- a/hbase-shell/src/main/ruby/shell/commands/list_locks.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/list_locks.rb
@@ -17,6 +17,8 @@
# limitations under the License.
#
+require 'json'
+
module Shell
module Commands
class ListLocks < Command
@@ -29,27 +31,28 @@ EOF
end
def command
- list = admin.list_locks
+ list = JSON.parse(admin.list_locks)
list.each do |lock|
- formatter.output_strln("#{lock.resourceType}(#{lock.resourceName})")
-
- case lock.lockType
- when org.apache.hadoop.hbase.procedure2.LockInfo::LockType::EXCLUSIVE then
- formatter.output_strln("Lock type: EXCLUSIVE, procedure: #{lock.exclusiveLockOwnerProcedure.procId}")
- when org.apache.hadoop.hbase.procedure2.LockInfo::LockType::SHARED then
- formatter.output_strln("Lock type: SHARED, count: #{lock.sharedLockCount}")
+ formatter.output_strln("#{lock['resourceType']}(#{lock['resourceName']})")
+
+ case lock['lockType']
+ when 'EXCLUSIVE' then
+ formatter.output_strln("Lock type: #{lock['lockType']}, " \
+ "procedure: #{lock['exclusiveLockOwnerProcedure']}")
+ when 'SHARED' then
+ formatter.output_strln("Lock type: #{lock['lockType']}, " \
+ "count: #{lock['sharedLockCount']}")
end
- if lock.waitingProcedures.any?
- formatter.output_strln('Waiting procedures:')
- formatter.header(['Lock type', 'Procedure Id'])
+ if lock['waitingProcedures']
+ formatter.header(['Waiting procedures'])
- lock.waitingProcedures.each do |waitingProcedure|
- formatter.row([waitingProcedure.lockType.to_s, waitingProcedure.procedure.procId.to_s])
+ lock['waitingProcedures'].each do |waiting_procedure|
+ formatter.row([waiting_procedure])
end
- formatter.footer(lock.waitingProcedures.size)
+ formatter.footer(lock['waitingProcedures'].size)
end
formatter.output_strln('')
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-shell/src/main/ruby/shell/commands/list_procedures.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/shell/commands/list_procedures.rb b/hbase-shell/src/main/ruby/shell/commands/list_procedures.rb
index a2bec37..77335b8 100644
--- a/hbase-shell/src/main/ruby/shell/commands/list_procedures.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/list_procedures.rb
@@ -17,6 +17,8 @@
# limitations under the License.
#
+require 'json'
+
module Shell
module Commands
class ListProcedures < Command
@@ -29,13 +31,15 @@ EOF
end
def command
- formatter.header(%w[Id Name State Submitted_Time Last_Update])
+ formatter.header(%w[Id Name State Submitted_Time Last_Update Parameters])
- list = admin.list_procedures
+ list = JSON.parse(admin.list_procedures)
list.each do |proc|
- submitted_time = Time.at(proc.getSubmittedTime / 1000).to_s
- last_update = Time.at(proc.getLastUpdate / 1000).to_s
- formatter.row([proc.getProcId, proc.getProcName, proc.getProcState, submitted_time, last_update])
+ formatter.row([proc])
+ submitted_time = Time.at(Integer(proc['submittedTime']) / 1000).to_s
+ last_update = Time.at(Integer(proc['lastUpdate']) / 1000).to_s
+ formatter.row([proc['procId'], proc['className'], proc['state'],
+ submitted_time, last_update, proc['stateMessage']])
end
formatter.footer(list.size)
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
----------------------------------------------------------------------
diff --git a/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
index 4279d89..645119a 100644
--- a/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
+++ b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/TestReplicationShell.java
@@ -33,4 +33,4 @@ public class TestReplicationShell extends AbstractTestShell {
// Start all ruby tests
jruby.runScriptlet(PathType.ABSOLUTE, "src/test/ruby/tests_runner.rb");
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/procedure/ShellTestProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/procedure/ShellTestProcedure.java b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/procedure/ShellTestProcedure.java
new file mode 100644
index 0000000..742fd91
--- /dev/null
+++ b/hbase-shell/src/test/java/org/apache/hadoop/hbase/client/procedure/ShellTestProcedure.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client.procedure;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.master.procedure.TableProcedureInterface;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
+import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
+import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue;
+
+public class ShellTestProcedure extends Procedure<Object> implements TableProcedureInterface {
+ private String tableNameString;
+
+ public ShellTestProcedure() {
+ }
+
+ public ShellTestProcedure(String tableNameString) {
+ setTableNameString(tableNameString);
+ }
+
+ public String getTableNameString() {
+ return tableNameString;
+ }
+
+ public void setTableNameString(String tableNameString) {
+ this.tableNameString = tableNameString;
+ }
+
+ @Override
+ public TableName getTableName() {
+ return TableName.valueOf(tableNameString);
+ }
+
+ @Override
+ public TableOperationType getTableOperationType() {
+ return TableOperationType.EDIT;
+ }
+
+ @Override
+ protected Procedure<Object>[] execute(Object env)
+ throws ProcedureYieldException, ProcedureSuspendedException,
+ InterruptedException {
+ return null;
+ }
+
+ @Override
+ protected void rollback(Object env) throws IOException, InterruptedException {
+ }
+
+ @Override
+ protected boolean abort(Object env) {
+ return false;
+ }
+
+ @Override
+ protected void serializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ StringValue message = StringValue.newBuilder().setValue(tableNameString).build();
+ serializer.serialize(message);
+ }
+
+ @Override
+ protected void deserializeStateData(ProcedureStateSerializer serializer)
+ throws IOException {
+ StringValue message = serializer.deserialize(StringValue.class);
+ tableNameString = message.getValue();
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-shell/src/test/ruby/shell/list_locks_test.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/test/ruby/shell/list_locks_test.rb b/hbase-shell/src/test/ruby/shell/list_locks_test.rb
index fe132db..f465a6b 100644
--- a/hbase-shell/src/test/ruby/shell/list_locks_test.rb
+++ b/hbase-shell/src/test/ruby/shell/list_locks_test.rb
@@ -20,133 +20,175 @@
require 'hbase_constants'
require 'shell'
-class ListLocksTest < Test::Unit::TestCase
- def setup
- @hbase = ::Hbase::Hbase.new($TEST_CLUSTER.getConfiguration)
- @shell = Shell::Shell.new(@hbase)
- @master = $TEST_CLUSTER.getHBaseClusterInterface.getMaster
- @scheduler = @master.getMasterProcedureExecutor.getEnvironment.getProcedureScheduler
-
- @string_io = StringIO.new
-
- @list_locks = Shell::Commands::ListLocks.new(@shell)
- @list_locks.set_formatter(Shell::Formatter::Base.new({ :output_stream => @string_io }))
- end
-
- def set_field(object, field_name, value)
- field = object.getClass.getDeclaredField(field_name)
- field.setAccessible(true)
- field.set(object, value)
- end
-
- def create_lock(type, proc_id)
- lock = org.apache.hadoop.hbase.master.locking.LockProcedure.new()
- set_field(lock, "type", type)
- lock.procId = proc_id
-
- return lock
- end
-
- def create_exclusive_lock(proc_id)
- return create_lock(org.apache.hadoop.hbase.master.locking.LockProcedure::LockType::EXCLUSIVE, proc_id)
+module Hbase
+ class ListLocksTest < Test::Unit::TestCase
+ include TestHelpers
+
+ def setup
+ setup_hbase
+
+ @master = $TEST_CLUSTER.getHBaseClusterInterface.getMaster
+ @scheduler = @master.getMasterProcedureExecutor.getEnvironment.getProcedureScheduler
+
+ @list_locks = Shell::Commands::ListLocks.new(@shell)
+ end
+
+ def teardown
+ shutdown
+ end
+
+ def set_field(object, field_name, value)
+ field = object.getClass.getDeclaredField(field_name)
+ field.setAccessible(true)
+ field.set(object, value)
+ end
+
+ def create_lock(type, op_type, proc_id)
+ lock = org.apache.hadoop.hbase.master.locking.LockProcedure.new
+ set_field(lock, 'type', type)
+ set_field(lock, 'opType', op_type)
+ set_field(lock, 'description', 'description')
+ lock.procId = proc_id
+ lock.submittedTime = 0
+ lock.lastUpdate = 0
+
+ lock
+ end
+
+ def create_exclusive_lock(proc_id)
+ create_lock(org.apache.hadoop.hbase.procedure2.LockType::EXCLUSIVE,
+ org.apache.hadoop.hbase.master.procedure.TableProcedureInterface::TableOperationType::EDIT,
+ proc_id)
+ end
+
+ def create_shared_lock(proc_id)
+ create_lock(org.apache.hadoop.hbase.procedure2.LockType::SHARED,
+ org.apache.hadoop.hbase.master.procedure.TableProcedureInterface::TableOperationType::READ,
+ proc_id)
+ end
+
+ define_test 'list server locks' do
+ lock = create_exclusive_lock(0)
+
+ server_name = org.apache.hadoop.hbase.ServerName.valueOf('server1,1234,0')
+
+ @scheduler.waitServerExclusiveLock(lock, server_name)
+ output = capture_stdout { @list_locks.command }
+ @scheduler.wakeServerExclusiveLock(lock, server_name)
+
+ assert_equal(
+ "SERVER(server1,1234,0)\n" \
+ "Lock type: EXCLUSIVE, procedure: {" \
+ "\"className\"=>\"org.apache.hadoop.hbase.master.locking.LockProcedure\", " \
+ "\"procId\"=>\"0\", \"submittedTime\"=>\"0\", \"state\"=>\"RUNNABLE\", " \
+ "\"lastUpdate\"=>\"0\", " \
+ "\"stateMessage\"=>[{\"lockType\"=>\"EXCLUSIVE\", \"description\"=>\"description\"}]" \
+ "}\n\n",
+ output)
+ end
+
+ define_test 'list namespace locks' do
+ lock = create_exclusive_lock(1)
+
+ @scheduler.waitNamespaceExclusiveLock(lock, 'ns1')
+ output = capture_stdout { @list_locks.command }
+ @scheduler.wakeNamespaceExclusiveLock(lock, 'ns1')
+
+ assert_equal(
+ "NAMESPACE(ns1)\n" \
+ "Lock type: EXCLUSIVE, procedure: {" \
+ "\"className\"=>\"org.apache.hadoop.hbase.master.locking.LockProcedure\", " \
+ "\"procId\"=>\"1\", \"submittedTime\"=>\"0\", \"state\"=>\"RUNNABLE\", " \
+ "\"lastUpdate\"=>\"0\", " \
+ "\"stateMessage\"=>[{\"lockType\"=>\"EXCLUSIVE\", \"description\"=>\"description\"}]" \
+ "}\n\n" \
+ "TABLE(hbase:namespace)\n" \
+ "Lock type: SHARED, count: 1\n\n",
+ output)
+ end
+
+ define_test 'list table locks' do
+ lock = create_exclusive_lock(2)
+
+ table_name = org.apache.hadoop.hbase.TableName.valueOf('ns2', 'table2')
+
+ @scheduler.waitTableExclusiveLock(lock, table_name)
+ output = capture_stdout { @list_locks.command }
+ @scheduler.wakeTableExclusiveLock(lock, table_name)
+
+ assert_equal(
+ "NAMESPACE(ns2)\n" \
+ "Lock type: SHARED, count: 1\n\n" \
+ "TABLE(ns2:table2)\n" \
+ "Lock type: EXCLUSIVE, procedure: {" \
+ "\"className\"=>\"org.apache.hadoop.hbase.master.locking.LockProcedure\", " \
+ "\"procId\"=>\"2\", \"submittedTime\"=>\"0\", \"state\"=>\"RUNNABLE\", " \
+ "\"lastUpdate\"=>\"0\", " \
+ "\"stateMessage\"=>[{\"lockType\"=>\"EXCLUSIVE\", \"description\"=>\"description\"}]" \
+ "}\n\n",
+ output)
+ end
+
+ define_test 'list region locks' do
+ lock = create_exclusive_lock(3)
+
+ table_name = org.apache.hadoop.hbase.TableName.valueOf('ns3', 'table3')
+ region_info = org.apache.hadoop.hbase.HRegionInfo.new(table_name)
+
+ @scheduler.waitRegion(lock, region_info)
+ output = capture_stdout { @list_locks.command }
+ @scheduler.wakeRegion(lock, region_info)
+
+ assert_equal(
+ "NAMESPACE(ns3)\n" \
+ "Lock type: SHARED, count: 1\n\n" \
+ "TABLE(ns3:table3)\n" \
+ "Lock type: SHARED, count: 1\n\n" \
+ "REGION(" << region_info.getEncodedName << ")\n" \
+ "Lock type: EXCLUSIVE, procedure: {" \
+ "\"className\"=>\"org.apache.hadoop.hbase.master.locking.LockProcedure\", " \
+ "\"procId\"=>\"3\", \"submittedTime\"=>\"0\", \"state\"=>\"RUNNABLE\", " \
+ "\"lastUpdate\"=>\"0\", " \
+ "\"stateMessage\"=>[{\"lockType\"=>\"EXCLUSIVE\", \"description\"=>\"description\"}]" \
+ "}\n\n",
+ output)
+ end
+
+ define_test 'list waiting locks' do
+ table_name = org.apache.hadoop.hbase.TableName.valueOf('ns4', 'table4')
+
+ lock1 = create_exclusive_lock(1)
+ set_field(lock1, 'tableName', table_name)
+
+ lock2 = create_shared_lock(2)
+ set_field(lock2, 'tableName', table_name)
+
+ @scheduler.waitTableExclusiveLock(lock1, table_name)
+ @scheduler.waitTableSharedLock(lock2, table_name)
+ output = capture_stdout { @list_locks.command }
+ @scheduler.wakeTableExclusiveLock(lock1, table_name)
+ @scheduler.wakeTableSharedLock(lock2, table_name)
+
+ assert_equal(
+ "NAMESPACE(ns4)\n" \
+ "Lock type: SHARED, count: 1\n\n" \
+ "TABLE(ns4:table4)\n" \
+ "Lock type: EXCLUSIVE, procedure: {" \
+ "\"className\"=>\"org.apache.hadoop.hbase.master.locking.LockProcedure\", " \
+ "\"procId\"=>\"1\", \"submittedTime\"=>\"0\", \"state\"=>\"RUNNABLE\", " \
+ "\"lastUpdate\"=>\"0\", \"stateMessage\"=>[{" \
+ "\"lockType\"=>\"EXCLUSIVE\", " \
+ "\"tableName\"=>{\"namespace\"=>\"bnM0\", \"qualifier\"=>\"dGFibGU0\"" \
+ "}, \"description\"=>\"description\"}]}\n" \
+ "Waiting procedures\n" \
+ "{\"className\"=>\"org.apache.hadoop.hbase.master.locking.LockProcedure\", " \
+ "\"procId\"=>\"2\", \"submittedTime\"=>\"0\", \"state\"=>\"RUNNABLE\", " \
+ "\"lastUpdate\"=>\"0\", \"stateMessage\"=>[{" \
+ "\"lockType\"=>\"SHARED\", " \
+ "\"tableName\"=>{\"namespace\"=>\"bnM0\", \"qualifier\"=>\"dGFibGU0\"}, " \
+ "\"description\"=>\"description\"}]}\n" \
+ "1 row(s)\n\n",
+ output)
+ end
end
-
- def create_shared_lock(proc_id)
- return create_lock(org.apache.hadoop.hbase.master.locking.LockProcedure::LockType::SHARED, proc_id)
- end
-
- define_test "list server locks" do
- lock = create_exclusive_lock(0)
-
- server_name = org.apache.hadoop.hbase.ServerName.valueOf("server1,1234,0")
-
- @scheduler.waitServerExclusiveLock(lock, server_name)
- @list_locks.command()
- @scheduler.wakeServerExclusiveLock(lock, server_name)
-
- assert_equal(
- "SERVER(server1,1234,0)\n" <<
- "Lock type: EXCLUSIVE, procedure: 0\n\n",
- @string_io.string)
- end
-
- define_test "list namespace locks" do
- lock = create_exclusive_lock(1)
-
- @scheduler.waitNamespaceExclusiveLock(lock, "ns1")
- @list_locks.command()
- @scheduler.wakeNamespaceExclusiveLock(lock, "ns1")
-
- assert_equal(
- "NAMESPACE(ns1)\n" <<
- "Lock type: EXCLUSIVE, procedure: 1\n\n" <<
- "TABLE(hbase:namespace)\n" <<
- "Lock type: SHARED, count: 1\n\n",
- @string_io.string)
- end
-
- define_test "list table locks" do
- lock = create_exclusive_lock(2)
-
- table_name = org.apache.hadoop.hbase.TableName.valueOf("ns2", "table2")
-
- @scheduler.waitTableExclusiveLock(lock, table_name)
- @list_locks.command()
- @scheduler.wakeTableExclusiveLock(lock, table_name)
-
- assert_equal(
- "NAMESPACE(ns2)\n" <<
- "Lock type: SHARED, count: 1\n\n" <<
- "TABLE(ns2:table2)\n" <<
- "Lock type: EXCLUSIVE, procedure: 2\n\n",
- @string_io.string)
- end
-
- define_test "list region locks" do
- lock = create_exclusive_lock(3)
-
- table_name = org.apache.hadoop.hbase.TableName.valueOf("ns3", "table3")
- region_info = org.apache.hadoop.hbase.HRegionInfo.new(table_name)
-
- @scheduler.waitRegion(lock, region_info)
- @list_locks.command()
- @scheduler.wakeRegion(lock, region_info)
-
- assert_equal(
- "NAMESPACE(ns3)\n" <<
- "Lock type: SHARED, count: 1\n\n" <<
- "TABLE(ns3:table3)\n" <<
- "Lock type: SHARED, count: 1\n\n" <<
- "REGION(" << region_info.getEncodedName << ")\n" <<
- "Lock type: EXCLUSIVE, procedure: 3\n\n",
- @string_io.string)
- end
-
- define_test "list waiting locks" do
- table_name = org.apache.hadoop.hbase.TableName.valueOf("ns4", "table4")
-
- lock1 = create_exclusive_lock(1)
- set_field(lock1, "tableName", table_name)
-
- lock2 = create_shared_lock(2)
- set_field(lock2, "tableName", table_name)
-
- @scheduler.waitTableExclusiveLock(lock1, table_name)
- @scheduler.waitTableSharedLock(lock2, table_name)
- @list_locks.command()
- @scheduler.wakeTableExclusiveLock(lock1, table_name)
- @scheduler.wakeTableSharedLock(lock2, table_name)
-
- assert_equal(
- "NAMESPACE(ns4)\n" <<
- "Lock type: SHARED, count: 1\n\n" <<
- "TABLE(ns4:table4)\n" <<
- "Lock type: EXCLUSIVE, procedure: 1\n" <<
- "Waiting procedures:\n" <<
- "Lock type Procedure Id\n" <<
- " SHARED 2\n" <<
- "1 row(s)\n\n",
- @string_io.string)
- end
-
end
http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-shell/src/test/ruby/shell/list_procedures_test.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/test/ruby/shell/list_procedures_test.rb b/hbase-shell/src/test/ruby/shell/list_procedures_test.rb
new file mode 100644
index 0000000..8d5c83d
--- /dev/null
+++ b/hbase-shell/src/test/ruby/shell/list_procedures_test.rb
@@ -0,0 +1,68 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+require 'hbase_constants'
+require 'shell'
+
+include HBaseConstants
+
+module Hbase
+ class ListProceduresTest < Test::Unit::TestCase
+ include TestHelpers
+
+ def setup
+ setup_hbase
+
+ @master = $TEST_CLUSTER.getHBaseClusterInterface.getMaster
+ @executor = @master.getMasterProcedureExecutor
+
+ @list_procedures = Shell::Commands::ListProcedures.new(@shell)
+ end
+
+ def teardown
+ shutdown
+ end
+
+ def create_procedure_regexp(table_name)
+ regexp_string = '[0-9]+ .*ShellTestProcedure SUCCESS.*' \
+ "\[{\"value\"=>\"#{table_name}\"}\]";
+ Regexp.new(regexp_string)
+ end
+
+ define_test 'list procedures' do
+ procedure = org.apache.hadoop.hbase.client.procedure.ShellTestProcedure.new
+ procedure.tableNameString = 'table1'
+
+ @executor.submitProcedure(procedure)
+ output = capture_stdout { @list_procedures.command }
+
+ regexp = create_procedure_regexp('table1')
+ matching_lines = 0
+
+ lines = output.split(/\n/)
+ lines.each do |line|
+ if regexp.match(line)
+ matching_lines += 1
+ end
+ end
+
+ assert_equal(1, matching_lines)
+ end
+ end
+end