You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by sy...@apache.org on 2015/09/24 06:48:58 UTC
[1/3] hbase git commit: HBASE-14107 Administrative Task: Provide an
API to List all procedures (Stephen Yuan Jiang)
Repository: hbase
Updated Branches:
refs/heads/master 5e26ae043 -> 5b7894f92
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-protocol/src/main/protobuf/Master.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/Master.proto b/hbase-protocol/src/main/protobuf/Master.proto
index 2cd0b5f..c616840 100644
--- a/hbase-protocol/src/main/protobuf/Master.proto
+++ b/hbase-protocol/src/main/protobuf/Master.proto
@@ -30,6 +30,7 @@ import "HBase.proto";
import "Client.proto";
import "ClusterStatus.proto";
import "ErrorHandling.proto";
+import "Procedure.proto";
import "Quota.proto";
/* Column-level protobufs */
@@ -440,6 +441,13 @@ message AbortProcedureResponse {
required bool is_procedure_aborted = 1;
}
+message ListProceduresRequest {
+}
+
+message ListProceduresResponse {
+ repeated Procedure procedure = 1;
+}
+
message SetQuotaRequest {
optional string user_name = 1;
optional string user_group = 2;
@@ -720,4 +728,8 @@ service MasterService {
/** Abort a procedure */
rpc AbortProcedure(AbortProcedureRequest)
returns(AbortProcedureResponse);
+
+ /** returns a list of procedures */
+ rpc ListProcedures(ListProceduresRequest)
+ returns(ListProceduresResponse);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 75fe759..fba6bf5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -65,6 +65,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
import org.apache.hadoop.hbase.PleaseHoldException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
@@ -74,7 +75,6 @@ import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.TableState;
@@ -2486,6 +2486,11 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
}
@Override
+ public List<ProcedureInfo> listProcedures() throws IOException {
+ return this.procedureExecutor.listProcedures();
+ }
+
+ @Override
public List<HTableDescriptor> listTableDescriptorsByNamespace(String name) throws IOException {
ensureNamespaceExists(name);
return listTableDescriptors(name, null, null, true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 85e3acc..0044996 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.PleaseHoldException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -51,7 +52,6 @@ import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.procedure.MasterProcedureManager;
import org.apache.hadoop.hbase.procedure2.Procedure;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.RequestConverter;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
@@ -123,6 +123,8 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneReq
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;
@@ -1032,15 +1034,15 @@ public class MasterRpcServices extends RSRpcServices
master.checkInitialized();
GetProcedureResultResponse.Builder builder = GetProcedureResultResponse.newBuilder();
- Pair<ProcedureResult, Procedure> v = master.getMasterProcedureExecutor()
+ Pair<ProcedureInfo, Procedure> v = master.getMasterProcedureExecutor()
.getResultOrProcedure(request.getProcId());
if (v.getFirst() != null) {
- ProcedureResult result = v.getFirst();
+ ProcedureInfo result = v.getFirst();
builder.setState(GetProcedureResultResponse.State.FINISHED);
builder.setStartTime(result.getStartTime());
builder.setLastUpdate(result.getLastUpdate());
if (result.isFailed()) {
- builder.setException(result.getException().convert());
+ builder.setException(result.getForeignExceptionMessage());
}
if (result.hasResultData()) {
builder.setResult(ByteStringer.wrap(result.getResult()));
@@ -1089,6 +1091,22 @@ public class MasterRpcServices extends RSRpcServices
}
@Override
+ public ListProceduresResponse listProcedures(
+ RpcController rpcController,
+ ListProceduresRequest request) throws ServiceException {
+ try {
+ ListProceduresResponse.Builder response =
+ ListProceduresResponse.newBuilder();
+ for(ProcedureInfo p: master.listProcedures()) {
+ response.addProcedure(ProcedureInfo.convertToProcedureProto(p));
+ }
+ return response.build();
+ } catch (IOException e) {
+ throw new ServiceException(e);
+ }
+ }
+
+ @Override
public ListTableDescriptorsByNamespaceResponse listTableDescriptorsByNamespace(RpcController c,
ListTableDescriptorsByNamespaceRequest request) throws ServiceException {
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
index e7f4f21..9a8a334 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableName;
@@ -344,6 +345,13 @@ public interface MasterServices extends Server {
public List<NamespaceDescriptor> listNamespaceDescriptors() throws IOException;
/**
+ * List procedures
+ * @return procedure list
+ * @throws IOException
+ */
+ public List<ProcedureInfo> listProcedures() throws IOException;
+
+ /**
* Get list of table descriptors by namespace
* @param name namespace name
* @return descriptors
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
index 1eb0073..6a7e267 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
@@ -24,12 +24,12 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoordinatedStateException;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.master.RegionState.State;
import org.apache.hadoop.hbase.master.ServerManager;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
+import org.apache.hadoop.hbase.procedure2.RemoteProcedureException;
import org.apache.hadoop.hbase.quotas.MasterQuotaManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
@@ -78,11 +78,12 @@ public final class ProcedureSyncWait {
// Dev Consideration: are we waiting forever, or we can set up some timeout value?
Threads.sleepWithoutInterrupt(250);
}
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
if (result != null) {
if (result.isFailed()) {
// If the procedure fails, we should always have an exception captured. Throw it.
- throw result.getException().unwrapRemoteException();
+ throw RemoteProcedureException.fromProto(
+ result.getForeignExceptionMessage()).unwrapRemoteException();
}
return result.getResult();
} else {
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index dc06071..6f83d4a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -26,7 +26,6 @@ import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.List;
import java.util.Random;
-import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
@@ -41,6 +40,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.NotServingRegionException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
@@ -52,7 +52,6 @@ import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.master.AssignmentManager;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.testclassification.ClientTests;
@@ -738,4 +737,10 @@ public class TestAdmin2 {
boolean abortResult = admin.abortProcedure(procId, true);
assertFalse(abortResult);
}
+
+ @Test(timeout = 300000)
+ public void testListProcedures() throws Exception {
+ ProcedureInfo[] procList = admin.listProcedures();
+ assertTrue(procList.length >= 0);
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
index c7fe187..e9816e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MetaMockingUtil;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableDescriptor;
@@ -442,6 +443,11 @@ public class TestCatalogJanitor {
}
@Override
+ public List<ProcedureInfo> listProcedures() throws IOException {
+ return null; //To change body of implemented methods use File | Settings | File Templates.
+ }
+
+ @Override
public List<HTableDescriptor> listTableDescriptorsByNamespace(String name) throws IOException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java
index 5d8da9c..a98d468 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java
@@ -28,9 +28,9 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -153,10 +153,11 @@ public class TestAddColumnFamilyProcedure {
ProcedureTestingUtility.waitProcedure(procExec, procId2);
// Second add should fail with InvalidFamilyOperationException
- ProcedureResult result = procExec.getResult(procId2);
+ ProcedureInfo result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Add failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Add failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
// Do the same add the existing column family - this time offline
UTIL.getHBaseAdmin().disableTable(tableName);
@@ -170,8 +171,9 @@ public class TestAddColumnFamilyProcedure {
// Second add should fail with InvalidFamilyOperationException
result = procExec.getResult(procId3);
assertTrue(result.isFailed());
- LOG.debug("Add failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Add failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
}
@Test(timeout=60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java
index d25e61f..c01755f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateNamespaceProcedure.java
@@ -32,9 +32,9 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceExistException;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.CreateNamespaceState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -126,10 +126,11 @@ public class TestCreateNamespaceProcedure {
ProcedureTestingUtility.waitProcedure(procExec, procId2);
// Second create should fail with NamespaceExistException
- ProcedureResult result = procExec.getResult(procId2);
+ ProcedureInfo result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Create namespace failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof NamespaceExistException);
+ LOG.debug("Create namespace failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof NamespaceExistException);
}
@Test(timeout=60000)
@@ -144,10 +145,11 @@ public class TestCreateNamespaceProcedure {
nonce);
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId);
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Create namespace failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof NamespaceExistException);
+ LOG.debug("Create namespace failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof NamespaceExistException);
}
@Test(timeout=60000)
@@ -166,10 +168,10 @@ public class TestCreateNamespaceProcedure {
nonce);
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId);
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Create namespace failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof ConstraintException);
+ LOG.debug("Create namespace failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException);
}
@Test(timeout=60000)
@@ -188,10 +190,10 @@ public class TestCreateNamespaceProcedure {
nonce);
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId);
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Create namespace failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof ConstraintException);
+ LOG.debug("Create namespace failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException);
}
@Test(timeout=60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java
index ad68354..3980274 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java
@@ -28,9 +28,9 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -151,10 +151,11 @@ public class TestDeleteColumnFamilyProcedure {
ProcedureTestingUtility.waitProcedure(procExec, procId2);
// Second delete should fail with InvalidFamilyOperationException
- ProcedureResult result = procExec.getResult(procId2);
+ ProcedureInfo result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Delete online failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Delete online failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
// Try again, this time with table disabled.
UTIL.getHBaseAdmin().disableTable(tableName);
@@ -167,8 +168,9 @@ public class TestDeleteColumnFamilyProcedure {
// Expect fail with InvalidFamilyOperationException
result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Delete offline failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Delete offline failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
}
@Test(timeout=60000)
@@ -219,10 +221,11 @@ public class TestDeleteColumnFamilyProcedure {
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId1);
- ProcedureResult result = procExec.getResult(procId1);
+ ProcedureInfo result = procExec.getResult(procId1);
assertTrue(result.isFailed());
- LOG.debug("Delete failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Delete failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
}
@Test(timeout=60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
index dd22de7..4c5f87b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
@@ -32,10 +32,10 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteNamespaceState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -124,10 +124,11 @@ public class TestDeleteNamespaceProcedure {
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId);
// Expect fail with NamespaceNotFoundException
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Delete namespace failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof NamespaceNotFoundException);
+ LOG.debug("Delete namespace failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof NamespaceNotFoundException);
}
@Test(timeout=60000)
@@ -141,10 +142,10 @@ public class TestDeleteNamespaceProcedure {
nonce);
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId);
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Delete namespace failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof ConstraintException);
+ LOG.debug("Delete namespace failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException);
}
@Test(timeout=60000)
@@ -163,10 +164,10 @@ public class TestDeleteNamespaceProcedure {
nonce);
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId);
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Delete namespace failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof ConstraintException);
+ LOG.debug("Delete namespace failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException);
}
@Test(timeout=60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
index f4afd31..4f62537 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
@@ -25,11 +25,11 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteTableState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -142,10 +142,10 @@ public class TestDeleteTableProcedure {
UTIL.getHBaseCluster().getMaster(), tableName, regions, "f");
// Second delete should fail with TableNotFound
- ProcedureResult result = procExec.getResult(procId2);
+ ProcedureInfo result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Delete failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof TableNotFoundException);
+ LOG.debug("Delete failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotFoundException);
}
@Test(timeout=60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
index 6959af9..eb58cd5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
@@ -26,10 +26,10 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DisableTableState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -126,10 +126,11 @@ public class TestDisableTableProcedure {
procExec.getEnvironment(), tableName, false), nonceGroup + 1, nonce + 1);
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId2);
- ProcedureResult result = procExec.getResult(procId2);
+ ProcedureInfo result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Disable failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof TableNotEnabledException);
+ LOG.debug("Disable failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotEnabledException);
// Disable the table - expect failure from ProcedurePrepareLatch
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
index 0204e52..5c2aa29 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
@@ -26,10 +26,10 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.EnableTableState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -141,10 +141,11 @@ public class TestEnableTableProcedure {
new EnableTableProcedure(procExec.getEnvironment(), tableName, false), nonceGroup, nonce);
ProcedureTestingUtility.waitProcedure(procExec, procId1);
- ProcedureResult result = procExec.getResult(procId1);
+ ProcedureInfo result = procExec.getResult(procId1);
assertTrue(result.isFailed());
- LOG.debug("Enable failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof TableNotDisabledException);
+ LOG.debug("Enable failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotDisabledException);
// Enable the table with skipping table state check flag (simulate recovery scenario)
long procId2 = procExec.submitProcedure(
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyColumnFamilyProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyColumnFamilyProcedure.java
index 3b40955..e983459 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyColumnFamilyProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyColumnFamilyProcedure.java
@@ -28,9 +28,9 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -146,10 +146,11 @@ public class TestModifyColumnFamilyProcedure {
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId1);
- ProcedureResult result = procExec.getResult(procId1);
+ ProcedureInfo result = procExec.getResult(procId1);
assertTrue(result.isFailed());
- LOG.debug("Modify failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Modify failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
}
@Test(timeout=60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
index e946043..9208df7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyNamespaceProcedure.java
@@ -29,9 +29,9 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyNamespaceState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -150,10 +150,11 @@ public class TestModifyNamespaceProcedure {
ProcedureTestingUtility.waitProcedure(procExec, procId);
// Expect fail with NamespaceNotFoundException
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("modify namespace failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof NamespaceNotFoundException);
+ LOG.debug("modify namespace failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof NamespaceNotFoundException);
}
@Test(timeout=60000)
@@ -175,10 +176,10 @@ public class TestModifyNamespaceProcedure {
nonce);
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId);
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Modify namespace failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof ConstraintException);
+ LOG.debug("Modify namespace failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException);
}
@Test(timeout=60000)
@@ -200,10 +201,10 @@ public class TestModifyNamespaceProcedure {
nonce);
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId);
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Modify namespace failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof ConstraintException);
+ LOG.debug("Modify namespace failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof ConstraintException);
}
@Test(timeout = 60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
index d304ecd..39808db 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.master.procedure;
import java.util.Random;
+import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -27,11 +28,13 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -181,6 +184,41 @@ public class TestProcedureAdmin {
assertFalse(abortResult);
}
+ @Test(timeout=60000)
+ public void testListProcedure() throws Exception {
+ final TableName tableName = TableName.valueOf("testListProcedure");
+ final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
+
+ MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f");
+ ProcedureTestingUtility.waitNoProcedureRunning(procExec);
+ ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
+
+ long procId = procExec.submitProcedure(
+ new DisableTableProcedure(procExec.getEnvironment(), tableName, false), nonceGroup, nonce);
+
+ List<ProcedureInfo> listProcedures = procExec.listProcedures();
+ assertTrue(listProcedures.size() >= 1);
+ boolean found = false;
+ for (ProcedureInfo procInfo: listProcedures) {
+ if (procInfo.getProcId() == procId) {
+ assertTrue(procInfo.getProcState() == ProcedureState.RUNNABLE);
+ found = true;
+ } else {
+ assertTrue(procInfo.getProcState() == ProcedureState.FINISHED);
+ }
+ }
+ assertTrue(found);
+
+ ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, false);
+ ProcedureTestingUtility.restart(procExec);
+ ProcedureTestingUtility.waitNoProcedureRunning(procExec);
+ ProcedureTestingUtility.assertProcNotFailed(procExec, procId);
+ listProcedures = procExec.listProcedures();
+ for (ProcedureInfo procInfo: listProcedures) {
+ assertTrue(procInfo.getProcState() == ProcedureState.FINISHED);
+ }
+ }
+
private ProcedureExecutor<MasterProcedureEnv> getMasterProcedureExecutor() {
return UTIL.getHBaseCluster().getMaster().getMasterProcedureExecutor();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
index e9f5746..6490a92 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
@@ -25,11 +25,11 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.TruncateTableState;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -102,10 +102,10 @@ public class TestTruncateTableProcedure {
new TruncateTableProcedure(procExec.getEnvironment(), tableName, true));
// Second delete should fail with TableNotFound
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Truncate failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof TableNotFoundException);
+ LOG.debug("Truncate failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotFoundException);
}
@Test(timeout=60000)
@@ -119,10 +119,11 @@ public class TestTruncateTableProcedure {
new TruncateTableProcedure(procExec.getEnvironment(), tableName, false));
// Second delete should fail with TableNotDisabled
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Truncate failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof TableNotDisabledException);
+ LOG.debug("Truncate failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotDisabledException);
}
@Test(timeout=60000)
[3/3] hbase git commit: HBASE-14107 Administrative Task: Provide an
API to List all procedures (Stephen Yuan Jiang)
Posted by sy...@apache.org.
HBASE-14107 Administrative Task: Provide an API to List all procedures (Stephen Yuan Jiang)
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5b7894f9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5b7894f9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5b7894f9
Branch: refs/heads/master
Commit: 5b7894f92ba3e9ff700da1e9194ebb4774d8b71e
Parents: 5e26ae0
Author: Stephen Yuan Jiang <sy...@gmail.com>
Authored: Wed Sep 23 21:48:32 2015 -0700
Committer: Stephen Yuan Jiang <sy...@gmail.com>
Committed: Wed Sep 23 21:48:32 2015 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/client/Admin.java | 9 +
.../hbase/client/ConnectionImplementation.java | 10 +-
.../apache/hadoop/hbase/client/HBaseAdmin.java | 25 +
.../org/apache/hadoop/hbase/ProcedureInfo.java | 224 +++
.../exceptions/IllegalArgumentIOException.java | 46 +
.../hadoop/hbase/procedure2/Procedure.java | 43 +-
.../hbase/procedure2/ProcedureExecutor.java | 63 +-
.../hbase/procedure2/ProcedureResult.java | 115 --
.../procedure2/ProcedureTestingUtility.java | 42 +-
.../procedure2/TestProcedureExecution.java | 46 +-
.../hbase/procedure2/TestProcedureRecovery.java | 13 +-
.../procedure2/TestProcedureReplayOrder.java | 3 -
.../hbase/protobuf/generated/MasterProtos.java | 1754 +++++++++++++++---
hbase-protocol/src/main/protobuf/Master.proto | 12 +
.../org/apache/hadoop/hbase/master/HMaster.java | 7 +-
.../hadoop/hbase/master/MasterRpcServices.java | 26 +-
.../hadoop/hbase/master/MasterServices.java | 8 +
.../master/procedure/ProcedureSyncWait.java | 9 +-
.../apache/hadoop/hbase/client/TestAdmin2.java | 9 +-
.../hadoop/hbase/master/TestCatalogJanitor.java | 6 +
.../procedure/TestAddColumnFamilyProcedure.java | 14 +-
.../procedure/TestCreateNamespaceProcedure.java | 28 +-
.../TestDeleteColumnFamilyProcedure.java | 21 +-
.../procedure/TestDeleteNamespaceProcedure.java | 21 +-
.../procedure/TestDeleteTableProcedure.java | 8 +-
.../procedure/TestDisableTableProcedure.java | 9 +-
.../procedure/TestEnableTableProcedure.java | 9 +-
.../TestModifyColumnFamilyProcedure.java | 9 +-
.../procedure/TestModifyNamespaceProcedure.java | 21 +-
.../master/procedure/TestProcedureAdmin.java | 38 +
.../procedure/TestTruncateTableProcedure.java | 15 +-
31 files changed, 2098 insertions(+), 565 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 1c6c376..9537424 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
@@ -1045,6 +1046,14 @@ public interface Admin extends Abortable, Closeable {
final boolean mayInterruptIfRunning) throws IOException;
/**
+ * List procedures
+ * @return procedure list
+ * @throws IOException
+ */
+ ProcedureInfo[] listProcedures()
+ throws IOException;
+
+ /**
* Roll the log writer. I.e. for filesystem based write ahead logs, start writing to a new file.
*
* Note that the actual rolling of the log writer is asynchronous and may not be complete when
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index 2262a0f..ade32a8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -1419,6 +1419,13 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
}
@Override
+ public MasterProtos.ListProceduresResponse listProcedures(
+ RpcController controller,
+ MasterProtos.ListProceduresRequest request) throws ServiceException {
+ return stub.listProcedures(controller, request);
+ }
+
+ @Override
public MasterProtos.AddColumnResponse addColumn(
RpcController controller,
MasterProtos.AddColumnRequest request) throws ServiceException {
@@ -1933,7 +1940,8 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
* If the method returns it means that there is no error, and the 'results' array will
* contain no exception. On error, an exception is thrown, and the 'results' array will
* contain results and exceptions.
- * @deprecated since 0.96 - Use {@link org.apache.hadoop.hbase.client.HTable#processBatchCallback} instead
+ * @deprecated since 0.96 -
+ * Use {@link org.apache.hadoop.hbase.client.HTable#processBatchCallback} instead
*/
@Override
@Deprecated
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 2268d3e..0d089be 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.RegionLocations;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
@@ -124,6 +125,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshot
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest;
@@ -144,6 +146,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest;
+import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
import org.apache.hadoop.hbase.quotas.QuotaFilter;
import org.apache.hadoop.hbase.quotas.QuotaRetriever;
import org.apache.hadoop.hbase.quotas.QuotaSettings;
@@ -2820,6 +2823,28 @@ public class HBaseAdmin implements Admin {
}
/**
+ * List procedures
+ * @return procedure list
+ * @throws IOException
+ */
+ @Override
+ public ProcedureInfo[] listProcedures() throws IOException {
+ return
+ executeCallable(new MasterCallable<ProcedureInfo[]>(getConnection()) {
+ @Override
+ public ProcedureInfo[] call(int callTimeout) throws Exception {
+ List<ProcedureProtos.Procedure> procList = master.listProcedures(
+ null, ListProceduresRequest.newBuilder().build()).getProcedureList();
+ ProcedureInfo[] procInfoList = new ProcedureInfo[procList.size()];
+ for (int i = 0; i < procList.size(); i++) {
+ procInfoList[i] = ProcedureInfo.convert(procList.get(i));
+ }
+ return procInfoList;
+ }
+ });
+ }
+
+ /**
* Get list of table descriptors by namespace
* @param name namespace name
* @return A descriptor
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
new file mode 100644
index 0000000..9708c31
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
@@ -0,0 +1,224 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage;
+import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
+import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
+import org.apache.hadoop.hbase.util.ByteStringer;
+import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
+import org.apache.hadoop.hbase.util.NonceKey;
+
+/**
+ * Procedure information
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class ProcedureInfo {
+ private final long procId;
+ private final String procName;
+ private final String procOwner;
+ private final ProcedureState procState;
+ private final long parentId;
+ private final ForeignExceptionMessage exception;
+ private final long lastUpdate;
+ private final long startTime;
+ private final byte[] result;
+
+ private NonceKey nonceKey = null;
+ private long clientAckTime = -1;
+
+ public ProcedureInfo(
+ final long procId,
+ final String procName,
+ final String procOwner,
+ final ProcedureState procState,
+ final long parentId,
+ final ForeignExceptionMessage exception,
+ final long lastUpdate,
+ final long startTime,
+ final byte[] result) {
+ this.procId = procId;
+ this.procName = procName;
+ this.procOwner = procOwner;
+ this.procState = procState;
+ this.parentId = parentId;
+ this.lastUpdate = lastUpdate;
+ this.startTime = startTime;
+
+ // If the procedure is completed, we should treat exception and result differently
+ this.exception = exception;
+ this.result = result;
+ }
+
+ public long getProcId() {
+ return procId;
+ }
+
+ public String getProcName() {
+ return procName;
+ }
+
+ public String getProcOwner() {
+ return procOwner;
+ }
+
+ public ProcedureState getProcState() {
+ return procState;
+ }
+
+ public boolean hasParentId() {
+ return (parentId != -1);
+ }
+
+ public long getParentId() {
+ return parentId;
+ }
+
+ public NonceKey getNonceKey() {
+ return nonceKey;
+ }
+
+ public void setNonceKey(NonceKey nonceKey) {
+ this.nonceKey = nonceKey;
+ }
+
+ public boolean isFailed() {
+ return exception != null;
+ }
+
+ public IOException getException() {
+ if (isFailed()) {
+ return ForeignExceptionUtil.toIOException(exception);
+ }
+ return null;
+ }
+
+ @InterfaceAudience.Private
+ public ForeignExceptionMessage getForeignExceptionMessage() {
+ return exception;
+ }
+
+ public String getExceptionCause() {
+ assert isFailed();
+ return exception.getGenericException().getClassName();
+ }
+
+ public String getExceptionMessage() {
+ assert isFailed();
+ return exception.getGenericException().getMessage();
+ }
+
+ public String getExceptionFullMessage() {
+ assert isFailed();
+ return getExceptionCause() + " - " + getExceptionMessage();
+ }
+
+ public boolean hasResultData() {
+ return result != null;
+ }
+
+ public byte[] getResult() {
+ return result;
+ }
+
+ public long getStartTime() {
+ return startTime;
+ }
+
+ public long getLastUpdate() {
+ return lastUpdate;
+ }
+
+ public long executionTime() {
+ return lastUpdate - startTime;
+ }
+
+ @InterfaceAudience.Private
+ public boolean hasClientAckTime() {
+ return clientAckTime > 0;
+ }
+
+ @InterfaceAudience.Private
+ public long getClientAckTime() {
+ return clientAckTime;
+ }
+
+ @InterfaceAudience.Private
+ public void setClientAckTime(final long timestamp) {
+ this.clientAckTime = timestamp;
+ }
+
+ /**
+ * @return Convert the current {@link ProcedureInfo} into a Protocol Buffers Procedure
+ * instance.
+ */
+ @InterfaceAudience.Private
+ public static ProcedureProtos.Procedure convertToProcedureProto(
+ final ProcedureInfo procInfo) {
+ ProcedureProtos.Procedure.Builder builder = ProcedureProtos.Procedure.newBuilder();
+
+ builder.setClassName(procInfo.getProcName());
+ builder.setProcId(procInfo.getProcId());
+ builder.setStartTime(procInfo.getStartTime());
+ builder.setState(procInfo.getProcState());
+ builder.setLastUpdate(procInfo.getLastUpdate());
+
+ if (procInfo.hasParentId()) {
+ builder.setParentId(procInfo.getParentId());
+ }
+
+ if (procInfo.getProcOwner() != null) {
+ builder.setOwner(procInfo.getProcOwner());
+ }
+
+ if (procInfo.isFailed()) {
+ builder.setException(procInfo.getForeignExceptionMessage());
+ }
+
+ if (procInfo.hasResultData()) {
+ builder.setResult(ByteStringer.wrap(procInfo.getResult()));
+ }
+
+ return builder.build();
+ }
+
+ /**
+ * Helper to convert the protobuf object.
+ * @return Convert the current Protocol Buffers Procedure to {@link ProcedureInfo}
+ * instance.
+ */
+ @InterfaceAudience.Private
+ public static ProcedureInfo convert(final ProcedureProtos.Procedure procProto) {
+ return new ProcedureInfo(
+ procProto.getProcId(),
+ procProto.getClassName(),
+ procProto.getOwner(),
+ procProto.getState(),
+ procProto.hasParentId() ? procProto.getParentId() : -1,
+ procProto.getState() == ProcedureState.ROLLEDBACK ? procProto.getException() : null,
+ procProto.getLastUpdate(),
+ procProto.getStartTime(),
+ procProto.getState() == ProcedureState.FINISHED ? procProto.getResult().toByteArray() : null);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/IllegalArgumentIOException.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/IllegalArgumentIOException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/IllegalArgumentIOException.java
new file mode 100644
index 0000000..53ced75
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/IllegalArgumentIOException.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.exceptions;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+/**
+ * Exception thrown when an illegal argument is passed to a function/procedure.
+ */
+@SuppressWarnings("serial")
+@InterfaceAudience.Private
+public class IllegalArgumentIOException extends IOException {
+ public IllegalArgumentIOException() {
+ super();
+ }
+
+ public IllegalArgumentIOException(final String message) {
+ super(message);
+ }
+
+ public IllegalArgumentIOException(final String message, final Throwable t) {
+ super(message, t);
+ }
+
+ public IllegalArgumentIOException(final Throwable t) {
+ super(t);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index a343c89..5545c5c 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -26,11 +26,12 @@ import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
-import java.util.concurrent.TimeoutException;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
@@ -214,6 +215,13 @@ public abstract class Procedure<TEnvironment> implements Comparable<Procedure> {
return sb.toString();
}
+ protected String toStringClass() {
+ StringBuilder sb = new StringBuilder();
+ toStringClassDetails(sb);
+
+ return sb.toString();
+ }
+
/**
* Called from {@link #toString()} when interpolating {@link Procedure} state
* @param builder Append current {@link ProcedureState}
@@ -395,7 +403,7 @@ public abstract class Procedure<TEnvironment> implements Comparable<Procedure> {
protected synchronized boolean setTimeoutFailure() {
if (state == ProcedureState.WAITING_TIMEOUT) {
long timeDiff = EnvironmentEdgeManager.currentTime() - lastUpdate;
- setFailure("ProcedureExecutor", new TimeoutException(
+ setFailure("ProcedureExecutor", new TimeoutIOException(
"Operation timed out after " + StringUtils.humanTimeDiff(timeDiff)));
return true;
}
@@ -626,6 +634,37 @@ public abstract class Procedure<TEnvironment> implements Comparable<Procedure> {
}
/**
+ * Helper to create the ProcedureInfo from Procedure.
+ */
+ @InterfaceAudience.Private
+ public static ProcedureInfo createProcedureInfo(final Procedure proc, final NonceKey nonceKey) {
+ RemoteProcedureException exception;
+
+ if (proc.hasException()) {
+ exception = proc.getException();
+ } else {
+ exception = null;
+ }
+ ProcedureInfo procInfo = new ProcedureInfo(
+ proc.getProcId(),
+ proc.toStringClass(),
+ proc.getOwner(),
+ proc.getState(),
+ proc.hasParent() ? proc.getParentProcId() : -1,
+ exception != null ?
+ RemoteProcedureException.toProto(exception.getSource(), exception.getCause()) : null,
+ proc.getLastUpdate(),
+ proc.getStartTime(),
+ proc.getResult());
+
+ if (nonceKey != null) {
+ procInfo.setNonceKey(nonceKey);
+ }
+
+ return procInfo;
+ }
+
+ /**
* Helper to convert the procedure to protobuf.
* Used by ProcedureStore implementations.
*/
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index db0fc97..a6673cb 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -40,8 +40,10 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
@@ -135,13 +137,13 @@ public class ProcedureExecutor<TEnvironment> {
private static final String EVICT_ACKED_TTL_CONF_KEY ="hbase.procedure.cleaner.acked.evict.ttl";
private static final int DEFAULT_ACKED_EVICT_TTL = 5 * 60000; // 5min
- private final Map<Long, ProcedureResult> completed;
+ private final Map<Long, ProcedureInfo> completed;
private final Map<NonceKey, Long> nonceKeysToProcIdsMap;
private final ProcedureStore store;
private final Configuration conf;
public CompletedProcedureCleaner(final Configuration conf, final ProcedureStore store,
- final Map<Long, ProcedureResult> completedMap,
+ final Map<Long, ProcedureInfo> completedMap,
final Map<NonceKey, Long> nonceKeysToProcIdsMap) {
// set the timeout interval that triggers the periodic-procedure
setTimeout(conf.getInt(CLEANER_INTERVAL_CONF_KEY, DEFAULT_CLEANER_INTERVAL));
@@ -163,10 +165,10 @@ public class ProcedureExecutor<TEnvironment> {
final long evictAckTtl = conf.getInt(EVICT_ACKED_TTL_CONF_KEY, DEFAULT_ACKED_EVICT_TTL);
long now = EnvironmentEdgeManager.currentTime();
- Iterator<Map.Entry<Long, ProcedureResult>> it = completed.entrySet().iterator();
+ Iterator<Map.Entry<Long, ProcedureInfo>> it = completed.entrySet().iterator();
while (it.hasNext() && store.isRunning()) {
- Map.Entry<Long, ProcedureResult> entry = it.next();
- ProcedureResult result = entry.getValue();
+ Map.Entry<Long, ProcedureInfo> entry = it.next();
+ ProcedureInfo result = entry.getValue();
// TODO: Select TTL based on Procedure type
if ((result.hasClientAckTime() && (now - result.getClientAckTime()) >= evictAckTtl) ||
@@ -212,12 +214,12 @@ public class ProcedureExecutor<TEnvironment> {
}
/**
- * Map the the procId returned by submitProcedure(), the Root-ProcID, to the ProcedureResult.
+ * Map the the procId returned by submitProcedure(), the Root-ProcID, to the ProcedureInfo.
* Once a Root-Procedure completes (success or failure), the result will be added to this map.
* The user of ProcedureExecutor should call getResult(procId) to get the result.
*/
- private final ConcurrentHashMap<Long, ProcedureResult> completed =
- new ConcurrentHashMap<Long, ProcedureResult>();
+ private final ConcurrentHashMap<Long, ProcedureInfo> completed =
+ new ConcurrentHashMap<Long, ProcedureInfo>();
/**
* Map the the procId returned by submitProcedure(), the Root-ProcID, to the RootProcedureState.
@@ -364,7 +366,7 @@ public class ProcedureExecutor<TEnvironment> {
}
assert !rollbackStack.containsKey(proc.getProcId());
procedures.remove(proc.getProcId());
- completed.put(proc.getProcId(), newResultFromProcedure(proc));
+ completed.put(proc.getProcId(), Procedure.createProcedureInfo(proc, proc.getNonceKey()));
continue;
}
@@ -572,6 +574,26 @@ public class ProcedureExecutor<TEnvironment> {
}
/**
+ * List procedures.
+ * @return the procedures in a list
+ */
+ public List<ProcedureInfo> listProcedures() {
+ List<ProcedureInfo> procedureLists =
+ new ArrayList<ProcedureInfo>(procedures.size() + completed.size());
+ for (java.util.Map.Entry<Long, Procedure> p: procedures.entrySet()) {
+ procedureLists.add(Procedure.createProcedureInfo(p.getValue(), null));
+ }
+ for (java.util.Map.Entry<Long, ProcedureInfo> e: completed.entrySet()) {
+ // Note: The procedure could show up twice in the list with different state, as
+ // it could complete after we walk through procedures list and insert into
+ // procedureList - it is ok, as we will use the information in the ProcedureInfo
+ // to figure it out; to prevent this would increase the complexity of the logic.
+ procedureLists.add(e.getValue());
+ }
+ return procedureLists;
+ }
+
+ /**
* Add a new root-procedure to the executor.
* @param proc the new procedure to execute.
* @return the procedure id, that can be used to monitor the operation
@@ -643,7 +665,7 @@ public class ProcedureExecutor<TEnvironment> {
return currentProcId;
}
- public ProcedureResult getResult(final long procId) {
+ public ProcedureInfo getResult(final long procId) {
return completed.get(procId);
}
@@ -676,7 +698,7 @@ public class ProcedureExecutor<TEnvironment> {
* @param procId the ID of the procedure to remove
*/
public void removeResult(final long procId) {
- ProcedureResult result = completed.get(procId);
+ ProcedureInfo result = completed.get(procId);
if (result == null) {
assert !procedures.containsKey(procId) : "procId=" + procId + " is still running";
if (LOG.isDebugEnabled()) {
@@ -718,7 +740,7 @@ public class ProcedureExecutor<TEnvironment> {
return false;
}
- public Map<Long, ProcedureResult> getResults() {
+ public Map<Long, ProcedureInfo> getResults() {
return Collections.unmodifiableMap(completed);
}
@@ -1033,7 +1055,7 @@ public class ProcedureExecutor<TEnvironment> {
if (subproc == null) {
String msg = "subproc[" + i + "] is null, aborting the procedure";
procedure.setFailure(new RemoteProcedureException(msg,
- new IllegalArgumentException(msg)));
+ new IllegalArgumentIOException(msg)));
subprocs = null;
break;
}
@@ -1212,7 +1234,7 @@ public class ProcedureExecutor<TEnvironment> {
}
// update the executor internal state maps
- completed.put(proc.getProcId(), newResultFromProcedure(proc));
+ completed.put(proc.getProcId(), Procedure.createProcedureInfo(proc, proc.getNonceKey()));
rollbackStack.remove(proc.getProcId());
procedures.remove(proc.getProcId());
@@ -1228,8 +1250,8 @@ public class ProcedureExecutor<TEnvironment> {
sendProcedureFinishedNotification(proc.getProcId());
}
- public Pair<ProcedureResult, Procedure> getResultOrProcedure(final long procId) {
- ProcedureResult result = completed.get(procId);
+ public Pair<ProcedureInfo, Procedure> getResultOrProcedure(final long procId) {
+ ProcedureInfo result = completed.get(procId);
Procedure proc = null;
if (result == null) {
proc = procedures.get(procId);
@@ -1239,13 +1261,4 @@ public class ProcedureExecutor<TEnvironment> {
}
return new Pair(result, proc);
}
-
- private static ProcedureResult newResultFromProcedure(final Procedure proc) {
- if (proc.isFailed()) {
- return new ProcedureResult(
- proc.getNonceKey(), proc.getStartTime(), proc.getLastUpdate(), proc.getException());
- }
- return new ProcedureResult(
- proc.getNonceKey(), proc.getStartTime(), proc.getLastUpdate(), proc.getResult());
- }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureResult.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureResult.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureResult.java
deleted file mode 100644
index ff5407f..0000000
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureResult.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.procedure2;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.util.NonceKey;
-
-/**
- * Once a Procedure completes the ProcedureExecutor takes all the useful
- * information of the procedure (e.g. exception/result) and creates a ProcedureResult.
- * The user of the Procedure framework will get the procedure result with
- * procedureExecutor.getResult(procId)
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class ProcedureResult {
- private final NonceKey nonceKey;
- private final RemoteProcedureException exception;
- private final long lastUpdate;
- private final long startTime;
- private final byte[] result;
-
- private long clientAckTime = -1;
-
- public ProcedureResult(
- final NonceKey nonceKey,
- final long startTime,
- final long lastUpdate,
- final RemoteProcedureException exception) {
- this(nonceKey, exception, lastUpdate, startTime, null);
- }
-
- public ProcedureResult(
- final NonceKey nonceKey,
- final long startTime,
- final long lastUpdate,
- final byte[] result) {
- this(nonceKey, null, lastUpdate, startTime, result);
- }
-
- public ProcedureResult(
- final NonceKey nonceKey,
- final RemoteProcedureException exception,
- final long lastUpdate,
- final long startTime,
- final byte[] result) {
- this.nonceKey = nonceKey;
- this.exception = exception;
- this.lastUpdate = lastUpdate;
- this.startTime = startTime;
- this.result = result;
- }
-
- public NonceKey getNonceKey() {
- return nonceKey;
- }
-
- public boolean isFailed() {
- return exception != null;
- }
-
- public RemoteProcedureException getException() {
- return exception;
- }
-
- public boolean hasResultData() {
- return result != null;
- }
-
- public byte[] getResult() {
- return result;
- }
-
- public long getStartTime() {
- return startTime;
- }
-
- public long getLastUpdate() {
- return lastUpdate;
- }
-
- public long executionTime() {
- return lastUpdate - startTime;
- }
-
- public boolean hasClientAckTime() {
- return clientAckTime > 0;
- }
-
- public long getClientAckTime() {
- return clientAckTime;
- }
-
- @InterfaceAudience.Private
- protected void setClientAckTime(final long timestamp) {
- this.clientAckTime = timestamp;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
index 34774ed..9edaec9 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
@@ -28,15 +28,18 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
+import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
+import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
public class ProcedureTestingUtility {
private static final Log LOG = LogFactory.getLog(ProcedureTestingUtility.class);
@@ -166,23 +169,42 @@ public class ProcedureTestingUtility {
public static <TEnv> void assertProcNotFailed(ProcedureExecutor<TEnv> procExecutor,
long procId) {
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
assertTrue("expected procedure result", result != null);
assertProcNotFailed(result);
}
- public static void assertProcNotFailed(final ProcedureResult result) {
- Exception exception = result.getException();
- String msg = exception != null ? exception.toString() : "no exception found";
+ public static void assertProcNotFailed(final ProcedureInfo result) {
+ ForeignExceptionMessage exception = result.getForeignExceptionMessage();
+ String msg = exception != null ? result.getExceptionFullMessage() : "no exception found";
assertFalse(msg, result.isFailed());
}
- public static void assertIsAbortException(final ProcedureResult result) {
- LOG.info(result.getException());
+ public static void assertIsAbortException(final ProcedureInfo result) {
assertEquals(true, result.isFailed());
- Throwable cause = result.getException().getCause();
- assertTrue("expected abort exception, got "+ cause,
- cause instanceof ProcedureAbortedException);
+ LOG.info(result.getExceptionFullMessage());
+ Throwable cause = getExceptionCause(result);
+ assertTrue("expected abort exception, got "+ cause, cause instanceof ProcedureAbortedException);
+ }
+
+ public static void assertIsTimeoutException(final ProcedureInfo result) {
+ assertEquals(true, result.isFailed());
+ LOG.info(result.getExceptionFullMessage());
+ Throwable cause = getExceptionCause(result);
+ assertTrue("expected TimeoutIOException, got " + cause, cause instanceof TimeoutIOException);
+ }
+
+ public static void assertIsIllegalArgumentException(final ProcedureInfo result) {
+ assertEquals(true, result.isFailed());
+ LOG.info(result.getExceptionFullMessage());
+ Throwable cause = ProcedureTestingUtility.getExceptionCause(result);
+ assertTrue("expected IllegalArgumentIOException, got " + cause,
+ cause instanceof IllegalArgumentIOException);
+ }
+
+ public static Throwable getExceptionCause(final ProcedureInfo procInfo) {
+ assert procInfo.getForeignExceptionMessage() != null;
+ return RemoteProcedureException.fromProto(procInfo.getForeignExceptionMessage()).getCause();
}
public static class TestProcedure extends Procedure<Void> {
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
index 9037e3c..b0cc43d 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
@@ -21,29 +21,25 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import java.util.concurrent.TimeoutException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-
import org.junit.After;
import org.junit.Before;
-import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
@Category({MasterTests.class, SmallTests.class})
public class TestProcedureExecution {
@@ -81,7 +77,7 @@ public class TestProcedureExecution {
fs.delete(logDir, true);
}
- private static class TestProcedureException extends Exception {
+ private static class TestProcedureException extends IOException {
public TestProcedureException(String msg) { super(msg); }
}
@@ -142,11 +138,9 @@ public class TestProcedureExecution {
// subProc1 has a "null" subprocedure which is catched as InvalidArgument
// failed state with 2 execute and 2 rollback
LOG.info(state);
- ProcedureResult result = procExecutor.getResult(rootId);
- LOG.info(result.getException());
+ ProcedureInfo result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
- assertTrue(result.getException().toString(),
- result.getException().getCause() instanceof IllegalArgumentException);
+ ProcedureTestingUtility.assertIsIllegalArgumentException(result);
assertEquals(state.toString(), 4, state.size());
assertEquals("rootProc-execute", state.get(0));
@@ -165,7 +159,7 @@ public class TestProcedureExecution {
// successful state, with 3 execute
LOG.info(state);
- ProcedureResult result = procExecutor.getResult(rootId);
+ ProcedureInfo result = procExecutor.getResult(rootId);
ProcedureTestingUtility.assertProcNotFailed(result);
assertEquals(state.toString(), 3, state.size());
}
@@ -181,11 +175,12 @@ public class TestProcedureExecution {
// the 3rd proc fail, rollback after 2 successful execution
LOG.info(state);
- ProcedureResult result = procExecutor.getResult(rootId);
- LOG.info(result.getException());
+ ProcedureInfo result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
- assertTrue(result.getException().toString(),
- result.getException().getCause() instanceof TestProcedureException);
+ LOG.info(result.getExceptionFullMessage());
+ Throwable cause = ProcedureTestingUtility.getExceptionCause(result);
+ assertTrue("expected TestProcedureException, got " + cause,
+ cause instanceof TestProcedureException);
assertEquals(state.toString(), 6, state.size());
assertEquals("rootProc-execute", state.get(0));
@@ -224,11 +219,12 @@ public class TestProcedureExecution {
public void testRollbackRetriableFailure() {
long procId = ProcedureTestingUtility.submitAndWait(procExecutor, new TestFaultyRollback());
- ProcedureResult result = procExecutor.getResult(procId);
- LOG.info(result.getException());
+ ProcedureInfo result = procExecutor.getResult(procId);
assertTrue("expected a failure", result.isFailed());
- assertTrue(result.getException().toString(),
- result.getException().getCause() instanceof TestProcedureException);
+ LOG.info(result.getExceptionFullMessage());
+ Throwable cause = ProcedureTestingUtility.getExceptionCause(result);
+ assertTrue("expected TestProcedureException, got " + cause,
+ cause instanceof TestProcedureException);
}
public static class TestWaitingProcedure extends SequentialProcedure<Void> {
@@ -307,11 +303,9 @@ public class TestProcedureExecution {
long execTime = EnvironmentEdgeManager.currentTime() - startTime;
LOG.info(state);
assertTrue("we didn't wait enough execTime=" + execTime, execTime >= PROC_TIMEOUT_MSEC);
- ProcedureResult result = procExecutor.getResult(rootId);
- LOG.info(result.getException());
+ ProcedureInfo result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
- assertTrue(result.getException().toString(),
- result.getException().getCause() instanceof TimeoutException);
+ ProcedureTestingUtility.assertIsTimeoutException(result);
assertEquals(state.toString(), 2, state.size());
assertEquals("wproc-execute", state.get(0));
assertEquals("wproc-rollback", state.get(1));
@@ -324,11 +318,9 @@ public class TestProcedureExecution {
proc.setTimeout(2500);
long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
LOG.info(state);
- ProcedureResult result = procExecutor.getResult(rootId);
- LOG.info(result.getException());
+ ProcedureInfo result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
- assertTrue(result.getException().toString(),
- result.getException().getCause() instanceof TimeoutException);
+ ProcedureTestingUtility.assertIsTimeoutException(result);
assertEquals(state.toString(), 4, state.size());
assertEquals("wproc-execute", state.get(0));
assertEquals("wproc-child-execute", state.get(1));
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
index 9346ae8..534e56e 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -197,7 +198,7 @@ public class TestProcedureRecovery {
long restartTs = EnvironmentEdgeManager.currentTime();
restart();
waitProcedure(procId);
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
assertTrue(result.getLastUpdate() > restartTs);
ProcedureTestingUtility.assertProcNotFailed(result);
assertEquals(1, Bytes.toInt(result.getResult()));
@@ -236,7 +237,7 @@ public class TestProcedureRecovery {
assertTrue(procExecutor.isRunning());
// The procedure is completed
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
ProcedureTestingUtility.assertProcNotFailed(result);
}
@@ -283,7 +284,7 @@ public class TestProcedureRecovery {
waitProcedure(procId);
// The procedure is completed
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
ProcedureTestingUtility.assertIsAbortException(result);
}
@@ -304,7 +305,7 @@ public class TestProcedureRecovery {
long procId2 = ProcedureTestingUtility.submitAndWait(procExecutor, proc2, nonceGroup, nonce);
assertTrue(procId == procId2);
- ProcedureResult result = procExecutor.getResult(procId2);
+ ProcedureInfo result = procExecutor.getResult(procId2);
ProcedureTestingUtility.assertProcNotFailed(result);
}
@@ -451,7 +452,7 @@ public class TestProcedureRecovery {
assertTrue(procExecutor.isRunning());
// The procedure is completed
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
ProcedureTestingUtility.assertProcNotFailed(result);
assertEquals(15, Bytes.toInt(result.getResult()));
}
@@ -505,7 +506,7 @@ public class TestProcedureRecovery {
assertTrue(procExecutor.isRunning());
// The procedure is completed
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
ProcedureTestingUtility.assertIsAbortException(result);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
index b1722ab..a7f5d9f 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
@@ -35,14 +35,11 @@ import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.junit.After;
-import org.junit.Assert;
import org.junit.Before;
-import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
[2/3] hbase git commit: HBASE-14107 Administrative Task: Provide an
API to List all procedures (Stephen Yuan Jiang)
Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/5b7894f9/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
index 9c6b3df..14d7221 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
@@ -48641,6 +48641,1065 @@ public final class MasterProtos {
// @@protoc_insertion_point(class_scope:hbase.pb.AbortProcedureResponse)
}
+ public interface ListProceduresRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListProceduresRequest}
+ */
+ public static final class ListProceduresRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements ListProceduresRequestOrBuilder {
+ // Use ListProceduresRequest.newBuilder() to construct.
+ private ListProceduresRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ListProceduresRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ListProceduresRequest defaultInstance;
+ public static ListProceduresRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ListProceduresRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ListProceduresRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<ListProceduresRequest> PARSER =
+ new com.google.protobuf.AbstractParser<ListProceduresRequest>() {
+ public ListProceduresRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ListProceduresRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<ListProceduresRequest> getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListProceduresRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.ListProceduresRequest)
+ }
+
+ static {
+ defaultInstance = new ListProceduresRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.ListProceduresRequest)
+ }
+
+ public interface ListProceduresResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .hbase.pb.Procedure procedure = 1;
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure>
+ getProcedureList();
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index);
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ int getProcedureCount();
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
+ getProcedureOrBuilderList();
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder(
+ int index);
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListProceduresResponse}
+ */
+ public static final class ListProceduresResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements ListProceduresResponseOrBuilder {
+ // Use ListProceduresResponse.newBuilder() to construct.
+ private ListProceduresResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ListProceduresResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ListProceduresResponse defaultInstance;
+ public static ListProceduresResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ListProceduresResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ListProceduresResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ procedure_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure>();
+ mutable_bitField0_ |= 0x00000001;
+ }
+ procedure_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry));
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ procedure_ = java.util.Collections.unmodifiableList(procedure_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<ListProceduresResponse> PARSER =
+ new com.google.protobuf.AbstractParser<ListProceduresResponse>() {
+ public ListProceduresResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ListProceduresResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<ListProceduresResponse> getParserForType() {
+ return PARSER;
+ }
+
+ // repeated .hbase.pb.Procedure procedure = 1;
+ public static final int PROCEDURE_FIELD_NUMBER = 1;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> procedure_;
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> getProcedureList() {
+ return procedure_;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
+ getProcedureOrBuilderList() {
+ return procedure_;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public int getProcedureCount() {
+ return procedure_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index) {
+ return procedure_.get(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder(
+ int index) {
+ return procedure_.get(index);
+ }
+
+ private void initFields() {
+ procedure_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getProcedureCount(); i++) {
+ if (!getProcedure(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < procedure_.size(); i++) {
+ output.writeMessage(1, procedure_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < procedure_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, procedure_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) obj;
+
+ boolean result = true;
+ result = result && getProcedureList()
+ .equals(other.getProcedureList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getProcedureCount() > 0) {
+ hash = (37 * hash) + PROCEDURE_FIELD_NUMBER;
+ hash = (53 * hash) + getProcedureList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.ListProceduresResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getProcedureFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (procedureBuilder_ == null) {
+ procedure_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ procedureBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListProceduresResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse(this);
+ int from_bitField0_ = bitField0_;
+ if (procedureBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ procedure_ = java.util.Collections.unmodifiableList(procedure_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.procedure_ = procedure_;
+ } else {
+ result.procedure_ = procedureBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance()) return this;
+ if (procedureBuilder_ == null) {
+ if (!other.procedure_.isEmpty()) {
+ if (procedure_.isEmpty()) {
+ procedure_ = other.procedure_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureProcedureIsMutable();
+ procedure_.addAll(other.procedure_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.procedure_.isEmpty()) {
+ if (procedureBuilder_.isEmpty()) {
+ procedureBuilder_.dispose();
+ procedureBuilder_ = null;
+ procedure_ = other.procedure_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ procedureBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getProcedureFieldBuilder() : null;
+ } else {
+ procedureBuilder_.addAllMessages(other.procedure_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getProcedureCount(); i++) {
+ if (!getProcedure(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // repeated .hbase.pb.Procedure procedure = 1;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> procedure_ =
+ java.util.Collections.emptyList();
+ private void ensureProcedureIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ procedure_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure>(procedure_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> procedureBuilder_;
+
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> getProcedureList() {
+ if (procedureBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(procedure_);
+ } else {
+ return procedureBuilder_.getMessageList();
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public int getProcedureCount() {
+ if (procedureBuilder_ == null) {
+ return procedure_.size();
+ } else {
+ return procedureBuilder_.getCount();
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index) {
+ if (procedureBuilder_ == null) {
+ return procedure_.get(index);
+ } else {
+ return procedureBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public Builder setProcedure(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value) {
+ if (procedureBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureProcedureIsMutable();
+ procedure_.set(index, value);
+ onChanged();
+ } else {
+ procedureBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public Builder setProcedure(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
+ if (procedureBuilder_ == null) {
+ ensureProcedureIsMutable();
+ procedure_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ procedureBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public Builder addProcedure(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value) {
+ if (procedureBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureProcedureIsMutable();
+ procedure_.add(value);
+ onChanged();
+ } else {
+ procedureBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public Builder addProcedure(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value) {
+ if (procedureBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureProcedureIsMutable();
+ procedure_.add(index, value);
+ onChanged();
+ } else {
+ procedureBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public Builder addProcedure(
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
+ if (procedureBuilder_ == null) {
+ ensureProcedureIsMutable();
+ procedure_.add(builderForValue.build());
+ onChanged();
+ } else {
+ procedureBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public Builder addProcedure(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
+ if (procedureBuilder_ == null) {
+ ensureProcedureIsMutable();
+ procedure_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ procedureBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public Builder addAllProcedure(
+ java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> values) {
+ if (procedureBuilder_ == null) {
+ ensureProcedureIsMutable();
+ super.addAll(values, procedure_);
+ onChanged();
+ } else {
+ procedureBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public Builder clearProcedure() {
+ if (procedureBuilder_ == null) {
+ procedure_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ procedureBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public Builder removeProcedure(int index) {
+ if (procedureBuilder_ == null) {
+ ensureProcedureIsMutable();
+ procedure_.remove(index);
+ onChanged();
+ } else {
+ procedureBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder getProcedureBuilder(
+ int index) {
+ return getProcedureFieldBuilder().getBuilder(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder(
+ int index) {
+ if (procedureBuilder_ == null) {
+ return procedure_.get(index); } else {
+ return procedureBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
+ getProcedureOrBuilderList() {
+ if (procedureBuilder_ != null) {
+ return procedureBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(procedure_);
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder() {
+ return getProcedureFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder(
+ int index) {
+ return getProcedureFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .hbase.pb.Procedure procedure = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder>
+ getProcedureBuilderList() {
+ return getProcedureFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
+ getProcedureFieldBuilder() {
+ if (procedureBuilder_ == null) {
+ procedureBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>(
+ procedure_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ procedure_ = null;
+ }
+ return procedureBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.ListProceduresResponse)
+ }
+
+ static {
+ defaultInstance = new ListProceduresResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.ListProceduresResponse)
+ }
+
public interface SetQuotaRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
@@ -53599,6 +54658,18 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse> done);
+ /**
+ * <code>rpc ListProcedures(.hbase.pb.ListProceduresRequest) returns (.hbase.pb.ListProceduresResponse);</code>
+ *
+ * <pre>
+ ** returns a list of procedures
+ * </pre>
+ */
+ public abstract void listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done);
+
}
public static com.google.protobuf.Service newReflectiveService(
@@ -54012,6 +55083,14 @@ public final class MasterProtos {
impl.abortProcedure(controller, request, done);
}
+ @java.lang.Override
+ public void listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done) {
+ impl.listProcedures(controller, request, done);
+ }
+
};
}
@@ -54136,6 +55215,8 @@ public final class MasterProtos {
return impl.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request);
case 50:
return impl.abortProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)request);
+ case 51:
+ return impl.listProcedures(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -54252,6 +55333,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
case 50:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+ case 51:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -54368,6 +55451,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
case 50:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+ case 51:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -55011,6 +56096,18 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse> done);
+ /**
+ * <code>rpc ListProcedures(.hbase.pb.ListProceduresRequest) returns (.hbase.pb.ListProceduresResponse);</code>
+ *
+ * <pre>
+ ** returns a list of procedures
+ * </pre>
+ */
+ public abstract void listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done);
+
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
@@ -55288,6 +56385,11 @@ public final class MasterProtos {
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse>specializeCallback(
done));
return;
+ case 51:
+ this.listProcedures(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)request,
+ com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse>specializeCallback(
+ done));
+ return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -55404,6 +56506,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
case 50:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+ case 51:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -55520,6 +56624,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
case 50:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+ case 51:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -56305,6 +57411,21 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.class,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance()));
}
+
+ public void listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(51),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance()));
+ }
}
public static BlockingInterface newBlockingStub(
@@ -56567,6 +57688,11 @@ public final class MasterProtos {
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request)
throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request)
+ throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
@@ -57187,6 +58313,18 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance());
}
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(51),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance());
+ }
+
}
// @@protoc_insertion_point(class_scope:hbase.pb.MasterService)
@@ -57643,6 +58781,16 @@ public final class MasterProtos {
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_AbortProcedureResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_ListProceduresRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_ListProceduresRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_ListProceduresResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_ListProceduresResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_SetQuotaRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
@@ -57688,294 +58836,299 @@ public final class MasterProtos {
java.lang.String[] descriptorData = {
"\n\014Master.proto\022\010hbase.pb\032\013HBase.proto\032\014C" +
"lient.proto\032\023ClusterStatus.proto\032\023ErrorH" +
- "andling.proto\032\013Quota.proto\"\234\001\n\020AddColumn" +
- "Request\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.T" +
- "ableName\0225\n\017column_families\030\002 \002(\0132\034.hbas" +
- "e.pb.ColumnFamilySchema\022\026\n\013nonce_group\030\003" +
- " \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\023\n\021AddColumnRe" +
- "sponse\"}\n\023DeleteColumnRequest\022\'\n\ntable_n" +
- "ame\030\001 \002(\0132\023.hbase.pb.TableName\022\023\n\013column" +
- "_name\030\002 \002(\014\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005n",
- "once\030\004 \001(\004:\0010\"\026\n\024DeleteColumnResponse\"\237\001" +
- "\n\023ModifyColumnRequest\022\'\n\ntable_name\030\001 \002(" +
- "\0132\023.hbase.pb.TableName\0225\n\017column_familie" +
- "s\030\002 \002(\0132\034.hbase.pb.ColumnFamilySchema\022\026\n" +
- "\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"" +
- "\026\n\024ModifyColumnResponse\"n\n\021MoveRegionReq" +
- "uest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.RegionSp" +
- "ecifier\022.\n\020dest_server_name\030\002 \001(\0132\024.hbas" +
- "e.pb.ServerName\"\024\n\022MoveRegionResponse\"\222\001" +
- "\n\035DispatchMergingRegionsRequest\022+\n\010regio",
- "n_a\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\022+\n\010" +
- "region_b\030\002 \002(\0132\031.hbase.pb.RegionSpecifie" +
- "r\022\027\n\010forcible\030\003 \001(\010:\005false\" \n\036DispatchMe" +
- "rgingRegionsResponse\"@\n\023AssignRegionRequ" +
- "est\022)\n\006region\030\001 \002(\0132\031.hbase.pb.RegionSpe" +
- "cifier\"\026\n\024AssignRegionResponse\"X\n\025Unassi" +
- "gnRegionRequest\022)\n\006region\030\001 \002(\0132\031.hbase." +
- "pb.RegionSpecifier\022\024\n\005force\030\002 \001(\010:\005false" +
- "\"\030\n\026UnassignRegionResponse\"A\n\024OfflineReg" +
- "ionRequest\022)\n\006region\030\001 \002(\0132\031.hbase.pb.Re",
- "gionSpecifier\"\027\n\025OfflineRegionResponse\"\177" +
- "\n\022CreateTableRequest\022+\n\014table_schema\030\001 \002" +
- "(\0132\025.hbase.pb.TableSchema\022\022\n\nsplit_keys\030" +
- "\002 \003(\014\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004" +
- " \001(\004:\0010\"&\n\023CreateTableResponse\022\017\n\007proc_i" +
- "d\030\001 \001(\004\"g\n\022DeleteTableRequest\022\'\n\ntable_n" +
- "ame\030\001 \002(\0132\023.hbase.pb.TableName\022\026\n\013nonce_" +
- "group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"&\n\023Dele" +
- "teTableResponse\022\017\n\007proc_id\030\001 \001(\004\"\207\001\n\024Tru" +
- "ncateTableRequest\022&\n\ttableName\030\001 \002(\0132\023.h",
- "base.pb.TableName\022\035\n\016preserveSplits\030\002 \001(" +
- "\010:\005false\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonc" +
- "e\030\004 \001(\004:\0010\"(\n\025TruncateTableResponse\022\017\n\007p" +
- "roc_id\030\001 \001(\004\"g\n\022EnableTableRequest\022\'\n\nta" +
- "ble_name\030\001 \002(\0132\023.hbase.pb.TableName\022\026\n\013n" +
- "once_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"&\n" +
- "\023EnableTableResponse\022\017\n\007proc_id\030\001 \001(\004\"h\n" +
- "\023DisableTableRequest\022\'\n\ntable_name\030\001 \002(\013" +
- "2\023.hbase.pb.TableName\022\026\n\013nonce_group\030\002 \001" +
- "(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"\'\n\024DisableTableR",
- "esponse\022\017\n\007proc_id\030\001 \001(\004\"\224\001\n\022ModifyTable" +
- "Request\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.T" +
- "ableName\022+\n\014table_schema\030\002 \002(\0132\025.hbase.p" +
- "b.TableSchema\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n" +
- "\005nonce\030\004 \001(\004:\0010\"&\n\023ModifyTableResponse\022\017" +
- "\n\007proc_id\030\001 \001(\004\"~\n\026CreateNamespaceReques" +
- "t\022:\n\023namespaceDescriptor\030\001 \002(\0132\035.hbase.p" +
- "b.NamespaceDescriptor\022\026\n\013nonce_group\030\002 \001" +
- "(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"\031\n\027CreateNamespa" +
- "ceResponse\"Y\n\026DeleteNamespaceRequest\022\025\n\r",
- "namespaceName\030\001 \002(\t\022\026\n\013nonce_group\030\002 \001(\004" +
- ":\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"\031\n\027DeleteNamespace" +
- "Response\"~\n\026ModifyNamespaceRequest\022:\n\023na" +
- "mespaceDescriptor\030\001 \002(\0132\035.hbase.pb.Names" +
- "paceDescriptor\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020" +
- "\n\005nonce\030\003 \001(\004:\0010\"\031\n\027ModifyNamespaceRespo" +
- "nse\"6\n\035GetNamespaceDescriptorRequest\022\025\n\r" +
- "namespaceName\030\001 \002(\t\"\\\n\036GetNamespaceDescr" +
- "iptorResponse\022:\n\023namespaceDescriptor\030\001 \002" +
- "(\0132\035.hbase.pb.NamespaceDescriptor\"!\n\037Lis",
- "tNamespaceDescriptorsRequest\"^\n ListName" +
- "spaceDescriptorsResponse\022:\n\023namespaceDes" +
- "criptor\030\001 \003(\0132\035.hbase.pb.NamespaceDescri" +
- "ptor\"?\n&ListTableDescriptorsByNamespaceR" +
- "equest\022\025\n\rnamespaceName\030\001 \002(\t\"U\n\'ListTab" +
- "leDescriptorsByNamespaceResponse\022*\n\013tabl" +
- "eSchema\030\001 \003(\0132\025.hbase.pb.TableSchema\"9\n " +
- "ListTableNamesByNamespaceRequest\022\025\n\rname" +
- "spaceName\030\001 \002(\t\"K\n!ListTableNamesByNames" +
- "paceResponse\022&\n\ttableName\030\001 \003(\0132\023.hbase.",
- "pb.TableName\"\021\n\017ShutdownRequest\"\022\n\020Shutd" +
- "ownResponse\"\023\n\021StopMasterRequest\"\024\n\022Stop" +
- "MasterResponse\"\037\n\016BalanceRequest\022\r\n\005forc" +
- "e\030\001 \001(\010\"\'\n\017BalanceResponse\022\024\n\014balancer_r" +
- "an\030\001 \002(\010\"<\n\031SetBalancerRunningRequest\022\n\n" +
- "\002on\030\001 \002(\010\022\023\n\013synchronous\030\002 \001(\010\"8\n\032SetBal" +
- "ancerRunningResponse\022\032\n\022prev_balance_val" +
- "ue\030\001 \001(\010\"\032\n\030IsBalancerEnabledRequest\",\n\031" +
- "IsBalancerEnabledResponse\022\017\n\007enabled\030\001 \002" +
- "(\010\"\027\n\025RunCatalogScanRequest\"-\n\026RunCatalo",
- "gScanResponse\022\023\n\013scan_result\030\001 \001(\005\"-\n\033En" +
- "ableCatalogJanitorRequest\022\016\n\006enable\030\001 \002(" +
- "\010\"2\n\034EnableCatalogJanitorResponse\022\022\n\npre" +
- "v_value\030\001 \001(\010\" \n\036IsCatalogJanitorEnabled" +
- "Request\"0\n\037IsCatalogJanitorEnabledRespon" +
- "se\022\r\n\005value\030\001 \002(\010\"B\n\017SnapshotRequest\022/\n\010" +
- "snapshot\030\001 \002(\0132\035.hbase.pb.SnapshotDescri" +
- "ption\",\n\020SnapshotResponse\022\030\n\020expected_ti" +
- "meout\030\001 \002(\003\"\036\n\034GetCompletedSnapshotsRequ" +
- "est\"Q\n\035GetCompletedSnapshotsResponse\0220\n\t",
- "snapshots\030\001 \003(\0132\035.hbase.pb.SnapshotDescr" +
- "iption\"H\n\025DeleteSnapshotRequest\022/\n\010snaps" +
- "hot\030\001 \002(\0132\035.hbase.pb.SnapshotDescription" +
- "\"\030\n\026DeleteSnapshotResponse\"I\n\026RestoreSna" +
- "pshotRequest\022/\n\010snapshot\030\001 \002(\0132\035.hbase.p" +
- "b.SnapshotDescription\"\031\n\027RestoreSnapshot" +
- "Response\"H\n\025IsSnapshotDoneRequest\022/\n\010sna" +
- "pshot\030\001 \001(\0132\035.hbase.pb.SnapshotDescripti" +
- "on\"^\n\026IsSnapshotDoneResponse\022\023\n\004done\030\001 \001" +
- "(\010:\005false\022/\n\010snapshot\030\002 \001(\0132\035.hbase.pb.S",
- "napshotDescription\"O\n\034IsRestoreSnapshotD" +
+ "andling.proto\032\017Procedure.proto\032\013Quota.pr" +
+ "oto\"\234\001\n\020AddColumnRequest\022\'\n\ntable_name\030\001" +
+ " \002(\0132\023.hbase.pb.TableName\0225\n\017column_fami" +
+ "lies\030\002 \002(\0132\034.hbase.pb.ColumnFamilySchema" +
+ "\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:" +
+ "\0010\"\023\n\021AddColumnResponse\"}\n\023DeleteColumnR" +
+ "equest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Ta" +
+ "bleName\022\023\n\013column_name\030\002 \002(\014\022\026\n\013nonce_gr",
+ "oup\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\026\n\024Delete" +
+ "ColumnResponse\"\237\001\n\023ModifyColumnRequest\022\'" +
+ "\n\ntable_name\030\001 \002(\0132\023.hbase.pb.TableName\022" +
+ "5\n\017column_families\030\002 \002(\0132\034.hbase.pb.Colu" +
+ "mnFamilySchema\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020" +
+ "\n\005nonce\030\004 \001(\004:\0010\"\026\n\024ModifyColumnResponse" +
+ "\"n\n\021MoveRegionRequest\022)\n\006region\030\001 \002(\0132\031." +
+ "hbase.pb.RegionSpecifier\022.\n\020dest_server_" +
+ "name\030\002 \001(\0132\024.hbase.pb.ServerName\"\024\n\022Move" +
+ "RegionResponse\"\222\001\n\035DispatchMergingRegion",
+ "sRequest\022+\n\010region_a\030\001 \002(\0132\031.hbase.pb.Re" +
+ "gionSpecifier\022+\n\010region_b\030\002 \002(\0132\031.hbase." +
+ "pb.RegionSpecifier\022\027\n\010forcible\030\003 \001(\010:\005fa" +
+ "lse\" \n\036DispatchMergingRegionsResponse\"@\n" +
+ "\023AssignRegionRequest\022)\n\006region\030\001 \002(\0132\031.h" +
+ "base.pb.RegionSpecifier\"\026\n\024AssignRegionR" +
+ "esponse\"X\n\025UnassignRegionRequest\022)\n\006regi" +
+ "on\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\022\024\n\005f" +
+ "orce\030\002 \001(\010:\005false\"\030\n\026UnassignRegionRespo" +
+ "nse\"A\n\024OfflineRegionRequest\022)\n\006region\030\001 ",
+ "\002(\0132\031.hbase.pb.RegionSpecifier\"\027\n\025Offlin" +
+ "eRegionResponse\"\177\n\022CreateTableRequest\022+\n" +
+ "\014table_schema\030\001 \002(\0132\025.hbase.pb.TableSche" +
+ "ma\022\022\n\nsplit_keys\030\002 \003(\014\022\026\n\013nonce_group\030\003 " +
+ "\001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"&\n\023CreateTableR" +
+ "esponse\022\017\n\007proc_id\030\001 \001(\004\"g\n\022DeleteTableR" +
+ "equest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.Ta" +
+ "bleName\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce" +
+ "\030\003 \001(\004:\0010\"&\n\023DeleteTableResponse\022\017\n\007proc" +
+ "_id\030\001 \001(\004\"\207\001\n\024TruncateTableRequest\022&\n\tta",
+ "bleName\030\001 \002(\0132\023.hbase.pb.TableName\022\035\n\016pr" +
+ "eserveSplits\030\002 \001(\010:\005false\022\026\n\013nonce_group" +
+ "\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"(\n\025TruncateT" +
+ "ableResponse\022\017\n\007proc_id\030\001 \001(\004\"g\n\022EnableT" +
+ "ableRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase." +
+ "pb.TableName\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005" +
+ "nonce\030\003 \001(\004:\0010\"&\n\023EnableTableResponse\022\017\n" +
+ "\007proc_id\030\001 \001(\004\"h\n\023DisableTableRequest\022\'\n" +
+ "\ntable_name\030\001 \002(\0132\023.hbase.pb.TableName\022\026" +
+ "\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010",
+ "\"\'\n\024DisableTableResponse\022\017\n\007proc_id\030\001 \001(" +
+ "\004\"\224\001\n\022ModifyTableRequest\022\'\n\ntable_name\030\001" +
+ " \002(\0132\023.hbase.pb.TableName\022+\n\014table_schem" +
+ "a\030\002 \002(\0132\025.hbase.pb.TableSchema\022\026\n\013nonce_" +
+ "group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"&\n\023Modi" +
+ "fyTableResponse\022\017\n\007proc_id\030\001 \001(\004\"~\n\026Crea" +
+ "teNamespaceRequest\022:\n\023namespaceDescripto" +
+ "r\030\001 \002(\0132\035.hbase.pb.NamespaceDescriptor\022\026" +
+ "\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010" +
+ "\"\031\n\027CreateNamespaceResponse\"Y\n\026DeleteNam",
+ "espaceRequest\022\025\n\rnamespaceName\030\001 \002(\t\022\026\n\013" +
+ "nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"\031" +
+ "\n\027DeleteNamespaceResponse\"~\n\026ModifyNames" +
+ "paceRequest\022:\n\023namespaceDescriptor\030\001 \002(\013" +
+ "2\035.hbase.pb.NamespaceDescriptor\022\026\n\013nonce" +
+ "_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"\031\n\027Mod" +
+ "ifyNamespaceResponse\"6\n\035GetNamespaceDesc" +
+ "riptorRequest\022\025\n\rnamespaceName\030\001 \002(\t\"\\\n\036" +
+ "GetNamespaceDescriptorResponse\022:\n\023namesp" +
+ "aceDescriptor\030\001 \002(\0132\035.hbase.pb.Namespace",
+ "Descriptor\"!\n\037ListNamespaceDescriptorsRe" +
+ "quest\"^\n ListNamespaceDescriptorsRespons" +
+ "e\022:\n\023namespaceDescriptor\030\001 \003(\0132\035.hbase.p" +
+ "b.NamespaceDescriptor\"?\n&ListTableDescri" +
+ "ptorsByNamespaceRequest\022\025\n\rnamespaceName" +
+ "\030\001 \002(\t\"U\n\'ListTableDescriptorsByNamespac" +
+ "eResponse\022*\n\013tableSchema\030\001 \003(\0132\025.hbase.p" +
+ "b.TableSchema\"9\n ListTableNamesByNamespa" +
+ "ceRequest\022\025\n\rnamespaceName\030\001 \002(\t\"K\n!List" +
+ "TableNamesByNamespaceResponse\022&\n\ttableNa",
+ "me\030\001 \003(\0132\023.hbase.pb.TableName\"\021\n\017Shutdow" +
+ "nRequest\"\022\n\020ShutdownResponse\"\023\n\021StopMast" +
+ "erRequest\"\024\n\022StopMasterResponse\"\037\n\016Balan" +
+ "ceRequest\022\r\n\005force\030\001 \001(\010\"\'\n\017BalanceRespo" +
+ "nse\022\024\n\014balancer_ran\030\001 \002(\010\"<\n\031SetBalancer" +
+ "RunningRequest\022\n\n\002on\030\001 \002(\010\022\023\n\013synchronou" +
+ "s\030\002 \001(\010\"8\n\032SetBalancerRunningResponse\022\032\n" +
+ "\022prev_balance_value\030\001 \001(\010\"\032\n\030IsBalancerE" +
+ "nabledRequest\",\n\031IsBalancerEnabledRespon" +
+ "se\022\017\n\007enabled\030\001 \002(\010\"\027\n\025RunCatalogScanReq",
+ "uest\"-\n\026RunCatalogScanResponse\022\023\n\013scan_r" +
+ "esult\030\001 \001(\005\"-\n\033EnableCatalogJanitorReque" +
+ "st\022\016\n\006enable\030\001 \002(\010\"2\n\034EnableCatalogJanit" +
+ "orResponse\022\022\n\nprev_value\030\001 \001(\010\" \n\036IsCata" +
+ "logJanitorEnabledRequest\"0\n\037IsCatalogJan" +
+ "itorEnabledResponse\022\r\n\005value\030\001 \002(\010\"B\n\017Sn" +
+ "apshotRequest\022/\n\010snapshot\030\001 \002(\0132\035.hbase." +
+ "pb.SnapshotDescription\",\n\020SnapshotRespon" +
+ "se\022\030\n\020expected_timeout\030\001 \002(\003\"\036\n\034GetCompl" +
+ "etedSnapshotsRequest\"Q\n\035GetCompletedSnap",
+ "shotsResponse\0220\n\tsnapshots\030\001 \003(\0132\035.hbase" +
+ ".pb.SnapshotDescription\"H\n\025DeleteSnapsho" +
+ "tRequest\022/\n\010snapshot\030\001 \002(\0132\035.hbase.pb.Sn" +
+ "apshotDescription\"\030\n\026DeleteSnapshotRespo" +
+ "nse\"I\n\026RestoreSnapshotRequest\022/\n\010snapsho" +
+ "t\030\001 \002(\0132\035.hbase.pb.SnapshotDescription\"\031" +
+ "\n\027RestoreSnapshotResponse\"H\n\025IsSnapshotD" +
"oneRequest\022/\n\010snapshot\030\001 \001(\0132\035.hbase.pb." +
- "SnapshotDescription\"4\n\035IsRestoreSnapshot" +
- "DoneResponse\022\023\n\004done\030\001 \001(\010:\005false\"F\n\033Get" +
- "SchemaAlterStatusRequest\022\'\n\ntable_name\030\001" +
- " \002(\0132\023.hbase.pb.TableName\"T\n\034GetSchemaAl" +
- "terStatusResponse\022\035\n\025yet_to_update_regio" +
- "ns\030\001 \001(\r\022\025\n\rtotal_regions\030\002 \001(\r\"\213\001\n\032GetT" +
- "ableDescriptorsRequest\022(\n\013table_names\030\001 " +
- "\003(\0132\023.hbase.pb.TableName\022\r\n\005regex\030\002 \001(\t\022",
- "!\n\022include_sys_tables\030\003 \001(\010:\005false\022\021\n\tna" +
- "mespace\030\004 \001(\t\"J\n\033GetTableDescriptorsResp" +
- "onse\022+\n\014table_schema\030\001 \003(\0132\025.hbase.pb.Ta" +
- "bleSchema\"[\n\024GetTableNamesRequest\022\r\n\005reg" +
- "ex\030\001 \001(\t\022!\n\022include_sys_tables\030\002 \001(\010:\005fa" +
- "lse\022\021\n\tnamespace\030\003 \001(\t\"A\n\025GetTableNamesR" +
- "esponse\022(\n\013table_names\030\001 \003(\0132\023.hbase.pb." +
- "TableName\"?\n\024GetTableStateRequest\022\'\n\ntab" +
- "le_name\030\001 \002(\0132\023.hbase.pb.TableName\"B\n\025Ge" +
- "tTableStateResponse\022)\n\013table_state\030\001 \002(\013",
- "2\024.hbase.pb.TableState\"\031\n\027GetClusterStat" +
- "usRequest\"K\n\030GetClusterStatusResponse\022/\n" +
- "\016cluster_status\030\001 \002(\0132\027.hbase.pb.Cluster" +
- "Status\"\030\n\026IsMasterRunningRequest\"4\n\027IsMa" +
- "sterRunningResponse\022\031\n\021is_master_running" +
- "\030\001 \002(\010\"I\n\024ExecProcedureRequest\0221\n\tproced" +
- "ure\030\001 \002(\0132\036.hbase.pb.ProcedureDescriptio" +
- "n\"F\n\025ExecProcedureResponse\022\030\n\020expected_t" +
- "imeout\030\001 \001(\003\022\023\n\013return_data\030\002 \001(\014\"K\n\026IsP" +
- "rocedureDoneRequest\0221\n\tprocedure\030\001 \001(\0132\036",
- ".hbase.pb.ProcedureDescription\"`\n\027IsProc" +
- "edureDoneResponse\022\023\n\004done\030\001 \001(\010:\005false\0220" +
- "\n\010snapshot\030\002 \001(\0132\036.hbase.pb.ProcedureDes" +
- "cription\",\n\031GetProcedureResultRequest\022\017\n" +
- "\007proc_id\030\001 \002(\004\"\371\001\n\032GetProcedureResultRes" +
- "ponse\0229\n\005state\030\001 \002(\0162*.hbase.pb.GetProce" +
- "dureResultResponse.State\022\022\n\nstart_time\030\002" +
- " \001(\004\022\023\n\013last_update\030\003 \001(\004\022\016\n\006result\030\004 \001(" +
- "\014\0224\n\texception\030\005 \001(\0132!.hbase.pb.ForeignE" +
- "xceptionMessage\"1\n\005State\022\r\n\tNOT_FOUND\020\000\022",
- "\013\n\007RUNNING\020\001\022\014\n\010FINISHED\020\002\"M\n\025AbortProce" +
- "dureRequest\022\017\n\007proc_id\030\001 \002(\004\022#\n\025mayInter" +
- "ruptIfRunning\030\002 \001(\010:\004true\"6\n\026AbortProced" +
- "ureResponse\022\034\n\024is_procedure_aborted\030\001 \002(" +
- "\010\"\315\001\n\017SetQuotaRequest\022\021\n\tuser_name\030\001 \001(\t" +
- "\022\022\n\nuser_group\030\002 \001(\t\022\021\n\tnamespace\030\003 \001(\t\022" +
- "\'\n\ntable_name\030\004 \001(\0132\023.hbase.pb.TableName" +
- "\022\022\n\nremove_all\030\005 \001(\010\022\026\n\016bypass_globals\030\006" +
- " \001(\010\022+\n\010throttle\030\007 \001(\0132\031.hbase.pb.Thrott" +
- "leRequest\"\022\n\020SetQuotaResponse\"J\n\037MajorCo",
- "mpactionTimestampRequest\022\'\n\ntable_name\030\001" +
- " \002(\0132\023.hbase.pb.TableName\"U\n(MajorCompac" +
- "tionTimestampForRegionRequest\022)\n\006region\030" +
- "\001 \002(\0132\031.hbase.pb.RegionSpecifier\"@\n Majo" +
- "rCompactionTimestampResponse\022\034\n\024compacti" +
- "on_timestamp\030\001 \002(\003\"\035\n\033SecurityCapabiliti" +
- "esRequest\"\354\001\n\034SecurityCapabilitiesRespon" +
- "se\022G\n\014capabilities\030\001 \003(\01621.hbase.pb.Secu" +
- "rityCapabilitiesResponse.Capability\"\202\001\n\n" +
- "Capability\022\031\n\025SIMPLE_AUTHENTICATION\020\000\022\031\n",
- "\025SECURE_AUTHENTICATION\020\001\022\021\n\rAUTHORIZATIO" +
- "N\020\002\022\026\n\022CELL_AUTHORIZATION\020\003\022\023\n\017CELL_VISI" +
- "BILITY\020\0042\226$\n\rMasterService\022e\n\024GetSchemaA" +
- "lterStatus\022%.hbase.pb.GetSchemaAlterStat" +
- "usRequest\032&.hbase.pb.GetSchemaAlterStatu" +
- "sResponse\022b\n\023GetTableDescriptors\022$.hbase" +
- ".pb.GetTableDescriptorsRequest\032%.hbase.p" +
- "b.GetTableDescriptorsResponse\022P\n\rGetTabl" +
- "eNames\022\036.hbase.pb.GetTableNamesRequest\032\037" +
- ".hbase.pb.GetTableNamesResponse\022Y\n\020GetCl",
- "usterStatus\022!.hbase.pb.GetClusterStatusR" +
- "equest\032\".hbase.pb.GetClusterStatusRespon" +
- "se\022V\n\017IsMasterRunning\022 .hbase.pb.IsMaste" +
- "rRunningRequest\032!.hbase.pb.IsMasterRunni" +
- "ngResponse\022D\n\tAddColumn\022\032.hbase.pb.AddCo" +
- "lumnRequest\032\033.hbase.pb.AddColumnResponse" +
- "\022M\n\014DeleteColumn\022\035.hbase.pb.DeleteColumn" +
- "Request\032\036.hbase.pb.DeleteColumnResponse\022" +
- "M\n\014ModifyColumn\022\035.hbase.pb.ModifyColumnR" +
- "equest\032\036.hbase.pb.ModifyColumnResponse\022G",
- "\n\nMoveRegion\022\033.hbase.pb.MoveRegionReques" +
- "t\032\034.hbase.pb.MoveRegionResponse\022k\n\026Dispa" +
- "tchMergingRegions\022\'.hbase.pb.DispatchMer" +
- "gingRegionsRequest\032(.hbase.pb.DispatchMe" +
- "rgingRegionsResponse\022M\n\014AssignRegion\022\035.h" +
- "base.pb.AssignRegionRequest\032\036.hbase.pb.A" +
- "ssignRegionResponse\022S\n\016UnassignRegion\022\037." +
- "hbase.pb.UnassignRegionRequest\032 .hbase.p" +
- "b.UnassignRegionResponse\022P\n\rOfflineRegio" +
- "n\022\036.hbase.pb.OfflineRegionRequest\032\037.hbas",
- "e.pb.OfflineRegionResponse\022J\n\013DeleteTabl" +
- "e\022\034.hbase.pb.DeleteTableRequest\032\035.hbase." +
- "pb.DeleteTableResponse\022P\n\rtruncateTable\022" +
- "\036.hbase.pb.TruncateTableRequest\032\037.hbase." +
- "pb.TruncateTableResponse\022J\n\013EnableTable\022" +
- "\034.hbase.pb.EnableTableRequest\032\035.hbase.pb" +
- ".EnableTableResponse\022M\n\014DisableTable\022\035.h" +
- "base.pb.DisableTableRequest\032\036.hbase.pb.D" +
- "isableTableResponse\022J\n\013ModifyTable\022\034.hba" +
- "se.pb.ModifyTableRequest\032\035.hbase.pb.Modi",
- "fyTableResponse\022J\n\013CreateTable\022\034.hbase.p" +
- "b.CreateTableRequest\032\035.hbase.pb.CreateTa" +
- "bleResponse\022A\n\010Shutdown\022\031.hbase.pb.Shutd" +
- "ownRequest\032\032.hbase.pb.ShutdownResponse\022G" +
- "\n\nStopMaster\022\033.hbase.pb.StopMasterReques" +
- "t\032\034.hbase.pb.StopMasterResponse\022>\n\007Balan" +
- "ce\022\030.hbase.pb.BalanceRequest\032\031.hbase.pb." +
- "BalanceResponse\022_\n\022SetBalancerRunning\022#." +
- "hbase.pb.SetBalancerRunningRequest\032$.hba" +
- "se.pb.SetBalancerRunningResponse\022\\\n\021IsBa",
- "lancerEnabled\022\".hbase.pb.IsBalancerEnabl" +
- "edRequest\032#.hbase.pb.IsBalancerEnabledRe" +
- "sponse\022S\n\016RunCatalogScan\022\037.hbase.pb.RunC" +
- "atalogScanRequest\032 .hbase.pb.RunCatalogS" +
- "canResponse\022e\n\024EnableCatalogJanitor\022%.hb" +
- "ase.pb.EnableCatalogJanitorRequest\032&.hba" +
- "se.pb.EnableCatalogJanitorResponse\022n\n\027Is" +
- "CatalogJanitorEnabled\022(.hbase.pb.IsCatal" +
- "ogJanitorEnabledRequest\032).hbase.pb.IsCat" +
- "alogJanitorEnabledResponse\022^\n\021ExecMaster",
- "Service\022#.hbase.pb.CoprocessorServiceReq" +
- "uest\032$.hbase.pb.CoprocessorServiceRespon" +
- "se\022A\n\010Snapshot\022\031.hbase.pb.SnapshotReques" +
- "t\032\032.hbase.pb.SnapshotResponse\022h\n\025GetComp" +
- "letedSnapshots\022&.hbase.pb.GetCompletedSn" +
- "apshotsRequest\032\'.hbase.pb.GetCompletedSn" +
- "apshotsResponse\022S\n\016DeleteSnapshot\022\037.hbas" +
- "e.pb.DeleteSnapshotRequest\032 .hbase.pb.De" +
- "leteSnapshotResponse\022S\n\016IsSnapshotDone\022\037" +
- ".hbase.pb.IsSnapshotDoneRequest\032 .hbase.",
- "pb.IsSnapshotDoneResponse\022V\n\017RestoreSnap" +
- "shot\022 .hbase.pb.RestoreSnapshotRequest\032!" +
- ".hbase.pb.RestoreSnapshotResponse\022h\n\025IsR" +
- "estoreSnapshotDone\022&.hbase.pb.IsRestoreS" +
- "napshotDoneRequest\032\'.hbase.pb.IsRestoreS" +
- "napshotDoneResponse\022P\n\rExecProcedure\022\036.h" +
- "base.pb.ExecProcedureRequest\032\037.hbase.pb." +
- "ExecProcedureResponse\022W\n\024ExecProcedureWi" +
- "thRet\022\036.hbase.pb.ExecProcedureRequest\032\037." +
- "hbase.pb.ExecProcedureResponse\022V\n\017IsProc",
- "edureDone\022 .hbase.pb.IsProcedureDoneRequ" +
- "est\032!.hbase.pb.IsProcedureDoneResponse\022V" +
- "\n\017ModifyNamespace\022 .hbase.pb.ModifyNames" +
- "paceRequest\032!.hbase.pb.ModifyNamespaceRe" +
- "sponse\022V\n\017CreateNamespace\022 .hbase.pb.Cre" +
- "ateNamespaceRequest\032!.hbase.pb.CreateNam" +
- "espaceResponse\022V\n\017DeleteNamespace\022 .hbas" +
- "e.pb.DeleteNamespaceRequest\032!.hbase.pb.D" +
- "eleteNamespaceResponse\022k\n\026GetNamespaceDe" +
- "scriptor\022\'.hbase.pb.GetNamespaceDescript",
- "orRequest\032(.hbase.pb.GetNamespaceDescrip" +
- "torResponse\022q\n\030ListNamespaceDescriptors\022" +
- ").hbase.pb.ListNamespaceDescriptorsReque" +
- "st\032*.hbase.pb.ListNamespaceDescriptorsRe" +
- "sponse\022\206\001\n\037ListTableDescriptorsByNamespa" +
- "ce\0220.hbase.pb.ListTableDescriptorsByName" +
- "spaceRequest\0321.hbase.pb.ListTableDescrip" +
- "torsByNamespaceResponse\022t\n\031ListTableName" +
- "sByNamespace\022*.hbase.pb.ListTableNamesBy" +
- "NamespaceRequest\032+.hbase.pb.ListTableNam",
- "esByNamespaceResponse\022P\n\rGetTableState\022\036" +
- ".hbase.pb.GetTableStateRequest\032\037.hbase.p" +
- "b.GetTableStateResponse\022A\n\010SetQuota\022\031.hb" +
- "ase.pb.SetQuotaRequest\032\032.hbase.pb.SetQuo" +
- "taResponse\022x\n\037getLastMajorCompactionTime" +
- "stamp\022).hbase.pb.MajorCompactionTimestam" +
- "pRequest\032*.hbase.pb.MajorCompactionTimes" +
- "tampResponse\022\212\001\n(getLastMajorCompactionT" +
- "imestampForRegion\0222.hbase.pb.MajorCompac" +
- "tionTimestampForRegionRequest\032*.hbase.pb",
- ".MajorCompactionTimestampResponse\022_\n\022get" +
- "ProcedureResult\022#.hbase.pb.GetProcedureR" +
- "esultRequest\032$.hbase.pb.GetProcedureResu" +
- "ltResponse\022h\n\027getSecurityCapabilities\022%." +
- "hbase.pb.SecurityCapabilitiesRequest\032&.h" +
- "base.pb.SecurityCapabilitiesResponse\022S\n\016" +
- "AbortProcedure\022\037.hbase.pb.AbortProcedure" +
- "Request\032 .hbase.pb.AbortProcedureRespons" +
- "eBB\n*org.apache.hadoop.hbase.protobuf.ge" +
- "neratedB\014MasterProtosH\001\210\001\001\240\001\001"
+ "SnapshotDescription\"^\n\026IsSnapshotDoneRes" +
+ "ponse\022\023\n\004done\030\001 \001(\010:\005false\022/\n\010snapshot\030\002",
+ " \001(\0132\035.hbase.pb.SnapshotDescription\"O\n\034I" +
+ "sRestoreSnapshotDoneRequest\022/\n\010snapshot\030" +
+ "\001 \001(\0132\035.hbase.pb.SnapshotDescription\"4\n\035" +
+ "IsRestoreSnapshotDoneResponse\022\023\n\004done\030\001 " +
+ "\001(\010:\005false\"F\n\033GetSchemaAlterStatusReques" +
+ "t\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.pb.TableNa" +
+ "me\"T\n\034GetSchemaAlterStatusResponse\022\035\n\025ye" +
+ "t_to_update_regions\030\001 \001(\r\022\025\n\rtotal_regio" +
+ "ns\030\002 \001(\r\"\213\001\n\032GetTableDescriptorsRequest\022" +
+ "(\n\013table_names\030\001 \003(\0132\023.hbase.pb.TableNam",
+ "e\022\r\n\005regex\030\002 \001(\t\022!\n\022include_sys_tables\030\003" +
+ " \001(\010:\005false\022\021\n\tnamespace\030\004 \001(\t\"J\n\033GetTab" +
+ "leDescriptorsResponse\022+\n\014table_schema\030\001 " +
+ "\003(\0132\025.hbase.pb.TableSchema\"[\n\024GetTableNa" +
+ "mesRequest\022\r\n\005regex\030\001 \001(\t\022!\n\022include_sys" +
+ "_tables\030\002 \001(\010:\005false\022\021\n\tnamespace\030\003 \001(\t\"" +
+ "A\n\025GetTableNamesResponse\022(\n\013table_names\030" +
+ "\001 \003(\0132\023.hbase.pb.TableName\"?\n\024GetTableSt" +
+ "ateRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase.p" +
+ "b.TableName\"B\n\025GetTableStateResponse\022)\n\013",
+ "table_state\030\001 \002(\0132\024.hbase.pb.TableState\"" +
+ "\031\n\027GetClusterStatusRequest\"K\n\030GetCluster" +
+ "StatusResponse\022/\n\016cluster_status\030\001 \002(\0132\027" +
+ ".hbase.pb.ClusterStatus\"\030\n\026IsMasterRunni" +
+ "ngRequest\"4\n\027IsMasterRunningResponse\022\031\n\021" +
+ "is_master_running\030\001 \002(\010\"I\n\024ExecProcedure" +
+ "Request\0221\n\tprocedure\030\001 \002(\0132\036.hbase.pb.Pr" +
+ "ocedureDescription\"F\n\025ExecProcedureRespo" +
+ "nse\022\030\n\020expected_timeout\030\001 \001(\003\022\023\n\013return_" +
+ "data\030\002 \001(\014\"K\n\026IsProcedureDoneRequest\0221\n\t",
+ "procedure\030\001 \001(\0132\036.hbase.pb.ProcedureDesc" +
+ "ription\"`\n\027IsProcedureDoneResponse\022\023\n\004do" +
+ "ne\030\001 \001(\010:\005false\0220\n\010snapshot\030\002 \001(\0132\036.hbas" +
+ "e.pb.ProcedureDescription\",\n\031GetProcedur" +
+ "eResultRequest\022\017\n\007proc_id\030\001 \002(\004\"\371\001\n\032GetP" +
+ "rocedureResultResponse\0229\n\005state\030\001 \002(\0162*." +
+ "hbase.pb.GetProcedureResultResponse.Stat" +
+ "e\022\022\n\nstart_time\030\002 \001(\004\022\023\n\013last_update\030\003 \001" +
+ "(\004\022\016\n\006result\030\004 \001(\014\0224\n\texception\030\005 \001(\0132!." +
+ "hbase.pb.ForeignExceptionMessage\"1\n\005Stat",
+ "e\022\r\n\tNOT_FOUND\020\000\022\013\n\007RUNNING\020\001\022\014\n\010FINISHE" +
+ "D\020\002\"M\n\025AbortProcedureRequest\022\017\n\007proc_id\030" +
+ "\001 \002(\004\022#\n\025mayInterruptIfRunning\030\002 \001(\010:\004tr" +
+ "ue\"6\n\026AbortProcedureResponse\022\034\n\024is_proce" +
+ "dure_aborted\030\001 \002(\010\"\027\n\025ListProceduresRequ" +
+ "est\"@\n\026ListProceduresResponse\022&\n\tprocedu" +
+ "re\030\001 \003(\0132\023.hbase.pb.Procedure\"\315\001\n\017SetQuo" +
+ "taRequest\022\021\n\tuser_name\030\001 \001(\t\022\022\n\nuser_gro" +
+ "up\030\002 \001(\t\022\021\n\tnamespace\030\003 \001(\t\022\'\n\ntable_nam" +
+ "e\030\004 \001(\0132\023.hbase.pb.TableName\022\022\n\nremove_a",
+ "ll\030\005 \001(\010\022\026\n\016bypass_globals\030\006 \001(\010\022+\n\010thro" +
+ "ttle\030\007 \001(\0132\031.hbase.pb.ThrottleRequest\"\022\n" +
+ "\020SetQuotaResponse\"J\n\037MajorCompactionTime" +
+ "stampRequest\022\'\n\ntable_name\030\001 \002(\0132\023.hbase" +
+ ".pb.TableName\"U\n(MajorCompactionTimestam" +
+ "pForRegionRequest\022)\n\006region\030\001 \002(\0132\031.hbas" +
+ "e.pb.RegionSpecifier\"@\n MajorCompactionT" +
+ "imestampResponse\022\034\n\024compaction_timestamp" +
+ "\030\001 \002(\003\"\035\n\033SecurityCapabilitiesRequest\"\354\001" +
+ "\n\034SecurityCapabilitiesResponse\022G\n\014capabi",
+ "lities\030\001 \003(\01621.hbase.pb.SecurityCapabili" +
+ "tiesResponse.Capability\"\202\001\n\nCapability\022\031" +
+ "\n\025SIMPLE_AUTHENTICATION\020\000\022\031\n\025SECURE_AUTH" +
+ "ENTICATION\020\001\022\021\n\rAUTHORIZATION\020\002\022\026\n\022CELL_" +
+ "AUTHORIZATION\020\003\022\023\n\017CELL_VISIBILITY\020\0042\353$\n" +
+ "\rMasterService\022e\n\024GetSchemaAlterStatus\022%" +
+ ".hbase.pb.GetSchemaAlterStatusRequest\032&." +
+ "hbase.pb.GetSchemaAlterStatusResponse\022b\n" +
+ "\023GetTableDescriptors\022$.hbase.pb.GetTable" +
+ "DescriptorsRequest\032%.hbase.pb.GetTableDe",
+ "scriptorsResponse\022P\n\rGetTableNames\022\036.hba" +
+ "se.pb.GetTableNamesRequest\032\037.hbase.pb.Ge" +
+ "tTableNamesResponse\022Y\n\020GetClusterStatus\022" +
+ "!.hbase.pb.GetClusterStatusRequest\032\".hba" +
+ "se.pb.GetClusterStatusResponse\022V\n\017IsMast" +
+ "erRunning\022 .hbase.pb.IsMasterRunningRequ" +
+ "est\032!.hbase.pb.IsMasterRunningResponse\022D" +
+ "\n\tAddColumn\022\032.hbase.pb.AddColumnRequest\032" +
+ "\033.hbase.pb.AddColumnResponse\022M\n\014DeleteCo" +
+ "lumn\022\035.hbase.pb.DeleteColumnRequest\032\036.hb",
+ "ase.pb.DeleteColumnResponse\022M\n\014ModifyCol" +
+ "umn\022\035.hbase.pb.ModifyColumnRequest\032\036.hba" +
+ "se.pb.ModifyColumnResponse\022G\n\nMoveRegion" +
+ "\022\033.hbase.pb.MoveRegionRequest\032\034.hbase.pb" +
+ ".MoveRegionResponse\022k\n\026DispatchMergingRe" +
+ "gions\022\'.hbase.pb.DispatchMergingRegionsR" +
+ "equest\032(.hbase.pb.DispatchMergingRegions" +
+ "Response\022M\n\014AssignRegion\022\035.hbase.pb.Assi" +
+ "gnRegionRequest\032\036.hbase.pb.AssignRegionR" +
+ "esponse\022S\n\016UnassignRegion\022\037.hbase.pb.Una",
+ "ssignRegionRequest\032 .hbase.pb.UnassignRe" +
+ "gionResponse\022P\n\rOfflineRegion\022\036.hbase.pb" +
+ ".OfflineRegionRequest\032\037.hbase.pb.Offline" +
+ "RegionResponse\022J\n\013DeleteTable\022\034.hbase.pb" +
+ ".DeleteTableRequest\032\035.hbase.pb.DeleteTab" +
+ "leResponse\022P\n\rtruncateTable\022\036.hbase.pb.T" +
+ "runcateTableRequest\032\037.hbase.pb.TruncateT" +
+ "ableResponse\022J\n\013EnableTable\022\034.hbase.pb.E" +
+ "nableTableRequest\032\035.hbase.pb.EnableTable" +
+ "Response\022M\n\014DisableTable\022\035.hbase.pb.Disa",
+ "bleTableRequest\032\036.hbase.pb.DisableTableR" +
+ "esponse\022J\n\013ModifyTable\022\034.hbase.pb.Modify" +
+ "TableRequest\032\035.hbase.pb.ModifyTableRespo" +
+ "nse\022J\n\013CreateTable\022\034.hbase.pb.CreateTabl" +
+ "eRequest\032\035.hbase.pb.CreateTableResponse\022" +
+ "A\n\010Shutdown\022\031.hbase.pb.ShutdownRequest\032\032" +
+ ".hbase.pb.ShutdownResponse\022G\n\nStopMaster" +
+ "\022\033.hbase.pb.StopMasterRequest\032\034.hbase.pb" +
+ ".StopMasterResponse\022>\n\007Balance\022\030.hbase.p" +
+ "b.BalanceRequest\032\031.hbase.pb.BalanceRespo",
+ "nse\022_\n\022SetBalancerRunning\022#.hbase.pb.Set" +
+ "BalancerRunningRequest\032$.hbase.pb.SetBal" +
+ "ancerRunningResponse\022\\\n\021IsBalancerEnable" +
+ "d\022\
<TRUNCATED>