You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by sy...@apache.org on 2016/01/06 02:45:55 UTC
[1/3] hbase git commit: HBASE-14107 Procedure V2 - Administrative
Task: Provide an API to List all procedures
Repository: hbase
Updated Branches:
refs/heads/branch-1.2 cbd4e5a1f -> 6d25e1119
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-protocol/src/main/protobuf/Master.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/Master.proto b/hbase-protocol/src/main/protobuf/Master.proto
index 9606cb0..7d4f7df 100644
--- a/hbase-protocol/src/main/protobuf/Master.proto
+++ b/hbase-protocol/src/main/protobuf/Master.proto
@@ -29,6 +29,7 @@ import "HBase.proto";
import "Client.proto";
import "ClusterStatus.proto";
import "ErrorHandling.proto";
+import "Procedure.proto";
import "Quota.proto";
/* Column-level protobufs */
@@ -444,6 +445,13 @@ message AbortProcedureResponse {
required bool is_procedure_aborted = 1;
}
+message ListProceduresRequest {
+}
+
+message ListProceduresResponse {
+ repeated Procedure procedure = 1;
+}
+
message SetQuotaRequest {
optional string user_name = 1;
optional string user_group = 2;
@@ -738,4 +746,8 @@ service MasterService {
/** Abort a procedure */
rpc AbortProcedure(AbortProcedureRequest)
returns(AbortProcedureResponse);
+
+ /** returns a list of procedures */
+ rpc ListProcedures(ListProceduresRequest)
+ returns(ListProceduresResponse);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index ef6669b..f3b4fa4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -63,6 +63,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
import org.apache.hadoop.hbase.PleaseHoldException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
@@ -2510,6 +2511,11 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
}
@Override
+ public List<ProcedureInfo> listProcedures() throws IOException {
+ return this.procedureExecutor.listProcedures();
+ }
+
+ @Override
public List<HTableDescriptor> listTableDescriptorsByNamespace(String name) throws IOException {
ensureNamespaceExists(name);
return listTableDescriptors(name, null, null, true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 613bb9c..fd9ef6c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.PleaseHoldException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@@ -49,7 +50,6 @@ import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.procedure.MasterProcedureManager;
import org.apache.hadoop.hbase.procedure2.Procedure;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.RequestConverter;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
@@ -119,6 +119,8 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneReq
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;
@@ -1027,15 +1029,15 @@ public class MasterRpcServices extends RSRpcServices
master.checkInitialized();
GetProcedureResultResponse.Builder builder = GetProcedureResultResponse.newBuilder();
- Pair<ProcedureResult, Procedure> v = master.getMasterProcedureExecutor()
+ Pair<ProcedureInfo, Procedure> v = master.getMasterProcedureExecutor()
.getResultOrProcedure(request.getProcId());
if (v.getFirst() != null) {
- ProcedureResult result = v.getFirst();
+ ProcedureInfo result = v.getFirst();
builder.setState(GetProcedureResultResponse.State.FINISHED);
builder.setStartTime(result.getStartTime());
builder.setLastUpdate(result.getLastUpdate());
if (result.isFailed()) {
- builder.setException(result.getException().convert());
+ builder.setException(result.getForeignExceptionMessage());
}
if (result.hasResultData()) {
builder.setResult(ByteStringer.wrap(result.getResult()));
@@ -1069,6 +1071,22 @@ public class MasterRpcServices extends RSRpcServices
}
@Override
+ public ListProceduresResponse listProcedures(
+ RpcController rpcController,
+ ListProceduresRequest request) throws ServiceException {
+ try {
+ ListProceduresResponse.Builder response =
+ ListProceduresResponse.newBuilder();
+ for(ProcedureInfo p: master.listProcedures()) {
+ response.addProcedure(ProcedureInfo.convertToProcedureProto(p));
+ }
+ return response.build();
+ } catch (IOException e) {
+ throw new ServiceException(e);
+ }
+ }
+
+ @Override
public ListNamespaceDescriptorsResponse listNamespaceDescriptors(RpcController c,
ListNamespaceDescriptorsRequest request) throws ServiceException {
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
index 2a3f71a..76f6cb4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableName;
@@ -292,6 +293,13 @@ public interface MasterServices extends Server {
public boolean abortProcedure(final long procId, final boolean mayInterruptIfRunning);
/**
+ * List procedures
+ * @return procedure list
+ * @throws IOException
+ */
+ public List<ProcedureInfo> listProcedures() throws IOException;
+
+ /**
* Get a namespace descriptor by name
* @param name name of namespace descriptor
* @return A descriptor
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
index 1e94744..5c03a4a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
@@ -24,12 +24,12 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoordinatedStateException;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.master.RegionState.State;
import org.apache.hadoop.hbase.master.ServerManager;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
+import org.apache.hadoop.hbase.procedure2.RemoteProcedureException;
import org.apache.hadoop.hbase.quotas.MasterQuotaManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
@@ -78,11 +78,12 @@ public final class ProcedureSyncWait {
// Dev Consideration: are we waiting forever, or we can set up some timeout value?
Threads.sleepWithoutInterrupt(250);
}
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
if (result != null) {
if (result.isFailed()) {
// If the procedure fails, we should always have an exception captured. Throw it.
- throw result.getException().unwrapRemoteException();
+ throw RemoteProcedureException.fromProto(
+ result.getForeignExceptionMessage()).unwrapRemoteException();
}
return result.getResult();
} else {
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index 83a51a8..b518631 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.NotServingRegionException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
@@ -783,4 +784,10 @@ public class TestAdmin2 {
boolean abortResult = admin.abortProcedure(procId, true);
assertFalse(abortResult);
}
+
+ @Test(timeout = 300000)
+ public void testListProcedures() throws Exception {
+ ProcedureInfo[] procList = admin.listProcedures();
+ assertTrue(procList.length >= 0);
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
index a19eaf4..80902cb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MetaMockingUtil;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotAllMetaRegionsOnlineException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -407,6 +408,11 @@ public class TestCatalogJanitor {
}
@Override
+ public List<ProcedureInfo> listProcedures() throws IOException {
+ return null; //To change body of implemented methods use File | Settings | File Templates.
+ }
+
+ @Override
public List<HTableDescriptor> listTableDescriptorsByNamespace(String name) throws IOException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java
index e09c838..97a287e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestAddColumnFamilyProcedure.java
@@ -28,9 +28,9 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyState;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -152,10 +152,11 @@ public class TestAddColumnFamilyProcedure {
ProcedureTestingUtility.waitProcedure(procExec, procId2);
// Second add should fail with InvalidFamilyOperationException
- ProcedureResult result = procExec.getResult(procId2);
+ ProcedureInfo result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Add failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Add failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
// Do the same add the existing column family - this time offline
UTIL.getHBaseAdmin().disableTable(tableName);
@@ -169,8 +170,9 @@ public class TestAddColumnFamilyProcedure {
// Second add should fail with InvalidFamilyOperationException
result = procExec.getResult(procId3);
assertTrue(result.isFailed());
- LOG.debug("Add failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Add failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
}
@Test(timeout=60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java
index e7efc2b..d5e79cf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedure.java
@@ -28,9 +28,9 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyState;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -150,10 +150,11 @@ public class TestDeleteColumnFamilyProcedure {
ProcedureTestingUtility.waitProcedure(procExec, procId2);
// Second delete should fail with InvalidFamilyOperationException
- ProcedureResult result = procExec.getResult(procId2);
+ ProcedureInfo result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Delete online failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Delete online failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
// Try again, this time with table disabled.
UTIL.getHBaseAdmin().disableTable(tableName);
@@ -166,8 +167,9 @@ public class TestDeleteColumnFamilyProcedure {
// Expect fail with InvalidFamilyOperationException
result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Delete offline failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Delete offline failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
}
@Test(timeout=60000)
@@ -218,10 +220,11 @@ public class TestDeleteColumnFamilyProcedure {
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId1);
- ProcedureResult result = procExec.getResult(procId1);
+ ProcedureInfo result = procExec.getResult(procId1);
assertTrue(result.isFailed());
- LOG.debug("Delete failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Delete failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
}
@Test(timeout=60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
index a215c21..4a1c435 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteTableProcedure.java
@@ -25,11 +25,11 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteTableState;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -141,10 +141,10 @@ public class TestDeleteTableProcedure {
UTIL.getHBaseCluster().getMaster(), tableName, regions, "f");
// Second delete should fail with TableNotFound
- ProcedureResult result = procExec.getResult(procId2);
+ ProcedureInfo result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Delete failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof TableNotFoundException);
+ LOG.debug("Delete failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotFoundException);
}
@Test(timeout=60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
index 7f49e11..078db92 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDisableTableProcedure.java
@@ -26,10 +26,10 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DisableTableState;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -125,10 +125,11 @@ public class TestDisableTableProcedure {
procExec.getEnvironment(), tableName, false), nonceGroup + 1, nonce + 1);
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId2);
- ProcedureResult result = procExec.getResult(procId2);
+ ProcedureInfo result = procExec.getResult(procId2);
assertTrue(result.isFailed());
- LOG.debug("Disable failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof TableNotEnabledException);
+ LOG.debug("Disable failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotEnabledException);
// Disable the table - expect failure from ProcedurePrepareLatch
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
index 7f8687a..8200246 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestEnableTableProcedure.java
@@ -26,10 +26,10 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.EnableTableState;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -140,10 +140,11 @@ public class TestEnableTableProcedure {
new EnableTableProcedure(procExec.getEnvironment(), tableName, false), nonceGroup, nonce);
ProcedureTestingUtility.waitProcedure(procExec, procId1);
- ProcedureResult result = procExec.getResult(procId1);
+ ProcedureInfo result = procExec.getResult(procId1);
assertTrue(result.isFailed());
- LOG.debug("Enable failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof TableNotDisabledException);
+ LOG.debug("Enable failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotDisabledException);
// Enable the table with skipping table state check flag (simulate recovery scenario)
long procId2 = procExec.submitProcedure(
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyColumnFamilyProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyColumnFamilyProcedure.java
index c7f05f1..12b2ad8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyColumnFamilyProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyColumnFamilyProcedure.java
@@ -28,9 +28,9 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.InvalidFamilyOperationException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyState;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -145,10 +145,11 @@ public class TestModifyColumnFamilyProcedure {
// Wait the completion
ProcedureTestingUtility.waitProcedure(procExec, procId1);
- ProcedureResult result = procExec.getResult(procId1);
+ ProcedureInfo result = procExec.getResult(procId1);
assertTrue(result.isFailed());
- LOG.debug("Modify failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof InvalidFamilyOperationException);
+ LOG.debug("Modify failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof InvalidFamilyOperationException);
}
@Test(timeout=60000)
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
index eea22ce..a49c23c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.master.procedure;
import java.util.Random;
+import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -27,9 +28,11 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
+import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.junit.After;
import org.junit.AfterClass;
@@ -180,6 +183,41 @@ public class TestProcedureAdmin {
assertFalse(abortResult);
}
+ @Test(timeout=60000)
+ public void testListProcedure() throws Exception {
+ final TableName tableName = TableName.valueOf("testListProcedure");
+ final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
+
+ MasterProcedureTestingUtility.createTable(procExec, tableName, null, "f");
+ ProcedureTestingUtility.waitNoProcedureRunning(procExec);
+ ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
+
+ long procId = procExec.submitProcedure(
+ new DisableTableProcedure(procExec.getEnvironment(), tableName, false), nonceGroup, nonce);
+
+ List<ProcedureInfo> listProcedures = procExec.listProcedures();
+ assertTrue(listProcedures.size() >= 1);
+ boolean found = false;
+ for (ProcedureInfo procInfo: listProcedures) {
+ if (procInfo.getProcId() == procId) {
+ assertTrue(procInfo.getProcState() == ProcedureState.RUNNABLE);
+ found = true;
+ } else {
+ assertTrue(procInfo.getProcState() == ProcedureState.FINISHED);
+ }
+ }
+ assertTrue(found);
+
+ ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, false);
+ ProcedureTestingUtility.restart(procExec);
+ ProcedureTestingUtility.waitNoProcedureRunning(procExec);
+ ProcedureTestingUtility.assertProcNotFailed(procExec, procId);
+ listProcedures = procExec.listProcedures();
+ for (ProcedureInfo procInfo: listProcedures) {
+ assertTrue(procInfo.getProcState() == ProcedureState.FINISHED);
+ }
+ }
+
private ProcedureExecutor<MasterProcedureEnv> getMasterProcedureExecutor() {
return UTIL.getHBaseCluster().getMaster().getMasterProcedureExecutor();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
index 2500cca..08442ce 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestTruncateTableProcedure.java
@@ -25,11 +25,11 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureResult;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.TruncateTableState;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -101,10 +101,10 @@ public class TestTruncateTableProcedure {
new TruncateTableProcedure(procExec.getEnvironment(), tableName, true));
// Second delete should fail with TableNotFound
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Truncate failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof TableNotFoundException);
+ LOG.debug("Truncate failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotFoundException);
}
@Test(timeout=60000)
@@ -118,10 +118,11 @@ public class TestTruncateTableProcedure {
new TruncateTableProcedure(procExec.getEnvironment(), tableName, false));
// Second delete should fail with TableNotDisabled
- ProcedureResult result = procExec.getResult(procId);
+ ProcedureInfo result = procExec.getResult(procId);
assertTrue(result.isFailed());
- LOG.debug("Truncate failed with exception: " + result.getException());
- assertTrue(result.getException().getCause() instanceof TableNotDisabledException);
+ LOG.debug("Truncate failed with exception: " + result.getExceptionFullMessage());
+ assertTrue(
+ ProcedureTestingUtility.getExceptionCause(result) instanceof TableNotDisabledException);
}
@Test(timeout=60000)
[2/3] hbase git commit: HBASE-14107 Procedure V2 - Administrative
Task: Provide an API to List all procedures
Posted by sy...@apache.org.
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
index 7daf168..5afa4bc 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
@@ -49127,6 +49127,1065 @@ public final class MasterProtos {
// @@protoc_insertion_point(class_scope:AbortProcedureResponse)
}
+ public interface ListProceduresRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code ListProceduresRequest}
+ */
+ public static final class ListProceduresRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements ListProceduresRequestOrBuilder {
+ // Use ListProceduresRequest.newBuilder() to construct.
+ private ListProceduresRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ListProceduresRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ListProceduresRequest defaultInstance;
+ public static ListProceduresRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ListProceduresRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ListProceduresRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<ListProceduresRequest> PARSER =
+ new com.google.protobuf.AbstractParser<ListProceduresRequest>() {
+ public ListProceduresRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ListProceduresRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<ListProceduresRequest> getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code ListProceduresRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:ListProceduresRequest)
+ }
+
+ static {
+ defaultInstance = new ListProceduresRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ListProceduresRequest)
+ }
+
+ public interface ListProceduresResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .Procedure procedure = 1;
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure>
+ getProcedureList();
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index);
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ int getProcedureCount();
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
+ getProcedureOrBuilderList();
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder(
+ int index);
+ }
+ /**
+ * Protobuf type {@code ListProceduresResponse}
+ */
+ public static final class ListProceduresResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements ListProceduresResponseOrBuilder {
+ // Use ListProceduresResponse.newBuilder() to construct.
+ private ListProceduresResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ListProceduresResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ListProceduresResponse defaultInstance;
+ public static ListProceduresResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ListProceduresResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ListProceduresResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ procedure_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure>();
+ mutable_bitField0_ |= 0x00000001;
+ }
+ procedure_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry));
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ procedure_ = java.util.Collections.unmodifiableList(procedure_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<ListProceduresResponse> PARSER =
+ new com.google.protobuf.AbstractParser<ListProceduresResponse>() {
+ public ListProceduresResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ListProceduresResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<ListProceduresResponse> getParserForType() {
+ return PARSER;
+ }
+
+ // repeated .Procedure procedure = 1;
+ public static final int PROCEDURE_FIELD_NUMBER = 1;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> procedure_;
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> getProcedureList() {
+ return procedure_;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
+ getProcedureOrBuilderList() {
+ return procedure_;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public int getProcedureCount() {
+ return procedure_.size();
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index) {
+ return procedure_.get(index);
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder(
+ int index) {
+ return procedure_.get(index);
+ }
+
+ private void initFields() {
+ procedure_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ for (int i = 0; i < getProcedureCount(); i++) {
+ if (!getProcedure(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < procedure_.size(); i++) {
+ output.writeMessage(1, procedure_.get(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < procedure_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, procedure_.get(i));
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) obj;
+
+ boolean result = true;
+ result = result && getProcedureList()
+ .equals(other.getProcedureList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getProcedureCount() > 0) {
+ hash = (37 * hash) + PROCEDURE_FIELD_NUMBER;
+ hash = (53 * hash) + getProcedureList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code ListProceduresResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getProcedureFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (procedureBuilder_ == null) {
+ procedure_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ procedureBuilder_.clear();
+ }
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse(this);
+ int from_bitField0_ = bitField0_;
+ if (procedureBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ procedure_ = java.util.Collections.unmodifiableList(procedure_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.procedure_ = procedure_;
+ } else {
+ result.procedure_ = procedureBuilder_.build();
+ }
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance()) return this;
+ if (procedureBuilder_ == null) {
+ if (!other.procedure_.isEmpty()) {
+ if (procedure_.isEmpty()) {
+ procedure_ = other.procedure_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureProcedureIsMutable();
+ procedure_.addAll(other.procedure_);
+ }
+ onChanged();
+ }
+ } else {
+ if (!other.procedure_.isEmpty()) {
+ if (procedureBuilder_.isEmpty()) {
+ procedureBuilder_.dispose();
+ procedureBuilder_ = null;
+ procedure_ = other.procedure_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ procedureBuilder_ =
+ com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+ getProcedureFieldBuilder() : null;
+ } else {
+ procedureBuilder_.addAllMessages(other.procedure_);
+ }
+ }
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ for (int i = 0; i < getProcedureCount(); i++) {
+ if (!getProcedure(i).isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // repeated .Procedure procedure = 1;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> procedure_ =
+ java.util.Collections.emptyList();
+ private void ensureProcedureIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ procedure_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure>(procedure_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> procedureBuilder_;
+
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> getProcedureList() {
+ if (procedureBuilder_ == null) {
+ return java.util.Collections.unmodifiableList(procedure_);
+ } else {
+ return procedureBuilder_.getMessageList();
+ }
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public int getProcedureCount() {
+ if (procedureBuilder_ == null) {
+ return procedure_.size();
+ } else {
+ return procedureBuilder_.getCount();
+ }
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index) {
+ if (procedureBuilder_ == null) {
+ return procedure_.get(index);
+ } else {
+ return procedureBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public Builder setProcedure(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value) {
+ if (procedureBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureProcedureIsMutable();
+ procedure_.set(index, value);
+ onChanged();
+ } else {
+ procedureBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public Builder setProcedure(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
+ if (procedureBuilder_ == null) {
+ ensureProcedureIsMutable();
+ procedure_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ procedureBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public Builder addProcedure(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value) {
+ if (procedureBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureProcedureIsMutable();
+ procedure_.add(value);
+ onChanged();
+ } else {
+ procedureBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public Builder addProcedure(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value) {
+ if (procedureBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureProcedureIsMutable();
+ procedure_.add(index, value);
+ onChanged();
+ } else {
+ procedureBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public Builder addProcedure(
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
+ if (procedureBuilder_ == null) {
+ ensureProcedureIsMutable();
+ procedure_.add(builderForValue.build());
+ onChanged();
+ } else {
+ procedureBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public Builder addProcedure(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
+ if (procedureBuilder_ == null) {
+ ensureProcedureIsMutable();
+ procedure_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ procedureBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public Builder addAllProcedure(
+ java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> values) {
+ if (procedureBuilder_ == null) {
+ ensureProcedureIsMutable();
+ super.addAll(values, procedure_);
+ onChanged();
+ } else {
+ procedureBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public Builder clearProcedure() {
+ if (procedureBuilder_ == null) {
+ procedure_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ procedureBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public Builder removeProcedure(int index) {
+ if (procedureBuilder_ == null) {
+ ensureProcedureIsMutable();
+ procedure_.remove(index);
+ onChanged();
+ } else {
+ procedureBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder getProcedureBuilder(
+ int index) {
+ return getProcedureFieldBuilder().getBuilder(index);
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder(
+ int index) {
+ if (procedureBuilder_ == null) {
+ return procedure_.get(index); } else {
+ return procedureBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
+ getProcedureOrBuilderList() {
+ if (procedureBuilder_ != null) {
+ return procedureBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(procedure_);
+ }
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder() {
+ return getProcedureFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder(
+ int index) {
+ return getProcedureFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance());
+ }
+ /**
+ * <code>repeated .Procedure procedure = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder>
+ getProcedureBuilderList() {
+ return getProcedureFieldBuilder().getBuilderList();
+ }
+ private com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
+ getProcedureFieldBuilder() {
+ if (procedureBuilder_ == null) {
+ procedureBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>(
+ procedure_,
+ ((bitField0_ & 0x00000001) == 0x00000001),
+ getParentForChildren(),
+ isClean());
+ procedure_ = null;
+ }
+ return procedureBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:ListProceduresResponse)
+ }
+
+ static {
+ defaultInstance = new ListProceduresResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ListProceduresResponse)
+ }
+
public interface SetQuotaRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
@@ -54112,6 +55171,18 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse> done);
+ /**
+ * <code>rpc ListProcedures(.ListProceduresRequest) returns (.ListProceduresResponse);</code>
+ *
+ * <pre>
+ ** returns a list of procedures
+ * </pre>
+ */
+ public abstract void listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done);
+
}
public static com.google.protobuf.Service newReflectiveService(
@@ -54541,6 +55612,14 @@ public final class MasterProtos {
impl.abortProcedure(controller, request, done);
}
+ @java.lang.Override
+ public void listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done) {
+ impl.listProcedures(controller, request, done);
+ }
+
};
}
@@ -54669,6 +55748,8 @@ public final class MasterProtos {
return impl.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request);
case 52:
return impl.abortProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)request);
+ case 53:
+ return impl.listProcedures(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -54789,6 +55870,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
case 52:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+ case 53:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -54909,6 +55992,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
case 52:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+ case 53:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -55579,6 +56664,18 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse> done);
+ /**
+ * <code>rpc ListProcedures(.ListProceduresRequest) returns (.ListProceduresResponse);</code>
+ *
+ * <pre>
+ ** returns a list of procedures
+ * </pre>
+ */
+ public abstract void listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done);
+
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
@@ -55866,6 +56963,11 @@ public final class MasterProtos {
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse>specializeCallback(
done));
return;
+ case 53:
+ this.listProcedures(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)request,
+ com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse>specializeCallback(
+ done));
+ return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -55986,6 +57088,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
case 52:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
+ case 53:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -56106,6 +57210,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
case 52:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
+ case 53:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -56921,6 +58027,21 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.class,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance()));
}
+
+ public void listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(53),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance()));
+ }
}
public static BlockingInterface newBlockingStub(
@@ -57193,6 +58314,11 @@ public final class MasterProtos {
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request)
throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request)
+ throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
@@ -57837,6 +58963,18 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance());
}
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse listProcedures(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(53),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance());
+ }
+
}
// @@protoc_insertion_point(class_scope:MasterService)
@@ -58313,6 +59451,16 @@ public final class MasterProtos {
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_AbortProcedureResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_ListProceduresRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_ListProceduresRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_ListProceduresResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_ListProceduresResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
internal_static_SetQuotaRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
@@ -58358,263 +59506,267 @@ public final class MasterProtos {
java.lang.String[] descriptorData = {
"\n\014Master.proto\032\013HBase.proto\032\014Client.prot" +
"o\032\023ClusterStatus.proto\032\023ErrorHandling.pr" +
- "oto\032\013Quota.proto\"\212\001\n\020AddColumnRequest\022\036\n" +
- "\ntable_name\030\001 \002(\0132\n.TableName\022,\n\017column_" +
- "families\030\002 \002(\0132\023.ColumnFamilySchema\022\026\n\013n" +
- "once_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\023\n" +
- "\021AddColumnResponse\"t\n\023DeleteColumnReques" +
- "t\022\036\n\ntable_name\030\001 \002(\0132\n.TableName\022\023\n\013col" +
- "umn_name\030\002 \002(\014\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020" +
- "\n\005nonce\030\004 \001(\004:\0010\"\026\n\024DeleteColumnResponse",
- "\"\215\001\n\023ModifyColumnRequest\022\036\n\ntable_name\030\001" +
- " \002(\0132\n.TableName\022,\n\017column_families\030\002 \002(" +
- "\0132\023.ColumnFamilySchema\022\026\n\013nonce_group\030\003 " +
- "\001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\026\n\024ModifyColumn" +
- "Response\"\\\n\021MoveRegionRequest\022 \n\006region\030" +
- "\001 \002(\0132\020.RegionSpecifier\022%\n\020dest_server_n" +
- "ame\030\002 \001(\0132\013.ServerName\"\024\n\022MoveRegionResp" +
- "onse\"\200\001\n\035DispatchMergingRegionsRequest\022\"" +
- "\n\010region_a\030\001 \002(\0132\020.RegionSpecifier\022\"\n\010re" +
- "gion_b\030\002 \002(\0132\020.RegionSpecifier\022\027\n\010forcib",
- "le\030\003 \001(\010:\005false\" \n\036DispatchMergingRegion" +
- "sResponse\"7\n\023AssignRegionRequest\022 \n\006regi" +
- "on\030\001 \002(\0132\020.RegionSpecifier\"\026\n\024AssignRegi" +
- "onResponse\"O\n\025UnassignRegionRequest\022 \n\006r" +
- "egion\030\001 \002(\0132\020.RegionSpecifier\022\024\n\005force\030\002" +
- " \001(\010:\005false\"\030\n\026UnassignRegionResponse\"8\n" +
- "\024OfflineRegionRequest\022 \n\006region\030\001 \002(\0132\020." +
- "RegionSpecifier\"\027\n\025OfflineRegionResponse" +
- "\"v\n\022CreateTableRequest\022\"\n\014table_schema\030\001" +
- " \002(\0132\014.TableSchema\022\022\n\nsplit_keys\030\002 \003(\014\022\026",
- "\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010" +
- "\"&\n\023CreateTableResponse\022\017\n\007proc_id\030\001 \001(\004" +
- "\"^\n\022DeleteTableRequest\022\036\n\ntable_name\030\001 \002" +
- "(\0132\n.TableName\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020" +
- "\n\005nonce\030\003 \001(\004:\0010\"&\n\023DeleteTableResponse\022" +
- "\017\n\007proc_id\030\001 \001(\004\"~\n\024TruncateTableRequest" +
- "\022\035\n\ttableName\030\001 \002(\0132\n.TableName\022\035\n\016prese" +
- "rveSplits\030\002 \001(\010:\005false\022\026\n\013nonce_group\030\003 " +
- "\001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\027\n\025TruncateTabl" +
- "eResponse\"^\n\022EnableTableRequest\022\036\n\ntable",
- "_name\030\001 \002(\0132\n.TableName\022\026\n\013nonce_group\030\002" +
- " \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"&\n\023EnableTable" +
- "Response\022\017\n\007proc_id\030\001 \001(\004\"_\n\023DisableTabl" +
+ "oto\032\017Procedure.proto\032\013Quota.proto\"\212\001\n\020Ad" +
+ "dColumnRequest\022\036\n\ntable_name\030\001 \002(\0132\n.Tab" +
+ "leName\022,\n\017column_families\030\002 \002(\0132\023.Column" +
+ "FamilySchema\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005" +
+ "nonce\030\004 \001(\004:\0010\"\023\n\021AddColumnResponse\"t\n\023D" +
+ "eleteColumnRequest\022\036\n\ntable_name\030\001 \002(\0132\n" +
+ ".TableName\022\023\n\013column_name\030\002 \002(\014\022\026\n\013nonce" +
+ "_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\026\n\024Del",
+ "eteColumnResponse\"\215\001\n\023ModifyColumnReques" +
+ "t\022\036\n\ntable_name\030\001 \002(\0132\n.TableName\022,\n\017col" +
+ "umn_families\030\002 \002(\0132\023.ColumnFamilySchema\022" +
+ "\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\001" +
+ "0\"\026\n\024ModifyColumnResponse\"\\\n\021MoveRegionR" +
+ "equest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier" +
+ "\022%\n\020dest_server_name\030\002 \001(\0132\013.ServerName\"" +
+ "\024\n\022MoveRegionResponse\"\200\001\n\035DispatchMergin" +
+ "gRegionsRequest\022\"\n\010region_a\030\001 \002(\0132\020.Regi" +
+ "onSpecifier\022\"\n\010region_b\030\002 \002(\0132\020.RegionSp",
+ "ecifier\022\027\n\010forcible\030\003 \001(\010:\005false\" \n\036Disp" +
+ "atchMergingRegionsResponse\"7\n\023AssignRegi" +
+ "onRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecif" +
+ "ier\"\026\n\024AssignRegionResponse\"O\n\025UnassignR" +
+ "egionRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpe" +
+ "cifier\022\024\n\005force\030\002 \001(\010:\005false\"\030\n\026Unassign" +
+ "RegionResponse\"8\n\024OfflineRegionRequest\022 " +
+ "\n\006region\030\001 \002(\0132\020.RegionSpecifier\"\027\n\025Offl" +
+ "ineRegionResponse\"v\n\022CreateTableRequest\022" +
+ "\"\n\014table_schema\030\001 \002(\0132\014.TableSchema\022\022\n\ns",
+ "plit_keys\030\002 \003(\014\022\026\n\013nonce_group\030\003 \001(\004:\0010\022" +
+ "\020\n\005nonce\030\004 \001(\004:\0010\"&\n\023CreateTableResponse" +
+ "\022\017\n\007proc_id\030\001 \001(\004\"^\n\022DeleteTableRequest\022" +
+ "\036\n\ntable_name\030\001 \002(\0132\n.TableName\022\026\n\013nonce" +
+ "_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"&\n\023Del" +
+ "eteTableResponse\022\017\n\007proc_id\030\001 \001(\004\"~\n\024Tru" +
+ "ncateTableRequest\022\035\n\ttableName\030\001 \002(\0132\n.T" +
+ "ableName\022\035\n\016preserveSplits\030\002 \001(\010:\005false\022" +
+ "\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\001" +
+ "0\"\027\n\025TruncateTableResponse\"^\n\022EnableTabl",
"eRequest\022\036\n\ntable_name\030\001 \002(\0132\n.TableName" +
"\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:" +
- "\0010\"\'\n\024DisableTableResponse\022\017\n\007proc_id\030\001 " +
- "\001(\004\"\202\001\n\022ModifyTableRequest\022\036\n\ntable_name" +
- "\030\001 \002(\0132\n.TableName\022\"\n\014table_schema\030\002 \002(\013" +
- "2\014.TableSchema\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020" +
- "\n\005nonce\030\004 \001(\004:\0010\"\025\n\023ModifyTableResponse\"",
- "K\n\026CreateNamespaceRequest\0221\n\023namespaceDe" +
- "scriptor\030\001 \002(\0132\024.NamespaceDescriptor\"\031\n\027" +
- "CreateNamespaceResponse\"/\n\026DeleteNamespa" +
- "ceRequest\022\025\n\rnamespaceName\030\001 \002(\t\"\031\n\027Dele" +
- "teNamespaceResponse\"K\n\026ModifyNamespaceRe" +
- "quest\0221\n\023namespaceDescriptor\030\001 \002(\0132\024.Nam" +
- "espaceDescriptor\"\031\n\027ModifyNamespaceRespo" +
- "nse\"6\n\035GetNamespaceDescriptorRequest\022\025\n\r" +
- "namespaceName\030\001 \002(\t\"S\n\036GetNamespaceDescr" +
- "iptorResponse\0221\n\023namespaceDescriptor\030\001 \002",
- "(\0132\024.NamespaceDescriptor\"!\n\037ListNamespac" +
- "eDescriptorsRequest\"U\n ListNamespaceDesc" +
- "riptorsResponse\0221\n\023namespaceDescriptor\030\001" +
- " \003(\0132\024.NamespaceDescriptor\"?\n&ListTableD" +
- "escriptorsByNamespaceRequest\022\025\n\rnamespac" +
- "eName\030\001 \002(\t\"L\n\'ListTableDescriptorsByNam" +
- "espaceResponse\022!\n\013tableSchema\030\001 \003(\0132\014.Ta" +
- "bleSchema\"9\n ListTableNamesByNamespaceRe" +
- "quest\022\025\n\rnamespaceName\030\001 \002(\t\"B\n!ListTabl" +
- "eNamesByNamespaceResponse\022\035\n\ttableName\030\001",
- " \003(\0132\n.TableName\"\021\n\017ShutdownRequest\"\022\n\020S" +
- "hutdownResponse\"\023\n\021StopMasterRequest\"\024\n\022" +
- "StopMasterResponse\"\020\n\016BalanceRequest\"\'\n\017" +
- "BalanceResponse\022\024\n\014balancer_ran\030\001 \002(\010\"<\n" +
- "\031SetBalancerRunningRequest\022\n\n\002on\030\001 \002(\010\022\023" +
- "\n\013synchronous\030\002 \001(\010\"8\n\032SetBalancerRunnin" +
- "gResponse\022\032\n\022prev_balance_value\030\001 \001(\010\"\032\n" +
- "\030IsBalancerEnabledRequest\",\n\031IsBalancerE" +
- "nabledResponse\022\017\n\007enabled\030\001 \002(\010\"\022\n\020Norma" +
- "lizeRequest\"+\n\021NormalizeResponse\022\026\n\016norm",
- "alizer_ran\030\001 \002(\010\")\n\033SetNormalizerRunning" +
- "Request\022\n\n\002on\030\001 \002(\010\"=\n\034SetNormalizerRunn" +
- "ingResponse\022\035\n\025prev_normalizer_value\030\001 \001" +
- "(\010\"\034\n\032IsNormalizerEnabledRequest\".\n\033IsNo" +
- "rmalizerEnabledResponse\022\017\n\007enabled\030\001 \002(\010" +
- "\"\027\n\025RunCatalogScanRequest\"-\n\026RunCatalogS" +
- "canResponse\022\023\n\013scan_result\030\001 \001(\005\"-\n\033Enab" +
- "leCatalogJanitorRequest\022\016\n\006enable\030\001 \002(\010\"" +
- "2\n\034EnableCatalogJanitorResponse\022\022\n\nprev_" +
- "value\030\001 \001(\010\" \n\036IsCatalogJanitorEnabledRe",
- "quest\"0\n\037IsCatalogJanitorEnabledResponse" +
- "\022\r\n\005value\030\001 \002(\010\"9\n\017SnapshotRequest\022&\n\010sn" +
- "apshot\030\001 \002(\0132\024.SnapshotDescription\",\n\020Sn" +
- "apshotResponse\022\030\n\020expected_timeout\030\001 \002(\003" +
- "\"\036\n\034GetCompletedSnapshotsRequest\"H\n\035GetC" +
- "ompletedSnapshotsResponse\022\'\n\tsnapshots\030\001" +
- " \003(\0132\024.SnapshotDescription\"?\n\025DeleteSnap" +
+ "\0010\"&\n\023EnableTableResponse\022\017\n\007proc_id\030\001 \001" +
+ "(\004\"_\n\023DisableTableRequest\022\036\n\ntable_name\030" +
+ "\001 \002(\0132\n.TableName\022\026\n\013nonce_group\030\002 \001(\004:\001" +
+ "0\022\020\n\005nonce\030\003 \001(\004:\0010\"\'\n\024DisableTableRespo" +
+ "nse\022\017\n\007proc_id\030\001 \001(\004\"\202\001\n\022ModifyTableRequ" +
+ "est\022\036\n\ntable_name\030\001 \002(\0132\n.TableName\022\"\n\014t" +
+ "able_schema\030\002 \002(\0132\014.TableSchema\022\026\n\013nonce" +
+ "_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\025\n\023Mod",
+ "ifyTableResponse\"K\n\026CreateNamespaceReque" +
+ "st\0221\n\023namespaceDescriptor\030\001 \002(\0132\024.Namesp" +
+ "aceDescriptor\"\031\n\027CreateNamespaceResponse" +
+ "\"/\n\026DeleteNamespaceRequest\022\025\n\rnamespaceN" +
+ "ame\030\001 \002(\t\"\031\n\027DeleteNamespaceResponse\"K\n\026" +
+ "ModifyNamespaceRequest\0221\n\023namespaceDescr" +
+ "iptor\030\001 \002(\0132\024.NamespaceDescriptor\"\031\n\027Mod" +
+ "ifyNamespaceResponse\"6\n\035GetNamespaceDesc" +
+ "riptorRequest\022\025\n\rnamespaceName\030\001 \002(\t\"S\n\036" +
+ "GetNamespaceDescriptorResponse\0221\n\023namesp",
+ "aceDescriptor\030\001 \002(\0132\024.NamespaceDescripto" +
+ "r\"!\n\037ListNamespaceDescriptorsRequest\"U\n " +
+ "ListNamespaceDescriptorsResponse\0221\n\023name" +
+ "spaceDescriptor\030\001 \003(\0132\024.NamespaceDescrip" +
+ "tor\"?\n&ListTableDescriptorsByNamespaceRe" +
+ "quest\022\025\n\rnamespaceName\030\001 \002(\t\"L\n\'ListTabl" +
+ "eDescriptorsByNamespaceResponse\022!\n\013table" +
+ "Schema\030\001 \003(\0132\014.TableSchema\"9\n ListTableN" +
+ "amesByNamespaceRequest\022\025\n\rnamespaceName\030" +
+ "\001 \002(\t\"B\n!ListTableNamesByNamespaceRespon",
+ "se\022\035\n\ttableName\030\001 \003(\0132\n.TableName\"\021\n\017Shu" +
+ "tdownRequest\"\022\n\020ShutdownResponse\"\023\n\021Stop" +
+ "MasterRequest\"\024\n\022StopMasterResponse\"\020\n\016B" +
+ "alanceRequest\"\'\n\017BalanceResponse\022\024\n\014bala" +
+ "ncer_ran\030\001 \002(\010\"<\n\031SetBalancerRunningRequ" +
+ "est\022\n\n\002on\030\001 \002(\010\022\023\n\013synchronous\030\002 \001(\010\"8\n\032" +
+ "SetBalancerRunningResponse\022\032\n\022prev_balan" +
+ "ce_value\030\001 \001(\010\"\032\n\030IsBalancerEnabledReque" +
+ "st\",\n\031IsBalancerEnabledResponse\022\017\n\007enabl" +
+ "ed\030\001 \002(\010\"\022\n\020NormalizeRequest\"+\n\021Normaliz",
+ "eResponse\022\026\n\016normalizer_ran\030\001 \002(\010\")\n\033Set" +
+ "NormalizerRunningRequest\022\n\n\002on\030\001 \002(\010\"=\n\034" +
+ "SetNormalizerRunningResponse\022\035\n\025prev_nor" +
+ "malizer_value\030\001 \001(\010\"\034\n\032IsNormalizerEnabl" +
+ "edRequest\".\n\033IsNormalizerEnabledResponse" +
+ "\022\017\n\007enabled\030\001 \002(\010\"\027\n\025RunCatalogScanReque" +
+ "st\"-\n\026RunCatalogScanResponse\022\023\n\013scan_res" +
+ "ult\030\001 \001(\005\"-\n\033EnableCatalogJanitorRequest" +
+ "\022\016\n\006enable\030\001 \002(\010\"2\n\034EnableCatalogJanitor" +
+ "Response\022\022\n\nprev_value\030\001 \001(\010\" \n\036IsCatalo",
+ "gJanitorEnabledRequest\"0\n\037IsCatalogJanit" +
+ "orEnabledResponse\022\r\n\005value\030\001 \002(\010\"9\n\017Snap" +
"shotRequest\022&\n\010snapshot\030\001 \002(\0132\024.Snapshot" +
- "Description\"\030\n\026DeleteSnapshotResponse\"@\n" +
- "\026RestoreSnapshotRequest\022&\n\010snapshot\030\001 \002(",
- "\0132\024.SnapshotDescription\"\031\n\027RestoreSnapsh" +
- "otResponse\"?\n\025IsSnapshotDoneRequest\022&\n\010s" +
- "napshot\030\001 \001(\0132\024.SnapshotDescription\"U\n\026I" +
- "sSnapshotDoneResponse\022\023\n\004done\030\001 \001(\010:\005fal" +
- "se\022&\n\010snapshot\030\002 \001(\0132\024.SnapshotDescripti" +
- "on\"F\n\034IsRestoreSnapshotDoneRequest\022&\n\010sn" +
- "apshot\030\001 \001(\0132\024.SnapshotDescription\"4\n\035Is" +
- "RestoreSnapshotDoneResponse\022\023\n\004done\030\001 \001(" +
- "\010:\005false\"=\n\033GetSchemaAlterStatusRequest\022" +
- "\036\n\ntable_name\030\001 \002(\0132\n.TableName\"T\n\034GetSc",
- "hemaAlterStatusResponse\022\035\n\025yet_to_update" +
- "_regions\030\001 \001(\r\022\025\n\rtotal_regions\030\002 \001(\r\"\202\001" +
- "\n\032GetTableDescriptorsRequest\022\037\n\013table_na" +
- "mes\030\001 \003(\0132\n.TableName\022\r\n\005regex\030\002 \001(\t\022!\n\022" +
- "include_sys_tables\030\003 \001(\010:\005false\022\021\n\tnames" +
- "pace\030\004 \001(\t\"A\n\033GetTableDescriptorsRespons" +
- "e\022\"\n\014table_schema\030\001 \003(\0132\014.TableSchema\"[\n" +
- "\024GetTableNamesRequest\022\r\n\005regex\030\001 \001(\t\022!\n\022" +
- "include_sys_tables\030\002 \001(\010:\005false\022\021\n\tnames" +
- "pace\030\003 \001(\t\"8\n\025GetTableNamesResponse\022\037\n\013t",
- "able_names\030\001 \003(\0132\n.TableName\"\031\n\027GetClust" +
- "erStatusRequest\"B\n\030GetClusterStatusRespo" +
- "nse\022&\n\016cluster_status\030\001 \002(\0132\016.ClusterSta" +
- "tus\"\030\n\026IsMasterRunningRequest\"4\n\027IsMaste" +
- "rRunningResponse\022\031\n\021is_master_running\030\001 " +
- "\002(\010\"@\n\024ExecProcedureRequest\022(\n\tprocedure" +
- "\030\001 \002(\0132\025.ProcedureDescription\"F\n\025ExecPro" +
- "cedureResponse\022\030\n\020expected_timeout\030\001 \001(\003" +
- "\022\023\n\013return_data\030\002 \001(\014\"B\n\026IsProcedureDone" +
- "Request\022(\n\tprocedure\030\001 \001(\0132\025.ProcedureDe",
- "scription\"W\n\027IsProcedureDoneResponse\022\023\n\004" +
- "done\030\001 \001(\010:\005false\022\'\n\010snapshot\030\002 \001(\0132\025.Pr" +
- "ocedureDescription\",\n\031GetProcedureResult" +
- "Request\022\017\n\007proc_id\030\001 \002(\004\"\347\001\n\032GetProcedur" +
- "eResultResponse\0220\n\005state\030\001 \002(\0162!.GetProc" +
- "edureResultResponse.State\022\022\n\nstart_time\030" +
- "\002 \001(\004\022\023\n\013last_update\030\003 \001(\004\022\016\n\006result\030\004 \001" +
- "(\014\022+\n\texception\030\005 \001(\0132\030.ForeignException" +
- "Message\"1\n\005State\022\r\n\tNOT_FOUND\020\000\022\013\n\007RUNNI" +
- "NG\020\001\022\014\n\010FINISHED\020\002\"M\n\025AbortProcedureRequ",
- "est\022\017\n\007proc_id\030\001 \002(\004\022#\n\025mayInterruptIfRu" +
- "nning\030\002 \001(\010:\004true\"6\n\026AbortProcedureRespo" +
- "nse\022\034\n\024is_procedure_aborted\030\001 \002(\010\"\273\001\n\017Se" +
- "tQuotaRequest\022\021\n\tuser_name\030\001 \001(\t\022\022\n\nuser" +
- "_group\030\002 \001(\t\022\021\n\tnamespace\030\003 \001(\t\022\036\n\ntable" +
- "_name\030\004 \001(\0132\n.TableName\022\022\n\nremove_all\030\005 " +
- "\001(\010\022\026\n\016bypass_globals\030\006 \001(\010\022\"\n\010throttle\030" +
- "\007 \001(\0132\020.ThrottleRequest\"\022\n\020SetQuotaRespo" +
- "nse\"A\n\037MajorCompactionTimestampRequest\022\036" +
- "\n\ntable_name\030\001 \002(\0132\n.TableName\"L\n(MajorC",
- "ompactionTimestampForRegionRequest\022 \n\006re" +
- "gion\030\001 \002(\0132\020.RegionSpecifier\"@\n MajorCom" +
- "pactionTimestampResponse\022\034\n\024compaction_t" +
- "imestamp\030\001 \002(\003\"\035\n\033SecurityCapabilitiesRe" +
- "quest\"\343\001\n\034SecurityCapabilitiesResponse\022>" +
- "\n\014capabilities\030\001 \003(\0162(.SecurityCapabilit" +
- "iesResponse.Capability\"\202\001\n\nCapability\022\031\n" +
- "\025SIMPLE_AUTHENTICATION\020\000\022\031\n\025SECURE_AUTHE" +
- "NTICATION\020\001\022\021\n\rAUTHORIZATION\020\002\022\026\n\022CELL_A" +
- "UTHORIZATION\020\003\022\023\n\017CELL_VISIBILITY\020\0042\231\036\n\r",
- "MasterService\022S\n\024GetSchemaAlterStatus\022\034." +
- "GetSchemaAlterStatusRequest\032\035.GetSchemaA" +
- "lterStatusResponse\022P\n\023GetTableDescriptor" +
- "s\022\033.GetTableDescriptorsRequest\032\034.GetTabl" +
- "eDescriptorsResponse\022>\n\rGetTableNames\022\025." +
- "GetTableNamesRequest\032\026.GetTableNamesResp" +
- "onse\022G\n\020GetClusterStatus\022\030.GetClusterSta" +
- "tusRequest\032\031.GetClusterStatusResponse\022D\n" +
- "\017IsMasterRunning\022\027.IsMasterRunningReques" +
- "t\032\030.IsMasterRunningResponse\0222\n\tAddColumn",
- "\022\021.AddColumnRequest\032\022.AddColumnResponse\022" +
- ";\n\014DeleteColumn\022\024.DeleteColumnRequest\032\025." +
- "DeleteColumnResponse\022;\n\014ModifyColumn\022\024.M" +
- "odifyColumnRequest\032\025.ModifyColumnRespons" +
- "e\0225\n\nMoveRegion\022\022.MoveRegionRequest\032\023.Mo" +
- "veRegionResponse\022Y\n\026DispatchMergingRegio" +
- "ns\022\036.DispatchMergingRegionsRequest\032\037.Dis" +
- "patchMergingRegionsResponse\022;\n\014AssignReg" +
- "ion\022\024.AssignRegionRequest\032\025.AssignRegion" +
- "Response\022A\n\016UnassignRegion\022\026.UnassignReg",
- "ionRequest\032\027.UnassignRegionResponse\022>\n\rO" +
- "fflineRegion\022\025.OfflineRegionRequest\032\026.Of" +
- "flineRegionResponse\0228\n\013DeleteTable\022\023.Del" +
- "eteTableRequest\032\024.DeleteTableResponse\022>\n" +
- "\rtruncateTable\022\025.TruncateTableRequest\032\026." +
- "TruncateTableResponse\0228\n\013EnableTable\022\023.E" +
- "nableTableRequest\032\024.EnableTableResponse\022" +
- ";\n\014DisableTable\022\024.DisableTableRequest\032\025." +
- "DisableTableResponse\0228\n\013ModifyTable\022\023.Mo" +
- "difyTableRequest\032\024.ModifyTableResponse\0228",
- "\n\013CreateTable\022\023.CreateTableRequest\032\024.Cre" +
- "ateTableResponse\022/\n\010Shutdown\022\020.ShutdownR" +
- "equest\032\021.ShutdownResponse\0225\n\nStopMaster\022" +
- "\022.StopMasterRequest\032\023.StopMasterResponse" +
- "\022,\n\007Balance\022\017.BalanceRequest\032\020.BalanceRe" +
- "sponse\022M\n\022SetBalancerRunning\022\032.SetBalanc" +
- "erRunningRequest\032\033.SetBalancerRunningRes" +
- "ponse\022J\n\021IsBalancerEnabled\022\031.IsBalancerE" +
- "nabledRequest\032\032.IsBalancerEnabledRespons" +
- "e\0222\n\tNormalize\022\021.NormalizeRequest\032\022.Norm",
- "alizeResponse\022S\n\024SetNormalizerRunning\022\034." +
- "SetNormalizerRunningRequest\032\035.SetNormali" +
- "zerRunningResponse\022P\n\023IsNormalizerEnable" +
- "d\022\033.IsNormalizerEnabledRequest\032\034.IsNorma" +
- "lizerEnabledResponse\022A\n\016RunCatalogScan\022\026" +
- ".RunCatalogScanRequest\032\027.RunCatalogScanR" +
- "esponse\022S\n\024EnableCatalogJanitor\022\034.Enable" +
- "CatalogJanitorRequest\032\035.EnableCatalogJan" +
- "itorResponse\022\\\n\027IsCatalogJanitorEnabled\022" +
- "\037.IsCatalogJanitorEnabledRequest\032 .IsCat",
- "alogJanitorEnabledResponse\022L\n\021ExecMaster" +
- "Service\022\032.CoprocessorServiceRequest\032\033.Co" +
- "processorServiceResponse\022/\n\010Snapshot\022\020.S" +
- "napshotRequest\032\021.SnapshotResponse\022V\n\025Get" +
- "CompletedSnapshots\022\035.GetCompletedSnapsho" +
- "tsRequest\032\036.GetCompletedSnapshotsRespons" +
- "e\022A\n\016DeleteSnapshot\022\026.DeleteSnapshotRequ" +
- "est\032\027.DeleteSnapshotResponse\022A\n\016IsSnapsh" +
- "otDone\022\026.IsSnapshotDoneRequest\032\027.IsSnaps" +
- "hotDoneResponse\022D\n\017RestoreSnapshot\022\027.Res",
- "toreSnapshotRequest\032\030.RestoreSnapshotRes" +
- "ponse\022V\n\025IsRestoreSnapshotDone\022\035.IsResto" +
- "reSnapshotDoneRequest\032\036.IsRestoreSnapsho" +
- "tDoneResponse\022>\n\rExecProcedure\022\025.ExecPro" +
- "cedureRequest\032\026.ExecProcedureResponse\022E\n" +
- "\024ExecProcedureWithRet\022\025.ExecProcedureReq" +
- "uest\032\026.ExecProcedureResponse\022D\n\017IsProced" +
- "ureDone\022\027.IsProcedureDoneRequest\032\030.IsPro" +
- "cedureDoneResponse\022D\n\017ModifyNamespace\022\027." +
- "ModifyNamespaceRequest\032\030.ModifyNamespace",
- "Response\022D\n\017CreateNamespace\022\027.CreateName" +
- "spaceRequest\032\030.CreateNamespaceResponse\022D" +
- "\n\017DeleteNamespace\022\027.DeleteNamespaceReque" +
- "st\032\030.DeleteNamespaceResponse\022Y\n\026GetNames" +
- "paceDescriptor\022\036.GetNamespaceDescriptorR" +
- "equest\032\037.GetNamespaceDescriptorResponse\022" +
- "_\n\030ListNamespaceDescriptors\022 .ListNamesp" +
- "aceDescriptorsRequest\032!.ListNamespaceDes" +
- "criptorsResponse\022t\n\037ListTableDescriptors" +
- "ByNamespace\022\'.ListTableDescriptorsByName",
- "spaceRequest\032(.ListTableDescriptorsByNam" +
- "espaceResponse\022b\n\031ListTableNamesByNamesp" +
- "ace\022!.ListTableNamesByNamespaceRequest\032\"" +
- ".ListTableNamesByNamespaceResponse\022/\n\010Se" +
- "tQuota\022\020.SetQuotaRequest\032\021.SetQuotaRespo" +
- "nse\022f\n\037getLastMajorCompactionTimestamp\022 " +
- ".MajorCompactionTimestampRequest\032!.Major" +
- "CompactionTimestampResponse\022x\n(getLastMa" +
- "jorCompactionTimestampForRegion\022).MajorC" +
- "ompactionTimestampForRegionRequest\032!.Maj",
- "orCompactionTimestampResponse\022M\n\022getProc" +
- "edureResult\022\032.GetProcedureResultRequest\032" +
- "\033.GetProcedureResultResponse\022V\n\027getSecur" +
- "ityCapabilities\022\034.SecurityCapabilitiesRe" +
- "quest\032\035.SecurityCapabilitiesResponse\022A\n\016" +
- "AbortProcedure\022\026.AbortProcedureRequest\032\027" +
- ".AbortProcedureResponseBB\n*org.apache.ha" +
- "doop.hbase.protobuf.generatedB\014MasterPro" +
- "tosH\001\210\001\001\240\001\001"
+ "Description\",\n\020SnapshotResponse\022\030\n\020expec" +
+ "ted_timeout\030\001 \002(\003\"\036\n\034GetCompletedSnapsho" +
+ "tsRequest\"H\n\035GetCompletedSnapshotsRespon" +
+ "se\022\'\n\tsnapshots\030\001 \003(\0132\024.SnapshotDescript" +
+ "ion\"?\n\025DeleteSnapshotRequest\022&\n\010snapshot" +
+ "\030\001 \002(\0132\024.SnapshotDescription\"\030\n\026DeleteSn" +
+ "apshotResponse\"@\n\026RestoreSnapshotRequest",
+ "\022&\n\010snapshot\030\001 \002(\0132\024.SnapshotDescription" +
+ "\"\031\n\027RestoreSnapshotResponse\"?\n\025IsSnapsho" +
+ "tDoneRequest\022&\n\010snapshot\030\001 \001(\0132\024.Snapsho" +
+ "tDescription\"U\n\026IsSnapshotDoneResponse\022\023" +
+ "\n\004done\030\001 \001(\010:\005false\022&\n\010snapshot\030\002 \001(\0132\024." +
+ "SnapshotDescription\"F\n\034IsRestoreSnapshot" +
+ "DoneRequest\022&\n\010snapshot\030\001 \001(\0132\024.Snapshot" +
+ "Description\"4\n\035IsRestoreSnapshotDoneResp" +
+ "onse\022\023\n\004done\030\001 \001(\010:\005false\"=\n\033GetSchemaAl" +
+ "terStatusRequest\022\036\n\ntable_name\030\001 \002(\0132\n.T",
+ "ableName\"T\n\034GetSchemaAlterStatusResponse" +
+ "\022\035\n\025yet_to_update_regions\030\001 \001(\r\022\025\n\rtotal" +
+ "_regions\030\002 \001(\r\"\202\001\n\032GetTableDescriptorsRe" +
+ "quest\022\037\n\013table_names\030\001 \003(\0132\n.TableName\022\r" +
+ "\n\005regex\030\002 \001(\t\022!\n\022include_sys_tables\030\003 \001(" +
+ "\010:\005false\022\021\n\tnamespace\030\004 \001(\t\"A\n\033GetTableD" +
+ "escriptorsResponse\022\"\n\014table_schema\030\001 \003(\013" +
+ "2\014.TableSchema\"[\n\024GetTableNamesRequest\022\r" +
+ "\n\005regex\030\001 \001(\t\022!\n\022include_sys_tables\030\002 \001(" +
+ "\010:\005false\022\021\n\tnamespace\030\003 \001(\t\"8\n\025GetTableN",
+ "amesResponse\022\037\n\013table_names\030\001 \003(\0132\n.Tabl" +
+ "eName\"\031\n\027GetClusterStatusRequest\"B\n\030GetC" +
+ "lusterStatusResponse\022&\n\016cluster_status\030\001" +
+ " \002(\0132\016.ClusterStatus\"\030\n\026IsMasterRunningR" +
+ "equest\"4\n\027IsMasterRunningResponse\022\031\n\021is_" +
+ "master_running\030\001 \002(\010\"@\n\024ExecProcedureReq" +
+ "uest\022(\n\tprocedure\030\001 \002(\0132\025.ProcedureDescr" +
+ "iption\"F\n\025ExecProcedureResponse\022\030\n\020expec" +
+ "ted_timeout\030\001 \001(\003\022\023\n\013return_data\030\002 \001(\014\"B" +
+ "\n\026IsProcedureDoneRequest\022(\n\tprocedure\030\001 ",
+ "\001(\0132\025.ProcedureDescription\"W\n\027IsProcedur" +
+ "eDoneResponse\022\023\n\004done\030\001 \001(\010:\005false\022\'\n\010sn" +
+ "apshot\030\002 \001(\0132\025.ProcedureDescription\",\n\031G" +
+ "etProcedureResultRequest\022\017\n\007proc_id\030\001 \002(" +
+ "\004\"\347\001\n\032GetProcedureResultResponse\0220\n\005stat" +
+ "e\030\001 \002(\0162!.GetProcedureResultResponse.Sta" +
+ "te\022\022\n\nstart_time\030\002 \001(\004\022\023\n\013last_update\030\003 " +
+ "\001(\004\022\016\n\006result\030\004 \001(\014\022+\n\texception\030\005 \001(\0132\030" +
+ ".ForeignExceptionMessage\"1\n\005State\022\r\n\tNOT" +
+ "_FOUND\020\000\022\013\n\007RUNNING\020\001\022\014\n\010FINISHED\020\002\"M\n\025A",
+ "bortProcedureRequest\022\017\n\007proc_id\030\001 \002(\004\022#\n" +
+ "\025mayInterruptIfRunning\030\002 \001(\010:\004true\"6\n\026Ab" +
+ "ortProcedureResponse\022\034\n\024is_procedure_abo" +
+ "rted\030\001 \002(\010\"\027\n\025ListProceduresRequest\"7\n\026L" +
+ "istProceduresResponse\022\035\n\tprocedure\030\001 \003(\013" +
+ "2\n.Procedure\"\273\001\n\017SetQuotaRequest\022\021\n\tuser" +
+ "_name\030\001 \001(\t\022\022\n\nuser_group\030\002 \001(\t\022\021\n\tnames" +
+ "pace\030\003 \001(\t\022\036\n\ntable_name\030\004 \001(\0132\n.TableNa" +
+ "me\022\022\n\nremove_all\030\005 \001(\010\022\026\n\016bypass_globals" +
+ "\030\006 \001(\010\022\"\n\010throttle\030\007 \001(\0132\020.ThrottleReque",
+ "st\"\022\n\020SetQuotaResponse\"A\n\037MajorCompactio" +
+ "nTimestampRequest\022\036\n\ntable_name\030\001 \002(\0132\n." +
+ "TableName\"L\n(MajorCompactionTimestampFor" +
+ "RegionRequest\022 \n\006region\030\001 \002(\0132\020.RegionSp" +
+ "ecifier\"@\n MajorCompactionTimestampRespo" +
+ "nse\022\034\n\024compaction_timestamp\030\001 \002(\003\"\035\n\033Sec" +
+ "urityCapabilitiesRequest\"\343\001\n\034SecurityCap" +
+ "abilitiesResponse\022>\n\014capabilities\030\001 \003(\0162" +
+ "(.SecurityCapabilitiesResponse.Capabilit" +
+ "y\"\202\001\n\nCapability\022\031\n\025SIMPLE_AUTHENTICATIO",
+ "N\020\000\022\031\n\025SECURE_AUTHENTICATION\020\001\022\021\n\rAUTHOR" +
+ "IZATION\020\002\022\026\n\022CELL_AUTHORIZATION\020\003\022\023\n\017CEL" +
+ "L_VISIBILITY\020\0042\334\036\n\rMasterService\022S\n\024GetS" +
+ "chemaAlterStatus\022\034.GetSchemaAlterStatusR" +
+ "equest\032\035.GetSchemaAlterStatusResponse\022P\n" +
+ "\023GetTableDescriptors\022\033.GetTableDescripto" +
+ "rsRequest\032\034.GetTableDescriptorsResponse\022" +
+ ">\n\rGetTableNames\022\025.GetTableNamesRequest\032" +
+ "\026.GetTableNamesResponse\022G\n\020GetClusterSta" +
+ "tus\022\030.GetClusterStatusRequest\032\031.GetClust",
+ "erStatusResponse\022D\n\017IsMasterRunning\022\027.Is" +
+ "MasterRunningRequest\032\030.IsMasterRunningRe" +
+ "sponse\0222\n\tAddColumn\022\021.AddColumnRequest\032\022" +
+ ".AddColumnResponse\022;\n\014DeleteColumn\022\024.Del" +
+ "eteColumnRequest\032\025.DeleteColumnResponse\022" +
+ ";\n\014ModifyColumn\022\024.ModifyColumnRequest\032\025." +
+ "ModifyColumnResponse\0225\n\nMoveRegion\022\022.Mov" +
+ "eRegionRequest\032\023.MoveRegionResponse\022Y\n\026D" +
+ "ispatchMergingRegions\022\036.DispatchMergingR" +
+ "egionsRequest\032\037.DispatchMergingRegionsRe",
+ "sponse\022;\n\014AssignRegion\022\024.AssignRegionReq" +
+ "uest\032\025.AssignRegionResponse\022A\n\016UnassignR" +
+ "egion\022\026.UnassignRegionRequest\032\027.Unassign" +
+ "RegionResponse\022>\n\rOfflineRegion\022\025.Offlin" +
+ "eRegionRequest\032\026.OfflineRegionResponse\0228" +
+ "\n\013DeleteTable\022\023.DeleteTableRequest\032\024.Del" +
+ "eteTableResponse\022>\n\rtruncateTable\022\025.Trun" +
+ "cateTableRequest\032\026.TruncateTableResponse" +
+ "\0228\n\013EnableTable\022\023.EnableTableRequest\032\024.E" +
+ "nableTableResponse\022;\n\014DisableTable\022\024.Dis",
+ "ableTableRequest\032\025.DisableTableResponse\022" +
+ "8\n\013ModifyTable\022\023.ModifyTableRequest\032\024.Mo" +
+ "difyTableResponse\0228\n\013CreateTable\022\023.Creat" +
+ "eTableRequest\032\024.CreateTableResponse\022/\n\010S" +
+ "hutdown\022\020.ShutdownRequest\032\021.ShutdownResp" +
+ "onse\0225\n\nStopMaster\022\022.StopMasterRequest\032\023" +
+ ".StopMasterResponse\022,\n\007Balance\022\017.Balance" +
+ "Request\032\020.BalanceResponse\022M\n\022SetBalancer" +
+ "Running\022\032.SetBalancerRunningRequest\032\033.Se" +
+ "tBalancerRunningResponse\022J\n\021IsBalancerEn",
+ "abled\022\031.IsBalancerEnabledRequest\032\032.IsBal" +
+ "ancerEnabledResponse\0222\n\tNormalize\022\021.Norm" +
+ "alizeRequest\032\022.NormalizeResponse\022S\n\024SetN" +
+ "ormalizerRunning\022\034.SetNormalizerRunningR" +
+ "equest\032\035.SetNormalizerRunningResponse\022P\n" +
+ "\023IsNormalizerEnabled\022\033.IsNormalizerEnabl" +
+ "edRequest\032\034.IsNormalizerEnabledResponse\022" +
+ "A\n\016RunCatalogScan\022\026.RunCatalogScanReques" +
+ "t\032\027.RunCatalogScanResponse\022S\n\024EnableCata" +
+ "logJanitor\022\034.EnableCatalogJanitorRequest",
+ "\032\035.EnableCatalogJanitorResponse\022\\\n\027IsCat" +
+ "alogJanitorEnabled\022\037.IsCatalogJanitorEna" +
+ "bledRequest\032 .IsCatalogJanitorEnabledRes" +
+ "ponse\022L\n\021ExecMasterService\022\032.Coprocessor" +
+ "ServiceRequest\032\033.CoprocessorServiceRespo" +
+ "nse\022/\n\010Snapshot\022\020.SnapshotRequest\032\021.Snap" +
+ "shotResponse\022V\n\025GetCompletedSnapshots\022\035." +
+ "GetCompletedSnapshotsRequest\032\036.GetComple" +
+ "tedSnapshotsResponse\022A\n\016DeleteSnapshot\022\026" +
+ ".DeleteSnapshotRequest\032\027.DeleteSnapshotR",
+ "esponse\022A\n\016IsSnapshotDone\022\026.IsSnapshotDo" +
+ "neRequest\032\027.IsSnapshotDoneResponse\022D\n\017Re" +
+ "storeSnapshot\022\027.RestoreSnapshotRequest\032\030" +
+ ".RestoreSnapshotResponse\022V\n\025IsRestoreSna" +
+ "pshotDone\022\035.IsRestoreSnapshotDoneRequest" +
+ "\032\036.IsRestoreSnapshotDoneResponse\022>\n\rExec" +
+ "Procedure\022\025.ExecProcedureRequest\032\026.ExecP" +
+ "rocedureResponse\022E\n\024ExecProcedureWithRet" +
+ "\022\025.ExecProcedureRequest\032\026.ExecProcedureR" +
+ "esponse\022D\n\017IsProcedureDone\022\027.IsProcedure",
+ "DoneRequest\032\030.IsProcedureDoneResponse\022D\n" +
+ "\017ModifyNamespace\022\027.ModifyNamespaceReques" +
+ "t\032\030.ModifyNamespaceResponse\022D\n\017CreateNam" +
+ "espace\022\027.CreateNamespaceRequest\032\030.Create" +
+ "NamespaceResponse\022D\n\017DeleteNamespace\022\027.D" +
+ "eleteNamespaceRequest\032\030.DeleteNamespaceR" +
+ "esponse\022Y\n\026GetNamespaceDescriptor\022\036.GetN" +
+ "amespaceDescriptorRequest\032\037.GetNamespace" +
+ "DescriptorResponse\022_\n\030ListNamespaceDescr" +
+ "iptors\022 .ListNamespaceDescriptorsRequest",
+ "\032!.ListNamespaceDescriptorsResponse\022t\n\037L" +
+ "istTableDescriptorsByNamespace\022\'.ListTab" +
+ "leDescriptorsByNamespaceRequest\032(.ListTa" +
+ "bleDescriptorsByNamespaceResponse\022b\n\031Lis" +
+ "tTableNamesByNamespace\022!.ListTableNamesB" +
+ "yNamespaceRequest\032\".ListTableNamesByName" +
+ "spaceResponse\022/\n\010SetQuota\022\020.SetQuotaRequ" +
+ "est\032\021.SetQuotaResponse\022f\n\037getLastMajorCo" +
+ "mpactionTimestamp\022 .MajorCompactionTimes" +
+ "tampRequest\032!.MajorCompactionTimestampRe",
+ "sponse\022x\n(getLastMajorCompactionTimestam" +
+ "pForRegion\022).MajorCompactionTimestampFor" +
+ "RegionRequest\032!.MajorCompactionTimestamp" +
+ "Response\022M\n\022getProcedureResult\022\032.GetProc" +
+ "edureResultRequest\032\033.GetProcedureResultR" +
+ "esponse\022V\n\027getSecurityCapabilities\022\034.Sec" +
+ "urityCapabilitiesRequest\032\035.SecurityCapab" +
+ "ilitiesResponse\022A\n\016AbortProcedure\022\026.Abor" +
+ "tProcedureRequest\032\027.AbortProcedureRespon" +
+ "se\022A\n\016ListProcedures\022\02
<TRUNCATED>
[3/3] hbase git commit: HBASE-14107 Procedure V2 - Administrative
Task: Provide an API to List all procedures
Posted by sy...@apache.org.
HBASE-14107 Procedure V2 - Administrative Task: Provide an API to List all procedures
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6d25e111
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6d25e111
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6d25e111
Branch: refs/heads/branch-1.2
Commit: 6d25e1119ed42f43f1efb781cdb670b4faf0b342
Parents: cbd4e5a
Author: Stephen Yuan Jiang <sy...@gmail.com>
Authored: Thu Sep 24 07:17:23 2015 -0700
Committer: Stephen Yuan Jiang <sy...@gmail.com>
Committed: Tue Jan 5 17:44:04 2016 -0800
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/client/Admin.java | 8 +
.../hadoop/hbase/client/ConnectionManager.java | 6 +
.../apache/hadoop/hbase/client/HBaseAdmin.java | 25 +
.../org/apache/hadoop/hbase/ProcedureInfo.java | 224 +++
.../exceptions/IllegalArgumentIOException.java | 46 +
.../hadoop/hbase/procedure2/Procedure.java | 43 +-
.../hbase/procedure2/ProcedureExecutor.java | 63 +-
.../hbase/procedure2/ProcedureResult.java | 115 --
.../procedure2/ProcedureTestingUtility.java | 43 +-
.../procedure2/TestProcedureExecution.java | 45 +-
.../hbase/procedure2/TestProcedureRecovery.java | 13 +-
.../procedure2/TestProcedureReplayOrder.java | 3 -
.../hbase/protobuf/generated/MasterProtos.java | 1687 +++++++++++++++---
hbase-protocol/src/main/protobuf/Master.proto | 12 +
.../org/apache/hadoop/hbase/master/HMaster.java | 6 +
.../hadoop/hbase/master/MasterRpcServices.java | 26 +-
.../hadoop/hbase/master/MasterServices.java | 8 +
.../master/procedure/ProcedureSyncWait.java | 9 +-
.../apache/hadoop/hbase/client/TestAdmin2.java | 7 +
.../hadoop/hbase/master/TestCatalogJanitor.java | 6 +
.../procedure/TestAddColumnFamilyProcedure.java | 14 +-
.../TestDeleteColumnFamilyProcedure.java | 21 +-
.../procedure/TestDeleteTableProcedure.java | 8 +-
.../procedure/TestDisableTableProcedure.java | 9 +-
.../procedure/TestEnableTableProcedure.java | 9 +-
.../TestModifyColumnFamilyProcedure.java | 9 +-
.../master/procedure/TestProcedureAdmin.java | 38 +
.../procedure/TestTruncateTableProcedure.java | 15 +-
28 files changed, 2024 insertions(+), 494 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 70fdd53..e1113c3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
@@ -958,6 +959,13 @@ public interface Admin extends Abortable, Closeable {
final boolean mayInterruptIfRunning) throws IOException;
/**
+ * List procedures
+ * @return procedure list
+ * @throws IOException
+ */
+ ProcedureInfo[] listProcedures() throws IOException;
+
+ /**
* Abort a procedure but does not block and wait for it be completely removed.
* You can use Future.get(long, TimeUnit) to wait on the operation to complete.
* It may throw ExecutionException if there was an error while executing the operation
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
index f6f15ae..2efa1af 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java
@@ -1758,6 +1758,12 @@ class ConnectionManager {
return stub.abortProcedure(controller, request);
}
@Override
+ public MasterProtos.ListProceduresResponse listProcedures(
+ RpcController controller,
+ MasterProtos.ListProceduresRequest request) throws ServiceException {
+ return stub.listProcedures(controller, request);
+ }
+ @Override
public AddColumnResponse addColumn(RpcController controller, AddColumnRequest request)
throws ServiceException {
return stub.addColumn(controller, request);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 86af68e..3668b02 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.RegionException;
import org.apache.hadoop.hbase.RegionLocations;
import org.apache.hadoop.hbase.ServerName;
@@ -128,6 +129,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshot
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest;
@@ -147,6 +149,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest;
+import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
import org.apache.hadoop.hbase.quotas.QuotaFilter;
import org.apache.hadoop.hbase.quotas.QuotaRetriever;
import org.apache.hadoop.hbase.quotas.QuotaSettings;
@@ -2902,6 +2905,28 @@ public class HBaseAdmin implements Admin {
}
/**
+ * List procedures
+ * @return procedure list
+ * @throws IOException
+ */
+ @Override
+ public ProcedureInfo[] listProcedures() throws IOException {
+ return
+ executeCallable(new MasterCallable<ProcedureInfo[]>(getConnection()) {
+ @Override
+ public ProcedureInfo[] call(int callTimeout) throws Exception {
+ List<ProcedureProtos.Procedure> procList = master.listProcedures(
+ null, ListProceduresRequest.newBuilder().build()).getProcedureList();
+ ProcedureInfo[] procInfoList = new ProcedureInfo[procList.size()];
+ for (int i = 0; i < procList.size(); i++) {
+ procInfoList[i] = ProcedureInfo.convert(procList.get(i));
+ }
+ return procInfoList;
+ }
+ });
+ }
+
+ /**
* Get list of table descriptors by namespace
* @param name namespace name
* @return A descriptor
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
new file mode 100644
index 0000000..9708c31
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
@@ -0,0 +1,224 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage;
+import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
+import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
+import org.apache.hadoop.hbase.util.ByteStringer;
+import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
+import org.apache.hadoop.hbase.util.NonceKey;
+
+/**
+ * Procedure information
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class ProcedureInfo {
+ private final long procId;
+ private final String procName;
+ private final String procOwner;
+ private final ProcedureState procState;
+ private final long parentId;
+ private final ForeignExceptionMessage exception;
+ private final long lastUpdate;
+ private final long startTime;
+ private final byte[] result;
+
+ private NonceKey nonceKey = null;
+ private long clientAckTime = -1;
+
+ public ProcedureInfo(
+ final long procId,
+ final String procName,
+ final String procOwner,
+ final ProcedureState procState,
+ final long parentId,
+ final ForeignExceptionMessage exception,
+ final long lastUpdate,
+ final long startTime,
+ final byte[] result) {
+ this.procId = procId;
+ this.procName = procName;
+ this.procOwner = procOwner;
+ this.procState = procState;
+ this.parentId = parentId;
+ this.lastUpdate = lastUpdate;
+ this.startTime = startTime;
+
+ // If the procedure is completed, we should treat exception and result differently
+ this.exception = exception;
+ this.result = result;
+ }
+
+ public long getProcId() {
+ return procId;
+ }
+
+ public String getProcName() {
+ return procName;
+ }
+
+ public String getProcOwner() {
+ return procOwner;
+ }
+
+ public ProcedureState getProcState() {
+ return procState;
+ }
+
+ public boolean hasParentId() {
+ return (parentId != -1);
+ }
+
+ public long getParentId() {
+ return parentId;
+ }
+
+ public NonceKey getNonceKey() {
+ return nonceKey;
+ }
+
+ public void setNonceKey(NonceKey nonceKey) {
+ this.nonceKey = nonceKey;
+ }
+
+ public boolean isFailed() {
+ return exception != null;
+ }
+
+ public IOException getException() {
+ if (isFailed()) {
+ return ForeignExceptionUtil.toIOException(exception);
+ }
+ return null;
+ }
+
+ @InterfaceAudience.Private
+ public ForeignExceptionMessage getForeignExceptionMessage() {
+ return exception;
+ }
+
+ public String getExceptionCause() {
+ assert isFailed();
+ return exception.getGenericException().getClassName();
+ }
+
+ public String getExceptionMessage() {
+ assert isFailed();
+ return exception.getGenericException().getMessage();
+ }
+
+ public String getExceptionFullMessage() {
+ assert isFailed();
+ return getExceptionCause() + " - " + getExceptionMessage();
+ }
+
+ public boolean hasResultData() {
+ return result != null;
+ }
+
+ public byte[] getResult() {
+ return result;
+ }
+
+ public long getStartTime() {
+ return startTime;
+ }
+
+ public long getLastUpdate() {
+ return lastUpdate;
+ }
+
+ public long executionTime() {
+ return lastUpdate - startTime;
+ }
+
+ @InterfaceAudience.Private
+ public boolean hasClientAckTime() {
+ return clientAckTime > 0;
+ }
+
+ @InterfaceAudience.Private
+ public long getClientAckTime() {
+ return clientAckTime;
+ }
+
+ @InterfaceAudience.Private
+ public void setClientAckTime(final long timestamp) {
+ this.clientAckTime = timestamp;
+ }
+
+ /**
+ * @return Convert the current {@link ProcedureInfo} into a Protocol Buffers Procedure
+ * instance.
+ */
+ @InterfaceAudience.Private
+ public static ProcedureProtos.Procedure convertToProcedureProto(
+ final ProcedureInfo procInfo) {
+ ProcedureProtos.Procedure.Builder builder = ProcedureProtos.Procedure.newBuilder();
+
+ builder.setClassName(procInfo.getProcName());
+ builder.setProcId(procInfo.getProcId());
+ builder.setStartTime(procInfo.getStartTime());
+ builder.setState(procInfo.getProcState());
+ builder.setLastUpdate(procInfo.getLastUpdate());
+
+ if (procInfo.hasParentId()) {
+ builder.setParentId(procInfo.getParentId());
+ }
+
+ if (procInfo.getProcOwner() != null) {
+ builder.setOwner(procInfo.getProcOwner());
+ }
+
+ if (procInfo.isFailed()) {
+ builder.setException(procInfo.getForeignExceptionMessage());
+ }
+
+ if (procInfo.hasResultData()) {
+ builder.setResult(ByteStringer.wrap(procInfo.getResult()));
+ }
+
+ return builder.build();
+ }
+
+ /**
+ * Helper to convert the protobuf object.
+ * @return Convert the current Protocol Buffers Procedure to {@link ProcedureInfo}
+ * instance.
+ */
+ @InterfaceAudience.Private
+ public static ProcedureInfo convert(final ProcedureProtos.Procedure procProto) {
+ return new ProcedureInfo(
+ procProto.getProcId(),
+ procProto.getClassName(),
+ procProto.getOwner(),
+ procProto.getState(),
+ procProto.hasParentId() ? procProto.getParentId() : -1,
+ procProto.getState() == ProcedureState.ROLLEDBACK ? procProto.getException() : null,
+ procProto.getLastUpdate(),
+ procProto.getStartTime(),
+ procProto.getState() == ProcedureState.FINISHED ? procProto.getResult().toByteArray() : null);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/IllegalArgumentIOException.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/IllegalArgumentIOException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/IllegalArgumentIOException.java
new file mode 100644
index 0000000..53ced75
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/IllegalArgumentIOException.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.exceptions;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+
+/**
+ * Exception thrown when an illegal argument is passed to a function/procedure.
+ */
+@SuppressWarnings("serial")
+@InterfaceAudience.Private
+public class IllegalArgumentIOException extends IOException {
+ public IllegalArgumentIOException() {
+ super();
+ }
+
+ public IllegalArgumentIOException(final String message) {
+ super(message);
+ }
+
+ public IllegalArgumentIOException(final String message, final Throwable t) {
+ super(message, t);
+ }
+
+ public IllegalArgumentIOException(final Throwable t) {
+ super(t);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index a343c89..5545c5c 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -26,11 +26,12 @@ import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
-import java.util.concurrent.TimeoutException;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
@@ -214,6 +215,13 @@ public abstract class Procedure<TEnvironment> implements Comparable<Procedure> {
return sb.toString();
}
+ protected String toStringClass() {
+ StringBuilder sb = new StringBuilder();
+ toStringClassDetails(sb);
+
+ return sb.toString();
+ }
+
/**
* Called from {@link #toString()} when interpolating {@link Procedure} state
* @param builder Append current {@link ProcedureState}
@@ -395,7 +403,7 @@ public abstract class Procedure<TEnvironment> implements Comparable<Procedure> {
protected synchronized boolean setTimeoutFailure() {
if (state == ProcedureState.WAITING_TIMEOUT) {
long timeDiff = EnvironmentEdgeManager.currentTime() - lastUpdate;
- setFailure("ProcedureExecutor", new TimeoutException(
+ setFailure("ProcedureExecutor", new TimeoutIOException(
"Operation timed out after " + StringUtils.humanTimeDiff(timeDiff)));
return true;
}
@@ -626,6 +634,37 @@ public abstract class Procedure<TEnvironment> implements Comparable<Procedure> {
}
/**
+ * Helper to create the ProcedureInfo from Procedure.
+ */
+ @InterfaceAudience.Private
+ public static ProcedureInfo createProcedureInfo(final Procedure proc, final NonceKey nonceKey) {
+ RemoteProcedureException exception;
+
+ if (proc.hasException()) {
+ exception = proc.getException();
+ } else {
+ exception = null;
+ }
+ ProcedureInfo procInfo = new ProcedureInfo(
+ proc.getProcId(),
+ proc.toStringClass(),
+ proc.getOwner(),
+ proc.getState(),
+ proc.hasParent() ? proc.getParentProcId() : -1,
+ exception != null ?
+ RemoteProcedureException.toProto(exception.getSource(), exception.getCause()) : null,
+ proc.getLastUpdate(),
+ proc.getStartTime(),
+ proc.getResult());
+
+ if (nonceKey != null) {
+ procInfo.setNonceKey(nonceKey);
+ }
+
+ return procInfo;
+ }
+
+ /**
* Helper to convert the procedure to protobuf.
* Used by ProcedureStore implementations.
*/
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index 1a098e5..3213607 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -40,8 +40,10 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore.ProcedureIterator;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
@@ -135,13 +137,13 @@ public class ProcedureExecutor<TEnvironment> {
private static final String EVICT_ACKED_TTL_CONF_KEY ="hbase.procedure.cleaner.acked.evict.ttl";
private static final int DEFAULT_ACKED_EVICT_TTL = 5 * 60000; // 5min
- private final Map<Long, ProcedureResult> completed;
+ private final Map<Long, ProcedureInfo> completed;
private final Map<NonceKey, Long> nonceKeysToProcIdsMap;
private final ProcedureStore store;
private final Configuration conf;
public CompletedProcedureCleaner(final Configuration conf, final ProcedureStore store,
- final Map<Long, ProcedureResult> completedMap,
+ final Map<Long, ProcedureInfo> completedMap,
final Map<NonceKey, Long> nonceKeysToProcIdsMap) {
// set the timeout interval that triggers the periodic-procedure
setTimeout(conf.getInt(CLEANER_INTERVAL_CONF_KEY, DEFAULT_CLEANER_INTERVAL));
@@ -163,10 +165,10 @@ public class ProcedureExecutor<TEnvironment> {
final long evictAckTtl = conf.getInt(EVICT_ACKED_TTL_CONF_KEY, DEFAULT_ACKED_EVICT_TTL);
long now = EnvironmentEdgeManager.currentTime();
- Iterator<Map.Entry<Long, ProcedureResult>> it = completed.entrySet().iterator();
+ Iterator<Map.Entry<Long, ProcedureInfo>> it = completed.entrySet().iterator();
while (it.hasNext() && store.isRunning()) {
- Map.Entry<Long, ProcedureResult> entry = it.next();
- ProcedureResult result = entry.getValue();
+ Map.Entry<Long, ProcedureInfo> entry = it.next();
+ ProcedureInfo result = entry.getValue();
// TODO: Select TTL based on Procedure type
if ((result.hasClientAckTime() && (now - result.getClientAckTime()) >= evictAckTtl) ||
@@ -212,12 +214,12 @@ public class ProcedureExecutor<TEnvironment> {
}
/**
- * Map the the procId returned by submitProcedure(), the Root-ProcID, to the ProcedureResult.
+ * Map the the procId returned by submitProcedure(), the Root-ProcID, to the ProcedureInfo.
* Once a Root-Procedure completes (success or failure), the result will be added to this map.
* The user of ProcedureExecutor should call getResult(procId) to get the result.
*/
- private final ConcurrentHashMap<Long, ProcedureResult> completed =
- new ConcurrentHashMap<Long, ProcedureResult>();
+ private final ConcurrentHashMap<Long, ProcedureInfo> completed =
+ new ConcurrentHashMap<Long, ProcedureInfo>();
/**
* Map the the procId returned by submitProcedure(), the Root-ProcID, to the RootProcedureState.
@@ -364,7 +366,7 @@ public class ProcedureExecutor<TEnvironment> {
}
assert !rollbackStack.containsKey(proc.getProcId());
procedures.remove(proc.getProcId());
- completed.put(proc.getProcId(), newResultFromProcedure(proc));
+ completed.put(proc.getProcId(), Procedure.createProcedureInfo(proc, proc.getNonceKey()));
continue;
}
@@ -572,6 +574,26 @@ public class ProcedureExecutor<TEnvironment> {
}
/**
+ * List procedures.
+ * @return the procedures in a list
+ */
+ public List<ProcedureInfo> listProcedures() {
+ List<ProcedureInfo> procedureLists =
+ new ArrayList<ProcedureInfo>(procedures.size() + completed.size());
+ for (java.util.Map.Entry<Long, Procedure> p: procedures.entrySet()) {
+ procedureLists.add(Procedure.createProcedureInfo(p.getValue(), null));
+ }
+ for (java.util.Map.Entry<Long, ProcedureInfo> e: completed.entrySet()) {
+ // Note: The procedure could show up twice in the list with different state, as
+ // it could complete after we walk through procedures list and insert into
+ // procedureList - it is ok, as we will use the information in the ProcedureInfo
+ // to figure it out; to prevent this would increase the complexity of the logic.
+ procedureLists.add(e.getValue());
+ }
+ return procedureLists;
+ }
+
+ /**
* Add a new root-procedure to the executor.
* @param proc the new procedure to execute.
* @return the procedure id, that can be used to monitor the operation
@@ -643,7 +665,7 @@ public class ProcedureExecutor<TEnvironment> {
return currentProcId;
}
- public ProcedureResult getResult(final long procId) {
+ public ProcedureInfo getResult(final long procId) {
return completed.get(procId);
}
@@ -676,7 +698,7 @@ public class ProcedureExecutor<TEnvironment> {
* @param procId the ID of the procedure to remove
*/
public void removeResult(final long procId) {
- ProcedureResult result = completed.get(procId);
+ ProcedureInfo result = completed.get(procId);
if (result == null) {
assert !procedures.containsKey(procId) : "procId=" + procId + " is still running";
if (LOG.isDebugEnabled()) {
@@ -718,7 +740,7 @@ public class ProcedureExecutor<TEnvironment> {
return false;
}
- public Map<Long, ProcedureResult> getResults() {
+ public Map<Long, ProcedureInfo> getResults() {
return Collections.unmodifiableMap(completed);
}
@@ -1033,7 +1055,7 @@ public class ProcedureExecutor<TEnvironment> {
if (subproc == null) {
String msg = "subproc[" + i + "] is null, aborting the procedure";
procedure.setFailure(new RemoteProcedureException(msg,
- new IllegalArgumentException(msg)));
+ new IllegalArgumentIOException(msg)));
subprocs = null;
break;
}
@@ -1212,7 +1234,7 @@ public class ProcedureExecutor<TEnvironment> {
}
// update the executor internal state maps
- completed.put(proc.getProcId(), newResultFromProcedure(proc));
+ completed.put(proc.getProcId(), Procedure.createProcedureInfo(proc, proc.getNonceKey()));
rollbackStack.remove(proc.getProcId());
procedures.remove(proc.getProcId());
@@ -1228,8 +1250,8 @@ public class ProcedureExecutor<TEnvironment> {
sendProcedureFinishedNotification(proc.getProcId());
}
- public Pair<ProcedureResult, Procedure> getResultOrProcedure(final long procId) {
- ProcedureResult result = completed.get(procId);
+ public Pair<ProcedureInfo, Procedure> getResultOrProcedure(final long procId) {
+ ProcedureInfo result = completed.get(procId);
Procedure proc = null;
if (result == null) {
proc = procedures.get(procId);
@@ -1239,13 +1261,4 @@ public class ProcedureExecutor<TEnvironment> {
}
return new Pair(result, proc);
}
-
- private static ProcedureResult newResultFromProcedure(final Procedure proc) {
- if (proc.isFailed()) {
- return new ProcedureResult(
- proc.getNonceKey(), proc.getStartTime(), proc.getLastUpdate(), proc.getException());
- }
- return new ProcedureResult(
- proc.getNonceKey(), proc.getStartTime(), proc.getLastUpdate(), proc.getResult());
- }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureResult.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureResult.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureResult.java
deleted file mode 100644
index ff5407f..0000000
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureResult.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.procedure2;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.hadoop.hbase.util.NonceKey;
-
-/**
- * Once a Procedure completes the ProcedureExecutor takes all the useful
- * information of the procedure (e.g. exception/result) and creates a ProcedureResult.
- * The user of the Procedure framework will get the procedure result with
- * procedureExecutor.getResult(procId)
- */
-@InterfaceAudience.Private
-@InterfaceStability.Evolving
-public class ProcedureResult {
- private final NonceKey nonceKey;
- private final RemoteProcedureException exception;
- private final long lastUpdate;
- private final long startTime;
- private final byte[] result;
-
- private long clientAckTime = -1;
-
- public ProcedureResult(
- final NonceKey nonceKey,
- final long startTime,
- final long lastUpdate,
- final RemoteProcedureException exception) {
- this(nonceKey, exception, lastUpdate, startTime, null);
- }
-
- public ProcedureResult(
- final NonceKey nonceKey,
- final long startTime,
- final long lastUpdate,
- final byte[] result) {
- this(nonceKey, null, lastUpdate, startTime, result);
- }
-
- public ProcedureResult(
- final NonceKey nonceKey,
- final RemoteProcedureException exception,
- final long lastUpdate,
- final long startTime,
- final byte[] result) {
- this.nonceKey = nonceKey;
- this.exception = exception;
- this.lastUpdate = lastUpdate;
- this.startTime = startTime;
- this.result = result;
- }
-
- public NonceKey getNonceKey() {
- return nonceKey;
- }
-
- public boolean isFailed() {
- return exception != null;
- }
-
- public RemoteProcedureException getException() {
- return exception;
- }
-
- public boolean hasResultData() {
- return result != null;
- }
-
- public byte[] getResult() {
- return result;
- }
-
- public long getStartTime() {
- return startTime;
- }
-
- public long getLastUpdate() {
- return lastUpdate;
- }
-
- public long executionTime() {
- return lastUpdate - startTime;
- }
-
- public boolean hasClientAckTime() {
- return clientAckTime > 0;
- }
-
- public long getClientAckTime() {
- return clientAckTime;
- }
-
- @InterfaceAudience.Private
- protected void setClientAckTime(final long timestamp) {
- this.clientAckTime = timestamp;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
index 34774ed..45ab4bd 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/ProcedureTestingUtility.java
@@ -28,15 +28,18 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hadoop.hbase.exceptions.IllegalArgumentIOException;
+import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore;
import org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore;
+import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
public class ProcedureTestingUtility {
private static final Log LOG = LogFactory.getLog(ProcedureTestingUtility.class);
@@ -166,23 +169,43 @@ public class ProcedureTestingUtility {
public static <TEnv> void assertProcNotFailed(ProcedureExecutor<TEnv> procExecutor,
long procId) {
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
assertTrue("expected procedure result", result != null);
assertProcNotFailed(result);
}
- public static void assertProcNotFailed(final ProcedureResult result) {
- Exception exception = result.getException();
- String msg = exception != null ? exception.toString() : "no exception found";
+ public static void assertProcNotFailed(final ProcedureInfo result) {
+ ForeignExceptionMessage exception = result.getForeignExceptionMessage();
+ String msg = exception != null ? result.getExceptionFullMessage() : "no exception found";
assertFalse(msg, result.isFailed());
}
- public static void assertIsAbortException(final ProcedureResult result) {
- LOG.info(result.getException());
+ public static void assertIsAbortException(final ProcedureInfo result) {
assertEquals(true, result.isFailed());
- Throwable cause = result.getException().getCause();
- assertTrue("expected abort exception, got "+ cause,
- cause instanceof ProcedureAbortedException);
+ LOG.info(result.getExceptionFullMessage());
+ Throwable cause = getExceptionCause(result);
+ assertTrue("expected abort exception, got " + cause,
+ cause instanceof ProcedureAbortedException);
+ }
+
+ public static void assertIsTimeoutException(final ProcedureInfo result) {
+ assertEquals(true, result.isFailed());
+ LOG.info(result.getExceptionFullMessage());
+ Throwable cause = getExceptionCause(result);
+ assertTrue("expected TimeoutIOException, got " + cause, cause instanceof TimeoutIOException);
+ }
+
+ public static void assertIsIllegalArgumentException(final ProcedureInfo result) {
+ assertEquals(true, result.isFailed());
+ LOG.info(result.getExceptionFullMessage());
+ Throwable cause = ProcedureTestingUtility.getExceptionCause(result);
+ assertTrue("expected IllegalArgumentIOException, got " + cause,
+ cause instanceof IllegalArgumentIOException);
+ }
+
+ public static Throwable getExceptionCause(final ProcedureInfo procInfo) {
+ assert procInfo.getForeignExceptionMessage() != null;
+ return RemoteProcedureException.fromProto(procInfo.getForeignExceptionMessage()).getCause();
}
public static class TestProcedure extends Procedure<Void> {
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
index 0b2a364..a259f49 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureExecution.java
@@ -21,13 +21,13 @@ package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
-import java.util.concurrent.TimeoutException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -35,14 +35,11 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.After;
import org.junit.Before;
-import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
@Category(SmallTests.class)
public class TestProcedureExecution {
@@ -80,7 +77,7 @@ public class TestProcedureExecution {
fs.delete(logDir, true);
}
- private static class TestProcedureException extends Exception {
+ private static class TestProcedureException extends IOException {
public TestProcedureException(String msg) { super(msg); }
}
@@ -141,11 +138,9 @@ public class TestProcedureExecution {
// subProc1 has a "null" subprocedure which is catched as InvalidArgument
// failed state with 2 execute and 2 rollback
LOG.info(state);
- ProcedureResult result = procExecutor.getResult(rootId);
- LOG.info(result.getException());
+ ProcedureInfo result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
- assertTrue(result.getException().toString(),
- result.getException().getCause() instanceof IllegalArgumentException);
+ ProcedureTestingUtility.assertIsIllegalArgumentException(result);
assertEquals(state.toString(), 4, state.size());
assertEquals("rootProc-execute", state.get(0));
@@ -164,7 +159,7 @@ public class TestProcedureExecution {
// successful state, with 3 execute
LOG.info(state);
- ProcedureResult result = procExecutor.getResult(rootId);
+ ProcedureInfo result = procExecutor.getResult(rootId);
ProcedureTestingUtility.assertProcNotFailed(result);
assertEquals(state.toString(), 3, state.size());
}
@@ -180,11 +175,12 @@ public class TestProcedureExecution {
// the 3rd proc fail, rollback after 2 successful execution
LOG.info(state);
- ProcedureResult result = procExecutor.getResult(rootId);
- LOG.info(result.getException());
+ ProcedureInfo result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
- assertTrue(result.getException().toString(),
- result.getException().getCause() instanceof TestProcedureException);
+ LOG.info(result.getExceptionFullMessage());
+ Throwable cause = ProcedureTestingUtility.getExceptionCause(result);
+ assertTrue("expected TestProcedureException, got " + cause,
+ cause instanceof TestProcedureException);
assertEquals(state.toString(), 6, state.size());
assertEquals("rootProc-execute", state.get(0));
@@ -223,11 +219,12 @@ public class TestProcedureExecution {
public void testRollbackRetriableFailure() {
long procId = ProcedureTestingUtility.submitAndWait(procExecutor, new TestFaultyRollback());
- ProcedureResult result = procExecutor.getResult(procId);
- LOG.info(result.getException());
+ ProcedureInfo result = procExecutor.getResult(procId);
assertTrue("expected a failure", result.isFailed());
- assertTrue(result.getException().toString(),
- result.getException().getCause() instanceof TestProcedureException);
+ LOG.info(result.getExceptionFullMessage());
+ Throwable cause = ProcedureTestingUtility.getExceptionCause(result);
+ assertTrue("expected TestProcedureException, got " + cause,
+ cause instanceof TestProcedureException);
}
public static class TestWaitingProcedure extends SequentialProcedure<Void> {
@@ -306,11 +303,9 @@ public class TestProcedureExecution {
long execTime = EnvironmentEdgeManager.currentTime() - startTime;
LOG.info(state);
assertTrue("we didn't wait enough execTime=" + execTime, execTime >= PROC_TIMEOUT_MSEC);
- ProcedureResult result = procExecutor.getResult(rootId);
- LOG.info(result.getException());
+ ProcedureInfo result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
- assertTrue(result.getException().toString(),
- result.getException().getCause() instanceof TimeoutException);
+ ProcedureTestingUtility.assertIsTimeoutException(result);
assertEquals(state.toString(), 2, state.size());
assertEquals("wproc-execute", state.get(0));
assertEquals("wproc-rollback", state.get(1));
@@ -323,11 +318,9 @@ public class TestProcedureExecution {
proc.setTimeout(2500);
long rootId = ProcedureTestingUtility.submitAndWait(procExecutor, proc);
LOG.info(state);
- ProcedureResult result = procExecutor.getResult(rootId);
- LOG.info(result.getException());
+ ProcedureInfo result = procExecutor.getResult(rootId);
assertTrue(state.toString(), result.isFailed());
- assertTrue(result.getException().toString(),
- result.getException().getCause() instanceof TimeoutException);
+ ProcedureTestingUtility.assertIsTimeoutException(result);
assertEquals(state.toString(), 4, state.size());
assertEquals("wproc-execute", state.get(0));
assertEquals("wproc-child-execute", state.get(1));
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
index 9b3e097..9e01fcf 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureRecovery.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
@@ -196,7 +197,7 @@ public class TestProcedureRecovery {
long restartTs = EnvironmentEdgeManager.currentTime();
restart();
waitProcedure(procId);
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
assertTrue(result.getLastUpdate() > restartTs);
ProcedureTestingUtility.assertProcNotFailed(result);
assertEquals(1, Bytes.toInt(result.getResult()));
@@ -235,7 +236,7 @@ public class TestProcedureRecovery {
assertTrue(procExecutor.isRunning());
// The procedure is completed
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
ProcedureTestingUtility.assertProcNotFailed(result);
}
@@ -282,7 +283,7 @@ public class TestProcedureRecovery {
waitProcedure(procId);
// The procedure is completed
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
ProcedureTestingUtility.assertIsAbortException(result);
}
@@ -303,7 +304,7 @@ public class TestProcedureRecovery {
long procId2 = ProcedureTestingUtility.submitAndWait(procExecutor, proc2, nonceGroup, nonce);
assertTrue(procId == procId2);
- ProcedureResult result = procExecutor.getResult(procId2);
+ ProcedureInfo result = procExecutor.getResult(procId2);
ProcedureTestingUtility.assertProcNotFailed(result);
}
@@ -450,7 +451,7 @@ public class TestProcedureRecovery {
assertTrue(procExecutor.isRunning());
// The procedure is completed
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
ProcedureTestingUtility.assertProcNotFailed(result);
assertEquals(15, Bytes.toInt(result.getResult()));
}
@@ -504,7 +505,7 @@ public class TestProcedureRecovery {
assertTrue(procExecutor.isRunning());
// The procedure is completed
- ProcedureResult result = procExecutor.getResult(procId);
+ ProcedureInfo result = procExecutor.getResult(procId);
ProcedureTestingUtility.assertIsAbortException(result);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6d25e111/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
index 61c58e1..d476c4f 100644
--- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
+++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java
@@ -34,14 +34,11 @@ import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.junit.After;
-import org.junit.Assert;
import org.junit.Before;
-import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;