You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by sy...@apache.org on 2015/10/06 18:11:51 UTC
hbase git commit: HBASE-14432 Procedure V2 - enforce ACL on procedure
admin tasks (Stephen Yuan Jiang)
Repository: hbase
Updated Branches:
refs/heads/branch-1 80961187a -> a6d90bcc9
HBASE-14432 Procedure V2 - enforce ACL on procedure admin tasks (Stephen Yuan Jiang)
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a6d90bcc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a6d90bcc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a6d90bcc
Branch: refs/heads/branch-1
Commit: a6d90bcc97ea6e00d2d75381db0b598ab6c71026
Parents: 8096118
Author: Stephen Yuan Jiang <sy...@gmail.com>
Authored: Tue Oct 6 09:11:38 2015 -0700
Committer: Stephen Yuan Jiang <sy...@gmail.com>
Committed: Tue Oct 6 09:11:38 2015 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/ProcedureInfo.java | 25 +
.../hadoop/hbase/procedure2/Procedure.java | 4 +-
.../hbase/procedure2/ProcedureExecutor.java | 26 +
hbase-server/pom.xml | 1 +
.../BaseMasterAndRegionObserver.java | 26 +
.../hbase/coprocessor/BaseMasterObserver.java | 48 +-
.../hbase/coprocessor/MasterObserver.java | 37 ++
.../org/apache/hadoop/hbase/master/HMaster.java | 27 +-
.../hbase/master/MasterCoprocessorHost.java | 45 ++
.../hadoop/hbase/master/MasterRpcServices.java | 16 +-
.../hadoop/hbase/master/MasterServices.java | 4 +-
.../procedure/AddColumnFamilyProcedure.java | 4 +-
.../procedure/CreateNamespaceProcedure.java | 1 +
.../master/procedure/CreateTableProcedure.java | 4 +-
.../procedure/DeleteColumnFamilyProcedure.java | 4 +-
.../procedure/DeleteNamespaceProcedure.java | 1 +
.../master/procedure/DeleteTableProcedure.java | 4 +-
.../master/procedure/DisableTableProcedure.java | 4 +-
.../master/procedure/EnableTableProcedure.java | 4 +-
.../procedure/ModifyColumnFamilyProcedure.java | 4 +-
.../procedure/ModifyNamespaceProcedure.java | 1 +
.../master/procedure/ModifyTableProcedure.java | 4 +-
.../procedure/TruncateTableProcedure.java | 4 +-
.../hbase/security/access/AccessController.java | 54 ++
.../hbase/coprocessor/TestMasterObserver.java | 88 ++-
.../protobuf/generated/TestProcedureProtos.java | 530 +++++++++++++++++++
.../hadoop/hbase/master/TestCatalogJanitor.java | 3 +-
.../security/access/TestAccessController.java | 120 +++++
.../src/test/protobuf/TestProcedure.proto | 25 +
29 files changed, 1080 insertions(+), 38 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
index 9708c31..4a15857 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
+import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
import org.apache.hadoop.hbase.util.NonceKey;
@@ -71,6 +72,11 @@ public class ProcedureInfo {
this.result = result;
}
+ public ProcedureInfo clone() {
+ return new ProcedureInfo(
+ procId, procName, procOwner, procState, parentId, exception, lastUpdate, startTime, result);
+ }
+
public long getProcId() {
return procId;
}
@@ -221,4 +227,23 @@ public class ProcedureInfo {
procProto.getStartTime(),
procProto.getState() == ProcedureState.FINISHED ? procProto.getResult().toByteArray() : null);
}
+
+ /**
+ * Check if the user is this procedure's owner
+ * @param owner the owner field of the procedure
+ * @param user the user
+ * @return true if the user is the owner of the procedure,
+ * false otherwise or the owner is unknown.
+ */
+ @InterfaceAudience.Private
+ public static boolean isProcedureOwner(final ProcedureInfo procInfo, final User user) {
+ if (user == null) {
+ return false;
+ }
+ String procOwner = procInfo.getProcOwner();
+ if (procOwner == null) {
+ return false;
+ }
+ return procOwner.equals(user.getShortName());
+ }
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index 5545c5c..8b343d5 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -360,7 +360,9 @@ public abstract class Procedure<TEnvironment> implements Comparable<Procedure> {
return Math.max(0, timeout - (EnvironmentEdgeManager.currentTime() - startTime));
}
- protected void setOwner(final String owner) {
+ @VisibleForTesting
+ @InterfaceAudience.Private
+ public void setOwner(final String owner) {
this.owner = StringUtils.isEmpty(owner) ? null : owner;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
----------------------------------------------------------------------
diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index 3213607..67ab119 100644
--- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -50,6 +50,7 @@ import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue;
import org.apache.hadoop.hbase.procedure2.util.TimeoutBlockingQueue.TimeoutRetriever;
import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
+import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.hadoop.hbase.util.Pair;
@@ -740,6 +741,31 @@ public class ProcedureExecutor<TEnvironment> {
return false;
}
+ /**
+ * Check if the user is this procedure's owner
+ * @param procId the target procedure
+ * @param user the user
+ * @return true if the user is the owner of the procedure,
+ * false otherwise or the owner is unknown.
+ */
+ public boolean isProcedureOwner(final long procId, final User user) {
+ if (user == null) {
+ return false;
+ }
+
+ Procedure proc = procedures.get(procId);
+ if (proc != null) {
+ return proc.getOwner().equals(user.getShortName());
+ }
+ ProcedureInfo procInfo = completed.get(procId);
+ if (procInfo == null) {
+ // Procedure either does not exist or has already completed and got cleaned up.
+ // At this time, we cannot check the owner of the procedure
+ return false;
+ }
+ return ProcedureInfo.isProcedureOwner(procInfo, user);
+ }
+
public Map<Long, ProcedureInfo> getResults() {
return Collections.unmodifiableMap(completed);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml
index b3c9063..dc7d0fb 100644
--- a/hbase-server/pom.xml
+++ b/hbase-server/pom.xml
@@ -868,6 +868,7 @@
<include>IncrementCounterProcessor.proto</include>
<include>PingProtocol.proto</include>
<include>DummyRegionServerEndpoint.proto</include>
+ <include>TestProcedure.proto</include>
<include>test.proto</include>
<include>test_delayed_rpc.proto</include>
<include>test_rpc_service.proto</include>
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
index d1045a2..92b0910 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -30,6 +31,8 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.master.RegionPlan;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
@@ -290,6 +293,29 @@ public abstract class BaseMasterAndRegionObserver extends BaseRegionObserver
}
@Override
+ public void preAbortProcedure(
+ ObserverContext<MasterCoprocessorEnvironment> ctx,
+ final ProcedureExecutor<MasterProcedureEnv> procEnv,
+ final long procId) throws IOException {
+ }
+
+ @Override
+ public void postAbortProcedure(ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException {
+ }
+
+ @Override
+ public void preListProcedures(ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException {
+ }
+
+ @Override
+ public void postListProcedures(
+ ObserverContext<MasterCoprocessorEnvironment> ctx,
+ List<ProcedureInfo> procInfoList) throws IOException {
+ }
+
+ @Override
public void preAssign(ObserverContext<MasterCoprocessorEnvironment> ctx,
HRegionInfo regionInfo) throws IOException {
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
index b1f6f4b..10d6a96 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -30,6 +31,8 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.master.RegionPlan;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
@@ -128,27 +131,37 @@ public class BaseMasterObserver implements MasterObserver {
}
@Override
- public void preCreateNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, NamespaceDescriptor ns) throws IOException {
+ public void preCreateNamespace(
+ ObserverContext<MasterCoprocessorEnvironment> ctx, NamespaceDescriptor ns)
+ throws IOException {
}
@Override
- public void postCreateNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, NamespaceDescriptor ns) throws IOException {
+ public void postCreateNamespace(
+ ObserverContext<MasterCoprocessorEnvironment> ctx, NamespaceDescriptor ns)
+ throws IOException {
}
@Override
- public void preDeleteNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, String namespace) throws IOException {
+ public void preDeleteNamespace(
+ ObserverContext<MasterCoprocessorEnvironment> ctx, String namespace) throws IOException {
}
@Override
- public void postDeleteNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, String namespace) throws IOException {
+ public void postDeleteNamespace(
+ ObserverContext<MasterCoprocessorEnvironment> ctx, String namespace) throws IOException {
}
@Override
- public void preModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, NamespaceDescriptor ns) throws IOException {
+ public void preModifyNamespace(
+ ObserverContext<MasterCoprocessorEnvironment> ctx, NamespaceDescriptor ns)
+ throws IOException {
}
@Override
- public void postModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, NamespaceDescriptor ns) throws IOException {
+ public void postModifyNamespace(
+ ObserverContext<MasterCoprocessorEnvironment> ctx, NamespaceDescriptor ns)
+ throws IOException {
}
@Override
@@ -283,6 +296,29 @@ public class BaseMasterObserver implements MasterObserver {
}
@Override
+ public void preAbortProcedure(
+ ObserverContext<MasterCoprocessorEnvironment> ctx,
+ final ProcedureExecutor<MasterProcedureEnv> procEnv,
+ final long procId) throws IOException {
+ }
+
+ @Override
+ public void postAbortProcedure(ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException {
+ }
+
+ @Override
+ public void preListProcedures(ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException {
+ }
+
+ @Override
+ public void postListProcedures(
+ ObserverContext<MasterCoprocessorEnvironment> ctx,
+ List<ProcedureInfo> procInfoList) throws IOException {
+ }
+
+ @Override
public void preAssign(ObserverContext<MasterCoprocessorEnvironment> ctx,
HRegionInfo regionInfo) throws IOException {
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
index 1136cd0..4e373e7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -33,6 +34,8 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.master.RegionPlan;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
@@ -457,6 +460,40 @@ public interface MasterObserver extends Coprocessor {
throws IOException;
/**
+ * Called before a abortProcedure request has been processed.
+ * @param ctx the environment to interact with the framework and master
+ * @throws IOException
+ */
+ public void preAbortProcedure(
+ ObserverContext<MasterCoprocessorEnvironment> ctx,
+ final ProcedureExecutor<MasterProcedureEnv> procEnv,
+ final long procId) throws IOException;
+
+ /**
+ * Called after a abortProcedure request has been processed.
+ * @param ctx the environment to interact with the framework and master
+ */
+ public void postAbortProcedure(ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException;
+
+ /**
+ * Called before a listProcedures request has been processed.
+ * @param ctx the environment to interact with the framework and master
+ * @throws IOException
+ */
+ void preListProcedures(ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException;
+
+ /**
+ * Called after a listProcedures request has been processed.
+ * @param ctx the environment to interact with the framework and master
+ * @param procInfoList the list of procedures about to be returned
+ */
+ void postListProcedures(
+ ObserverContext<MasterCoprocessorEnvironment> ctx,
+ List<ProcedureInfo> procInfoList) throws IOException;
+
+ /**
* Called prior to assigning a specific region.
* @param ctx the environment to interact with the framework and master
* @param regionInfo the regionInfo of the region
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 1241416..09c30fb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2481,13 +2481,34 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
}
@Override
- public boolean abortProcedure(final long procId, final boolean mayInterruptIfRunning) {
- return this.procedureExecutor.abort(procId, mayInterruptIfRunning);
+ public boolean abortProcedure(final long procId, final boolean mayInterruptIfRunning)
+ throws IOException {
+ if (cpHost != null) {
+ cpHost.preAbortProcedure(this.procedureExecutor, procId);
+ }
+
+ final boolean result = this.procedureExecutor.abort(procId, mayInterruptIfRunning);
+
+ if (cpHost != null) {
+ cpHost.postAbortProcedure();
+ }
+
+ return result;
}
@Override
public List<ProcedureInfo> listProcedures() throws IOException {
- return this.procedureExecutor.listProcedures();
+ if (cpHost != null) {
+ cpHost.preListProcedures();
+ }
+
+ final List<ProcedureInfo> procInfoList = this.procedureExecutor.listProcedures();
+
+ if (cpHost != null) {
+ cpHost.postListProcedures(procInfoList);
+ }
+
+ return procInfoList;
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
index c549712..c241def 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
@@ -31,10 +31,13 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.coprocessor.*;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
@@ -582,6 +585,48 @@ public class MasterCoprocessorHost
});
}
+ public boolean preAbortProcedure(
+ final ProcedureExecutor<MasterProcedureEnv> procEnv,
+ final long procId) throws IOException {
+ return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
+ @Override
+ public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException {
+ oserver.preAbortProcedure(ctx, procEnv, procId);
+ }
+ });
+ }
+
+ public void postAbortProcedure() throws IOException {
+ execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
+ @Override
+ public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException {
+ oserver.postAbortProcedure(ctx);
+ }
+ });
+ }
+
+ public boolean preListProcedures() throws IOException {
+ return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
+ @Override
+ public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException {
+ oserver.preListProcedures(ctx);
+ }
+ });
+ }
+
+ public void postListProcedures(final List<ProcedureInfo> procInfoList) throws IOException {
+ execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
+ @Override
+ public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException {
+ oserver.postListProcedures(ctx, procInfoList);
+ }
+ });
+ }
+
public boolean preMove(final HRegionInfo region, final ServerName srcServer,
final ServerName destServer) throws IOException {
return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() {
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index f245918..6e2ea28 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -1069,12 +1069,16 @@ public class MasterRpcServices extends RSRpcServices
@Override
public AbortProcedureResponse abortProcedure(
RpcController rpcController,
- AbortProcedureRequest request) {
- AbortProcedureResponse.Builder response = AbortProcedureResponse.newBuilder();
- boolean abortResult =
- master.abortProcedure(request.getProcId(), request.getMayInterruptIfRunning());
- response.setIsProcedureAborted(abortResult);
- return response.build();
+ AbortProcedureRequest request) throws ServiceException {
+ try {
+ AbortProcedureResponse.Builder response = AbortProcedureResponse.newBuilder();
+ boolean abortResult =
+ master.abortProcedure(request.getProcId(), request.getMayInterruptIfRunning());
+ response.setIsProcedureAborted(abortResult);
+ return response.build();
+ } catch (IOException e) {
+ throw new ServiceException(e);
+ }
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
index 4872686..5566042 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
@@ -321,8 +321,10 @@ public interface MasterServices extends Server {
* @param procId ID of the procedure
* @param mayInterruptIfRunning if the proc completed at least one step, should it be aborted?
* @return true if aborted, false if procedure already completed or does not exist
+ * @throws IOException
*/
- public boolean abortProcedure(final long procId, final boolean mayInterruptIfRunning);
+ public boolean abortProcedure(final long procId, final boolean mayInterruptIfRunning)
+ throws IOException;
/**
* List procedures
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
index 8dc0ca1..b6642a0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
@@ -74,6 +74,7 @@ public class AddColumnFamilyProcedure
this.tableName = tableName;
this.cfDescriptor = cfDescriptor;
this.user = env.getRequestUser().getUGI();
+ this.setOwner(this.user.getShortUserName());
this.unmodifiedHTableDescriptor = null;
this.regionInfoList = null;
this.traceEnabled = null;
@@ -236,8 +237,7 @@ public class AddColumnFamilyProcedure
} else {
sb.append("Unknown");
}
- sb.append(") user=");
- sb.append(user);
+ sb.append(")");
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
index c91092a..657bbfb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
@@ -60,6 +60,7 @@ public class CreateNamespaceProcedure
final NamespaceDescriptor nsDescriptor) throws IOException {
this.nsDescriptor = nsDescriptor;
this.traceEnabled = null;
+ this.setOwner(env.getRequestUser().getUGI().getShortUserName());
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
index 150f01b..8bcd3de 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
@@ -88,6 +88,7 @@ public class CreateTableProcedure
this.hTableDescriptor = hTableDescriptor;
this.newRegions = newRegions != null ? Lists.newArrayList(newRegions) : null;
this.user = env.getRequestUser().getUGI();
+ this.setOwner(this.user.getShortUserName());
// used for compatibility with clients without procedures
// they need a sync TableExistsException
@@ -230,8 +231,7 @@ public class CreateTableProcedure
sb.append(getClass().getSimpleName());
sb.append(" (table=");
sb.append(getTableName());
- sb.append(") user=");
- sb.append(user);
+ sb.append(")");
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
index 76bda07..c15ab98 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
@@ -75,6 +75,7 @@ public class DeleteColumnFamilyProcedure
this.tableName = tableName;
this.familyName = familyName;
this.user = env.getRequestUser().getUGI();
+ this.setOwner(this.user.getShortUserName());
this.unmodifiedHTableDescriptor = null;
this.regionInfoList = null;
this.traceEnabled = null;
@@ -252,8 +253,7 @@ public class DeleteColumnFamilyProcedure
} else {
sb.append("Unknown");
}
- sb.append(") user=");
- sb.append(user);
+ sb.append(")");
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
index 8715b0b..5a42614 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
@@ -69,6 +69,7 @@ public class DeleteNamespaceProcedure
this.namespaceName = namespaceName;
this.nsDescriptor = null;
this.traceEnabled = null;
+ this.setOwner(env.getRequestUser().getUGI().getShortUserName());
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
index 3305c73..46345a5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
@@ -83,6 +83,7 @@ public class DeleteTableProcedure
final ProcedurePrepareLatch syncLatch) throws IOException {
this.tableName = tableName;
this.user = env.getRequestUser().getUGI();
+ this.setOwner(this.user.getShortUserName());
// used for compatibility with clients without procedures
// they need a sync TableNotFoundException, TableNotDisabledException, ...
@@ -211,8 +212,7 @@ public class DeleteTableProcedure
sb.append(getClass().getSimpleName());
sb.append(" (table=");
sb.append(getTableName());
- sb.append(") user=");
- sb.append(user);
+ sb.append(")");
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
index efc9846..dbfa694 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
@@ -108,6 +108,7 @@ public class DisableTableProcedure
this.tableName = tableName;
this.skipTableStateCheck = skipTableStateCheck;
this.user = env.getRequestUser().getUGI();
+ this.setOwner(this.user.getShortUserName());
// Compatible with 1.0: We use latch to make sure that this procedure implementation is
// compatible with 1.0 asynchronized operations. We need to lock the table and check
@@ -254,8 +255,7 @@ public class DisableTableProcedure
sb.append(getClass().getSimpleName());
sb.append(" (table=");
sb.append(tableName);
- sb.append(") user=");
- sb.append(user);
+ sb.append(")");
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
index 0063fb9..7201dc7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
@@ -106,6 +106,7 @@ public class EnableTableProcedure
this.tableName = tableName;
this.skipTableStateCheck = skipTableStateCheck;
this.user = env.getRequestUser().getUGI();
+ this.setOwner(this.user.getShortUserName());
// Compatible with 1.0: We use latch to make sure that this procedure implementation is
// compatible with 1.0 asynchronized operations. We need to lock the table and check
@@ -278,8 +279,7 @@ public class EnableTableProcedure
sb.append(getClass().getSimpleName());
sb.append(" (table=");
sb.append(tableName);
- sb.append(") user=");
- sb.append(user);
+ sb.append(")");
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
index 24b17be..a6f97da 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
@@ -72,6 +72,7 @@ public class ModifyColumnFamilyProcedure
this.tableName = tableName;
this.cfDescriptor = cfDescriptor;
this.user = env.getRequestUser().getUGI();
+ this.setOwner(this.user.getShortUserName());
this.unmodifiedHTableDescriptor = null;
this.traceEnabled = null;
}
@@ -233,8 +234,7 @@ public class ModifyColumnFamilyProcedure
} else {
sb.append("Unknown");
}
- sb.append(") user=");
- sb.append(user);
+ sb.append(")");
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
index ba5c0ad..30de252 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
@@ -61,6 +61,7 @@ public class ModifyNamespaceProcedure
this.oldNsDescriptor = null;
this.newNsDescriptor = newNsDescriptor;
this.traceEnabled = null;
+ this.setOwner(env.getRequestUser().getUGI().getShortUserName());
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
index f764022..13a2496 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
@@ -77,6 +77,7 @@ public class ModifyTableProcedure
initilize();
this.modifiedHTableDescriptor = htd;
this.user = env.getRequestUser().getUGI();
+ this.setOwner(this.user.getShortUserName());
}
private void initilize() {
@@ -263,8 +264,7 @@ public class ModifyTableProcedure
sb.append(getClass().getSimpleName());
sb.append(" (table=");
sb.append(getTableName());
- sb.append(") user=");
- sb.append(user);
+ sb.append(")");
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
index 1a5b9ae..3c1e593 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/TruncateTableProcedure.java
@@ -65,6 +65,7 @@ public class TruncateTableProcedure
this.tableName = tableName;
this.preserveSplits = preserveSplits;
this.user = env.getRequestUser().getUGI();
+ this.setOwner(this.user.getShortUserName());
}
@Override
@@ -196,8 +197,7 @@ public class TruncateTableProcedure
sb.append(getTableName());
sb.append(" preserveSplits=");
sb.append(preserveSplits);
- sb.append(") user=");
- sb.append(user);
+ sb.append(")");
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 44da8bb..001252a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
@@ -81,6 +82,8 @@ import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.master.MasterServices;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
@@ -1182,6 +1185,57 @@ public class AccessController extends BaseMasterAndRegionObserver
}
@Override
+ public void preAbortProcedure(
+ ObserverContext<MasterCoprocessorEnvironment> ctx,
+ final ProcedureExecutor<MasterProcedureEnv> procEnv,
+ final long procId) throws IOException {
+ if (!procEnv.isProcedureOwner(procId, getActiveUser())) {
+ // If the user is not the procedure owner, then we should further probe whether
+ // he can abort the procedure.
+ requirePermission("abortProcedure", Action.ADMIN);
+ }
+ }
+
+ @Override
+ public void postAbortProcedure(ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException {
+ // There is nothing to do at this time after the procedure abort request was sent.
+ }
+
+ @Override
+ public void preListProcedures(ObserverContext<MasterCoprocessorEnvironment> ctx)
+ throws IOException {
+ // We are delegating the authorization check to postListProcedures as we don't have
+ // any concrete set of procedures to work with
+ }
+
+ @Override
+ public void postListProcedures(
+ ObserverContext<MasterCoprocessorEnvironment> ctx,
+ List<ProcedureInfo> procInfoList) throws IOException {
+ if (procInfoList.isEmpty()) {
+ return;
+ }
+
+ // Retains only those which passes authorization checks, as the checks weren't done as part
+ // of preListProcedures.
+ Iterator<ProcedureInfo> itr = procInfoList.iterator();
+ User user = getActiveUser();
+ while (itr.hasNext()) {
+ ProcedureInfo procInfo = itr.next();
+ try {
+ if (!ProcedureInfo.isProcedureOwner(procInfo, user)) {
+ // If the user is not the procedure owner, then we should further probe whether
+ // he can see the procedure.
+ requirePermission("listProcedures", Action.ADMIN);
+ }
+ } catch (AccessDeniedException e) {
+ itr.remove();
+ }
+ }
+ }
+
+ @Override
public void preMove(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo region,
ServerName srcServer, ServerName destServer) throws IOException {
requirePermission("move", region.getTable(), null, null, Action.ADMIN);
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
index be61f5e..6be0f49 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
@@ -50,6 +51,8 @@ import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.RegionState;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.RequestConverter;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
@@ -155,6 +158,10 @@ public class TestMasterObserver {
private boolean postDisableTableHandlerCalled;
private boolean preModifyTableHandlerCalled;
private boolean postModifyTableHandlerCalled;
+ private boolean preAbortProcedureCalled;
+ private boolean postAbortProcedureCalled;
+ private boolean preListProceduresCalled;
+ private boolean postListProceduresCalled;
private boolean preGetTableDescriptorsCalled;
private boolean postGetTableDescriptorsCalled;
private boolean postGetTableNamesCalled;
@@ -193,6 +200,10 @@ public class TestMasterObserver {
postEnableTableCalled = false;
preDisableTableCalled = false;
postDisableTableCalled = false;
+ preAbortProcedureCalled = false;
+ postAbortProcedureCalled = false;
+ preListProceduresCalled = false;
+ postListProceduresCalled = false;
preMoveCalled= false;
postMoveCalled = false;
preAssignCalled = false;
@@ -233,8 +244,6 @@ public class TestMasterObserver {
postEnableTableHandlerCalled = false;
preDisableTableHandlerCalled = false;
postDisableTableHandlerCalled = false;
- preModifyTableHandlerCalled = false;
- postModifyTableHandlerCalled = false;
preGetTableDescriptorsCalled = false;
postGetTableDescriptorsCalled = false;
postGetTableNamesCalled = false;
@@ -563,6 +572,49 @@ public class TestMasterObserver {
}
@Override
+ public void preAbortProcedure(
+ ObserverContext<MasterCoprocessorEnvironment> ctx,
+ final ProcedureExecutor<MasterProcedureEnv> procEnv,
+ final long procId) throws IOException {
+ preAbortProcedureCalled = true;
+ }
+
+ @Override
+ public void postAbortProcedure(
+ ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException {
+ postAbortProcedureCalled = true;
+ }
+
+ public boolean wasAbortProcedureCalled() {
+ return preAbortProcedureCalled && postAbortProcedureCalled;
+ }
+
+ public boolean wasPreAbortProcedureCalledOnly() {
+ return preAbortProcedureCalled && !postAbortProcedureCalled;
+ }
+
+ @Override
+ public void preListProcedures(
+ ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException {
+ preListProceduresCalled = true;
+ }
+
+ @Override
+ public void postListProcedures(
+ ObserverContext<MasterCoprocessorEnvironment> ctx,
+ List<ProcedureInfo> procInfoList) throws IOException {
+ postListProceduresCalled = true;
+ }
+
+ public boolean wasListProceduresCalled() {
+ return preListProceduresCalled && postListProceduresCalled;
+ }
+
+ public boolean wasPreListProceduresCalledOnly() {
+ return preListProceduresCalled && !postListProceduresCalled;
+ }
+
+ @Override
public void preMove(ObserverContext<MasterCoprocessorEnvironment> env,
HRegionInfo region, ServerName srcServer, ServerName destServer)
throws IOException {
@@ -1660,6 +1712,38 @@ public class TestMasterObserver {
cp.wasGetTableNamesCalled());
}
+ @Test (timeout=180000)
+ public void testAbortProcedureOperation() throws Exception {
+ MiniHBaseCluster cluster = UTIL.getHBaseCluster();
+
+ HMaster master = cluster.getMaster();
+ MasterCoprocessorHost host = master.getMasterCoprocessorHost();
+ CPMasterObserver cp = (CPMasterObserver)host.findCoprocessor(
+ CPMasterObserver.class.getName());
+ cp.resetStates();
+
+ master.abortProcedure(1, true);
+ assertTrue(
+ "Coprocessor should be called on abort procedure request",
+ cp.wasAbortProcedureCalled());
+ }
+
+ @Test (timeout=180000)
+ public void testListProceduresOperation() throws Exception {
+ MiniHBaseCluster cluster = UTIL.getHBaseCluster();
+
+ HMaster master = cluster.getMaster();
+ MasterCoprocessorHost host = master.getMasterCoprocessorHost();
+ CPMasterObserver cp = (CPMasterObserver)host.findCoprocessor(
+ CPMasterObserver.class.getName());
+ cp.resetStates();
+
+ master.listProcedures();
+ assertTrue(
+ "Coprocessor should be called on list procedures request",
+ cp.wasListProceduresCalled());
+ }
+
private void deleteTable(Admin admin, TableName tableName) throws Exception {
// NOTE: We need a latch because admin is not sync,
// so the postOp coprocessor method may be called after the admin operation returned.
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java
new file mode 100644
index 0000000..f065550
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java
@@ -0,0 +1,530 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: TestProcedure.proto
+
+package org.apache.hadoop.hbase.ipc.protobuf.generated;
+
+public final class TestProcedureProtos {
+ private TestProcedureProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface TestTableDDLStateDataOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string table_name = 1;
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ boolean hasTableName();
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ java.lang.String getTableName();
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getTableNameBytes();
+ }
+ /**
+ * Protobuf type {@code TestTableDDLStateData}
+ */
+ public static final class TestTableDDLStateData extends
+ com.google.protobuf.GeneratedMessage
+ implements TestTableDDLStateDataOrBuilder {
+ // Use TestTableDDLStateData.newBuilder() to construct.
+ private TestTableDDLStateData(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private TestTableDDLStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final TestTableDDLStateData defaultInstance;
+ public static TestTableDDLStateData getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public TestTableDDLStateData getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private TestTableDDLStateData(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ tableName_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<TestTableDDLStateData> PARSER =
+ new com.google.protobuf.AbstractParser<TestTableDDLStateData>() {
+ public TestTableDDLStateData parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new TestTableDDLStateData(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<TestTableDDLStateData> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required string table_name = 1;
+ public static final int TABLE_NAME_FIELD_NUMBER = 1;
+ private java.lang.Object tableName_;
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ public java.lang.String getTableName() {
+ java.lang.Object ref = tableName_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ tableName_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTableNameBytes() {
+ java.lang.Object ref = tableName_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ tableName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ tableName_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasTableName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getTableNameBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getTableNameBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code TestTableDDLStateData}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateDataOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ tableName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData build() {
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData buildPartial() {
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.tableName_ = tableName_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) {
+ return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData other) {
+ if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.getDefaultInstance()) return this;
+ if (other.hasTableName()) {
+ bitField0_ |= 0x00000001;
+ tableName_ = other.tableName_;
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasTableName()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string table_name = 1;
+ private java.lang.Object tableName_ = "";
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ public java.lang.String getTableName() {
+ java.lang.Object ref = tableName_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ tableName_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getTableNameBytes() {
+ java.lang.Object ref = tableName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ tableName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ public Builder setTableName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ tableName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ public Builder clearTableName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ tableName_ = getDefaultInstance().getTableName();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string table_name = 1;</code>
+ */
+ public Builder setTableNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ tableName_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:TestTableDDLStateData)
+ }
+
+ static {
+ defaultInstance = new TestTableDDLStateData(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:TestTableDDLStateData)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_TestTableDDLStateData_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_TestTableDDLStateData_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\023TestProcedure.proto\"+\n\025TestTableDDLSta" +
+ "teData\022\022\n\ntable_name\030\001 \002(\tBH\n.org.apache" +
+ ".hadoop.hbase.ipc.protobuf.generatedB\023Te" +
+ "stProcedureProtos\210\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_TestTableDDLStateData_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_TestTableDDLStateData_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_TestTableDDLStateData_descriptor,
+ new java.lang.String[] { "TableName", });
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
index a2887d3..bb62ccf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
@@ -420,7 +420,8 @@ public class TestCatalogJanitor {
}
@Override
- public boolean abortProcedure(final long procId, final boolean mayInterruptIfRunning) {
+ public boolean abortProcedure(final long procId, final boolean mayInterruptIfRunning)
+ throws IOException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
index bab8ada..fcdd334 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
@@ -26,6 +26,8 @@ import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Arrays;
@@ -48,6 +50,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
@@ -93,13 +96,20 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
+import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.master.procedure.TableProcedureInterface;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.CheckPermissionsRequest;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.Region;
@@ -500,6 +510,116 @@ public class TestAccessController extends SecureTestUtil {
verifyDenied(enableTable, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE);
}
+ public static class TestTableDDLProcedure extends Procedure<MasterProcedureEnv>
+ implements TableProcedureInterface {
+ private TableName tableName;
+
+ public TestTableDDLProcedure() {
+ }
+
+ public TestTableDDLProcedure(final MasterProcedureEnv env, final TableName tableName)
+ throws IOException {
+ this.tableName = tableName;
+ this.setTimeout(180000); // Timeout in 3 minutes
+ this.setOwner(env.getRequestUser().getUGI().getShortUserName());
+ }
+
+ @Override
+ public TableName getTableName() {
+ return tableName;
+ }
+
+ @Override
+ public TableOperationType getTableOperationType() {
+ return null;
+ }
+
+ @Override
+ protected boolean abort(MasterProcedureEnv env) {
+ return true;
+ }
+
+ @Override
+ protected void serializeStateData(OutputStream stream) throws IOException {
+ TestProcedureProtos.TestTableDDLStateData.Builder testTableDDLMsg =
+ TestProcedureProtos.TestTableDDLStateData.newBuilder()
+ .setTableName(tableName.getNameAsString());
+ testTableDDLMsg.build().writeDelimitedTo(stream);
+ }
+
+ @Override
+ protected void deserializeStateData(InputStream stream) throws IOException {
+ TestProcedureProtos.TestTableDDLStateData testTableDDLMsg =
+ TestProcedureProtos.TestTableDDLStateData.parseDelimitedFrom(stream);
+ tableName = TableName.valueOf(testTableDDLMsg.getTableName());
+ }
+
+ @Override
+ protected Procedure[] execute(MasterProcedureEnv env) throws ProcedureYieldException,
+ InterruptedException {
+ // Not letting the procedure to complete until timed out
+ setState(ProcedureState.WAITING_TIMEOUT);
+ return null;
+ }
+
+ @Override
+ protected void rollback(MasterProcedureEnv env) throws IOException, InterruptedException {
+ }
+ }
+
+ @Test
+ public void testAbortProcedure() throws Exception {
+ final TableName tableName = TableName.valueOf("testAbortProcedure");
+ final ProcedureExecutor<MasterProcedureEnv> procExec =
+ TEST_UTIL.getHBaseCluster().getMaster().getMasterProcedureExecutor();
+ Procedure proc = new TestTableDDLProcedure(procExec.getEnvironment(), tableName);
+ proc.setOwner(USER_OWNER.getShortName());
+ final long procId = procExec.submitProcedure(proc);
+
+ AccessTestAction abortProcedureAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ ACCESS_CONTROLLER
+ .preAbortProcedure(ObserverContext.createAndPrepare(CP_ENV, null), procExec, procId);
+ return null;
+ }
+ };
+
+ verifyAllowed(abortProcedureAction, SUPERUSER, USER_ADMIN, USER_GROUP_ADMIN);
+ verifyAllowed(abortProcedureAction, USER_OWNER);
+ verifyDenied(
+ abortProcedureAction, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE);
+ }
+
+ @Test
+ public void testListProcedures() throws Exception {
+ final TableName tableName = TableName.valueOf("testAbortProcedure");
+ final ProcedureExecutor<MasterProcedureEnv> procExec =
+ TEST_UTIL.getHBaseCluster().getMaster().getMasterProcedureExecutor();
+ Procedure proc = new TestTableDDLProcedure(procExec.getEnvironment(), tableName);
+ proc.setOwner(USER_OWNER.getShortName());
+ final long procId = procExec.submitProcedure(proc);
+ final List<ProcedureInfo> procInfoList = procExec.listProcedures();
+
+ AccessTestAction listProceduresAction = new AccessTestAction() {
+ @Override
+ public Object run() throws Exception {
+ List<ProcedureInfo> procInfoListClone = new ArrayList<ProcedureInfo>(procInfoList.size());
+ for(ProcedureInfo pi : procInfoList) {
+ procInfoListClone.add(pi.clone());
+ }
+ ACCESS_CONTROLLER
+ .postListProcedures(ObserverContext.createAndPrepare(CP_ENV, null), procInfoListClone);
+ return null;
+ }
+ };
+
+ verifyAllowed(listProceduresAction, SUPERUSER, USER_ADMIN, USER_GROUP_ADMIN);
+ verifyAllowed(listProceduresAction, USER_OWNER);
+ verifyIfNull(
+ listProceduresAction, USER_RW, USER_RO, USER_NONE, USER_GROUP_READ, USER_GROUP_WRITE);
+ }
+
@Test (timeout=180000)
public void testMove() throws Exception {
List<HRegionLocation> regions;
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6d90bcc/hbase-server/src/test/protobuf/TestProcedure.proto
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/protobuf/TestProcedure.proto b/hbase-server/src/test/protobuf/TestProcedure.proto
new file mode 100644
index 0000000..b621f6d
--- /dev/null
+++ b/hbase-server/src/test/protobuf/TestProcedure.proto
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+option java_package = "org.apache.hadoop.hbase.ipc.protobuf.generated";
+option java_outer_classname = "TestProcedureProtos";
+option java_generic_services = true;
+
+message TestTableDDLStateData {
+ required string table_name = 1;
+}