You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2015/08/13 00:22:02 UTC
[2/8] hbase git commit: HBASE-14122 Client API for determining if
server side supports cell level security
HBASE-14122 Client API for determining if server side supports cell level security
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5e5bcceb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5e5bcceb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5e5bcceb
Branch: refs/heads/master
Commit: 5e5bcceb533e6e4ded65bc778f05da213c07b688
Parents: beb1f1d
Author: Andrew Purtell <ap...@apache.org>
Authored: Wed Aug 12 13:26:55 2015 -0700
Committer: Andrew Purtell <ap...@apache.org>
Committed: Wed Aug 12 13:26:55 2015 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/client/Admin.java | 9 +-
.../hadoop/hbase/client/ClusterConnection.java | 1 +
.../hbase/client/ConnectionImplementation.java | 9 +
.../apache/hadoop/hbase/client/HBaseAdmin.java | 21 +
.../client/security/SecurityCapability.java | 63 +
.../hadoop/hbase/protobuf/ProtobufUtil.java | 21 +
.../security/access/AccessControlClient.java | 23 +
.../security/visibility/VisibilityClient.java | 12 +
.../hbase/protobuf/generated/MasterProtos.java | 1306 ++++++++++++++++--
hbase-protocol/src/main/protobuf/Master.proto | 19 +
.../hadoop/hbase/master/MasterRpcServices.java | 48 +
.../hbase/security/access/AccessController.java | 13 +-
.../visibility/VisibilityController.java | 10 +-
.../security/access/TestAccessController.java | 11 +
.../visibility/TestVisibilityLabels.java | 9 +
hbase-shell/src/main/ruby/hbase/admin.rb | 5 +
hbase-shell/src/main/ruby/hbase/security.rb | 19 +-
.../src/main/ruby/hbase/visibility_labels.rb | 27 +-
hbase-shell/src/main/ruby/shell.rb | 1 +
.../commands/list_security_capabilities.rb | 47 +
20 files changed, 1545 insertions(+), 129 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/5e5bcceb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index 46cfa49..f2fc958 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -18,7 +18,6 @@
*/
package org.apache.hadoop.hbase.client;
-
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
@@ -39,6 +38,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
@@ -1546,4 +1546,11 @@ public interface Admin extends Abortable, Closeable {
*/
AdminProtos.GetRegionInfoResponse.CompactionState getMobCompactionState(final TableName tableName)
throws IOException;
+
+ /**
+ * Return the set of supported security capabilities.
+ * @throws IOException
+ * @throws UnsupportedOperationException
+ */
+ List<SecurityCapability> getSecurityCapabilities() throws IOException;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5e5bcceb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
index 07b055a..b3d99ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
@@ -296,4 +296,5 @@ public interface ClusterConnection extends HConnection {
* @return the configured client backoff policy
*/
ClientBackoffPolicy getBackoffPolicy();
+
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5e5bcceb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index 2754997..a0d9955 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -56,6 +56,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse;
import org.apache.hadoop.hbase.quotas.ThrottlingException;
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
import org.apache.hadoop.hbase.security.User;
@@ -71,6 +73,7 @@ import org.apache.hadoop.ipc.RemoteException;
import org.apache.zookeeper.KeeperException;
import javax.annotation.Nullable;
+
import java.io.Closeable;
import java.io.IOException;
import java.io.InterruptedIOException;
@@ -1728,6 +1731,12 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
IsBalancerEnabledRequest request) throws ServiceException {
return stub.isBalancerEnabled(controller, request);
}
+
+ @Override
+ public SecurityCapabilitiesResponse getSecurityCapabilities(RpcController controller,
+ SecurityCapabilitiesRequest request) throws ServiceException {
+ return stub.getSecurityCapabilities(controller, request);
+ }
};
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5e5bcceb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index ac9db75..1df3ffa 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -62,6 +62,7 @@ import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
@@ -132,6 +133,7 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRespon
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest;
@@ -4623,4 +4625,23 @@ public class HBaseAdmin implements Admin {
+ " regions are online; retries exhausted.");
}
}
+
+ @Override
+ public List<SecurityCapability> getSecurityCapabilities() throws IOException {
+ try {
+ return executeCallable(new MasterCallable<List<SecurityCapability>>(getConnection()) {
+ @Override
+ public List<SecurityCapability> call(int callTimeout) throws ServiceException {
+ SecurityCapabilitiesRequest req = SecurityCapabilitiesRequest.newBuilder().build();
+ return ProtobufUtil.toSecurityCapabilityList(
+ master.getSecurityCapabilities(null, req).getCapabilitiesList());
+ }
+ });
+ } catch (IOException e) {
+ if (e instanceof RemoteException) {
+ e = ((RemoteException)e).unwrapRemoteException();
+ }
+ throw e;
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5e5bcceb/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
new file mode 100644
index 0000000..1847b2e
--- /dev/null
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
@@ -0,0 +1,63 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client.security;
+
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+
+/**
+ * Available security capabilities
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public enum SecurityCapability {
+ // Note to implementors: These must match the numbering of Capability values in MasterProtos
+ SIMPLE_AUTHENTICATION(0),
+ SECURE_AUTHENTICATION(1),
+ AUTHORIZATION(2),
+ CELL_AUTHORIZATION(3),
+ CELL_VISIBILITY(4);
+
+ private int value;
+
+ public int getValue() {
+ return value;
+ }
+
+ public String getName() {
+ return toString();
+ }
+
+ private SecurityCapability(int value) {
+ this.value = value;
+ }
+
+ public static SecurityCapability valueOf(int value) {
+ switch (value) {
+ case 0: return SIMPLE_AUTHENTICATION;
+ case 1: return SECURE_AUTHENTICATION;
+ case 2: return AUTHORIZATION;
+ case 3: return CELL_AUTHORIZATION;
+ case 4: return CELL_VISIBILITY;
+ default:
+ throw new IllegalArgumentException("Unknown SecurityCapability value " + value);
+ }
+ }
+};
+
http://git-wip-us.apache.org/repos/asf/hbase/blob/5e5bcceb/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index be3fb23..137bbed 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -65,6 +65,7 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
+import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.Filter;
@@ -115,6 +116,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService;
@@ -3136,4 +3138,23 @@ public final class ProtobufUtil {
builder.setSrcChecksum(VersionInfo.getSrcChecksum());
return builder.build();
}
+
+ /**
+ * Convert SecurityCapabilitiesResponse.Capability to SecurityCapability
+ * @param caps capabilities returned in the SecurityCapabilitiesResponse message
+ * @return the converted list of SecurityCapability elements
+ */
+ public static List<SecurityCapability> toSecurityCapabilityList(
+ List<MasterProtos.SecurityCapabilitiesResponse.Capability> capabilities) {
+ List<SecurityCapability> scList = new ArrayList<>(capabilities.size());
+ for (MasterProtos.SecurityCapabilitiesResponse.Capability c: capabilities) {
+ try {
+ scList.add(SecurityCapability.valueOf(c.getNumber()));
+ } catch (IllegalArgumentException e) {
+ // Unknown capability, just ignore it. We don't understand the new capability
+ // but don't care since by definition we cannot take advantage of it.
+ }
+ }
+ return scList;
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/5e5bcceb/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
index 5b41716..c50abc1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
@@ -48,6 +49,28 @@ public class AccessControlClient {
public static final TableName ACL_TABLE_NAME =
TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl");
+ /**
+ * Return true if authorization is supported and enabled
+ * @param connection The connection to use
+ * @return true if authorization is supported and enabled, false otherwise
+ * @throws IOException
+ */
+ public static boolean isAuthorizationEnabled(Connection connection) throws IOException {
+ return connection.getAdmin().getSecurityCapabilities()
+ .contains(SecurityCapability.AUTHORIZATION);
+ }
+
+ /**
+ * Return true if cell authorization is supported and enabled
+ * @param connection The connection to use
+ * @return true if cell authorization is supported and enabled, false otherwise
+ * @throws IOException
+ */
+ public static boolean isCellAuthorizationEnabled(Connection connection) throws IOException {
+ return connection.getAdmin().getSecurityCapabilities()
+ .contains(SecurityCapability.CELL_AUTHORIZATION);
+ }
+
private static BlockingInterface getAccessControlServiceStub(Table ht)
throws IOException {
CoprocessorRpcChannel service = ht.coprocessorService(HConstants.EMPTY_START_ROW);
http://git-wip-us.apache.org/repos/asf/hbase/blob/5e5bcceb/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
index 8de783c..7527049 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
+import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.GetAuthsRequest;
@@ -55,6 +56,17 @@ import com.google.protobuf.ServiceException;
public class VisibilityClient {
/**
+ * Return true if cell visibility features are supported and enabled
+ * @param connection The connection to use
+ * @return true if cell visibility features are supported and enabled, false otherwise
+ * @throws IOException
+ */
+ public static boolean isCellVisibilityEnabled(Connection connection) throws IOException {
+ return connection.getAdmin().getSecurityCapabilities()
+ .contains(SecurityCapability.CELL_VISIBILITY);
+ }
+
+ /**
* Utility method for adding label to the system.
*
* @param conf
http://git-wip-us.apache.org/repos/asf/hbase/blob/5e5bcceb/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
index 0f74b85..eb98b42 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java
@@ -50378,6 +50378,974 @@ public final class MasterProtos {
// @@protoc_insertion_point(class_scope:hbase.pb.MajorCompactionTimestampResponse)
}
+ public interface SecurityCapabilitiesRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SecurityCapabilitiesRequest}
+ */
+ public static final class SecurityCapabilitiesRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements SecurityCapabilitiesRequestOrBuilder {
+ // Use SecurityCapabilitiesRequest.newBuilder() to construct.
+ private SecurityCapabilitiesRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private SecurityCapabilitiesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SecurityCapabilitiesRequest defaultInstance;
+ public static SecurityCapabilitiesRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SecurityCapabilitiesRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private SecurityCapabilitiesRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<SecurityCapabilitiesRequest> PARSER =
+ new com.google.protobuf.AbstractParser<SecurityCapabilitiesRequest>() {
+ public SecurityCapabilitiesRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SecurityCapabilitiesRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<SecurityCapabilitiesRequest> getParserForType() {
+ return PARSER;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SecurityCapabilitiesRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequestOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest build() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.SecurityCapabilitiesRequest)
+ }
+
+ static {
+ defaultInstance = new SecurityCapabilitiesRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.SecurityCapabilitiesRequest)
+ }
+
+ public interface SecurityCapabilitiesResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ java.util.List<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> getCapabilitiesList();
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ int getCapabilitiesCount();
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability getCapabilities(int index);
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SecurityCapabilitiesResponse}
+ */
+ public static final class SecurityCapabilitiesResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements SecurityCapabilitiesResponseOrBuilder {
+ // Use SecurityCapabilitiesResponse.newBuilder() to construct.
+ private SecurityCapabilitiesResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private SecurityCapabilitiesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SecurityCapabilitiesResponse defaultInstance;
+ public static SecurityCapabilitiesResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SecurityCapabilitiesResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private SecurityCapabilitiesResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(1, rawValue);
+ } else {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ capabilities_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability>();
+ mutable_bitField0_ |= 0x00000001;
+ }
+ capabilities_.add(value);
+ }
+ break;
+ }
+ case 10: {
+ int length = input.readRawVarint32();
+ int oldLimit = input.pushLimit(length);
+ while(input.getBytesUntilLimit() > 0) {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(1, rawValue);
+ } else {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ capabilities_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability>();
+ mutable_bitField0_ |= 0x00000001;
+ }
+ capabilities_.add(value);
+ }
+ }
+ input.popLimit(oldLimit);
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ capabilities_ = java.util.Collections.unmodifiableList(capabilities_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<SecurityCapabilitiesResponse> PARSER =
+ new com.google.protobuf.AbstractParser<SecurityCapabilitiesResponse>() {
+ public SecurityCapabilitiesResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SecurityCapabilitiesResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<SecurityCapabilitiesResponse> getParserForType() {
+ return PARSER;
+ }
+
+ /**
+ * Protobuf enum {@code hbase.pb.SecurityCapabilitiesResponse.Capability}
+ */
+ public enum Capability
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>SIMPLE_AUTHENTICATION = 0;</code>
+ */
+ SIMPLE_AUTHENTICATION(0, 0),
+ /**
+ * <code>SECURE_AUTHENTICATION = 1;</code>
+ */
+ SECURE_AUTHENTICATION(1, 1),
+ /**
+ * <code>AUTHORIZATION = 2;</code>
+ */
+ AUTHORIZATION(2, 2),
+ /**
+ * <code>CELL_AUTHORIZATION = 3;</code>
+ */
+ CELL_AUTHORIZATION(3, 3),
+ /**
+ * <code>CELL_VISIBILITY = 4;</code>
+ */
+ CELL_VISIBILITY(4, 4),
+ ;
+
+ /**
+ * <code>SIMPLE_AUTHENTICATION = 0;</code>
+ */
+ public static final int SIMPLE_AUTHENTICATION_VALUE = 0;
+ /**
+ * <code>SECURE_AUTHENTICATION = 1;</code>
+ */
+ public static final int SECURE_AUTHENTICATION_VALUE = 1;
+ /**
+ * <code>AUTHORIZATION = 2;</code>
+ */
+ public static final int AUTHORIZATION_VALUE = 2;
+ /**
+ * <code>CELL_AUTHORIZATION = 3;</code>
+ */
+ public static final int CELL_AUTHORIZATION_VALUE = 3;
+ /**
+ * <code>CELL_VISIBILITY = 4;</code>
+ */
+ public static final int CELL_VISIBILITY_VALUE = 4;
+
+
+ public final int getNumber() { return value; }
+
+ public static Capability valueOf(int value) {
+ switch (value) {
+ case 0: return SIMPLE_AUTHENTICATION;
+ case 1: return SECURE_AUTHENTICATION;
+ case 2: return AUTHORIZATION;
+ case 3: return CELL_AUTHORIZATION;
+ case 4: return CELL_VISIBILITY;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<Capability>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<Capability>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<Capability>() {
+ public Capability findValueByNumber(int number) {
+ return Capability.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final Capability[] VALUES = values();
+
+ public static Capability valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private Capability(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:hbase.pb.SecurityCapabilitiesResponse.Capability)
+ }
+
+ // repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;
+ public static final int CAPABILITIES_FIELD_NUMBER = 1;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> capabilities_;
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> getCapabilitiesList() {
+ return capabilities_;
+ }
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ public int getCapabilitiesCount() {
+ return capabilities_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability getCapabilities(int index) {
+ return capabilities_.get(index);
+ }
+
+ private void initFields() {
+ capabilities_ = java.util.Collections.emptyList();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < capabilities_.size(); i++) {
+ output.writeEnum(1, capabilities_.get(i).getNumber());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < capabilities_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeEnumSizeNoTag(capabilities_.get(i).getNumber());
+ }
+ size += dataSize;
+ size += 1 * capabilities_.size();
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) obj;
+
+ boolean result = true;
+ result = result && getCapabilitiesList()
+ .equals(other.getCapabilitiesList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getCapabilitiesCount() > 0) {
+ hash = (37 * hash) + CAPABILITIES_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnumList(getCapabilitiesList());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SecurityCapabilitiesResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponseOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ capabilities_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse build() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse(this);
+ int from_bitField0_ = bitField0_;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ capabilities_ = java.util.Collections.unmodifiableList(capabilities_);
+ bitField0_ = (bitField0_ & ~0x00000001);
+ }
+ result.capabilities_ = capabilities_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance()) return this;
+ if (!other.capabilities_.isEmpty()) {
+ if (capabilities_.isEmpty()) {
+ capabilities_ = other.capabilities_;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ } else {
+ ensureCapabilitiesIsMutable();
+ capabilities_.addAll(other.capabilities_);
+ }
+ onChanged();
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;
+ private java.util.List<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> capabilities_ =
+ java.util.Collections.emptyList();
+ private void ensureCapabilitiesIsMutable() {
+ if (!((bitField0_ & 0x00000001) == 0x00000001)) {
+ capabilities_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability>(capabilities_);
+ bitField0_ |= 0x00000001;
+ }
+ }
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ public java.util.List<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> getCapabilitiesList() {
+ return java.util.Collections.unmodifiableList(capabilities_);
+ }
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ public int getCapabilitiesCount() {
+ return capabilities_.size();
+ }
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability getCapabilities(int index) {
+ return capabilities_.get(index);
+ }
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ public Builder setCapabilities(
+ int index, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureCapabilitiesIsMutable();
+ capabilities_.set(index, value);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ public Builder addCapabilities(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureCapabilitiesIsMutable();
+ capabilities_.add(value);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ public Builder addAllCapabilities(
+ java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> values) {
+ ensureCapabilitiesIsMutable();
+ super.addAll(values, capabilities_);
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>repeated .hbase.pb.SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
+ */
+ public Builder clearCapabilities() {
+ capabilities_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.SecurityCapabilitiesResponse)
+ }
+
+ static {
+ defaultInstance = new SecurityCapabilitiesResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.SecurityCapabilitiesResponse)
+ }
+
/**
* Protobuf service {@code hbase.pb.MasterService}
*/
@@ -50997,6 +51965,18 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse> done);
+ /**
+ * <code>rpc getSecurityCapabilities(.hbase.pb.SecurityCapabilitiesRequest) returns (.hbase.pb.SecurityCapabilitiesResponse);</code>
+ *
+ * <pre>
+ ** Returns the security capabilities in effect on the cluster
+ * </pre>
+ */
+ public abstract void getSecurityCapabilities(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse> done);
+
}
public static com.google.protobuf.Service newReflectiveService(
@@ -51394,6 +52374,14 @@ public final class MasterProtos {
impl.getProcedureResult(controller, request, done);
}
+ @java.lang.Override
+ public void getSecurityCapabilities(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse> done) {
+ impl.getSecurityCapabilities(controller, request, done);
+ }
+
};
}
@@ -51514,6 +52502,8 @@ public final class MasterProtos {
return impl.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request);
case 48:
return impl.getProcedureResult(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)request);
+ case 49:
+ return impl.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -51626,6 +52616,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
case 48:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
+ case 49:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -51738,6 +52730,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
case 48:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
+ case 49:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -52357,6 +53351,18 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse> done);
+ /**
+ * <code>rpc getSecurityCapabilities(.hbase.pb.SecurityCapabilitiesRequest) returns (.hbase.pb.SecurityCapabilitiesResponse);</code>
+ *
+ * <pre>
+ ** Returns the security capabilities in effect on the cluster
+ * </pre>
+ */
+ public abstract void getSecurityCapabilities(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse> done);
+
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
@@ -52624,6 +53630,11 @@ public final class MasterProtos {
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse>specializeCallback(
done));
return;
+ case 49:
+ this.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request,
+ com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse>specializeCallback(
+ done));
+ return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -52736,6 +53747,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
case 48:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
+ case 49:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -52848,6 +53861,8 @@ public final class MasterProtos {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
case 48:
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
+ case 49:
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
@@ -53603,6 +54618,21 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.class,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance()));
}
+
+ public void getSecurityCapabilities(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request,
+ com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse> done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(49),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.class,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance()));
+ }
}
public static BlockingInterface newBlockingStub(
@@ -53855,6 +54885,11 @@ public final class MasterProtos {
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request)
throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse getSecurityCapabilities(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request)
+ throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
@@ -54451,6 +55486,18 @@ public final class MasterProtos {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance());
}
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse getSecurityCapabilities(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(49),
+ controller,
+ request,
+ org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance());
+ }
+
}
// @@protoc_insertion_point(class_scope:hbase.pb.MasterService)
@@ -54921,6 +55968,16 @@ public final class MasterProtos {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_hbase_pb_MajorCompactionTimestampResponse_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@@ -55088,120 +56145,129 @@ public final class MasterProtos {
"mpactionTimestampForRegionRequest\022)\n\006reg" +
"ion\030\001 \002(\0132\031.hbase.pb.RegionSpecifier\"@\n " +
"MajorCompactionTimestampResponse\022\034\n\024comp" +
- "action_timestamp\030\001 \002(\0032\327\"\n\rMasterService" +
- "\022e\n\024GetSchemaAlterStatus\022%.hbase.pb.GetS",
- "chemaAlterStatusRequest\032&.hbase.pb.GetSc" +
- "hemaAlterStatusResponse\022b\n\023GetTableDescr" +
- "iptors\022$.hbase.pb.GetTableDescriptorsReq" +
- "uest\032%.hbase.pb.GetTableDescriptorsRespo" +
- "nse\022P\n\rGetTableNames\022\036.hbase.pb.GetTable" +
- "NamesRequest\032\037.hbase.pb.GetTableNamesRes" +
- "ponse\022Y\n\020GetClusterStatus\022!.hbase.pb.Get" +
- "ClusterStatusRequest\032\".hbase.pb.GetClust" +
- "erStatusResponse\022V\n\017IsMasterRunning\022 .hb" +
- "ase.pb.IsMasterRunningRequest\032!.hbase.pb",
- ".IsMasterRunningResponse\022D\n\tAddColumn\022\032." +
- "hbase.pb.AddColumnRequest\032\033.hbase.pb.Add" +
- "ColumnResponse\022M\n\014DeleteColumn\022\035.hbase.p" +
- "b.DeleteColumnRequest\032\036.hbase.pb.DeleteC" +
- "olumnResponse\022M\n\014ModifyColumn\022\035.hbase.pb" +
- ".ModifyColumnRequest\032\036.hbase.pb.ModifyCo" +
- "lumnResponse\022G\n\nMoveRegion\022\033.hbase.pb.Mo" +
- "veRegionRequest\032\034.hbase.pb.MoveRegionRes" +
- "ponse\022k\n\026DispatchMergingRegions\022\'.hbase." +
- "pb.DispatchMergingRegionsRequest\032(.hbase",
- ".pb.DispatchMergingRegionsResponse\022M\n\014As" +
- "signRegion\022\035.hbase.pb.AssignRegionReques" +
- "t\032\036.hbase.pb.AssignRegionResponse\022S\n\016Una" +
- "ssignRegion\022\037.hbase.pb.UnassignRegionReq" +
- "uest\032 .hbase.pb.UnassignRegionResponse\022P" +
- "\n\rOfflineRegion\022\036.hbase.pb.OfflineRegion" +
- "Request\032\037.hbase.pb.OfflineRegionResponse" +
- "\022J\n\013DeleteTable\022\034.hbase.pb.DeleteTableRe" +
- "quest\032\035.hbase.pb.DeleteTableResponse\022P\n\r" +
- "truncateTable\022\036.hbase.pb.TruncateTableRe",
- "quest\032\037.hbase.pb.TruncateTableResponse\022J" +
- "\n\013EnableTable\022\034.hbase.pb.EnableTableRequ" +
- "est\032\035.hbase.pb.EnableTableResponse\022M\n\014Di" +
- "sableTable\022\035.hbase.pb.DisableTableReques" +
- "t\032\036.hbase.pb.DisableTableResponse\022J\n\013Mod" +
- "ifyTable\022\034.hbase.pb.ModifyTableRequest\032\035" +
- ".hbase.pb.ModifyTableResponse\022J\n\013CreateT" +
- "able\022\034.hbase.pb.CreateTableRequest\032\035.hba" +
- "se.pb.CreateTableResponse\022A\n\010Shutdown\022\031." +
- "hbase.pb.ShutdownRequest\032\032.hbase.pb.Shut",
- "downResponse\022G\n\nStopMaster\022\033.hbase.pb.St" +
- "opMasterRequest\032\034.hbase.pb.StopMasterRes" +
- "ponse\022>\n\007Balance\022\030.hbase.pb.BalanceReque" +
- "st\032\031.hbase.pb.BalanceResponse\022_\n\022SetBala" +
- "ncerRunning\022#.hbase.pb.SetBalancerRunnin" +
- "gRequest\032$.hbase.pb.SetBalancerRunningRe" +
- "sponse\022\\\n\021IsBalancerEnabled\022\".hbase.pb.I" +
- "sBalancerEnabledRequest\032#.hbase.pb.IsBal" +
- "ancerEnabledResponse\022S\n\016RunCatalogScan\022\037" +
- ".hbase.pb.RunCatalogScanRequest\032 .hbase.",
- "pb.RunCatalogScanResponse\022e\n\024EnableCatal" +
- "ogJanitor\022%.hbase.pb.EnableCatalogJanito" +
- "rRequest\032&.hbase.pb.EnableCatalogJanitor" +
- "Response\022n\n\027IsCatalogJanitorEnabled\022(.hb" +
- "ase.pb.IsCatalogJanitorEnabledRequest\032)." +
- "hbase.pb.IsCatalogJanitorEnabledResponse" +
- "\022^\n\021ExecMasterService\022#.hbase.pb.Coproce" +
- "ssorServiceRequest\032$.hbase.pb.Coprocesso" +
- "rServiceResponse\022A\n\010Snapshot\022\031.hbase.pb." +
- "SnapshotRequest\032\032.hbase.pb.SnapshotRespo",
- "nse\022h\n\025GetCompletedSnapshots\022&.hbase.pb." +
- "GetCompletedSnapshotsRequest\032\'.hbase.pb." +
- "GetCompletedSnapshotsResponse\022S\n\016DeleteS" +
- "napshot\022\037.hbase.pb.DeleteSnapshotRequest" +
- "\032 .hbase.pb.DeleteSnapshotResponse\022S\n\016Is" +
- "SnapshotDone\022\037.hbase.pb.IsSnapshotDoneRe" +
- "quest\032 .hbase.pb.IsSnapshotDoneResponse\022" +
- "V\n\017RestoreSnapshot\022 .hbase.pb.RestoreSna" +
- "pshotRequest\032!.hbase.pb.RestoreSnapshotR" +
- "esponse\022h\n\025IsRestoreSnapshotDone\022&.hbase",
- ".pb.IsRestoreSnapshotDoneRequest\032\'.hbase" +
- ".pb.IsRestoreSnapshotDoneResponse\022P\n\rExe" +
- "cProcedure\022\036.hbase.pb.ExecProcedureReque" +
- "st\032\037.hbase.pb.ExecProcedureResponse\022W\n\024E" +
- "xecProcedureWithRet\022\036.hbase.pb.ExecProce" +
- "dureRequest\032\037.hbase.pb.ExecProcedureResp" +
- "onse\022V\n\017IsProcedureDone\022 .hbase.pb.IsPro" +
- "cedureDoneRequest\032!.hbase.pb.IsProcedure" +
- "DoneResponse\022V\n\017ModifyNamespace\022 .hbase." +
- "pb.ModifyNamespaceRequest\032!.hbase.pb.Mod",
- "ifyNamespaceResponse\022V\n\017CreateNamespace\022" +
- " .hbase.pb.CreateNamespaceRequest\032!.hbas" +
- "e.pb.CreateNamespaceResponse\022V\n\017DeleteNa" +
- "mespace\022 .hbase.pb.DeleteNamespaceReques" +
- "t\032!.hbase.pb.DeleteNamespaceResponse\022k\n\026" +
- "GetNamespaceDescriptor\022\'.hbase.pb.GetNam" +
- "espaceDescriptorRequest\032(.hbase.pb.GetNa" +
- "mespaceDescriptorResponse\022q\n\030ListNamespa" +
- "ceDescriptors\022).hbase.pb.ListNamespaceDe" +
- "scriptorsRequest\032*.hbase.pb.ListNamespac",
- "eDescriptorsResponse\022\206\001\n\037ListTableDescri" +
- "ptorsByNamespace\0220.hbase.pb.ListTableDes" +
- "criptorsByNamespaceRequest\0321.hbase.pb.Li" +
- "stTableDescriptorsByNamespaceResponse\022t\n" +
- "\031ListTableNamesByNamespace\022*.hbase.pb.Li" +
- "stTableNamesByNamespaceRequest\032+.hbase.p" +
- "b.ListTableNamesByNamespaceResponse\022P\n\rG" +
- "etTableState\022\036.hbase.pb.GetTableStateReq" +
- "uest\032\037.hbase.pb.GetTableStateResponse\022A\n" +
- "\010SetQuota\022\031.hbase.pb.SetQuotaRequest\032\032.h",
- "base.pb.SetQuotaResponse\022x\n\037getLastMajor" +
- "CompactionTimestamp\022).hbase.pb.MajorComp" +
- "actionTimestampRequest\032*.hbase.pb.MajorC" +
- "ompactionTimestampResponse\022\212\001\n(getLastMa" +
- "jorCompactionTimestampForRegion\0222.hbase." +
- "pb.MajorCompactionTimestampForRegionRequ" +
- "est\032*.hbase.pb.MajorCompactionTimestampR" +
- "esponse\022_\n\022getProcedureResult\022#.hbase.pb" +
- ".GetProcedureResultRequest\032$.hbase.pb.Ge" +
- "tProcedureResultResponseBB\n*org.apache.h",
- "adoop.hbase.protobuf.generatedB\014MasterPr" +
- "otosH\001\210\001\001\240\001\001"
+ "action_timestamp\030\001 \002(\003\"\035\n\033SecurityCapabi" +
+ "litiesRequest\"\354\001\n\034SecurityCapabilitiesRe",
+ "sponse\022G\n\014capabilities\030\001 \003(\01621.hbase.pb." +
+ "SecurityCapabilitiesResponse.Capability\"" +
+ "\202\001\n\nCapability\022\031\n\025SIMPLE_AUTHENTICATION\020" +
+ "\000\022\031\n\025SECURE_AUTHENTICATION\020\001\022\021\n\rAUTHORIZ" +
+ "ATION\020\002\022\026\n\022CELL_AUTHORIZATION\020\003\022\023\n\017CELL_" +
+ "VISIBILITY\020\0042\301#\n\rMasterService\022e\n\024GetSch" +
+ "emaAlterStatus\022%.hbase.pb.GetSchemaAlter" +
+ "StatusRequest\032&.hbase.pb.GetSchemaAlterS" +
+ "tatusResponse\022b\n\023GetTableDescriptors\022$.h" +
+ "base.pb.GetTableDescriptorsRequest\032%.hba",
+ "se.pb.GetTableDescriptorsResponse\022P\n\rGet" +
+ "TableNames\022\036.hbase.pb.GetTableNamesReque" +
+ "st\032\037.hbase.pb.GetTableNamesResponse\022Y\n\020G" +
+ "etClusterStatus\022!.hbase.pb.GetClusterSta" +
+ "tusRequest\032\".hbase.pb.GetClusterStatusRe" +
+ "sponse\022V\n\017IsMasterRunning\022 .hbase.pb.IsM" +
+ "asterRunningRequest\032!.hbase.pb.IsMasterR" +
+ "unningResponse\022D\n\tAddColumn\022\032.hbase.pb.A" +
+ "ddColumnRequest\032\033.hbase.pb.AddColumnResp" +
+ "onse\022M\n\014DeleteColumn\022\035.hbase.pb.DeleteCo",
+ "lumnRequest\032\036.hbase.pb.DeleteColumnRespo" +
+ "nse\022M\n\014ModifyColumn\022\035.hbase.pb.ModifyCol" +
+ "umnRequest\032\036.hbase.pb.ModifyColumnRespon" +
+ "se\022G\n\nMoveRegion\022\033.hbase.pb.MoveRegionRe" +
+ "quest\032\034.hbase.pb.MoveRegionResponse\022k\n\026D" +
+ "ispatchMergingRegions\022\'.hbase.pb.Dispatc" +
+ "hMergingRegionsRequest\032(.hbase.pb.Dispat" +
+ "chMergingRegionsResponse\022M\n\014AssignRegion" +
+ "\022\035.hbase.pb.AssignRegionRequest\032\036.hbase." +
+ "pb.AssignRegionResponse\022S\n\016UnassignRegio",
+ "n\022\037.hbase.pb.UnassignRegionRequest\032 .hba" +
+ "se.pb.UnassignRegionResponse\022P\n\rOfflineR" +
+ "egion\022\036.hbase.pb.OfflineRegionRequest\032\037." +
+ "hbase.pb.OfflineRegionResponse\022J\n\013Delete" +
+ "Table\022\034.hbase.pb.DeleteTableRequest\032\035.hb" +
+ "ase.pb.DeleteTableResponse\022P\n\rtruncateTa" +
+ "ble\022\036.hbase.pb.TruncateTableRequest\032\037.hb" +
+ "ase.pb.TruncateTableResponse\022J\n\013EnableTa" +
+ "ble\022\034.hbase.pb.EnableTableRequest\032\035.hbas" +
+ "e.pb.EnableTableResponse\022M\n\014DisableTable",
+ "\022\035.hbase.pb.DisableTableRequest\032\036.hbase." +
+ "pb.DisableTableResponse\022J\n\013ModifyTable\022\034" +
+ ".hbase.pb.ModifyTableRequest\032\035.hbase.pb." +
+ "ModifyTableResponse\022J\n\013CreateTable\022\034.hba" +
+ "se.pb.CreateTableRequest\032\035.hbase.pb.Crea" +
+ "teTableResponse\022A\n\010Shutdown\022\031.hbase.pb.S" +
+ "hutdownRequest\032\032.hbase.pb.ShutdownRespon" +
+ "se\022G\n\nStopMaster\022\033.hbase.pb.StopMasterRe" +
+ "quest\032\034.hbase.pb.StopMasterResponse\022>\n\007B" +
+ "alance\022\030.hbase.pb.BalanceRequest\032\031.hbase",
+ ".pb.BalanceResponse\022_\n\022SetBalancerRunnin" +
+ "g\022#.hbase.pb.SetBalancerRunningRequest\032$" +
+ ".hbase.pb.SetBalancerRunningResponse\022\\\n\021" +
+ "IsBalancerEnabled\022\".hbase.pb.IsBalancerE" +
+ "nabledRequest\032#.hbase.pb.IsBalancerEnabl" +
+ "edResponse\022S\n\016RunCatalogScan\022\037.hbase.pb." +
+ "RunCatalogScanRequest\032 .hbase.pb.RunCata" +
+ "logScanResponse\022e\n\024EnableCatalogJanitor\022" +
+ "%.hbase.pb.EnableCatalogJanitorRequest\032&" +
+ ".hbase.pb.EnableCatalogJanitorResponse\022n",
+ "\n\027IsCatalogJanitorEnabled\022(.hbase.pb.IsC" +
+ "atalogJanitorEnabledRequest\032).hbase.pb.I" +
+ "sCatalogJanitorEnabledResponse\022^\n\021ExecMa" +
+ "sterService\022#.hbase.pb.CoprocessorServic" +
+ "eRequest\032$.hbase.pb.CoprocessorServiceRe" +
+ "sponse\022A\n\010Snapshot\022\031.hbase.pb.SnapshotRe" +
+ "quest\032\032.hbase.pb.SnapshotResponse\022h\n\025Get" +
+ "CompletedSnapshots\022&.hbase.pb.GetComplet" +
+ "edSnapshotsRequest\032\'.hbase.pb.GetComplet" +
+ "edSnapshotsResponse\022S\n\016DeleteSnapshot\022\037.",
+ "hbase.pb.DeleteSnapshotRequest\032 .hbase.p" +
+ "b.DeleteSnapshotResponse\022S\n\016IsSnapshotDo" +
+ "ne\022\037.hbase.pb.IsSnapshotDoneRequest\032 .hb" +
+ "ase.pb.IsSnapshotDoneResponse\022V\n\017Restore" +
+ "Snapshot\022 .hbase.pb.RestoreSnapshotReque" +
+ "st\032!.hbase.pb.RestoreSnapshotResponse\022h\n" +
+ "\025IsRestoreSnapshotDone\022&.hbase.pb.IsRest" +
+ "oreSnapshotDoneRequest\032\'.hbase.pb.IsRest" +
+ "oreSnapshotDoneResponse\022P\n\rExecProcedure" +
+ "\022\036.hbase.pb.ExecProcedureRequest\032\037.hbase",
+ ".pb.ExecProcedureResponse\022W\n\024ExecProcedu" +
+ "reWithRet\022\036.hbase.pb.ExecProcedureReques" +
+ "t\032\037.hbase.pb.ExecProcedureResponse\022V\n\017Is" +
+ "ProcedureDone\022 .hbase.pb.IsProcedureDone" +
+ "Request\032!.hbase.pb.IsProcedureDoneRespon" +
+ "se\022V\n\017ModifyNamespace\022 .hbase.pb.ModifyN" +
+ "amespaceRequest\032!.hbase.pb.ModifyNamespa" +
+ "ceResponse\022V\n\017CreateNamespace\022 .hbase.pb" +
+ ".CreateNamespaceRequest\032!.hbase.pb.Creat" +
+ "eNamespaceResponse\022V\n\017DeleteNamespace\022 .",
+ "hbase.pb.DeleteNamespaceRequest\032!.hbase." +
+ "pb.DeleteNamespaceResponse\022k\n\026GetNamespa" +
+ "ceDescriptor\022\'.hbase.pb.GetNamespaceDesc" +
+ "riptorRequest\032(.hbase.pb.GetNamespaceDes" +
+ "criptorResponse\022q\n\030ListNamespaceDescript" +
+ "ors\022).hbase.pb.ListNamespaceDescriptorsR" +
+ "equest\032*.hbase.pb.ListNamespaceDescripto" +
+ "rsResponse\022\206\001\n\037ListTableDescriptorsByNam" +
+ "espace\0220.hbase.pb.ListTableDescriptorsBy" +
+ "NamespaceRequest\0321.hbase.pb.ListTableDes",
+ "criptorsByNamespaceResponse\022t\n\031ListTable" +
+ "NamesByNamespace\022*.hbase.pb.ListTableNam" +
+ "esByNamespaceRequest\032+.hbase.pb.ListTabl" +
+ "eNamesByNamespaceResponse\022P\n\rGetTableSta" +
+ "te\022\036.hbase.pb.GetTableStateRequest\032\037.hba" +
+ "se.pb.GetTableStateResponse\022A\n\010SetQuota\022" +
+ "\031.hbase.pb.SetQuotaRequest\032\032.hbase.pb.Se" +
+ "tQuotaResponse\022x\n\037getLastMajorCompaction" +
+ "Timestamp\022).hbase.pb.MajorCompactionTime" +
+ "stampRequest\032*.hbase.pb.MajorCompactionT",
+ "imestampResponse\022\212\001\n(getLastMajorCompact" +
+ "ionTimestampForRegion\0222.hbase.pb.MajorCo" +
+ "mpactionTimestampForRegionRequest\032*.hbas" +
+ "e.pb.MajorCompactionTimestampResponse\022_\n" +
+ "\022getProcedureResult\022#.hbase.pb.GetProced" +
+ "ureResultRequest\032$.hbase.pb.GetProcedure" +
+ "ResultResponse\022h\n\027getSecurityCapabilitie" +
+ "s\022%.hbase.pb.SecurityCapabilitiesRequest" +
+ "\032&.hbase.pb.SecurityCapabilitiesResponse" +
+ "BB\n*org.apache.hadoop.hbase.protobuf.gen",
+ "eratedB\014MasterProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -55766,6 +56832,18 @@ public final class MasterProtos {
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_MajorCompactionTimestampResponse_descriptor,
new java.lang.String[] { "CompactionTimestamp", });
+ internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor =
+ getDescriptor().getMessageTypes().get(93);
+ internal_static_hbase_pb_SecurityCapabilitiesRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_SecurityCapabilitiesRequest_descriptor,
+ new java.lang.String[] { });
+ internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor =
+ getDescriptor().getMessageTypes().get(94);
+ internal_static_hbase_pb_SecurityCapabilitiesResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_SecurityCapabilitiesResponse_descriptor,
+ new java.lang.String[] { "Capabilities", });
return null;
}
};
http://git-wip-us.apache.org/repos/asf/hbase/blob/5e5bcceb/hbase-protocol/src/main/protobuf/Master.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/Master.proto b/hbase-protocol/src/main/protobuf/Master.proto
index 10a7854..778a02a 100644
--- a/hbase-protocol/src/main/protobuf/Master.proto
+++ b/hbase-protocol/src/main/protobuf/Master.proto
@@ -450,6 +450,21 @@ message MajorCompactionTimestampResponse {
required int64 compaction_timestamp = 1;
}
+message SecurityCapabilitiesRequest {
+}
+
+message SecurityCapabilitiesResponse {
+ enum Capability {
+ SIMPLE_AUTHENTICATION = 0;
+ SECURE_AUTHENTICATION = 1;
+ AUTHORIZATION = 2;
+ CELL_AUTHORIZATION = 3;
+ CELL_VISIBILITY = 4;
+ }
+
+ repeated Capability capabilities = 1;
+}
+
service MasterService {
/** Used by the client to get the number of regions that have received the updated schema */
rpc GetSchemaAlterStatus(GetSchemaAlterStatusRequest)
@@ -681,4 +696,8 @@ service MasterService {
rpc getProcedureResult(GetProcedureResultRequest)
returns(GetProcedureResultResponse);
+
+ /** Returns the security capabilities in effect on the cluster */
+ rpc getSecurityCapabilities(SecurityCapabilitiesRequest)
+ returns(SecurityCapabilitiesResponse);
}