You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zg...@apache.org on 2017/05/18 09:56:08 UTC
[2/4] hbase git commit: HBASE-11013 Clone Snapshots on Secure Cluster
Should provide option to apply Retained User Permissions
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
index 9805d50..2e11b4a 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java
@@ -8,6 +8,1494 @@ public final class SnapshotProtos {
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
+ public interface SnapshotDescriptionOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string name = 1;
+ /**
+ * <code>required string name = 1;</code>
+ */
+ boolean hasName();
+ /**
+ * <code>required string name = 1;</code>
+ */
+ java.lang.String getName();
+ /**
+ * <code>required string name = 1;</code>
+ */
+ com.google.protobuf.ByteString
+ getNameBytes();
+
+ // optional string table = 2;
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ boolean hasTable();
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ java.lang.String getTable();
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ com.google.protobuf.ByteString
+ getTableBytes();
+
+ // optional int64 creation_time = 3 [default = 0];
+ /**
+ * <code>optional int64 creation_time = 3 [default = 0];</code>
+ */
+ boolean hasCreationTime();
+ /**
+ * <code>optional int64 creation_time = 3 [default = 0];</code>
+ */
+ long getCreationTime();
+
+ // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];
+ /**
+ * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+ */
+ boolean hasType();
+ /**
+ * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType();
+
+ // optional int32 version = 5;
+ /**
+ * <code>optional int32 version = 5;</code>
+ */
+ boolean hasVersion();
+ /**
+ * <code>optional int32 version = 5;</code>
+ */
+ int getVersion();
+
+ // optional string owner = 6;
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ boolean hasOwner();
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ java.lang.String getOwner();
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ com.google.protobuf.ByteString
+ getOwnerBytes();
+
+ // optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ boolean hasUsersAndPermissions();
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions();
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SnapshotDescription}
+ *
+ * <pre>
+ **
+ * Description of the snapshot to take
+ * </pre>
+ */
+ public static final class SnapshotDescription extends
+ com.google.protobuf.GeneratedMessage
+ implements SnapshotDescriptionOrBuilder {
+ // Use SnapshotDescription.newBuilder() to construct.
+ private SnapshotDescription(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private SnapshotDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SnapshotDescription defaultInstance;
+ public static SnapshotDescription getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SnapshotDescription getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private SnapshotDescription(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ name_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ table_ = input.readBytes();
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ creationTime_ = input.readInt64();
+ break;
+ }
+ case 32: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(4, rawValue);
+ } else {
+ bitField0_ |= 0x00000008;
+ type_ = value;
+ }
+ break;
+ }
+ case 40: {
+ bitField0_ |= 0x00000010;
+ version_ = input.readInt32();
+ break;
+ }
+ case 50: {
+ bitField0_ |= 0x00000020;
+ owner_ = input.readBytes();
+ break;
+ }
+ case 58: {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ subBuilder = usersAndPermissions_.toBuilder();
+ }
+ usersAndPermissions_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(usersAndPermissions_);
+ usersAndPermissions_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000040;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<SnapshotDescription> PARSER =
+ new com.google.protobuf.AbstractParser<SnapshotDescription>() {
+ public SnapshotDescription parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SnapshotDescription(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<SnapshotDescription> getParserForType() {
+ return PARSER;
+ }
+
+ /**
+ * Protobuf enum {@code hbase.pb.SnapshotDescription.Type}
+ */
+ public enum Type
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>DISABLED = 0;</code>
+ */
+ DISABLED(0, 0),
+ /**
+ * <code>FLUSH = 1;</code>
+ */
+ FLUSH(1, 1),
+ /**
+ * <code>SKIPFLUSH = 2;</code>
+ */
+ SKIPFLUSH(2, 2),
+ ;
+
+ /**
+ * <code>DISABLED = 0;</code>
+ */
+ public static final int DISABLED_VALUE = 0;
+ /**
+ * <code>FLUSH = 1;</code>
+ */
+ public static final int FLUSH_VALUE = 1;
+ /**
+ * <code>SKIPFLUSH = 2;</code>
+ */
+ public static final int SKIPFLUSH_VALUE = 2;
+
+
+ public final int getNumber() { return value; }
+
+ public static Type valueOf(int value) {
+ switch (value) {
+ case 0: return DISABLED;
+ case 1: return FLUSH;
+ case 2: return SKIPFLUSH;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<Type>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<Type>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<Type>() {
+ public Type findValueByNumber(int number) {
+ return Type.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final Type[] VALUES = values();
+
+ public static Type valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private Type(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:hbase.pb.SnapshotDescription.Type)
+ }
+
+ private int bitField0_;
+ // required string name = 1;
+ public static final int NAME_FIELD_NUMBER = 1;
+ private java.lang.Object name_;
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public boolean hasName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public java.lang.String getName() {
+ java.lang.Object ref = name_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ name_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string table = 2;
+ public static final int TABLE_FIELD_NUMBER = 2;
+ private java.lang.Object table_;
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ public boolean hasTable() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ public java.lang.String getTable() {
+ java.lang.Object ref = table_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ table_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ public com.google.protobuf.ByteString
+ getTableBytes() {
+ java.lang.Object ref = table_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ table_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional int64 creation_time = 3 [default = 0];
+ public static final int CREATION_TIME_FIELD_NUMBER = 3;
+ private long creationTime_;
+ /**
+ * <code>optional int64 creation_time = 3 [default = 0];</code>
+ */
+ public boolean hasCreationTime() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional int64 creation_time = 3 [default = 0];</code>
+ */
+ public long getCreationTime() {
+ return creationTime_;
+ }
+
+ // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];
+ public static final int TYPE_FIELD_NUMBER = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type type_;
+ /**
+ * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+ */
+ public boolean hasType() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType() {
+ return type_;
+ }
+
+ // optional int32 version = 5;
+ public static final int VERSION_FIELD_NUMBER = 5;
+ private int version_;
+ /**
+ * <code>optional int32 version = 5;</code>
+ */
+ public boolean hasVersion() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional int32 version = 5;</code>
+ */
+ public int getVersion() {
+ return version_;
+ }
+
+ // optional string owner = 6;
+ public static final int OWNER_FIELD_NUMBER = 6;
+ private java.lang.Object owner_;
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ public boolean hasOwner() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ public java.lang.String getOwner() {
+ java.lang.Object ref = owner_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ owner_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ public com.google.protobuf.ByteString
+ getOwnerBytes() {
+ java.lang.Object ref = owner_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ owner_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;
+ public static final int USERS_AND_PERMISSIONS_FIELD_NUMBER = 7;
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions usersAndPermissions_;
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public boolean hasUsersAndPermissions() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions() {
+ return usersAndPermissions_;
+ }
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder() {
+ return usersAndPermissions_;
+ }
+
+ private void initFields() {
+ name_ = "";
+ table_ = "";
+ creationTime_ = 0L;
+ type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH;
+ version_ = 0;
+ owner_ = "";
+ usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (hasUsersAndPermissions()) {
+ if (!getUsersAndPermissions().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getTableBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeInt64(3, creationTime_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeEnum(4, type_.getNumber());
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ output.writeInt32(5, version_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ output.writeBytes(6, getOwnerBytes());
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ output.writeMessage(7, usersAndPermissions_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getTableBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt64Size(3, creationTime_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(4, type_.getNumber());
+ }
+ if (((bitField0_ & 0x00000010) == 0x00000010)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(5, version_);
+ }
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(6, getOwnerBytes());
+ }
+ if (((bitField0_ & 0x00000040) == 0x00000040)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(7, usersAndPermissions_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription other = (org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription) obj;
+
+ boolean result = true;
+ result = result && (hasName() == other.hasName());
+ if (hasName()) {
+ result = result && getName()
+ .equals(other.getName());
+ }
+ result = result && (hasTable() == other.hasTable());
+ if (hasTable()) {
+ result = result && getTable()
+ .equals(other.getTable());
+ }
+ result = result && (hasCreationTime() == other.hasCreationTime());
+ if (hasCreationTime()) {
+ result = result && (getCreationTime()
+ == other.getCreationTime());
+ }
+ result = result && (hasType() == other.hasType());
+ if (hasType()) {
+ result = result &&
+ (getType() == other.getType());
+ }
+ result = result && (hasVersion() == other.hasVersion());
+ if (hasVersion()) {
+ result = result && (getVersion()
+ == other.getVersion());
+ }
+ result = result && (hasOwner() == other.hasOwner());
+ if (hasOwner()) {
+ result = result && getOwner()
+ .equals(other.getOwner());
+ }
+ result = result && (hasUsersAndPermissions() == other.hasUsersAndPermissions());
+ if (hasUsersAndPermissions()) {
+ result = result && getUsersAndPermissions()
+ .equals(other.getUsersAndPermissions());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasName()) {
+ hash = (37 * hash) + NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getName().hashCode();
+ }
+ if (hasTable()) {
+ hash = (37 * hash) + TABLE_FIELD_NUMBER;
+ hash = (53 * hash) + getTable().hashCode();
+ }
+ if (hasCreationTime()) {
+ hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getCreationTime());
+ }
+ if (hasType()) {
+ hash = (37 * hash) + TYPE_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnum(getType());
+ }
+ if (hasVersion()) {
+ hash = (37 * hash) + VERSION_FIELD_NUMBER;
+ hash = (53 * hash) + getVersion();
+ }
+ if (hasOwner()) {
+ hash = (37 * hash) + OWNER_FIELD_NUMBER;
+ hash = (53 * hash) + getOwner().hashCode();
+ }
+ if (hasUsersAndPermissions()) {
+ hash = (37 * hash) + USERS_AND_PERMISSIONS_FIELD_NUMBER;
+ hash = (53 * hash) + getUsersAndPermissions().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SnapshotDescription}
+ *
+ * <pre>
+ **
+ * Description of the snapshot to take
+ * </pre>
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescriptionOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getUsersAndPermissionsFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ name_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ table_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ creationTime_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ version_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000010);
+ owner_ = "";
+ bitField0_ = (bitField0_ & ~0x00000020);
+ if (usersAndPermissionsBuilder_ == null) {
+ usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance();
+ } else {
+ usersAndPermissionsBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000040);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.internal_static_hbase_pb_SnapshotDescription_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription build() {
+ org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription result = new org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.name_ = name_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.table_ = table_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.creationTime_ = creationTime_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.type_ = type_;
+ if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+ to_bitField0_ |= 0x00000010;
+ }
+ result.version_ = version_;
+ if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+ to_bitField0_ |= 0x00000020;
+ }
+ result.owner_ = owner_;
+ if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+ to_bitField0_ |= 0x00000040;
+ }
+ if (usersAndPermissionsBuilder_ == null) {
+ result.usersAndPermissions_ = usersAndPermissions_;
+ } else {
+ result.usersAndPermissions_ = usersAndPermissionsBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.getDefaultInstance()) return this;
+ if (other.hasName()) {
+ bitField0_ |= 0x00000001;
+ name_ = other.name_;
+ onChanged();
+ }
+ if (other.hasTable()) {
+ bitField0_ |= 0x00000002;
+ table_ = other.table_;
+ onChanged();
+ }
+ if (other.hasCreationTime()) {
+ setCreationTime(other.getCreationTime());
+ }
+ if (other.hasType()) {
+ setType(other.getType());
+ }
+ if (other.hasVersion()) {
+ setVersion(other.getVersion());
+ }
+ if (other.hasOwner()) {
+ bitField0_ |= 0x00000020;
+ owner_ = other.owner_;
+ onChanged();
+ }
+ if (other.hasUsersAndPermissions()) {
+ mergeUsersAndPermissions(other.getUsersAndPermissions());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasName()) {
+
+ return false;
+ }
+ if (hasUsersAndPermissions()) {
+ if (!getUsersAndPermissions().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required string name = 1;
+ private java.lang.Object name_ = "";
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public boolean hasName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public java.lang.String getName() {
+ java.lang.Object ref = name_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ name_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public Builder setName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ name_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public Builder clearName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ name_ = getDefaultInstance().getName();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required string name = 1;</code>
+ */
+ public Builder setNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ name_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional string table = 2;
+ private java.lang.Object table_ = "";
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ public boolean hasTable() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ public java.lang.String getTable() {
+ java.lang.Object ref = table_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ table_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ public com.google.protobuf.ByteString
+ getTableBytes() {
+ java.lang.Object ref = table_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ table_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ public Builder setTable(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ table_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ public Builder clearTable() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ table_ = getDefaultInstance().getTable();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string table = 2;</code>
+ *
+ * <pre>
+ * not needed for delete, but checked for in taking snapshot
+ * </pre>
+ */
+ public Builder setTableBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ table_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional int64 creation_time = 3 [default = 0];
+ private long creationTime_ ;
+ /**
+ * <code>optional int64 creation_time = 3 [default = 0];</code>
+ */
+ public boolean hasCreationTime() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional int64 creation_time = 3 [default = 0];</code>
+ */
+ public long getCreationTime() {
+ return creationTime_;
+ }
+ /**
+ * <code>optional int64 creation_time = 3 [default = 0];</code>
+ */
+ public Builder setCreationTime(long value) {
+ bitField0_ |= 0x00000004;
+ creationTime_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional int64 creation_time = 3 [default = 0];</code>
+ */
+ public Builder clearCreationTime() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ creationTime_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];
+ private org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH;
+ /**
+ * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+ */
+ public boolean hasType() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type getType() {
+ return type_;
+ }
+ /**
+ * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+ */
+ public Builder setType(org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional .hbase.pb.SnapshotDescription.Type type = 4 [default = FLUSH];</code>
+ */
+ public Builder clearType() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ type_ = org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription.Type.FLUSH;
+ onChanged();
+ return this;
+ }
+
+ // optional int32 version = 5;
+ private int version_ ;
+ /**
+ * <code>optional int32 version = 5;</code>
+ */
+ public boolean hasVersion() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional int32 version = 5;</code>
+ */
+ public int getVersion() {
+ return version_;
+ }
+ /**
+ * <code>optional int32 version = 5;</code>
+ */
+ public Builder setVersion(int value) {
+ bitField0_ |= 0x00000010;
+ version_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional int32 version = 5;</code>
+ */
+ public Builder clearVersion() {
+ bitField0_ = (bitField0_ & ~0x00000010);
+ version_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // optional string owner = 6;
+ private java.lang.Object owner_ = "";
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ public boolean hasOwner() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ public java.lang.String getOwner() {
+ java.lang.Object ref = owner_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ owner_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ public com.google.protobuf.ByteString
+ getOwnerBytes() {
+ java.lang.Object ref = owner_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ owner_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ public Builder setOwner(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000020;
+ owner_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ public Builder clearOwner() {
+ bitField0_ = (bitField0_ & ~0x00000020);
+ owner_ = getDefaultInstance().getOwner();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string owner = 6;</code>
+ */
+ public Builder setOwnerBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000020;
+ owner_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;
+ private org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder> usersAndPermissionsBuilder_;
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public boolean hasUsersAndPermissions() {
+ return ((bitField0_ & 0x00000040) == 0x00000040);
+ }
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions getUsersAndPermissions() {
+ if (usersAndPermissionsBuilder_ == null) {
+ return usersAndPermissions_;
+ } else {
+ return usersAndPermissionsBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public Builder setUsersAndPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions value) {
+ if (usersAndPermissionsBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ usersAndPermissions_ = value;
+ onChanged();
+ } else {
+ usersAndPermissionsBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000040;
+ return this;
+ }
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public Builder setUsersAndPermissions(
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder builderForValue) {
+ if (usersAndPermissionsBuilder_ == null) {
+ usersAndPermissions_ = builderForValue.build();
+ onChanged();
+ } else {
+ usersAndPermissionsBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000040;
+ return this;
+ }
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public Builder mergeUsersAndPermissions(org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions value) {
+ if (usersAndPermissionsBuilder_ == null) {
+ if (((bitField0_ & 0x00000040) == 0x00000040) &&
+ usersAndPermissions_ != org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance()) {
+ usersAndPermissions_ =
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.newBuilder(usersAndPermissions_).mergeFrom(value).buildPartial();
+ } else {
+ usersAndPermissions_ = value;
+ }
+ onChanged();
+ } else {
+ usersAndPermissionsBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000040;
+ return this;
+ }
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public Builder clearUsersAndPermissions() {
+ if (usersAndPermissionsBuilder_ == null) {
+ usersAndPermissions_ = org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.getDefaultInstance();
+ onChanged();
+ } else {
+ usersAndPermissionsBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000040);
+ return this;
+ }
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder getUsersAndPermissionsBuilder() {
+ bitField0_ |= 0x00000040;
+ onChanged();
+ return getUsersAndPermissionsFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder getUsersAndPermissionsOrBuilder() {
+ if (usersAndPermissionsBuilder_ != null) {
+ return usersAndPermissionsBuilder_.getMessageOrBuilder();
+ } else {
+ return usersAndPermissions_;
+ }
+ }
+ /**
+ * <code>optional .hbase.pb.UsersAndPermissions users_and_permissions = 7;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder>
+ getUsersAndPermissionsFieldBuilder() {
+ if (usersAndPermissionsBuilder_ == null) {
+ usersAndPermissionsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissions.Builder, org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.UsersAndPermissionsOrBuilder>(
+ usersAndPermissions_,
+ getParentForChildren(),
+ isClean());
+ usersAndPermissions_ = null;
+ }
+ return usersAndPermissionsBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.SnapshotDescription)
+ }
+
+ static {
+ defaultInstance = new SnapshotDescription(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.SnapshotDescription)
+ }
+
public interface SnapshotFileInfoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
@@ -4686,6 +6174,11 @@ public final class SnapshotProtos {
}
private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_SnapshotDescription_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
internal_static_hbase_pb_SnapshotFileInfo_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
@@ -4719,39 +6212,52 @@ public final class SnapshotProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
- "\n\016Snapshot.proto\022\010hbase.pb\032\010FS.proto\032\013HB" +
- "ase.proto\"\222\001\n\020SnapshotFileInfo\022-\n\004type\030\001" +
- " \002(\0162\037.hbase.pb.SnapshotFileInfo.Type\022\r\n" +
- "\005hfile\030\003 \001(\t\022\022\n\nwal_server\030\004 \001(\t\022\020\n\010wal_" +
- "name\030\005 \001(\t\"\032\n\004Type\022\t\n\005HFILE\020\001\022\007\n\003WAL\020\002\"\323" +
- "\002\n\026SnapshotRegionManifest\022\017\n\007version\030\001 \001" +
- "(\005\022)\n\013region_info\030\002 \002(\0132\024.hbase.pb.Regio" +
- "nInfo\022B\n\014family_files\030\003 \003(\0132,.hbase.pb.S" +
- "napshotRegionManifest.FamilyFiles\032T\n\tSto" +
- "reFile\022\014\n\004name\030\001 \002(\t\022&\n\treference\030\002 \001(\0132",
- "\023.hbase.pb.Reference\022\021\n\tfile_size\030\003 \001(\004\032" +
- "c\n\013FamilyFiles\022\023\n\013family_name\030\001 \002(\014\022?\n\013s" +
- "tore_files\030\002 \003(\0132*.hbase.pb.SnapshotRegi" +
- "onManifest.StoreFile\"\177\n\024SnapshotDataMani" +
- "fest\022+\n\014table_schema\030\001 \002(\0132\025.hbase.pb.Ta" +
- "bleSchema\022:\n\020region_manifests\030\002 \003(\0132 .hb" +
- "ase.pb.SnapshotRegionManifestBD\n*org.apa" +
- "che.hadoop.hbase.protobuf.generatedB\016Sna" +
- "pshotProtosH\001\210\001\001\240\001\001"
+ "\n\016Snapshot.proto\022\010hbase.pb\032\023AccessContro" +
+ "l.proto\032\010FS.proto\032\013HBase.proto\"\223\002\n\023Snaps" +
+ "hotDescription\022\014\n\004name\030\001 \002(\t\022\r\n\005table\030\002 " +
+ "\001(\t\022\030\n\rcreation_time\030\003 \001(\003:\0010\0227\n\004type\030\004 " +
+ "\001(\0162\".hbase.pb.SnapshotDescription.Type:" +
+ "\005FLUSH\022\017\n\007version\030\005 \001(\005\022\r\n\005owner\030\006 \001(\t\022<" +
+ "\n\025users_and_permissions\030\007 \001(\0132\035.hbase.pb" +
+ ".UsersAndPermissions\".\n\004Type\022\014\n\010DISABLED" +
+ "\020\000\022\t\n\005FLUSH\020\001\022\r\n\tSKIPFLUSH\020\002\"\222\001\n\020Snapsho" +
+ "tFileInfo\022-\n\004type\030\001 \002(\0162\037.hbase.pb.Snaps",
+ "hotFileInfo.Type\022\r\n\005hfile\030\003 \001(\t\022\022\n\nwal_s" +
+ "erver\030\004 \001(\t\022\020\n\010wal_name\030\005 \001(\t\"\032\n\004Type\022\t\n" +
+ "\005HFILE\020\001\022\007\n\003WAL\020\002\"\323\002\n\026SnapshotRegionMani" +
+ "fest\022\017\n\007version\030\001 \001(\005\022)\n\013region_info\030\002 \002" +
+ "(\0132\024.hbase.pb.RegionInfo\022B\n\014family_files" +
+ "\030\003 \003(\0132,.hbase.pb.SnapshotRegionManifest" +
+ ".FamilyFiles\032T\n\tStoreFile\022\014\n\004name\030\001 \002(\t\022" +
+ "&\n\treference\030\002 \001(\0132\023.hbase.pb.Reference\022" +
+ "\021\n\tfile_size\030\003 \001(\004\032c\n\013FamilyFiles\022\023\n\013fam" +
+ "ily_name\030\001 \002(\014\022?\n\013store_files\030\002 \003(\0132*.hb",
+ "ase.pb.SnapshotRegionManifest.StoreFile\"" +
+ "\177\n\024SnapshotDataManifest\022+\n\014table_schema\030" +
+ "\001 \002(\0132\025.hbase.pb.TableSchema\022:\n\020region_m" +
+ "anifests\030\002 \003(\0132 .hbase.pb.SnapshotRegion" +
+ "ManifestBD\n*org.apache.hadoop.hbase.prot" +
+ "obuf.generatedB\016SnapshotProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
- internal_static_hbase_pb_SnapshotFileInfo_descriptor =
+ internal_static_hbase_pb_SnapshotDescription_descriptor =
getDescriptor().getMessageTypes().get(0);
+ internal_static_hbase_pb_SnapshotDescription_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_SnapshotDescription_descriptor,
+ new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", "Owner", "UsersAndPermissions", });
+ internal_static_hbase_pb_SnapshotFileInfo_descriptor =
+ getDescriptor().getMessageTypes().get(1);
internal_static_hbase_pb_SnapshotFileInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_SnapshotFileInfo_descriptor,
new java.lang.String[] { "Type", "Hfile", "WalServer", "WalName", });
internal_static_hbase_pb_SnapshotRegionManifest_descriptor =
- getDescriptor().getMessageTypes().get(1);
+ getDescriptor().getMessageTypes().get(2);
internal_static_hbase_pb_SnapshotRegionManifest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_SnapshotRegionManifest_descriptor,
@@ -4769,7 +6275,7 @@ public final class SnapshotProtos {
internal_static_hbase_pb_SnapshotRegionManifest_FamilyFiles_descriptor,
new java.lang.String[] { "FamilyName", "StoreFiles", });
internal_static_hbase_pb_SnapshotDataManifest_descriptor =
- getDescriptor().getMessageTypes().get(2);
+ getDescriptor().getMessageTypes().get(3);
internal_static_hbase_pb_SnapshotDataManifest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_hbase_pb_SnapshotDataManifest_descriptor,
@@ -4780,6 +6286,7 @@ public final class SnapshotProtos {
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.FSProtos.getDescriptor(),
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
}, assigner);
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-protocol/src/main/protobuf/HBase.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/HBase.proto b/hbase-protocol/src/main/protobuf/HBase.proto
index 820dbeb..279eb39 100644
--- a/hbase-protocol/src/main/protobuf/HBase.proto
+++ b/hbase-protocol/src/main/protobuf/HBase.proto
@@ -158,23 +158,6 @@ message NameInt64Pair {
}
/**
- * Description of the snapshot to take
- */
-message SnapshotDescription {
- required string name = 1;
- optional string table = 2; // not needed for delete, but checked for in taking snapshot
- optional int64 creation_time = 3 [default = 0];
- enum Type {
- DISABLED = 0;
- FLUSH = 1;
- SKIPFLUSH = 2;
- }
- optional Type type = 4 [default = FLUSH];
- optional int32 version = 5;
- optional string owner = 6;
-}
-
-/**
* Description of the distributed procedure to take
*/
message ProcedureDescription {
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-protocol/src/main/protobuf/Master.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/Master.proto b/hbase-protocol/src/main/protobuf/Master.proto
index 1c60465..d19856b 100644
--- a/hbase-protocol/src/main/protobuf/Master.proto
+++ b/hbase-protocol/src/main/protobuf/Master.proto
@@ -32,6 +32,7 @@ import "ClusterStatus.proto";
import "ErrorHandling.proto";
import "Procedure.proto";
import "Quota.proto";
+import "Snapshot.proto";
/* Column-level protobufs */
@@ -391,6 +392,7 @@ message DeleteSnapshotResponse {
message RestoreSnapshotRequest {
required SnapshotDescription snapshot = 1;
+ optional bool restoreACL = 2 [default=false];
}
message RestoreSnapshotResponse {
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-protocol/src/main/protobuf/Snapshot.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/protobuf/Snapshot.proto b/hbase-protocol/src/main/protobuf/Snapshot.proto
index ae1a1e6..015787d 100644
--- a/hbase-protocol/src/main/protobuf/Snapshot.proto
+++ b/hbase-protocol/src/main/protobuf/Snapshot.proto
@@ -23,9 +23,28 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
option optimize_for = SPEED;
+import "AccessControl.proto";
import "FS.proto";
import "HBase.proto";
+/**
+ * Description of the snapshot to take
+ */
+message SnapshotDescription {
+ required string name = 1;
+ optional string table = 2; // not needed for delete, but checked for in taking snapshot
+ optional int64 creation_time = 3 [default = 0];
+ enum Type {
+ DISABLED = 0;
+ FLUSH = 1;
+ SKIPFLUSH = 2;
+ }
+ optional Type type = 4 [default = FLUSH];
+ optional int32 version = 5;
+ optional string owner = 6;
+ optional UsersAndPermissions users_and_permissions = 7;
+}
+
message SnapshotFileInfo {
enum Type {
HFILE = 1;
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 0ecc131..5803297 100644
--- a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -49,7 +49,7 @@ org.apache.hadoop.hbase.HTableDescriptor;
org.apache.hadoop.hbase.HBaseConfiguration;
org.apache.hadoop.hbase.TableName;
org.apache.hadoop.hbase.tool.Canary;
-org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
org.apache.hadoop.hbase.master.DeadServer;
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
index 2d8fdba..b2f76d2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
@@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import java.io.IOException;
import java.util.List;
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
index 5fe80c0..f747599 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
@@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import java.io.IOException;
import java.util.List;
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
index 24c62b2..7558147 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
@@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
/**
* Defines coprocessor hooks for interacting with operations on the
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
index 75c6fc5..d06fdf9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java
@@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
index 086d72a..9fb8d81 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
@@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.coprocessor.*;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
/**
* Provides the coprocessor framework and environment for master oriented
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index c678c86..0155e8a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -60,7 +60,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStor
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest;
@@ -186,6 +185,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.Repor
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.regionserver.RSRpcServices;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.User;
@@ -1311,7 +1311,8 @@ public class MasterRpcServices extends RSRpcServices
master.ensureNamespaceExists(dstTable.getNamespaceAsString());
SnapshotDescription reqSnapshot = request.getSnapshot();
- master.snapshotManager.restoreSnapshot(reqSnapshot);
+ master.snapshotManager.restoreSnapshot(reqSnapshot,
+ request.hasRestoreACL() && request.getRestoreACL());
return RestoreSnapshotResponse.newBuilder().build();
} catch (ForeignException e) {
throw new ServiceException(e.getCause());
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
index 2f769f3..0f1f495 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
/**
* Watch the current snapshot under process
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
index 2a6dca8..ff59ea1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.master.SnapshotSentinel;
import org.apache.hadoop.hbase.master.handler.CreateTableHandler;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
@@ -63,6 +63,7 @@ public class CloneSnapshotHandler extends CreateTableHandler implements Snapshot
private final static String NAME = "Master CloneSnapshotHandler";
private final SnapshotDescription snapshot;
+ private final boolean restoreAcl;
private final ForeignExceptionDispatcher monitor;
private final MetricsSnapshot metricsSnapshot = new MetricsSnapshot();
@@ -73,12 +74,14 @@ public class CloneSnapshotHandler extends CreateTableHandler implements Snapshot
private volatile boolean stopped = false;
public CloneSnapshotHandler(final MasterServices masterServices,
- final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor) {
+ final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor,
+ final boolean restoreAcl) {
super(masterServices, masterServices.getMasterFileSystem(), hTableDescriptor,
masterServices.getConfiguration(), null, masterServices);
// Snapshot information
this.snapshot = snapshot;
+ this.restoreAcl = restoreAcl;
// Monitor
this.monitor = new ForeignExceptionDispatcher();
@@ -118,6 +121,13 @@ public class CloneSnapshotHandler extends CreateTableHandler implements Snapshot
Preconditions.checkArgument(!metaChanges.hasRegionsToRemove(),
"A clone should not have regions to remove");
+ // Clone acl of snapshot into newly created table.
+ if (restoreAcl && snapshot.hasUsersAndPermissions()
+ && snapshot.getUsersAndPermissions() != null
+ && SnapshotDescriptionUtils.isSecurityAvailable(conf)) {
+ RestoreSnapshotHelper.restoreSnapshotACL(snapshot, tableName, conf);
+ }
+
// At this point the clone is complete. Next step is enabling the table.
String msg = "Clone snapshot="+ snapshot.getName() +" on table=" + tableName + " completed!";
LOG.info(msg);
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
index f574c14..8e40a7d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
@@ -25,8 +25,6 @@ import java.util.concurrent.ThreadPoolExecutor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -34,7 +32,7 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.master.MasterServices;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
import org.apache.hadoop.hbase.util.FSUtils;
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
index 7e047ac..fa4245a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.procedure.Procedure;
import org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
import org.apache.hadoop.hbase.util.Pair;
@@ -49,7 +49,7 @@ public class EnabledTableSnapshotHandler extends TakeSnapshotHandler {
private final ProcedureCoordinator coordinator;
public EnabledTableSnapshotHandler(SnapshotDescription snapshot, MasterServices master,
- final SnapshotManager manager) {
+ final SnapshotManager manager) {
super(snapshot, master);
this.coordinator = manager.getCoordinator();
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
index bb54fc3..73b2198 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.master.MasterServices;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException;
http://git-wip-us.apache.org/repos/asf/hbase/blob/f9dc4cad/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
index 56faf76..80f151d 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.master.SnapshotSentinel;
import org.apache.hadoop.hbase.master.handler.TableEventHandler;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+import org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper;
@@ -65,6 +65,7 @@ public class RestoreSnapshotHandler extends TableEventHandler implements Snapsho
private final HTableDescriptor hTableDescriptor;
private final SnapshotDescription snapshot;
+ private final boolean restoreAcl;
private final ForeignExceptionDispatcher monitor;
private final MetricsSnapshot metricsSnapshot = new MetricsSnapshot();
@@ -73,11 +74,13 @@ public class RestoreSnapshotHandler extends TableEventHandler implements Snapsho
private volatile boolean stopped = false;
public RestoreSnapshotHandler(final MasterServices masterServices,
- final SnapshotDescription snapshot, final HTableDescriptor htd) throws IOException {
+ final SnapshotDescription snapshot, final HTableDescriptor htd, final boolean restoreAcl)
+ throws IOException {
super(EventType.C_M_RESTORE_SNAPSHOT, htd.getTableName(), masterServices, masterServices);
// Snapshot information
this.snapshot = snapshot;
+ this.restoreAcl = restoreAcl;
// Monitor
this.monitor = new ForeignExceptionDispatcher();
@@ -166,6 +169,14 @@ public class RestoreSnapshotHandler extends TableEventHandler implements Snapsho
}
metaChanges.updateMetaParentRegions(this.server.getConnection(), hris);
+ // 5. restore acl of snapshot into the table.
+ if (restoreAcl && snapshot.hasUsersAndPermissions()
+ && snapshot.getUsersAndPermissions() != null
+ && SnapshotDescriptionUtils.isSecurityAvailable(server.getConfiguration())) {
+ RestoreSnapshotHelper.restoreSnapshotACL(snapshot, tableName, server.getConfiguration());
+ }
+
+
// At this point the restore is complete. Next step is enabling the table.
LOG.info("Restore snapshot=" + ClientSnapshotDescriptionUtils.toString(snapshot) +
" on table=" + tableName + " completed!");